merge master
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActiveManagerMessage.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActiveManagerMessage.java
index 392bec8..b18b669 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActiveManagerMessage.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActiveManagerMessage.java
@@ -54,7 +54,7 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException {
+    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
         NodeControllerService ncs = (NodeControllerService) cs;
         IAsterixAppRuntimeContext appContext =
                 (IAsterixAppRuntimeContext) ncs.getApplicationContext().getApplicationObject();
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActivePartitionMessage.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActivePartitionMessage.java
index fc67d3c..e4a57e6 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActivePartitionMessage.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActivePartitionMessage.java
@@ -65,7 +65,7 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException {
+    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
         ActiveLifecycleListener.INSTANCE.receive(this);
     }
 
diff --git a/asterixdb/asterix-algebra/pom.xml b/asterixdb/asterix-algebra/pom.xml
index c78e82a..779e0d5 100644
--- a/asterixdb/asterix-algebra/pom.xml
+++ b/asterixdb/asterix-algebra/pom.xml
@@ -161,6 +161,10 @@
       <artifactId>algebricks-compiler</artifactId>
     </dependency>
     <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>algebricks-rewriter</artifactId>
+    </dependency>
+    <dependency>
       <groupId>org.json</groupId>
       <artifactId>json</artifactId>
     </dependency>
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/LoadRecordFieldsRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/LoadRecordFieldsRule.java
index d444aca..f7f2de6 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/LoadRecordFieldsRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/LoadRecordFieldsRule.java
@@ -29,6 +29,7 @@
 import org.apache.asterix.om.base.AString;
 import org.apache.asterix.om.constants.AsterixConstantValue;
 import org.apache.asterix.om.functions.AsterixBuiltinFunctions;
+import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.om.util.ConstantExpressionUtil;
 import org.apache.asterix.optimizer.base.AnalysisUtil;
 import org.apache.commons.lang3.mutable.Mutable;
@@ -44,6 +45,7 @@
 import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
 import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
 import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
 import org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
 import org.apache.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
 import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
@@ -79,7 +81,7 @@
             AssignOperator a1 = (AssignOperator) op1;
             ILogicalExpression expr = getFirstExpr(a1);
             if (AnalysisUtil.isAccessToFieldRecord(expr)) {
-                boolean res = findAndEliminateRedundantFieldAccess(a1);
+                boolean res = findAndEliminateRedundantFieldAccess(a1, context);
                 context.addToDontApplySet(this, op1);
                 return res;
             }
@@ -184,8 +186,8 @@
             a2InptList.clear();
             a2InptList.add(topChild);
             // and link it as child in the op. tree
-            topOp.getInputs().set(0, new MutableObject<ILogicalOperator>(a2));
-            findAndEliminateRedundantFieldAccess(a2);
+            topOp.getInputs().set(0, new MutableObject<>(a2));
+            findAndEliminateRedundantFieldAccess(a2, context);
         } else { // e.g., a join
             LinkedList<LogicalVariable> usedInAccess = new LinkedList<LogicalVariable>();
             VariableUtilities.getUsedVariables(a2, usedInAccess);
@@ -231,9 +233,9 @@
             IOptimizationContext context) throws AlgebricksException {
         List<Mutable<ILogicalOperator>> tpInpList = toPush.getInputs();
         tpInpList.clear();
-        tpInpList.add(new MutableObject<ILogicalOperator>(toPushThroughChildRef.getValue()));
+        tpInpList.add(new MutableObject<>(toPushThroughChildRef.getValue()));
         toPushThroughChildRef.setValue(toPush);
-        findAndEliminateRedundantFieldAccess(toPush);
+        findAndEliminateRedundantFieldAccess(toPush, context);
     }
 
     /**
@@ -244,7 +246,8 @@
      * assign $x := Expr
      * assign $y := record-constructor { "field": Expr, ... }
      */
-    private static boolean findAndEliminateRedundantFieldAccess(AssignOperator assign) throws AlgebricksException {
+    private static boolean findAndEliminateRedundantFieldAccess(AssignOperator assign, IOptimizationContext context)
+            throws AlgebricksException {
         ILogicalExpression expr = getFirstExpr(assign);
         AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expr;
         ILogicalExpression arg0 = f.getArguments().get(0).getValue();
@@ -257,15 +260,16 @@
         if (arg1.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
             return false;
         }
+        IVariableTypeEnvironment typeEnvironment = context.getOutputTypeEnvironment(assign);
         ConstantExpression ce = (ConstantExpression) arg1;
         ILogicalExpression fldExpr;
         if (f.getFunctionIdentifier().equals(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME)) {
             String fldName = ((AString) ((AsterixConstantValue) ce.getValue()).getObject()).getStringValue();
-            fldExpr = findFieldExpression(assign, recordVar, fldName,
-                    LoadRecordFieldsRule::findFieldByNameFromRecordConstructor);
+            fldExpr = findFieldExpression(assign, recordVar, fldName, typeEnvironment,
+                    (name, expression, env) -> findFieldByNameFromRecordConstructor(name, expression));
         } else if (f.getFunctionIdentifier().equals(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX)) {
             Integer fldIdx = ((AInt32) ((AsterixConstantValue) ce.getValue()).getObject()).getIntegerValue();
-            fldExpr = findFieldExpression(assign, recordVar, fldIdx,
+            fldExpr = findFieldExpression(assign, recordVar, fldIdx, typeEnvironment,
                     LoadRecordFieldsRule::findFieldByIndexFromRecordConstructor);
         } else if (f.getFunctionIdentifier().equals(AsterixBuiltinFunctions.FIELD_ACCESS_NESTED)) {
             return false;
@@ -292,12 +296,14 @@
 
     @FunctionalInterface
     private interface FieldResolver {
-        public ILogicalExpression resolve(Object accessKey, AbstractFunctionCallExpression funcExpr);
+        ILogicalExpression resolve(Object accessKey, AbstractFunctionCallExpression funcExpr,
+                IVariableTypeEnvironment typeEnvironment) throws AlgebricksException;
     }
 
     // Finds a field expression.
     private static ILogicalExpression findFieldExpression(AbstractLogicalOperator op, LogicalVariable recordVar,
-            Object accessKey, FieldResolver resolver) {
+            Object accessKey, IVariableTypeEnvironment typeEnvironment, FieldResolver resolver)
+            throws AlgebricksException {
         for (Mutable<ILogicalOperator> child : op.getInputs()) {
             AbstractLogicalOperator opChild = (AbstractLogicalOperator) child.getValue();
             if (opChild.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
@@ -306,18 +312,19 @@
                 if (i >= 0) {
                     AbstractLogicalExpression constr = (AbstractLogicalExpression) op2.getExpressions().get(i)
                             .getValue();
-                    return resolveFieldExpression(constr, accessKey, resolver);
+                    return resolveFieldExpression(constr, accessKey, typeEnvironment, resolver);
                 }
             } else if (opChild.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
                 NestedTupleSourceOperator nts = (NestedTupleSourceOperator) opChild;
                 AbstractLogicalOperator opBelowNestedPlan = (AbstractLogicalOperator) nts.getDataSourceReference()
                         .getValue().getInputs().get(0).getValue();
-                ILogicalExpression expr1 = findFieldExpression(opBelowNestedPlan, recordVar, accessKey, resolver);
+                ILogicalExpression expr1 = findFieldExpression(opBelowNestedPlan, recordVar, accessKey, typeEnvironment,
+                        resolver);
                 if (expr1 != null) {
                     return expr1;
                 }
             }
-            ILogicalExpression expr2 = findFieldExpression(opChild, recordVar, accessKey, resolver);
+            ILogicalExpression expr2 = findFieldExpression(opChild, recordVar, accessKey, typeEnvironment, resolver);
             if (expr2 != null) {
                 return expr2;
             }
@@ -327,7 +334,7 @@
 
     // Resolves field expression from an access key and a field resolver.
     private static ILogicalExpression resolveFieldExpression(AbstractLogicalExpression constr, Object accessKey,
-            FieldResolver resolver) {
+            IVariableTypeEnvironment typeEnvironment, FieldResolver resolver) throws AlgebricksException {
         if (constr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
             return null;
         }
@@ -336,7 +343,7 @@
                 && !fce.getFunctionIdentifier().equals(AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR)) {
             return null;
         }
-        return resolver.resolve(accessKey, fce);
+        return resolver.resolve(accessKey, fce, typeEnvironment);
     }
 
     // Resolves field expression by name-based access.
@@ -355,9 +362,12 @@
 
     // Resolves field expression by index-based access.
     private static ILogicalExpression findFieldByIndexFromRecordConstructor(Object index,
-            AbstractFunctionCallExpression fce) {
+            AbstractFunctionCallExpression fce, IVariableTypeEnvironment typeEnvironment) throws AlgebricksException {
         Integer fieldIndex = (Integer) index;
-        return fce.getArguments().size() > fieldIndex ? fce.getArguments().get(2 * fieldIndex + 1).getValue() : null;
+        ARecordType recordType = (ARecordType) typeEnvironment.getType(fce);
+        String[] closedFieldNames = recordType.getFieldNames();
+        return closedFieldNames.length > fieldIndex
+                ? findFieldByNameFromRecordConstructor(closedFieldNames[fieldIndex], fce) : null;
     }
 
     private final class ExtractFieldLoadExpressionVisitor implements ILogicalExpressionReferenceTransform {
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushLimitIntoOrderByRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushLimitIntoOrderByRule.java
index b573ae4..4c0247f 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushLimitIntoOrderByRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushLimitIntoOrderByRule.java
@@ -96,8 +96,6 @@
             return false;
         }
 
-        boolean needToCheckOffsetValue = true;
-
         // Get the LIMIT constant
         if (limitOp.getMaxObjects().getValue().getExpressionTag() == LogicalExpressionTag.CONSTANT) {
             // Currently, we support LIMIT with a constant value.
@@ -107,6 +105,9 @@
             if (topK > Integer.MAX_VALUE) {
                 return false;
             }
+            if (topK < 0) {
+                topK = 0;
+            }
         } else {
             return false;
         }
@@ -116,7 +117,15 @@
         // Final topK will be applied through LIMIT.
         if (limitOp.getOffset().getValue() != null) {
             if (limitOp.getOffset().getValue().getExpressionTag() == LogicalExpressionTag.CONSTANT) {
-                topK = topK + ((int) AccessMethodUtils.getInt64Constant(limitOp.getOffset()));
+                long offset = AccessMethodUtils.getInt64Constant(limitOp.getOffset());
+                if (offset < 0) {
+                    offset = 0;
+                }
+                // Check the overflow case.
+                if (offset >= Integer.MAX_VALUE - topK) {
+                    return false;
+                }
+                topK += offset;
             } else {
                 return false;
             }
@@ -133,7 +142,6 @@
         opRef2.setValue(newOrderOp);
         context.computeAndSetTypeEnvironmentForOperator(newOrderOp);
         context.addToDontApplySet(this, limitOp);
-
         return true;
     }
 
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SweepIllegalNonfunctionalFunctions.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SweepIllegalNonfunctionalFunctions.java
index c7d21f8..72c47c3 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SweepIllegalNonfunctionalFunctions.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SweepIllegalNonfunctionalFunctions.java
@@ -44,13 +44,12 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.IntersectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestMapOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RangeForwardOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
@@ -58,6 +57,7 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
@@ -198,15 +198,12 @@
         }
 
         @Override
-        public Void visitPartitioningSplitOperator(PartitioningSplitOperator op, Void arg) throws AlgebricksException {
-            for (Mutable<ILogicalExpression> expr : op.getExpressions()) {
-                sweepExpression(expr.getValue(), op);
-            }
+        public Void visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
             return null;
         }
 
         @Override
-        public Void visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
+        public Void visitSplitOperator(SplitOperator op, Void arg) throws AlgebricksException {
             return null;
         }
 
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineAllNtsInSubplanVisitor.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineAllNtsInSubplanVisitor.java
index 6597006..ead78b7 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineAllNtsInSubplanVisitor.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineAllNtsInSubplanVisitor.java
@@ -26,8 +26,8 @@
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
 import java.util.Map.Entry;
+import java.util.Set;
 
 import org.apache.asterix.lang.common.util.FunctionUtil;
 import org.apache.asterix.om.base.AString;
@@ -67,19 +67,19 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RangeForwardOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.LogicalOperatorDeepCopyWithNewVariablesVisitor;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
 import org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
@@ -509,13 +509,12 @@
     }
 
     @Override
-    public ILogicalOperator visitPartitioningSplitOperator(PartitioningSplitOperator op, Void arg)
-            throws AlgebricksException {
+    public ILogicalOperator visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
         return visitSingleInputOperator(op);
     }
 
     @Override
-    public ILogicalOperator visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
+    public ILogicalOperator visitSplitOperator(SplitOperator op, Void arg) throws AlgebricksException {
         return visitSingleInputOperator(op);
     }
 
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineLeftNtsInSubplanJoinFlatteningVisitor.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineLeftNtsInSubplanJoinFlatteningVisitor.java
index d5f8fe9..ccdb41d 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineLeftNtsInSubplanJoinFlatteningVisitor.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineLeftNtsInSubplanJoinFlatteningVisitor.java
@@ -53,13 +53,13 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RangeForwardOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
@@ -277,13 +277,12 @@
     }
 
     @Override
-    public ILogicalOperator visitPartitioningSplitOperator(PartitioningSplitOperator op, Void arg)
-            throws AlgebricksException {
+    public ILogicalOperator visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
         return visitSingleInputOperator(op);
     }
 
     @Override
-    public ILogicalOperator visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
+    public ILogicalOperator visitSplitOperator(SplitOperator op, Void arg) throws AlgebricksException {
         return visitSingleInputOperator(op);
     }
 
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/SubplanSpecialFlatteningCheckVisitor.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/SubplanSpecialFlatteningCheckVisitor.java
index e5c67cc..75a42a5 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/SubplanSpecialFlatteningCheckVisitor.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/SubplanSpecialFlatteningCheckVisitor.java
@@ -33,18 +33,18 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.IntersectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestMapOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RangeForwardOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
@@ -154,12 +154,12 @@
     }
 
     @Override
-    public Boolean visitPartitioningSplitOperator(PartitioningSplitOperator op, Void arg) throws AlgebricksException {
-        return false;
+    public Boolean visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
+        return visitInputs(op);
     }
 
     @Override
-    public Boolean visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
+    public Boolean visitSplitOperator(SplitOperator op, Void arg) throws AlgebricksException {
         return visitInputs(op);
     }
 
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
index 9f2d5d8..1abf6c8 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
@@ -65,7 +65,7 @@
                 }
             }
             if (!ClusterStateManager.INSTANCE.getState().equals(ClusterState.ACTIVE)) {
-                throw new AsterixException(" Asterix Cluster is in " + ClusterState.UNUSABLE + " state."
+                throw new AsterixException("Cluster is in " + ClusterState.UNUSABLE + " state."
                         + "\n One or more Node Controllers have left or haven't joined yet.\n");
             } else {
                 if (LOGGER.isLoggable(Level.INFO)) {
@@ -75,7 +75,7 @@
         }
 
         if (ClusterStateManager.INSTANCE.getState().equals(ClusterState.UNUSABLE)) {
-            throw new AsterixException(" Asterix Cluster is in " + ClusterState.UNUSABLE + " state."
+            throw new AsterixException("Cluster is in " + ClusterState.UNUSABLE + " state."
                     + "\n One or more Node Controllers have left.\n");
         }
 
@@ -83,8 +83,7 @@
             int maxWaitCycles = AsterixAppContextInfo.INSTANCE.getExternalProperties().getMaxWaitClusterActive();
             int waitCycleCount = 0;
             try {
-                while (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted()
-                        && waitCycleCount < maxWaitCycles) {
+                while (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted() && waitCycleCount < maxWaitCycles) {
                     Thread.sleep(1000);
                     waitCycleCount++;
                 }
@@ -94,7 +93,7 @@
                 }
             }
             if (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted()) {
-                throw new AsterixException(" Asterix Cluster Global recovery is not yet complete and The system is in "
+                throw new AsterixException("Cluster Global recovery is not yet complete and the system is in "
                         + ClusterState.ACTIVE + " state");
             }
         }
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/FunctionCollection.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/FunctionCollection.java
index e7f74a1..9c93e07 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/FunctionCollection.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/FunctionCollection.java
@@ -157,8 +157,13 @@
 import org.apache.asterix.runtime.evaluators.functions.HashedGramTokensDescriptor;
 import org.apache.asterix.runtime.evaluators.functions.HashedWordTokensDescriptor;
 import org.apache.asterix.runtime.evaluators.functions.InjectFailureDescriptor;
+import org.apache.asterix.runtime.evaluators.functions.IsArrayDescriptor;
+import org.apache.asterix.runtime.evaluators.functions.IsBooleanDescriptor;
 import org.apache.asterix.runtime.evaluators.functions.IsMissingDescriptor;
 import org.apache.asterix.runtime.evaluators.functions.IsNullDescriptor;
+import org.apache.asterix.runtime.evaluators.functions.IsNumberDescriptor;
+import org.apache.asterix.runtime.evaluators.functions.IsObjectDescriptor;
+import org.apache.asterix.runtime.evaluators.functions.IsStringDescriptor;
 import org.apache.asterix.runtime.evaluators.functions.IsSystemNullDescriptor;
 import org.apache.asterix.runtime.evaluators.functions.IsUnknownDescriptor;
 import org.apache.asterix.runtime.evaluators.functions.LenDescriptor;
@@ -672,6 +677,13 @@
         functionsToInjectUnkownHandling.add(GetOverlappingIntervalDescriptor.FACTORY);
         functionsToInjectUnkownHandling.add(DurationFromIntervalDescriptor.FACTORY);
 
+        // Type functions.
+        functionsToInjectUnkownHandling.add(IsBooleanDescriptor.FACTORY);
+        functionsToInjectUnkownHandling.add(IsNumberDescriptor.FACTORY);
+        functionsToInjectUnkownHandling.add(IsStringDescriptor.FACTORY);
+        functionsToInjectUnkownHandling.add(IsArrayDescriptor.FACTORY);
+        functionsToInjectUnkownHandling.add(IsObjectDescriptor.FACTORY);
+
         // Cast function
         functionsToInjectUnkownHandling.add(CastTypeDescriptor.FACTORY);
 
diff --git a/asterixdb/asterix-app/data/tpcds/call_center.csv b/asterixdb/asterix-app/data/tpcds/call_center.csv
new file mode 100644
index 0000000..6aa2c1d
--- /dev/null
+++ b/asterixdb/asterix-app/data/tpcds/call_center.csv
@@ -0,0 +1,5 @@
+1|AAAAAAAABAAAAAAA|1998-01-01|||2450952|NY Metro|large|2|1138|8AM-4PM|Bob Belcher|6|More than other authori|Shared others could not count fully dollars. New members ca|Julius Tran|3|pri|6|cally|730|Ash Hill|Boulevard|Suite 0|Midway|Williamson County|TN|31904|United States|-5|0.11|
+2|AAAAAAAACAAAAAAA|1998-01-01|2000-12-31||2450806|Mid Atlantic|medium|6|2268|8AM-8AM|Felipe Perkins|2|A bit narrow forms matter animals. Consist|Largely blank years put substantially deaf, new others. Question|Julius Durham|5|anti|1|ought|984|Center Hill|Way|Suite 70|Midway|Williamson County|TN|31904|United States|-5|0.12|
+3|AAAAAAAACAAAAAAA|2001-01-01|||2450806|Mid Atlantic|medium|6|4134|8AM-4PM|Mark Hightower|2|Wrong troops shall work sometimes in a opti|Largely blank years put substantially deaf, new others. Question|Julius Durham|1|ought|2|able|984|Center Hill|Way|Suite 70|Midway|Williamson County|TN|31904|United States|-5|0.01|
+4|AAAAAAAAEAAAAAAA|1998-01-01|2000-01-01||2451063|North Midwest|medium|1|649|8AM-4PM|Larry Mccray|2|Dealers make most historical, direct students|Rich groups catch longer other fears; future,|Matthew Clifton|4|ese|3|pri|463|Pine Ridge|RD|Suite U|Midway|Williamson County|TN|31904|United States|-5|0.05|
+5|AAAAAAAAEAAAAAAA|2000-01-02|2001-12-31||2451063|North Midwest|small|3|795|8AM-8AM|Larry Mccray|2|Dealers make most historical, direct students|Blue, due beds come. Politicians would not make far thoughts. Specifically new horses partic|Gary Colburn|4|ese|3|pri|463|Pine Ridge|RD|Suite U|Midway|Williamson County|TN|31904|United States|-5|0.12|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/catalog_page.csv b/asterixdb/asterix-app/data/tpcds/catalog_page.csv
new file mode 100644
index 0000000..afd7618
--- /dev/null
+++ b/asterixdb/asterix-app/data/tpcds/catalog_page.csv
@@ -0,0 +1,5 @@
+1|AAAAAAAABAAAAAAA|2415024|2415028|DEPARTMENT|1|1|In general basic characters welcome. Clearly lively friends conv|bi-annual|
+2|AAAAAAAAGODAAAAA|2415028|2415030|DEPARTMENT|10|26|Inherent members should not save difficult, other prob|monthly|
+3|AAAAAAAAPNHAAAAA|2415023|2415031|DEPARTMENT|19|71|Somehow limited readers rear then cold, local stairs. Systems giv|bi-annual|
+4|AAAAAAAAKKPAAAAA|2415026|2415027|DEPARTMENT|38|14|Magnetic wives cannot collapse now terms. Mad friends want at onc|bi-annual|
+5|AAAAAAAAHGFBAAAA|2415025|2415029|DEPARTMENT|51|79|Inner circumstances shall specify cultures. Stairs can supply then frequent th|monthly|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/catalog_returns.csv b/asterixdb/asterix-app/data/tpcds/catalog_returns.csv
index 539afa9..07e244e 100644
--- a/asterixdb/asterix-app/data/tpcds/catalog_returns.csv
+++ b/asterixdb/asterix-app/data/tpcds/catalog_returns.csv
@@ -1,16 +1,16 @@
-2450926|45816|2|14601|797995|6189|9583|14601|797995|4703|9583|1|106|2|2|30|2|47|3888.31|233.29|4121.60|91.23|1348.90|3577.24|186.64|124.43|1673.42|
-2450946|74710|4|14601|797995|6189|9583|82809|665550|991|14832|1|17|2|5|6|2|49|2490.18|99.60|2589.78|52.54|1867.39|323.72|931.57|1234.89|2019.53|
-2451065|71104|6|25383|3755|2480|5652|2311|700704|5571|12485|4|7|13|2|1|4|12|64.32|4.50|68.82|22.97|78.60|1.28|55.47|7.57|106.07|
-2450954|28638|8|76801|90299|3797|42495|71681|1168758|7154|35197|2|9|12|3|6|5|20|829.40|49.76|879.16|60.00|308.00|622.05|176.24|31.11|417.76|
-2451023|44538|10|31351|528859|5567|23122|33027|737861|54|2647|1|78|12|5|16|6|6|403.08|36.27|439.35|4.58|199.44|354.71|27.57|20.80|240.29|
-2450992|10904|1|31351|528859|5567|23122|48008|1289384|1410|27436|1|54|7|3|4|6|7|23.17|1.85|25.02|19.85|7.07|6.25|9.64|7.28|28.77|
-2451016|60162|3|77674||||14994|||4335||2||||9|3|413.94|0.00||||318.73|35.22|59.99|185.36|
-2450926|30343|5|41731|1046123|5715|32107|88927|865466|4658|2697|1|65|14|3|5|11|17|1283.67|51.34|1335.01|71.97|58.31|449.28|183.56|650.83|181.62|
-2451058|53881|7|41731|1046123|5715|32107|28053|1827904|2936|21968|1|42|11|5|17|11|9|29.79|1.19|30.98|91.16|59.67|0.00|20.85|8.94|152.02|
-2451039|11891|9|36748|83584|2773|8362|53188|1816862|2974|34286|4|11|2|1|13|12|61|2119.75|84.79|2204.54|30.32|271.45|1441.43|529.08|149.24|386.56|
-2451035|37877|2|36022|1502404|6833|19662|14294|1764552|4145|34491|1|18|3|1|17|13|27|54.81|0.00|54.81|7.04|146.07|12.05|40.19|2.57|153.11|
-2450867|64669|4|36022|1502404|6833|19662|16573|1482438|6726|38614|1|43|18|1|34|13|18|100.44|5.02|105.46|70.47|196.92|9.03|82.26|9.15|272.41|
-2450952|64639|6|24957|189998|3839|7327|95061|1639056|2195|3287|2|56|17|1|3|16|40|407.60|32.60|440.20|2.01|296.40|362.76|19.72|25.12|331.01|
-2450940|15515|8|24957|189998|3839|7327|51292|1218513|1645|32377|2|75|13|5|2|16|44|2157.76|43.15|2200.91|46.29|1280.84|1834.09|51.78|271.89|1370.28|
-2450986|44014|1|82003|599432|3708|27937|26895|1075366|3615|41511|4|9|12|1|2|18|10|339.60|30.56|370.16|25.06|157.70|196.96|45.64|97.00|213.32|
-2450997|16008|3|82003|599432|3708|27937|95809|452924|4311|28451|4|33|5|2|7|18|44|2618.44|235.65|2854.09|83.74|5117.64|968.82|65.98|1583.64|5437.03|
+2415025|6|5|3|6|4|4|3|1|1|10|4|106|1|1|1|1|47|3888.31|233.29|4121.6|91.23|1348.9|3577.24|186.64|124.43|1673.42|
+2415025|4|10|7|1|1|2|5|4|5|3|3|17|1|1|4|3|49|2490.18|99.6|2589.78|52.54|1867.39|323.72|931.57|1234.89|2019.53|
+2415024|1|8|5|5|2|2|5|3|1|4|2|7|1|4|1|3|12|64.32|4.5|68.82|22.97|78.6|1.28|55.47|7.57|106.07|
+2415027|8|8|1|1|3|7|8|3|1|8|5|9|5|5|3|7|20|829.4|49.76|879.16|60|308|622.05|176.24|31.11|417.76|
+2415028|6|9|1|2|5|10|8|4|2|9|2|78|1|2|3|9|6|403.08|36.27|439.35|4.58|199.44|354.71|27.57|20.8|240.29|
+2415027|1|9|7|2|4|9|3|1|2|7|1|54|2|3|2|8|7|23.17|1.85|25.02|19.85|7.07|6.25|9.64|7.28|28.77|
+2415023|1|8|4|6|||8|||6||2||||4|3|413.94|0|900.87|||318.73|35.22|59.99|185.36|
+2415030|6|10|2|1|2|6|8|1|5|5|3|65|1|1|3|9|17|1283.67|51.34|1335.01|71.97|58.31|449.28|183.56|650.83|181.62|
+2415031|1|2|10|2|3|1|2|2|3|9|3|42|3|4|3|8|9|29.79|1.19|30.98|91.16|59.67|0|20.85|8.94|152.02|
+2415031|3|1|10|1|2|10|4|2|3|1|1|11|2|4|2|10|61|2119.75|84.79|2204.54|30.32|271.45|1441.43|529.08|149.24|386.56|
+2415029|4|4|10|1|3|10|1|6|3|9|5|18|4|2|2|10|27|54.81|0|54.81|7.04|146.07|12.05|40.19|2.57|153.11|
+2415025|1|1|10|5|2|10|8|6|4|5|3|43|5|4|4|6|18|100.44|5.02|105.46|70.47|196.92|9.03|82.26|9.15|272.41|
+2415027|3|4|9|5|4|7|6|5|5|7|1|56|3|5|3|2|40|407.6|32.6|440.2|2.01|296.4|362.76|19.72|25.12|331.01|
+2415029|1|5|4|5|1|3|3|3|1|9|2|75|3|1|5|3|44|2157.76|43.15|2200.91|46.29|1280.84|1834.09|51.78|271.89|1370.28|
+2415031|4|2|2|4|1|8|3|4|1|8|2|9|2|5|3|2|10|339.6|30.56|370.16|25.06|157.7|196.96|45.64|97|213.32|
+2415023|1|19|10|5|2|10|6|6|4|6|3|43|5|4|4|6|18|100.44|5.02|10.46|70.47|196.92|9.03|82.26|9.15|272.41|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/catalog_sales.csv b/asterixdb/asterix-app/data/tpcds/catalog_sales.csv
index 6ae0773..2c90f04 100644
--- a/asterixdb/asterix-app/data/tpcds/catalog_sales.csv
+++ b/asterixdb/asterix-app/data/tpcds/catalog_sales.csv
@@ -1,19 +1,19 @@
-2450815|38212|2450886|1|1822764|5775|19986|1|1822764|5775|19986|4|62|3|4|1|196|1|47|27.70|44.32|42.99|62.51|2020.53|1301.90|2083.04|101.02|0.00|1041.52|2020.53|2121.55|3062.05|3163.07|718.63|
-2450815|38212|2450846|1|1822764|5775|19986|1|1822764|5775|19986|4|31|8|2|2|270|1|20|87.55|260.89|153.92|2139.40|3078.40|1751.00|5217.80|71.41|1292.92|1356.60|1785.48|1856.89|3142.08|3213.49|34.48|
-2450815|38212|2450868|1|1822764|5775|19986|1|1822764|5775|19986|4|76|2|2|3|97|1|19|69.86|88.72|29.27|1129.55|556.13|1327.34|1685.68|33.36|0.00|168.53|556.13|589.49|724.66|758.02|-771.21|
-2450815|38212|2450851|1|1822764|5775|19986|1|1822764|5775|19986|4|89|15|2|2|284|2|50|70.00|205.10|188.69|820.50|9434.50|3500.00|10255.00|377.38|0.00|4307.00|9434.50|9811.88|13741.50|14118.88|5934.50|
-2450815|29485|2450904|14601|797995|6189|9583|14601|797995|6189|9583|1|64|18|3|4|176|2|56|67.54|166.82|18.35|8314.32|1027.60|3782.24|9341.92|0.00|0.00|3736.32|1027.60|1027.60|4763.92|4763.92|-2754.64|
-2450815|29485|2450890|14601|797995|6189|9583|14601|797995|6189|9583|1|75|8|1|5|278|2|88|20.08|60.03|20.41|3486.56|1796.08|1767.04|5282.64|13.82|1598.51|1056.00|197.57|211.39|1253.57|1267.39|-1569.47|
-2450815|29485|2450849|14601|797995|6189|9583|14601|797995|6189|9583|1|39|4|3|6|207|2|31|40.88|51.91|6.22|1416.39|192.82|1267.28|1609.21|11.56|0.00|321.78|192.82|204.38|514.60|526.16|-1074.46|
-2450815|29485|2450889|14601|797995|6189|9583|14601|797995|6189|9583|1|49|8|2|7|162|2|100|49.56|137.77|5.51|13226.00|551.00|4956.00|13777.00|0.00|0.00|3306.00|551.00|551.00|3857.00|3857.00|-4405.00|
-2450815|29485|2450868|14601|797995|6189|9583|14601|797995|6189|9583|1|69|17|5|8|72|2|30|72.82|88.84|71.07|533.10|2132.10|2184.60|2665.20|21.32|0.00|133.20|2132.10|2153.42|2265.30|2286.62|-52.50|
-2450815|29485|2450831|14601|797995|6189|9583|14601|797995|6189|9583|1|64|1|3|9|52|2|40|94.56|277.06|2.77|10971.60|110.80|3782.40|11082.40|2.21|0.00|664.80|110.80|113.01|775.60|777.81|-3671.60|
-2450815|29485|2450839|14601|797995|6189|9583|14601|797995|6189|9583|1|108|8|4|10|131|3|40|25.96|54.51|15.80|1548.40|632.00|1038.40|2180.40|22.75|176.96|1090.00|455.04|477.79|1545.04|1567.79|-583.36|
-2450815|29485|2450825|14601|797995|6189|9583|14601|797995|6189|9583|1|14|20|3|11|296|3|45|5.95|12.97|8.81|187.20|396.45|267.75|583.65|3.96|0.00|17.10|396.45|400.41|413.55|417.51|128.70|
-2450815|29485|2450837|14601|797995|6189|9583|14601|797995|6189|9583|1|13|7|1|12|271|3|34|43.69|105.29|82.12|787.78|2792.08|1485.46|3579.86|55.84|0.00|930.58|2792.08|2847.92|3722.66|3778.50|1306.62|
-2450815|29485|2450822|14601|797995|6189|9583|14601|797995|6189|9583|1|106|16|5|13|123|3|59|78.90|84.42|82.73|99.71|4881.07|4655.10|4980.78|292.86|0.00|1444.32|4881.07|5173.93|6325.39|6618.25|225.97|
-2450815|29485|2450895|14601|797995|6189|9583|14601|797995|6189|9583|1|17|19|4|14|131|3|94|41.36|105.88|50.82|5175.64|4777.08|3887.84|9952.72|191.08|0.00|497.26|4777.08|4968.16|5274.34|5465.42|889.24|
-2450815|29485|2450892|14601|797995|6189|9583|14601|797995|6189|9583|1|74|18|2|15|290|3|38|26.96|51.22|35.85|584.06|1362.30|1024.48|1946.36|108.98|0.00|642.20|1362.30|1471.28|2004.50|2113.48|337.82|
-2450815|29485|2450862|14601|797995|6189|9583|14601|797995|6189|9583|1|23|16|5|1|272|3|45|9.54|23.37|21.50|84.15|967.50|429.30|1051.65|21.76|725.62|294.30|241.88|263.64|536.18|557.94|-187.42|
-2450815|29485|2450834|14601|797995|6189|9583|14601|797995|6189|9583|1|63|5|3|16|127|3|41|39.04|108.92|75.15|1384.57|3081.15|1600.64|4465.72|215.68|0.00|1964.72|3081.15|3296.83|5045.87|5261.55|1480.51|
-2450815|10687|2450864|67572|437897|6622|46147|67572|437897|6622|46147|1|28|5|5|17|170|3|99|75.88|178.31|156.91|2118.60|15534.09|7512.12|17652.69|1398.06|0.00|6884.46|15534.09|16932.15|22418.55|23816.61|8021.97|
+2415031|8|2415026|10|2|2|6|2|6|6|3|4|1|3|4|3|6|1|47|27.7|44.32|42.99|62.51|2020.53|1301.9|2083.04|101.02|0|1041.52|2020.53|2121.55|3062.05|3163.07|718.63|
+2415026|7|2415026|8|2|2|9|8|4|6|7|4|1|3|2|3|10|2|20|87.55|260.89|153.92|2139.4|3078.4|1751|5217.8|71.41|1292.92|1356.6|1785.48|1856.89|3142.08|3213.49|34.48|
+2415027|3|2415029|3|5|4|4|1|2|4|8|4|2|4|2|5|3|3|19|69.86|88.72|29.27|1129.55|556.13|1327.34|1685.68|33.36|0|168.53|556.13|589.49|724.66|758.02|-771.21|
+2415028|8|2415023|10|1|5|6|1|4|6|3|4|1|1|2|2|1|4|50|70|205.1|188.69|820.5|9434.5|3500|10255|377.38|0|4307|9434.5|9811.88|13741.5|14118.88|5934.5|
+2415023|6|2415027|5|1|4|8|9|4|2|8|1|2|3|3|10|7|5|56|67.54|166.82|18.35|8314.32|1027.6|3782.24|9341.92|0|0|3736.32|1027.6|1027.6|4763.92|4763.92|-2754.64|
+2415031|2|2415025|5|1|3|7|10|4|1|4|1|5|3|1|3|6|6|88|20.08|60.03|20.41|3486.56|1796.08|1767.04|5282.64|13.82|1598.51|1056|197.57|211.39|1253.57|1267.39|-1569.47|
+2415023|4|2415022|2|2|1|2|2|5|1|9|1|3|5|10|6|9|7|31|40.88|51.91|6.22|1416.39|192.82|1267.28|1609.21|11.56|0|321.78|192.82|204.38|514.6|526.16|-1074.46|
+2415034|5|2415030|9|2|3|2|1|1|3|10|1|2|3|3|2|4|8|100|49.56|137.77|5.51|13226|551|4956|13777|0|0|3306|551|551|3857|3857|-4405|
+2415027|2|2415027|3|6|4|10|8|5|2|2|1|2|3|3|1|10|9|30|72.82|88.84|71.07|533.1|2132.1|2184.6|2665.2|21.32|0|133.2|2132.1|2153.42|2265.3|2286.62|-52.5|
+2415028|6|2415024|2|5|5|9|3|1|5|6|1|2|2|10|2|10|10|40|94.56|277.06|2.77|10971.6|110.8|3782.4|11082.4|2.21|0|664.8|110.8|113.01|775.6|777.81|-3671.6|
+2415030|2|2415022|8|5|5|3|3|2|3|4|1|2|1|16|10|7|11|40|25.96|54.51|15.8|1548.4|632|1038.4|2180.4|22.75|176.96|1090|455.04|477.79|1545.04|1567.79|-583.36|
+2415027|8|2415022|2|2|3|8|9|3|5|10|1|3|1|3|2|3|12|45|5.95|12.97|8.81|187.2|396.45|267.75|583.65|3.96|0|17.1|396.45|400.41|413.55|417.51|128.7|
+2415029|3|2415028|8|6|4|5|4|4|6|9|1|3|3|17|3|2|13|34|43.69|105.29|82.12|787.78|2792.08|1485.46|3579.86|55.84|0|930.58|2792.08|2847.92|3722.66|3778.5|1306.62|
+2415022|4|2415022|2|5|5|8|2|4|3|2|1|1|5|3|7|5|14|59|78.9|84.42|82.73|99.71|4881.07|4655.1|4980.78|292.86|0|1444.32|4881.07|5173.93|6325.39|6618.25|225.97|
+2415025|8|2415022|2|4|3|10|1|1|6|1|1|5|2|18|5|3|15|94|41.36|105.88|530.82|5175.64|4777.08|3887.84|9952.72|191.08|0|497.26|4777.08|4968.16|5274.34|5465.42|889.24|
+2415027|4|2415029|3|2|5|5|1|5|2|3|1|4|4|3|1|8|16|38|26.96|51.22|35.85|584.06|1362.3|1024.48|1946.36|108.98|0|642.2|1362.3|1471.28|2004.5|2113.48|337.82|
+2415028|3|2415031|1|3|1|6|10|4|1|7|1|1|2|15|5|10|17|45|9.54|23.37|21.5|84.15|967.5|429.3|1051.65|21.76|725.62|294.3|241.88|263.64|536.18|557.94|-187.42|
+2415030|6|2415025|2|1|3|5|3|1|4|7|1|3|3|3|6|9|18|41|39.04|108.92|75.15|1384.57|3081.15|1600.64|4465.72|215.68|0|1964.72|3081.15|3296.83|5045.87|5261.55|1480.51|
+2415030|6|2415025|1|1|3|5|3|1|4|7|1|3|3|15|6|9|29|41|39.04|108.92|75.15|1384.57|3081.15|1600.64|4465.72|215.68|0|1964.72|3081.15|3296.83|5045.87|5261.55|1480.51|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/customer.csv b/asterixdb/asterix-app/data/tpcds/customer.csv
index c7ee8ef..ba9450b 100644
--- a/asterixdb/asterix-app/data/tpcds/customer.csv
+++ b/asterixdb/asterix-app/data/tpcds/customer.csv
@@ -1,10 +1,10 @@
-1|AAAAAAAABAAAAAAA|980124|7135|13513|2452238|2452208|Mr.|Javier|Lewis|Y|9|12|1936|CHILE||Javier.Lewis@VFAxlnZEvOx.org|2452508|
-2|AAAAAAAACAAAAAAA|819667|1461|13514|2452318|2452288|Dr.|Amy|Moses|Y|9|4|1966|TOGO||Amy.Moses@Ovk9KjHH.com|2452318|
-3|AAAAAAAADAAAAAAA|1473522|6247|13515|2449130|2449100|Miss|Latisha|Hamilton|N|18|9|1979|NIUE||Latisha.Hamilton@V.com|2452313|
-4|AAAAAAAAEAAAAAAA|1703214|3986|13516|2450030|2450000|Dr.|Michael|White|N|7|6|1983|MEXICO||Michael.White@i.org|2452361|
-5|AAAAAAAAFAAAAAAA|953372|4470|13517|2449438|2449408|Sir|Robert|Moran|N|8|5|1956|FIJI||Robert.Moran@Hh.edu|2452469|
-6|AAAAAAAAGAAAAAAA|213219|6374|13518|2451883|2451853|Ms.|Brunilda|Sharp|N|4|12|1925|SURINAME||Brunilda.Sharp@T3pylZEUQjm.org|2452430|
-7|AAAAAAAAHAAAAAAA|68377|3219|13519|2451438|2451408|Ms.|Fonda|Wiles|Y|24|4|1985|GAMBIA||Fonda.Wiles@S9KnyEtz9hv.org|2452360|
-8|AAAAAAAAIAAAAAAA|1215897|2471|13520|2449406|2449376|Sir|Ollie|Shipman|N|26|12|1938|KOREA, REPUBLIC OF||Ollie.Shipman@be.org|2452334|
-9|AAAAAAAAJAAAAAAA|1168667|1404|13521|2452275|2452245|Sir|Karl|Gilbert|N|26|10|1966|MONTSERRAT||Karl.Gilbert@Crg5KyP2IxX9C4d6.edu|2452454|
-10|AAAAAAAAKAAAAAAA|1207553|5143|13522|2451353|2451323|Ms.|Albert|Brunson|N|15|10|1973|JORDAN||Albert.Brunson@62.com|2452641|
+1|AAAAAAAABAAAAAAA|4|5|1|2415022|2415023|Mr.|Javier|Lewis|Y|9|12|1936|CHILE||Javier.Lewis@VFAxlnZEvOx.org|2452508|
+2|AAAAAAAACAAAAAAA|6|1|8|2415025|2415024|Dr.|Amy|Moses|Y|9|4|1966|TOGO||Amy.Moses@Ovk9KjHH.com|2452318|
+3|AAAAAAAADAAAAAAA|1|4|9|2415028|2415026|Miss|Latisha|Hamilton|N|18|9|1979|UNITED STATES||Latisha.Hamilton@V.com|2452313|
+4|AAAAAAAAEAAAAAAA|4|1|10|2415029|2415023|Dr.|Michael|White|N|7|6|1983|MEXICO||Michael.White@i.org|2452361|
+5|AAAAAAAAFAAAAAAA|6|2|7|2415031|2415024|Sir|Robert|Moran|N|8|5|1956|FIJI||Robert.Moran@Hh.edu|2452469|
+6|AAAAAAAAGAAAAAAA|5|3|2|2415027|2415025|Ms.|Brunilda|Sharp|N|4|12|1925|SURINAME||Brunilda.Sharp@T3pylZEUQjm.org|2452430|
+7|AAAAAAAAHAAAAAAA|1|2|6|2415028|2415022|Ms.|Fonda|Wiles|Y|24|4|1985|GAMBIA||Fonda.Wiles@S9KnyEtz9hv.org|2452360|
+8|AAAAAAAAIAAAAAAA|5|1|2|2415023|2415025|Sir|Ollie|Shipman|N|26|12|1938|UNITED STATES||Ollie.Shipman@be.org|2452334|
+9|AAAAAAAAJAAAAAAA|3|4|9|2415022|2415031|Sir|Karl|Gilbert|N|26|10|1966|MONTSERRAT||Karl.Gilbert@Crg5KyP2IxX9C4d6.edu|2452454|
+10|AAAAAAAAKAAAAAAA|1|5|5|2415022|2415023|Ms.|Albert|Brunson|N|15|10|1973|JORDAN||Albert.Brunson@62.com|2452641|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/customer_address.csv b/asterixdb/asterix-app/data/tpcds/customer_address.csv
index a52166e..c5185f3 100644
--- a/asterixdb/asterix-app/data/tpcds/customer_address.csv
+++ b/asterixdb/asterix-app/data/tpcds/customer_address.csv
@@ -1,10 +1,10 @@
-13513|AAAAAAAAJMEDAAAA|545|Meadow |RD|Suite X|Crystal|DeKalb County|MO|65258|United States|-6|condo|
-13514|AAAAAAAAKMEDAAAA|730|Hickory Wilson|Street|Suite 340|Friendship|Hockley County|TX|74536|United States|-6|single family|
-13515|AAAAAAAALMEDAAAA||Lincoln Wilson||||Highlands County|FL|35124||||
-13516|AAAAAAAAMMEDAAAA|114|Wilson |Lane|Suite J|New Hope|Knox County|KY|49431|United States|-5|condo|
-13517|AAAAAAAANMEDAAAA|177|8th Walnut|Ct.|Suite 300|Riverview|Jenkins County|GA|39003|United States|-5|single family|
-13518|AAAAAAAAOMEDAAAA|11|Adams 1st|Cir.|Suite T|Franklin|Essex County|NY|19101|United States|-5|apartment|
-13519|AAAAAAAAPMEDAAAA|261|4th River|ST|Suite T|Green Acres|Sheboygan County|WI|57683|United States|-6|condo|
-13520|AAAAAAAAANEDAAAA|892|Johnson |Pkwy|Suite 320|Fairfield|Mora County|NM|86192|United States|-7|single family|
-13521|AAAAAAAABNEDAAAA|579|Elm |Road|Suite 480|Clifton|Wichita County|KS|68014|United States|-6|single family|
-13522|AAAAAAAACNEDAAAA|139|5th |Wy|Suite P|Springdale|Koochiching County|MN|58883|United States|-6|condo|
+1|AAAAAAAABAAAAAAA|18|Jackson |Parkway|Suite 280|Fairfield|Maricopa County|IL|86192|United States|-7|condo|
+2|AAAAAAAACAAAAAAA|362|Washington 6th|RD|Suite 80|Fairview|Taos County|IL|35709|Suriname|-7|condo|
+3|AAAAAAAADAAAAAAA|585|Dogwood Washington|Circle|Suite Q|Pleasant Valley|York County|PA|31904|United States|-5|single family|
+4|AAAAAAAAEAAAAAAA|111|Smith |Wy|Suite A|Oak Ridge|Kit Carson County|TX|35708|United States|-7|condo|
+5|AAAAAAAAFAAAAAAA|31|College |Blvd|Suite 180|Glendale|Barry County|MO|63951|United States|-6|single family|
+6|AAAAAAAAGAAAAAAA|59|Williams Sixth|Parkway|Suite 100|Lakeview|Chelan County|WA|35709|United States|-8|single family|
+7|AAAAAAAAHAAAAAAA||Hill 7th|Road|Suite U|Farmington|||39145|United States|-7||
+8|AAAAAAAAIAAAAAAA|875|Lincoln |Ct.|Suite Y|Union|Bledsoe County|WA|35708|United States|-5|apartment|
+9|AAAAAAAAJAAAAAAA|819|1st Laurel|Ave|Suite 70|New Hope|Perry County|IL|35709|United States|-6|condo|
+10|AAAAAAAAKAAAAAAA|851|Woodland Poplar|ST|Suite Y|Martinsville|Taos County|TX|90419|United States|-9|condo|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/customer_demographics.csv b/asterixdb/asterix-app/data/tpcds/customer_demographics.csv
new file mode 100644
index 0000000..32ae389
--- /dev/null
+++ b/asterixdb/asterix-app/data/tpcds/customer_demographics.csv
@@ -0,0 +1,6 @@
+1|M|M|Primary|500|Good|0|0|0|
+2|F|W|Primary|500|Good|0|0|0|
+3|F|U|Unknown|500|Good|0|0|0|
+4|F|U|Secondary|1500|Good|0|0|0|
+5|F|D|4 yr Degree|3500|Good|0|0|0|
+6|M|W|Advanced Degree|7500|Good|0|0|0|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/date_dim.csv b/asterixdb/asterix-app/data/tpcds/date_dim.csv
index 5300c68..3e5647d 100644
--- a/asterixdb/asterix-app/data/tpcds/date_dim.csv
+++ b/asterixdb/asterix-app/data/tpcds/date_dim.csv
@@ -1,14 +1,13 @@
-2450815|AAAAAAAAOKJNECAA|1900-01-02|0|1|1|1900|1|1|2|1|1900|1|1|Monday|1900Q1|N|N|Y|2415021|2415020|2414657|2414930|N|N|N|N|N|
-2415023|AAAAAAAAPKJNECAA|1900-01-03|0|1|1|1900|2|1|3|1|1900|1|1|Tuesday|1900Q1|N|N|N|2415021|2415020|2414658|2414931|N|N|N|N|N|
-2415024|AAAAAAAAALJNECAA|1900-01-04|0|1|1|1900|3|1|4|1|1900|1|1|Wednesday|1900Q1|N|N|N|2415021|2415020|2414659|2414932|N|N|N|N|N|
-2415025|AAAAAAAABLJNECAA|1900-01-05|0|1|1|1900|4|1|5|1|1900|1|1|Thursday|1900Q1|N|N|N|2415021|2415020|2414660|2414933|N|N|N|N|N|
-2415026|AAAAAAAACLJNECAA|1900-01-06|0|1|1|1900|5|1|6|1|1900|1|1|Friday|1900Q1|N|Y|N|2415021|2415020|2414661|2414934|N|N|N|N|N|
-2415027|AAAAAAAADLJNECAA|1900-01-07|0|1|1|1900|6|1|7|1|1900|1|1|Saturday|1900Q1|N|Y|N|2415021|2415020|2414662|2414935|N|N|N|N|N|
-2415028|AAAAAAAAELJNECAA|1900-01-08|0|1|1|1900|0|1|8|1|1900|1|1|Sunday|1900Q1|N|N|N|2415021|2415020|2414663|2414936|N|N|N|N|N|
-2415029|AAAAAAAAFLJNECAA|1900-01-09|0|2|1|1900|1|1|9|1|1900|1|2|Monday|1900Q1|N|N|N|2415021|2415020|2414664|2414937|N|N|N|N|N|
-2415030|AAAAAAAAGLJNECAA|1900-01-10|0|2|1|1900|2|1|10|1|1900|1|2|Tuesday|1900Q1|N|N|N|2415021|2415020|2414665|2414938|N|N|N|N|N|
-2415031|AAAAAAAAHLJNECAA|1900-01-11|0|2|1|1900|3|1|11|1|1900|1|2|Wednesday|1900Q1|N|N|N|2415021|2415020|2414666|2414939|N|N|N|N|N|
-2415032|AAAAAAAAILJNECAA|1900-01-12|0|2|1|1900|4|1|12|1|1900|1|2|Thursday|1900Q1|N|N|N|2415021|2415020|2414667|2414940|N|N|N|N|N|
-2415033|AAAAAAAAJLJNECAA|1900-01-13|0|2|1|1900|5|1|13|1|1900|1|2|Friday|1900Q1|N|Y|N|2415021|2415020|2414668|2414941|N|N|N|N|N|
-2451293|AAAAAAAAKLJNECAA|1900-01-14|0|2|1|1900|6|1|14|1|1900|1|2|Saturday|1900Q1|N|Y|N|2415021|2415020|2414669|2414942|N|N|N|N|N|
-2451176|AAAAAAAALLJNECAA|1900-01-15|0|2|1|1900|0|1|15|1|1900|1|2|Sunday|1900Q1|N|N|N|2415021|2415020|2414670|2414943|N|N|N|N|N|
+2415022|AAAAAAAAOKJNECAA|1999-06-29|0|1|1|1999|6|11|2|1|1900|1|1|Monday|1900Q1|N|N|Y|2415021|2415020|2414657|2414930|N|N|N|N|N|
+2415023|AAAAAAAAPKJNECAA|1900-01-02|1222|1|1|1998|0|12|3|1|1900|1|1|Tuesday|1900Q1|N|N|N|2415021|2415020|2414658|2414931|N|N|N|N|N|
+2415024|AAAAAAAAALJNECAA|1998-04-07|1186|1|1|1999|3|11|2|1|1900|1|1|Wednesday|1900Q1|N|N|N|2415021|2415020|2414659|2414932|N|N|N|N|N|
+2415025|AAAAAAAAALJNECAA|2000-01-04|0|1|1|2000|3|9|2|1|1900|1|1|Wednesday|1900Q1|N|N|N|2415021|2415020|2414659|2414932|N|N|N|N|N|
+2415026|AAAAAAAACLJNECAA|1999-06-29|0|1|1|1999|5|5|25|1|1900|1|1|Friday|1900Q1|N|Y|N|2415021|2415020|2414661|2414934|N|N|N|N|N|
+2415027|AAAAAAAADLJNECAA|2001-06-02|0|1|1|2001|6|12|7|1|1900|1|1|Saturday|1900Q1|N|Y|N|2415021|2415020|2414662|2414935|N|N|N|N|N|
+2415028|AAAAAAAAELJNECAA|2002-05-30|0|1|1|2002|0|11|8|1|1900|1|1|Sunday|1900Q1|N|N|N|2415021|2415020|2414663|2414936|N|N|N|N|N|
+2415029|AAAAAAAAFLJNECAA|1998-04-11|0|2|1|2000|1|4|27|3|1900|1|2|Monday|1900Q1|N|N|N|2415021|2415020|2414664|2414937|N|N|N|N|N|
+2415030|AAAAAAAAGLJNECAA|1900-01-10|0|2|1|2000|2|5|28|2|1900|1|2|Tuesday|1900Q1|N|N|N|2415021|2415020|2414665|2414938|N|N|N|N|N|
+2415031|AAAAAAAAHLJNECAA|1998-03-11|0|2|1|2003|3|1|11|1|1900|1|2|Wednesday|1900Q1|N|N|N|2415021|2415020|2414666|2414939|N|N|N|N|N|
+2415032|AAAAAAAADLJNECAA|2001-01-12|1197|53|1|2001|6|12|7|1|1900|1|1|Saturday|1900Q1|N|Y|N|2415021|2415020|2414662|2414935|N|N|N|N|N|
+2415033|AAAAAAAACLJNECAA|1999-06-29|0|1|1|1999|5|4|25|1|1900|1|1|Friday|1900Q1|N|Y|N|2415021|2415020|2414661|2414934|N|N|N|N|N|
+2415034|AAAAAAAAJNJNECAA|1900-02-14|1|7|1|1900|2|2|14|1|1900|1|7|Tuesday|1900Q1|N|N|N|2415052|2415082|2414700|2414973|N|N|N|N|N|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/household_demographics.csv b/asterixdb/asterix-app/data/tpcds/household_demographics.csv
new file mode 100644
index 0000000..655ffe7
--- /dev/null
+++ b/asterixdb/asterix-app/data/tpcds/household_demographics.csv
@@ -0,0 +1,7 @@
+1|4|0-500|5|0|
+2|3|0-500|5|0|
+3|5|501-1000|5|0|
+4|1|>10000|8|4|
+5|2|Unknown|5|3|
+6|2|Unknown|5|4|
+7|2|Unknown|3|4|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/income_band.csv b/asterixdb/asterix-app/data/tpcds/income_band.csv
new file mode 100644
index 0000000..5c86d1b
--- /dev/null
+++ b/asterixdb/asterix-app/data/tpcds/income_band.csv
@@ -0,0 +1,5 @@
+1|0|10000|
+2|10001|20000|
+3|20001|30000|
+4|30001|40000|
+5|40001|50000|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/inventory.csv b/asterixdb/asterix-app/data/tpcds/inventory.csv
new file mode 100644
index 0000000..ca79c34
--- /dev/null
+++ b/asterixdb/asterix-app/data/tpcds/inventory.csv
@@ -0,0 +1,15 @@
+2415029|4|2|300|
+2415022|8|5|235|
+2415027|1|4|500|
+2415024|5|2|200|
+2415022|3|3|891|
+2415023|10|4|834|
+2415028|6|5|591|
+2415027|5|3|579|
+2415022|5|5|622|
+2415024|9|4|745|
+2415024|1|1|405|
+2415022|1|1||
+2415031|7|3|20|
+2415028|7|5|350|
+2415023|3|4|24|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/item.csv b/asterixdb/asterix-app/data/tpcds/item.csv
index bdccbcb..261888a 100644
--- a/asterixdb/asterix-app/data/tpcds/item.csv
+++ b/asterixdb/asterix-app/data/tpcds/item.csv
@@ -1,16 +1,16 @@
-1|AAAAAAABAAAAAAA|1997-10-27||Powers will not get influences. Electoral ports should show low, annual chains. Now young visitors may pose now however final pages. Bitterly right children suit increasing, leading el|27.02|23.23|5003002|exportischolar #2|3|pop|5|Music|52|ableanti|N/A|3663peru009490160959|spring|Tsp|Unknown|6|ought|
+1|AAAAAAAABAAAAAAA|1997-10-27||Powers will not get influences. Electoral ports should show low, annual chains. Now young visitors may pose now however final pages. Bitterly right children suit increasing, leading el|27.02|23.23|5003002|exportischolar #2|3|pop|5|Music|964|ableanti|N/A|3663peru009490160959|spring|Tsp|Unknown|6|ought|
 2|AAAAAAAACAAAAAAA|1997-10-27|2000-10-26|False opportunities would run alone with a views. Early approaches would show inc, european intentions; important, main passages shall know urban, |1.12|0.38|1001001|amalgamalg #1|1|dresses|1|Women|294|esen stable|petite|516steel060826230906|rosy|Bunch|Unknown|98|able|
-3|AAAAAAAACAAAAAAA|2000-10-27||False opportunities would run alone with a views. Early approaches would show inc, european intentions; important, main passages shall know urban, |7.11|0.38|1001001|brandbrand #4|7|decor|7|Home|294|esen stable|N/A|516steel060826230906|sienna|Cup|Unknown|18|pri|
-4|AAAAAAAAEAAAAAAA|1997-10-27|1999-10-27|Normal systems would join simply different theories. Full, new clothes may eat instead achievements. D|1.35|0.85|3002001|importoexporti #1|2|infants|3|Children|479|n stationese|extra large|610157moccasin018327|red|Tbl|Unknown|26|ese|
-5|AAAAAAAAEAAAAAAA|1999-10-28|2001-10-26|Normal systems would join simply different theories. Full, new clothes may eat instead achievements. D|4.00|1.76|2002002|importoimporto #2|2|shirts|2|Men|220|barableable|petite|42214rosy28066558020|pink|Cup|Unknown|27|anti|
-6|AAAAAAAAEAAAAAAA|2001-10-27||Normal systems would join simply different theories. Full, new clothes may eat instead achievements. D|0.85|1.76|2002002|exportiimporto #1|3|pants|2|Men|212|barableable|large|42214rosy28066558020|moccasin|Bundle|Unknown|6|cally|
-7|AAAAAAAAHAAAAAAA|1997-10-27||Anxious accounts must catch also years. Revolutionary, large directors used to embrace then mo|9.94|6.75|3001002|amalgexporti #2|1|newborn|3|Children|214|eseoughtable|petite|6moccasin24027188872|spring|Tsp|Unknown|64|ation|
+3|AAAAAAAACAAAAAAA|2000-10-27||False opportunities would run alone with a views. Early approaches would show inc, european intentions; important, main passages shall know urban, |7.11|0.38|1001001|brandbrand #4|7|decor|7|Home|294|esen stable|N/A|516steel060826230906|chiffon|Cup|Unknown|1|pri|
+4|AAAAAAAAEAAAAAAA|1997-10-27|1999-10-27|Normal systems would join simply different theories. Full, new clothes may eat instead achievements. D|1.35|0.85|3002001|importoexporti #1|2|Jewelry|3|Women|762|n stationese|medium|610157moccasin018327|orchid|Pound|Unknown|1|ese|
+5|AAAAAAAAEAAAAAAA|1999-10-28|2001-10-26|Normal systems would join simply different theories. Full, new clothes may eat instead achievements. D|1.3|1.76|2002002|importoimporto #2|2|shirts|2|Men|220|barableable|petite|42214rosy28066558020|chiffon|Cup|Unknown|27|anti|
+6|AAAAAAAAEAAAAAAA|2001-10-27||Normal systems would join simply different theories. Full, new clothes may eat instead achievements. D|0.85|1.76|2002002|exportiimporto #1|3|pants|2|Men|212|barableable|large|42214rosy28066558020|chiffon|Bundle|Unknown|7|cally|
+7|AAAAAAAAHAAAAAAA|1997-10-27||Anxious accounts must catch also years. Revolutionary, large directors used to embrace then mo|39.94|1.30|3001002|amalgexporti #2|1|newborn|3|Children|129|eseoughtable|petite|6moccasin24027188872|chiffon|Tsp|Unknown|64|ation|
 8|AAAAAAAAIAAAAAAA|1997-10-27|2000-10-26|F|2.76|0.85|3003001|exportiexporti #1|3|toddlers|3|Children|630|barprically|extra large|35123wheat3256343398|turquoise|Carton|Unknown|25|eing|
 9|AAAAAAAAIAAAAAAA|2000-10-27||F|4.46|0.85|1004002|edu packamalg #2|3|swimwear|1|Women|630|barprically|medium|35123wheat3256343398|wheat|Tbl|Unknown|3|n st|
 10|AAAAAAAAKAAAAAAA|1997-10-27|1999-10-27|Classical services go trousers. However great galleries might say needs. Assumptions change very in favour of the notes. Teeth woul|8.94|4.11|10008011|namelessunivamalg #11|8|scanners|10|Electronics|350|barantipri|N/A|8159007505thistle447|pale|Tsp|Unknown|34|barought|
 11|AAAAAAAAKAAAAAAA|1999-10-28|2001-10-26|Correct, fo|54.87|4.11|10008011|edu packbrand #4|14|estate|6|Jewelry|625|antiablecally|N/A|snow1543775706017405|yellow|Bunch|Unknown|26|oughtought|
-12|AAAAAAAAKAAAAAAA|2001-10-27||Corporate, important facilities claim trying, external sides. Elements used to expect home pr|6.54|4.11|10008011|corpnameless #3|14|furniture|7|Home|264|esecallyable|N/A|968467777sky92069287|royal|Tbl|Unknown|19|ableought|
-13|AAAAAAAANAAAAAAA|1997-10-27||Hard, private departments spoil more quickly possible members; clear troops fail only needs. |8.76|7.62|6012006|importobrand #6|12|costume|6|Jewelry|167|ationcallyought|N/A|883208731996blue7862|olive|Bundle|Unknown|51|priought|
+12|AAAAAAAAKAAAAAAA|2001-10-27||Corporate, important facilities claim trying, external sides. Elements used to expect home pr|6.54|4.11|10008011|corpnameless #3|14|furniture|7|Home|436|esecallyable|N/A|968467777sky92069287|royal|Tbl|Unknown|36|ableought|
+13|AAAAAAAANAAAAAAA|1997-10-27||Hard, private departments spoil more quickly possible members; clear troops fail only needs. |8.76|7.62|6012006|importobrand #6|12|costume|6|Jewelry|167|ationcallyought|N/A|883208731996blue7862|orchid|Bundle|Unknown|51|priought|
 14|AAAAAAAAOAAAAAAA|1997-10-27|2000-10-26|Teachers carry by the children; old democrats enco|1.85|0.59|8007005|brandnameless #5|7|hockey|8|Sports|460|barcallyese|N/A|1144670162goldenrod2|red|Dram|Unknown|6|eseought|
 15|AAAAAAAAOAAAAAAA|2000-10-27||Teachers carry by the children; old democrats enco|2.57|0.59|5002002|importoscholar #2|2|country|5|Music|86|barcallyese|N/A|1144670162goldenrod2|royal|Pound|Unknown|11|antiought|
 16|AAAAAAAAABAAAAAA|1997-10-27|1999-10-27|Dominant, christian pp. may not raise|0.31|0.14|1002001|importoamalg #1|2|fragrances|1|Women|117|ationoughtought|large|36933056603steel7373|bisque|Lb|Unknown|23|callyought|
@@ -18,3 +18,6 @@
 18|AAAAAAAAABAAAAAA|2001-10-27||Twin, particular aspects will accept only on|0.87|0.48|1002001|importoamalg #1|2|fragrances|1|Women|117|ationoughtought|medium|452645olive281530722|rosy|Carton|Unknown|63|eingought|
 19|AAAAAAAADBAAAAAA|1997-10-27||Political parents know right; perfec|10.61|4.77|2004002|edu packimporto #2|4|sports-apparel|2|Men|364|esecallypri|large|85seashell1303417084|smoke|Ton|Unknown|13|n stought|
 20|AAAAAAAAEBAAAAAA|1997-10-27|2000-10-26|Legal, foreign days know losses; briefly equivalent arguments will expect today. New, front grounds look hot, other aspects. Actually national husbands show usually of course other stations; huge, k|29.35|18.78|1003001|exportiamalg #1|3|maternity|1|Women|995|antin stn st|extra large|97245417ivory0043452|tan|Gram|Unknown|21|barable|
+21|AAAAAAAAEBAAAAAA|2000-10-27||Legal, foreign days know losses; briefly equivalent arguments will expect today. New, front grounds look hot, other aspects. Actually national husbands show usually of course other stations; huge, k|10.71|8.46|6016006|corpbrand #6|3|consignment|6|Jewelry|995|antin stn st|N/A|1050463678plum205437|sienna|Pallet|Unknown|4|oughtable|
+22|AAAAAAAAGBAAAAAA|1997-10-27|1999-10-27|Members endure already near additional details. Ministers should ignore whole times. Bright, brief beliefs become highly bright men; o|7.11|3.27|10005006|scholarunivamalg #6|5|karoke|10|Electronics|169|n stcallyought|N/A|4900684033pink844758|sandy|Each|Unknown|40|ableable|
+23|AAAAAAAAGBAAAAAA|1999-10-28|2001-10-26|Below long minutes make primarily by a months. Secure effects get much upo|3.73|1.41|10005006|scholarunivamalg #7|5|karoke|10|Electronics|260|n stcallyought|N/A|569seashell149755477|papaya|Bunch|Unknown|44|priable|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/promotion.csv b/asterixdb/asterix-app/data/tpcds/promotion.csv
new file mode 100644
index 0000000..98937d7
--- /dev/null
+++ b/asterixdb/asterix-app/data/tpcds/promotion.csv
@@ -0,0 +1,10 @@
+1|AAAAAAAABAAAAAAA|2415023|2415028|1|1000.00|1|ought|Y|N|N|N|N|N|N|N|Men will not say merely. Old, available |Unknown|N|
+2|AAAAAAAACAAAAAAA|2415022|2415026|9|1000.00|1|able|Y|N|N|N|N|N|N|N|So willing buildings coul|Unknown|N|
+3|AAAAAAAADAAAAAAA|2415025|2415029|8|1000.00|1|pri|Y|N|N|N|N|N|N|N|Companies shall not pr|Unknown|N|
+4|AAAAAAAAEAAAAAAA|2415026|2415031|5|1000.00|1|ese|N|N|N|N|N|N|N|N|High, good shoulders can tell on a proble|Unknown|N|
+5|AAAAAAAAFAAAAAAA|2415028|2415029|10|1000.00|1|anti|N|N|N|N|N|N|N|N|Huge, competent structures can remember different pat|Unknown|N|
+6|AAAAAAAAGAAAAAAA|2415027|2415029|7|1000.00|1|cally|N|N|N|N|N|N|N|N|Boards might not reverse up to a hopes. Now high respon|Unknown|N|
+7|AAAAAAAAHAAAAAAA|2415022|2415024|2|1000.00|1|ation|N|N|N|N|N|N|N|N|Effects used to prefer however new terms. Usually ava|Unknown|N|
+8|AAAAAAAAIAAAAAAA|2415023|2415025|6|1000.00|1|eing|Y|N|N|N|N|N|N|N|Offences feel only on a fees.|Unknown|N|
+9|AAAAAAAAJAAAAAAA|2415027|2415031|3|1000.00|1|n st|Y|N|N|N|N|N|N|N|External forces shall comprehend ideal, disciplinary stud|Unknown|N|
+10|AAAAAAAAKAAAAAAA|2415022|2415030|4|1000.00|1|bar|N|N|N|N|N|N|N|N|Only local achievements used to make t|Unknown|N|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/reason.csv b/asterixdb/asterix-app/data/tpcds/reason.csv
new file mode 100644
index 0000000..f6325d0
--- /dev/null
+++ b/asterixdb/asterix-app/data/tpcds/reason.csv
@@ -0,0 +1,10 @@
+1|AAAAAAAABAAAAAAA|Package was damaged|
+2|AAAAAAAACAAAAAAA|Stopped working|
+3|AAAAAAAADAAAAAAA|Did not get it on time|
+4|AAAAAAAAEAAAAAAA|Not the product that was ordred|
+5|AAAAAAAAFAAAAAAA|Parts missing|
+6|AAAAAAAAGAAAAAAA|Does not work with a product that I have|
+7|AAAAAAAAHAAAAAAA|Gift exchange|
+8|AAAAAAAAIAAAAAAA|Did not like the color|
+9|AAAAAAAAJAAAAAAA|Did not like the model|
+10|AAAAAAAAKAAAAAAA|Did not like the make|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/ship_mode.csv b/asterixdb/asterix-app/data/tpcds/ship_mode.csv
new file mode 100644
index 0000000..6b959ed
--- /dev/null
+++ b/asterixdb/asterix-app/data/tpcds/ship_mode.csv
@@ -0,0 +1,5 @@
+1|AAAAAAAABAAAAAAA|EXPRESS|AIR|UPS|YvxVaJI10|
+2|AAAAAAAACAAAAAAA|NEXT DAY|AIR|FEDEX|ldhM8IvpzHgdbBgDfI|
+3|AAAAAAAADAAAAAAA|OVERNIGHT|AIR|AIRBORNE|6Hzzp4JkzjqD8MGXLCDa|
+4|AAAAAAAAEAAAAAAA|TWO DAY|AIR|USPS|UaAJjKDnL4gTOqbpj|
+5|AAAAAAAAFAAAAAAA|LIBRARY|AIR|DHL|HVDFCcQ|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/store.csv b/asterixdb/asterix-app/data/tpcds/store.csv
new file mode 100644
index 0000000..327f776
--- /dev/null
+++ b/asterixdb/asterix-app/data/tpcds/store.csv
@@ -0,0 +1,5 @@
+1|AAAAAAAABAAAAAAA|1997-03-13||2415025|ought|245|5250760|8AM-4PM|William Ward|7|Unknown|Enough high areas stop expectations. Elaborate, local is|Charles Bartley|1|Unknown|1|Unknown|767|Spring |Wy|Suite 250|Midway|Williamson County|TN|35709|United States|-5|0.03|
+2|AAAAAAAACAAAAAAA|1997-03-13|2000-03-12||able|236|5285950|8AM-4PM|Scott Smith|7|Unknown|Parliamentary candidates wait then heavy, keen mil|David Lamontagne|1|Unknown|1|Unknown|255|Sycamore |Dr.|Suite 410|Midway|Williamson County|TN|31904|United States|-5|0.03|
+3|AAAAAAAACAAAAAAA|2000-03-13|||ese|236|7557959|8AM-4PM|Scott Smith|7|Unknown|Impossible, true arms can treat constant, complete w|David Lamontagne|1|Unknown|1|Unknown|877|Park Laurel|Road|Suite T|Midway|Williamson County|TN|31904|United States|-5|0.03|
+4|AAAAAAAAEAAAAAAA|1997-03-13|1999-03-13|2415031|ese|218|9341467|8AM-4PM|Edwin Adams|7|Unknown|Events would achieve other, eastern hours. Mechanisms must not eat other, new org|Thomas Pollack|1|Unknown|1|Unknown|27|Lake |Ln|Suite 260|Midway|Williamson County|TN|90419|United States|-5|0.03|
+5|AAAAAAAAEAAAAAAA|1999-03-14|2001-03-12|2415025|anti|288|9078805|8AM-4PM|Edwin Adams|7|Unknown|Events would achieve other, eastern hours. Mechanisms must not eat other, new org|Thomas Pollack|1|Unknown|1|Unknown|27|Lee 6th|Court|Suite 80|Fairview|Williamson County|TN|35709|United States|-5|0.03|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/store_returns.csv b/asterixdb/asterix-app/data/tpcds/store_returns.csv
new file mode 100644
index 0000000..578ea53
--- /dev/null
+++ b/asterixdb/asterix-app/data/tpcds/store_returns.csv
@@ -0,0 +1,17 @@
+2415025|3|7|6|4|1|6|4|7|1|51|37.23|3.35|40.58|55.28|714|0.74|17.51|18.98|772.63|
+2415026|5|10|10|6|5|4|2|3|5|43|4009.32|120.27|4129.59|28.23|0|3448.01|5.61|555.7|148.5|
+2415030|7|1|9|3|2|1|1|4|3|7|249.48|2.49|251.97|11.5|6.23|234.51|8.83|6.14|20.22|
+2415026|1|5|6|3|3|1|5|3|1|3|227.37|6.82|234.19|9.23|33.42|125.05|51.16|51.16|49.47|
+2415027|8|7|6|5|5|5|1|2|5|23|1273.05|25.46|1298.51|46.15|406.18|954.78|57.28|260.99|477.79|
+2415030|4|6|8|6|2|5|3|8|6|1|35.14|2.45|37.59|79|0|9.13|5.2|20.81|81.45|
+2415030|1|4|5|3|5|2|1|5|7|18|63.54|1.27|64.81|1.26|10.8|33.67|2.68|27.19|13.33|
+2415031|2|9|7|4|5|6|1|5|8|12|18.6|1.48|20.08|46.48|270.6|10.04|8.04|0.52|318.56|
+2415022|6|2|8|3|5|7|1|9|9|58|347.42|6.94|354.36|97.98|331.18|107.7|14.38|225.34|436.1|
+2415022|8|8|8|1|5|4|2|2|10|1|61.81|3.7|65.51|68.44|44.79|54.39|0.14|7.28|116.93|
+2415027|6|3|5|4|5|4|3|5|11|72|809.28|24.27|833.55|88.17|51.12|194.22|442.84|172.22|163.56|
+2415028|7|10|9|3|1|10|3|1|12|48|3960.48|0|3960.48|39.22|65.76|1980.24|1960.43|19.81|104.98|
+2415027|6|1|2|6|4|3|4|3|13|36|1639.44|81.97|1721.41|96.21|712.8|147.54|1163.68|328.22|890.98|
+2415023|7|13|6|3|1|8|3|5|2|27|417.42|4.17|421.59|62.17|2400.3|375.67|37.57|4.18|2466.64|
+2415029|8|3|8|1|4|7|2|6|15|46|1208.42|48.33|1256.75|69.04|887.8|96.67|355.76|755.99|1005.17|
+2415033|8|15|1|1|4|7|2|6|25|46|1208.42|48.33|1256.75|69.04|887.8|96.67|355.76|755.99|1005.17|
+2415025|8|15|1|1|4|7|2|6|26|46|1208.42|48.33|1256.75|69.04|887.8|96.67|355.76|755.99|1005.17|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/store_sales.csv b/asterixdb/asterix-app/data/tpcds/store_sales.csv
index 7d94449..ac76278 100644
--- a/asterixdb/asterix-app/data/tpcds/store_sales.csv
+++ b/asterixdb/asterix-app/data/tpcds/store_sales.csv
@@ -1,19 +1,29 @@
-2450815|43503|1|1|518725|1359|31593|8|39|239999|20|61.65|61.65|16.64|0.00|332.80|1233.00|1233.00|0.00|0.00|332.80|332.80|-900.20|
-2450815|43503|3|1|518725|1359|31593|8|104|239999|98|55.42|110.84|70.93|0.00|6951.14|5431.16|10862.32|139.02|0.00|6951.14|7090.16|1519.98|
-2450815|43503|5|1|518725|1359|31593|8|294|239999|22|20.66|24.58|9.34|16.43|205.48|454.52|540.76|1.89|16.43|189.05|190.94|-265.47|
-2450815|43503|7|1|518725|1359|31593|8|137|239999|42|12.62|18.29|1.09|0.00|45.78|530.04|768.18|2.28|0.00|45.78|48.06|-484.26|
-2450815||9||518725|||||239999|76||23.04||0.00||1260.08|||0.00|297.16|303.10||
-2450815|43503|11|1|518725|1359|31593|8|256|239999|13|23.87|39.62|5.54|12.96|72.02|310.31|515.06|1.77|12.96|59.06|60.83|-251.25|
-2450815|43503|13|1|518725|1359|31593|8|128|239999|2|88.60|151.50|133.32|0.00|266.64|177.20|303.00|13.33|0.00|266.64|279.97|89.44|
-2450815|43503|15|1|518725|1359|31593|8|266|239999|13|60.52|95.62|13.38|0.00|173.94|786.76|1243.06|12.17|0.00|173.94|186.11|-612.82|
-2451293|43503|17|1|518725|1359|31593|8|179|239999|45|93.14|95.00|4.75|0.00|213.75|4191.30|4275.00|19.23|0.00|213.75|232.98|-3977.55|
-2451176|47181|2|10|1873544|2153|1962|10|92|240000|30|67.43|84.96|37.38|583.12|1121.40|2022.90|2548.80|5.38|583.12|538.28|543.66|-1484.62|
-2451176|47181|4|10|1873544|2153|1962|10|143|240000|14|51.64|66.61|8.65|0.00|121.10|722.96|932.54|8.47|0.00|121.10|129.57|-601.86|
-2451176|47181|6|10|1873544|2153|1962|10|129|240000|64|88.30|174.83|148.60|0.00|9510.40|5651.20|11189.12|855.93|0.00|9510.40|10366.33|3859.20|
-2451176|47181|8|10|1873544|2153|1962|10|71|240000|83|3.29|6.34|5.45|0.00|452.35|273.07|526.22|0.00|0.00|452.35|452.35|179.28|
-2451176|47181|10|10|1873544|2153|1962|10|99|240000|96|8.16|14.44|13.86|0.00|1330.56|783.36|1386.24|79.83|0.00|1330.56|1410.39|547.20|
-2451176|47181|12|10|1873544|2153|1962|10|114|240000|41|18.00|34.02|14.62|0.00|599.42|738.00|1394.82|11.98|0.00|599.42|611.40|-138.58|
-2451176|47181|14|10|1873544|2153|1962|10|15|240000|65|35.22|53.18|35.09|0.00|2280.85|2289.30|3456.70|182.46|0.00|2280.85|2463.31|-8.45|
-2451176|47181|16|10|1873544|2153|1962|10|35|240000|13|61.07|64.12|62.19|0.00|808.47|793.91|833.56|64.67|0.00|808.47|873.14|14.56|
-2451176|47181|18|10|1873544|2153|1962|10|216|240000|86|83.78|150.80|150.80|0.00|12968.80|7205.08|12968.80|0.00|0.00|12968.80|12968.80|5763.72|
-2451176|47181|20|10|1873544|2153|1962|10|59|240000|68|81.95|105.71|1.05|0.00|71.40|5572.60|7188.28|1.42|0.00|71.40|72.82|-5501.20|
+2415022|2|1|6|5|4|3|5|9|1|1|11.41|18.71|2.8|99.54|221.2|901.39|1478.09|6.08|99.54|121.66|127.74|-779.73|
+2415022|3|2|6|1|4|3|3|5|1|2|63.63|101.17|41.47|46.03|1534.39|2354.31|3743.29|59.53|46.03|1488.36|1547.89|-865.95|
+2415023|1|13|6|1|5|3|5|5|2|3|80.52|137.68|83.98|0|8314.02|7971.48|13630.32|0|0|8314.02|8314.02|342.54|
+2415023|2|4|3|1|5|1|5|4|1|4|57.37|76.3|6.1|0|85.4|803.18|1068.2|0|0|85.4|85.4|-717.78|
+2415024|4|5|6|4|7|1|4|10|1|10|25.08|36.86|0.73|0|73|2508|3686|6.57|0|73|79.57|-2435|
+2415024|6|6|6|4|4|10|2|1|1|18|93.48|108.43|93.24|0|8484.84|8506.68|9867.13|254.54|0|8484.84|8739.38|-21.84|
+2415025|7|7|6|5|4|2|1|7|1|25|10.68|15.91|6.68|0|33.4|53.4|79.55|2.33|0|33.4|35.73|-20|
+2415025|8|8|6|5|5|2|1|6|1|72|84.72|111.83|61.5|0|4428|6099.84|8051.76|177.12|0|4428|4605.12|-1671.84|
+2415026|3|9|6|4|5|1|1|2|1|99|11.54|11.77|0|0|0|161.56|164.78|0|0|0|0|-161.56|
+2415026|5|10|6|1|4|8|3|1|1|108|4.57|5.34|3.52|0|204.16|265.06|309.72|0|0|204.16|204.16|-60.9|
+2415032|1|11|1|5|1|3|3|10|1|144|31.07|54.99|24.19|0|1064.36|1367.08|2419.56|85.14|0|1064.36|1149.5|-302.72|
+2415027|5|12|1|3|7|5|3|4|2|288|52.41|63.94|49.23|0|4332.24|4612.08|5626.72|259.93|0|4332.24|4592.17|-279.84|
+2415034|3|13|1|1|6|3|5|8|1|350|67.71|126.61|87.36|0|4368|3385.5|6330.5|349.44|0|4368|4717.44|982.5|
+2415028|3|14|1|1|4|7|4|9|1|465|27.16|52.14|41.19|0|2677.35|1765.4|3389.1|214.18|0|2677.35|2891.53|911.95|
+2415029|1|15|7|5|5|3|3|6|2|565|93.86|158.62|34.89|0|12.85|6100.9|10310.3|45.35|0|2267.85|2313.2|-3833.05|
+2415023|5|3|6|2|4|3|2|1|1|630|2.27|3.83|1.11|0|33.3|68.1|114.9|0|0|33.3|33.3|-34.8|
+2415030|5|12|6|4|4|10|4|10|1|758|53.09|58.39|7.59|0|440.22|3079.22|3386.62|0|0|440.22|440.22|-2639|
+2415031|7|10|1|6|1|8|1|5|3|879|88.02|128.5|69.39|4549.9|5481.81|6953.58|10151.5|83.87|4549.9|931.91|1015.78|-6021.67|
+2415025|8|15|6|5|6|3|1|6|1|72|84.72|111.83|61.5|0|4428|6099.84|8051.76|177.12|0|4428|4605.12|-1671.84|
+2415026|5|16|6|1|4|8|3|1|1|108|4.57|5.34|3.52|0|204.16|265.06|309.72|0|0|204.16|204.16|-60.9|
+2415026|5|17|6|1|4|8|3|1|1|108|4.57|5.34|3.52|0|204.16|265.06|309.72|0|0|204.16|204.16|-60.9|
+2415026|5|18|6|1|4|8|3|1|1|108|4.57|5.34|3.52|0|204.16|265.06|309.72|0|0|204.16|204.16|-60.9|
+2415026|5|19|6|1|4|8|3|1|1|108|4.57|5.34|3.52|0|204.16|265.06|309.72|0|0|204.16|204.16|-60.9|
+2415026|5|20|6|1|4|8|3|1|1|108|4.57|5.34|3.52|0|204.16|265.06|309.72|0|0|204.16|204.16|-60.9|
+2415026|5|21|6|1|4|8|3|1|1|108|4.57|5.34|3.52|0|204.16|265.06|309.72|0|0|204.16|204.16|-60.9|
+2415026|5|22|6|1|4|8|3|1|1|108|4.57|5.34|3.52|0|204.16|265.06|309.72|0|0|204.16|204.16|-60.9|
+2415026|5|23|6|1|4|8|3|1|1|108|4.57|5.34|3.52|0|204.16|265.06|309.72|0|0|204.16|204.16|-60.9|
+2415033|1|15|1|5|5|3|3|6|25|565|93.86|158.62|34.89|0|12.85|6100.9|10310.3|45.35|0|2267.85|2313.2|-3833.05|
+2415029|1|15|1|5|5|2|3|6|26|565|93.86|158.62|34.89|0|12.85|6100.9|10310.3|45.35|0|2267.85|2313.2|-3833.05|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/time_dim.csv b/asterixdb/asterix-app/data/tpcds/time_dim.csv
new file mode 100644
index 0000000..bc73d37
--- /dev/null
+++ b/asterixdb/asterix-app/data/tpcds/time_dim.csv
@@ -0,0 +1,8 @@
+1|AAAAAAAACAAAAAAA|1|8|31|1|AM|third|night||
+2|AAAAAAAAANOEAAAA|20175|5|36|15|AM|third|night||
+3|AAAAAAAAAODIAAAA|33759|9|22|39|AM|first|morning|breakfast|
+4|AAAAAAAAJAOIAAAA|36360|10|6|0|AM|first|morning||
+5|AAAAAAAAMCMKAAAA|44075|12|14|35|PM|first|afternoon|lunch|
+6|AAAAAAAAGBFABAAA|66837|18|33|57|PM|second|evening|dinner|
+7|AAAAAAAAPIBBBAAA|70030|19|27|10|PM|second|evening|dinner|
+8|AAAAAAAAAIBFBAAA|86399|23|59|59|PM|third|evening||
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/warehouse.csv b/asterixdb/asterix-app/data/tpcds/warehouse.csv
new file mode 100644
index 0000000..7f34543
--- /dev/null
+++ b/asterixdb/asterix-app/data/tpcds/warehouse.csv
@@ -0,0 +1,5 @@
+1|AAAAAAAABAAAAAAA|Conventional childr|977787|651|6th |Parkway|Suite 470|Fairview|Williamson County|TN|35709|United States|-5|
+2|AAAAAAAACAAAAAAA|Important issues liv|138504|600|View First|Avenue|Suite P|Fairview|Williamson County|TN|35709|United States|-5|
+3|AAAAAAAADAAAAAAA|Doors canno|294242|534|Ash Laurel|Dr.|Suite 0|Fairview|Williamson County|TN|35709|United States|-5|
+4|AAAAAAAAEAAAAAAA|Bad cards must make.|621234|368|Wilson Elm|Drive|Suite 80|Fairview|Williamson County|TN|35709|United States|-5|
+5|AAAAAAAAFAAAAAAA|||||||Fairview|Williamson County|TN|35709|United States||
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/web_page.csv b/asterixdb/asterix-app/data/tpcds/web_page.csv
new file mode 100644
index 0000000..2aceb67
--- /dev/null
+++ b/asterixdb/asterix-app/data/tpcds/web_page.csv
@@ -0,0 +1,10 @@
+1|AAAAAAAABAAAAAAA|9/3/97||2415031|2415030|Y|9|http://www.foo.com|welcome|2531|8|3|4|
+2|AAAAAAAACAAAAAAA|9/3/97|9/2/00|2415026|2415025|N||http://www.foo.com|protected|1564|4|3|1|
+3|AAAAAAAACAAAAAAA|9/3/00||2415030|2415029|N||http://www.foo.com|feedback|1564|4|3|4|
+4|AAAAAAAAEAAAAAAA|9/3/97|9/3/99|2415023|2415029|N||http://www.foo.com|general|3732|18|7|1|
+5|AAAAAAAAEAAAAAAA|9/4/99|9/2/01|2415025|2415030|N||http://www.foo.com|welcome|3732|18|3|1|
+6|AAAAAAAAEAAAAAAA|9/3/01||2415031|2415031|N||http://www.foo.com|ad|3732|18|7|4|
+7|AAAAAAAAHAAAAAAA|9/3/97||2415031|2415028|N||http://www.foo.com|feedback|3034|18|7|4|
+8|AAAAAAAAIAAAAAAA|9/3/97|9/2/00|2415029|2415030|Y|3|http://www.foo.com|protected|3128|12|2|4|
+9|AAAAAAAAIAAAAAAA|9/3/00||2415029|2415025|Y|5|http://www.foo.com|welcome|3128|13|5|3|
+10|AAAAAAAAKAAAAAAA|9/3/97|9/3/99||2415028|N||http://www.foo.com||||||
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/web_returns.csv b/asterixdb/asterix-app/data/tpcds/web_returns.csv
new file mode 100644
index 0000000..e89a512
--- /dev/null
+++ b/asterixdb/asterix-app/data/tpcds/web_returns.csv
@@ -0,0 +1,13 @@
+2415029||4||6||2||1|2|4||9|3|11||25.52|||16.72||16.36|165.47||
+2415027|6|3|5|6|1|3|8|6|5|4|9|8|4|33|1820.94|54.62|1875.56|70.16|1000.23|1802.73|14.56|3.65|1125.01|
+2415028|4|5|10|5|3|4|1|2|3|2|7|8|5|18|1700.08|7.38|89.46|35.63|1342.26|38.57|19.14|24.37|1385.27|
+2415027|5|5|1|1|2|10|1|4|2|4|8|4|6|16|1956|156.48|2112.48|22.56|221.76|215.16|504.84|1236|400.8|
+2415022|6|10|10|4|4|9|8|6|1|1|10|1|1|3|134.4|0|134.4|78.06|108.78|108.86|15.06|10.48|186.84|
+2415025|5|5|8|3|2|2|2|3|3|1|4|7|18|43|520.3|20.81|541.11|54.02|3468.38|504.69|14.36|1.25|3543.21|
+2415028|8|1|8|5|2|4|3|6|4|8|1|1|9|4|7.68|2.39|82.07|11.4|54.52|58.16|1.5|20.02|68.31|
+2415025|4|8|2|2|5|9|3|2|5|9|2|9|10|28|707|28.28|735.28|16.84|382.2|424.2|248.86|33.94|427.32|
+2415029|7|6|6|2|1|10|3|2|3|5|4|10|11|6|708.66|21.25|729.91|25.19|146.1|474.8|144.99|88.87|192.54|
+2415027|8|3|5|3|1|5|9|4|5|4|4|1|12|26|528.84|37.01|565.85|90.97|237.12|449.51|23|56.33|365.1|
+2415027|8|10|3|6|1|1|8|6|5|1|10|10|13|16|417.6|25.05|442.65|22.13|539.36|50.11|128.62|238.87|586.54|
+2415028||3|4||1||9|1||8|4||14|4|100.87|||11.84||9.39||||
+2415027|4|9|6|1|1|10|2|6|4|6|10|10|15|5|219.85|15.38|235.23|28.94|233.9|147.29|42.81|29.75|278.22|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/web_sales.csv b/asterixdb/asterix-app/data/tpcds/web_sales.csv
new file mode 100644
index 0000000..63d0514
--- /dev/null
+++ b/asterixdb/asterix-app/data/tpcds/web_sales.csv
@@ -0,0 +1,16 @@
+2415026|7|2415023|4|5|1|4|2|2|3|5|5|8|4|5|5|6|1|57|33.59|59.45|38.04|1220.37|2168.28|1914.63|3388.65|50.95|1149.18|575.7|1019.1|1070.05|1594.8|1645.75|-895.53|
+2415027|2|2415026|5|21|2|4|5|3|6|2|4|9|2|3|8|271|1|38|29.83|48.92|26.41|855.38|1003.58|1133.54|1858.96|30.1|0|910.86|1003.58|1033.68|1914.44|1944.54|-129.96|
+2415027|6|2415024|1|4|5|1|7|4|4|3|8|10|5|5|6|300|3|32|49.72|107.89|97.1|345.28|3107.2|1591.04|3452.48|124.28|0|828.48|3107.2|3231.48|3935.68|4059.96|1516.16|
+2415022|8|2415024|1|3|4|4|1|4|1|2|3|7|2|3|7|63|4|65|69.18|112.07|22.41|5827.9|1456.65|4496.7|7284.55|29.13|0|2476.5|1456.65|1485.78|3933.15|3962.28|-3040.05|
+2415022|4|2415023|9|9|3|5|2|2|5|4|3|10|2|2|1|18|5|58|36.62|41.38|16.13|1464.5|935.54|2123.96|2400.04|84.19|0|167.62|935.54|1019.73|1103.16|1187.35|-1188.42|
+2415030|8|2415027|5|6|5|1|8|9|6|2|4|10|5|2|6|185|6|90|72.05|161.39|27.43|12056.4|2468.7|6484.5|14525.1|74.06|0|4647.6|2468.7|2542.76|7116.3|7190.36|-4015.8|
+2415022|7|2415025|4|9|4|1|1|2|4|4|3|10|4|4|8|293|7|15|83.92|174.55|69.82|1570.95|1047.3|1258.8|2618.25|20.94|0|392.7|1047.3|1068.24|1440|1460.94|-211.5|
+2415026|5|2415022|4|4|1|2|9|8|6|1|10|7|2|5|7|202|8|16|45.27|128.56|68.13|966.88|1090.08|724.32|2056.96|87.2|0|370.24|1090.08|1177.28|1460.32|1547.52|365.76|
+2415024|5|2415027|7|3|1|4|7|5|2|2|10|2|4|2|4|49|9|54|53.45|60.39|26.57|1826.28|1434.78|2886.3|3261.06|48.78|215.21|488.7|1219.57|1268.35|1708.27|1757.05|-1666.73|
+2415029|6|2415022|4|1|6|1|10|2|4|3|9|5|3|2|4|267|10|47|49.64|68.99|12.41|2659.26|583.27|2333.08|3242.53|52.49|0|1134.58|583.27|635.76|1717.85|1770.34|-1749.81|
+2415031|2|2415029|15|5|3|5|7|5|6|5|5|1|2|1|1|221|8|12|7.24|12.16|11.91|3|142.92|86.88|145.92|10|0|29.16|142.92|152.92|172.08|182.08|56.04|
+2415024|3|2415024|3|8|1|1|9|3|4|2|1|7|3|5|6|246|12|6|45.21|67.36|30.31|222.3|181.86|271.26|404.16|16.36|0|185.88|181.86|198.22|367.74|384.1|-89.4|
+2415024|4|2415024|22|5|6|1|9|7|5|2|5|4|2|2|8|237|10|43|9.62|13.08|9.02|174.58|387.86|413.66|562.44|19.39|0|117.82|387.86|407.25|505.68|525.07|-25.8|
+2415029|4|2415024|8|3|6|2|3|2|3|3|7|10|2|1|1|223|14|57|35.95|62.55|43.78|1069.89|2495.46|2049.15|3565.35|224.59|0|962.16|2495.46|2720.05|3457.62|3682.21|446.31|
+2415030|4|2415029|1|2|4|5|10|5|1|3|8|1|1|4|7|72|15|93|65.8|152.65|29|11499.45|2697|6119.4|14196.45|215.76|0|7097.76|2697|2912.76|9794.76|10010.52|-3422.4|
+2415025|4|2415029|1|2|4|5|2|5|1|3|8|1|1|4|7|72|16|93|65.8|152.65|29|11499.45|2697|6119.4|14196.45|215.76|0|7097.76|2697|2912.76|9794.76|10010.52|-3422.4|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/data/tpcds/web_site.csv b/asterixdb/asterix-app/data/tpcds/web_site.csv
new file mode 100644
index 0000000..a698594
--- /dev/null
+++ b/asterixdb/asterix-app/data/tpcds/web_site.csv
@@ -0,0 +1,5 @@
+1|AAAAAAAABAAAAAAA|1997-08-16||site_0|2450807||Unknown|Ronald Shaffer|4|Grey lines ought to result indeed centres. Tod|Well similar decisions used to keep hardly democratic, personal priorities.|Joe George|6|cally|51|Dogwood Sunset|Ln|Suite 330|Midway|Williamson County|TN|31904|United States|-5|0.10|
+2|AAAAAAAACAAAAAAA|1997-08-16|2000-08-15|site_0|2450798|2447148|Unknown|Tommy Jones|6|Completely excellent things ought to pro|Lucky passengers know. Red details will not hang alive, international s|David Myers|4|pri|358|Ridge Wilson|Cir.|Suite 150|Midway|Williamson County|TN|31904|United States|-5|0.00|
+3|AAAAAAAACAAAAAAA|2000-08-16||site_0|2450798|2447148|Unknown|Tommy Jones|3|Completely excellent things ought to pro|Particular, common seasons shall not indicate fully more single decisions; |David Myers|4|ese|753|7th |Pkwy|Suite 210|Midway|Williamson County|TN|31904|United States|-5|0.02|
+4|AAAAAAAAEAAAAAAA|1997-08-16|1999-08-16|site_0|2450781|2448956|Unknown|Harold Wilson|5|As strong notes deal questions. Proud visito|Deeply small books cannot extend all similar, clear historians. Free, new camp|James Harris|5|anti|805|Hill Hill|Dr.|Suite R|Midway|Williamson County|TN|31904|United States|-5|0.08|
+5|AAAAAAAAEAAAAAAA|1999-08-17|2001-08-15|site_0|2450781|2448956|Unknown|Harold Wilson|5|Wide, final representat|Deeply small books cannot extend all similar, clear historians. Free, new camp|Edward George|1|ought|805|2nd 3rd|Road|Suite R|Fairview|Williamson County|TN|31904|United States|-5|0.08|
\ No newline at end of file
diff --git a/asterixdb/asterix-app/pom.xml b/asterixdb/asterix-app/pom.xml
index cf96893..08a3d81 100644
--- a/asterixdb/asterix-app/pom.xml
+++ b/asterixdb/asterix-app/pom.xml
@@ -303,5 +303,19 @@
       <type>test-jar</type>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-hdfs-2.x</artifactId>
+      <version>0.2.18-SNAPSHOT</version>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-minicluster</artifactId>
+      <version>${hadoop.version}</version>
+      <type>jar</type>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 </project>
diff --git a/asterixdb/asterix-app/rttest/results/temporal/interval_joins/interval_overlapping/interval_overlapping.3.adm b/asterixdb/asterix-app/rttest/results/temporal/interval_joins/interval_overlapping/interval_overlapping.3.adm
deleted file mode 100644
index 4ecd143..0000000
--- a/asterixdb/asterix-app/rttest/results/temporal/interval_joins/interval_overlapping/interval_overlapping.3.adm
+++ /dev/null
@@ -1,35 +0,0 @@
-{ "staff": "Alex", "student": "Charles" }
-{ "staff": "Alex", "student": "Frank" }
-{ "staff": "Alex", "student": "Karen" }
-{ "staff": "Alex", "student": "Mary" }
-{ "staff": "Alex", "student": "Steve" }
-{ "staff": "Elisabeth", "student": "Charles" }
-{ "staff": "Elisabeth", "student": "Frank" }
-{ "staff": "Elisabeth", "student": "Karen" }
-{ "staff": "Elisabeth", "student": "Mary" }
-{ "staff": "Elisabeth", "student": "Olga" }
-{ "staff": "Elisabeth", "student": "Steve" }
-{ "staff": "Elisabeth", "student": "Tess" }
-{ "staff": "Franklin", "student": "Karen" }
-{ "staff": "Franklin", "student": "Mary" }
-{ "staff": "Franklin", "student": "Steve" }
-{ "staff": "Franklin", "student": "Tess" }
-{ "staff": "Henry", "student": "Charles" }
-{ "staff": "Henry", "student": "Frank" }
-{ "staff": "Henry", "student": "Karen" }
-{ "staff": "Henry", "student": "Mary" }
-{ "staff": "Henry", "student": "Steve" }
-{ "staff": "Maryann", "student": "Karen" }
-{ "staff": "Maryann", "student": "Steve" }
-{ "staff": "Maryann", "student": "Tess" }
-{ "staff": "Vicky", "student": "Charles" }
-{ "staff": "Vicky", "student": "Frank" }
-{ "staff": "Vicky", "student": "Karen" }
-{ "staff": "Vicky", "student": "Mary" }
-{ "staff": "Vicky", "student": "Olga" }
-{ "staff": "Vicky", "student": "Steve" }
-{ "staff": "Vicky", "student": "Tess" }
-{ "staff": "Zack", "student": "Charles" }
-{ "staff": "Zack", "student": "Frank" }
-{ "staff": "Zack", "student": "Mary" }
-{ "staff": "Zack", "student": "Olga" }
diff --git a/asterixdb/asterix-app/rttest/results/temporal/interval_joins/interval_overlapping/interval_overlapping.4.adm b/asterixdb/asterix-app/rttest/results/temporal/interval_joins/interval_overlapping/interval_overlapping.4.adm
deleted file mode 100644
index 4ecd143..0000000
--- a/asterixdb/asterix-app/rttest/results/temporal/interval_joins/interval_overlapping/interval_overlapping.4.adm
+++ /dev/null
@@ -1,35 +0,0 @@
-{ "staff": "Alex", "student": "Charles" }
-{ "staff": "Alex", "student": "Frank" }
-{ "staff": "Alex", "student": "Karen" }
-{ "staff": "Alex", "student": "Mary" }
-{ "staff": "Alex", "student": "Steve" }
-{ "staff": "Elisabeth", "student": "Charles" }
-{ "staff": "Elisabeth", "student": "Frank" }
-{ "staff": "Elisabeth", "student": "Karen" }
-{ "staff": "Elisabeth", "student": "Mary" }
-{ "staff": "Elisabeth", "student": "Olga" }
-{ "staff": "Elisabeth", "student": "Steve" }
-{ "staff": "Elisabeth", "student": "Tess" }
-{ "staff": "Franklin", "student": "Karen" }
-{ "staff": "Franklin", "student": "Mary" }
-{ "staff": "Franklin", "student": "Steve" }
-{ "staff": "Franklin", "student": "Tess" }
-{ "staff": "Henry", "student": "Charles" }
-{ "staff": "Henry", "student": "Frank" }
-{ "staff": "Henry", "student": "Karen" }
-{ "staff": "Henry", "student": "Mary" }
-{ "staff": "Henry", "student": "Steve" }
-{ "staff": "Maryann", "student": "Karen" }
-{ "staff": "Maryann", "student": "Steve" }
-{ "staff": "Maryann", "student": "Tess" }
-{ "staff": "Vicky", "student": "Charles" }
-{ "staff": "Vicky", "student": "Frank" }
-{ "staff": "Vicky", "student": "Karen" }
-{ "staff": "Vicky", "student": "Mary" }
-{ "staff": "Vicky", "student": "Olga" }
-{ "staff": "Vicky", "student": "Steve" }
-{ "staff": "Vicky", "student": "Tess" }
-{ "staff": "Zack", "student": "Charles" }
-{ "staff": "Zack", "student": "Frank" }
-{ "staff": "Zack", "student": "Mary" }
-{ "staff": "Zack", "student": "Olga" }
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
index dc0087b..7750ab0 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
@@ -64,7 +64,7 @@
 
     public void init(boolean deleteOldInstanceData) throws Exception {
         ncs = new NodeControllerService[0]; // ensure that ncs is not null
-        propertiesAccessor = new AsterixPropertiesAccessor();
+        propertiesAccessor = AsterixPropertiesAccessor.getInstance();
         if (deleteOldInstanceData) {
             deleteTransactionLogs();
             removeTestStorageFiles();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/APIServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/APIServlet.java
index f32e451..4e9bb25 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/APIServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/APIServlet.java
@@ -177,7 +177,17 @@
             InputStreamReader isr = new InputStreamReader(is);
             StringBuilder sb = new StringBuilder();
             BufferedReader br = new BufferedReader(isr);
-            String line = br.readLine();
+            String line;
+            try {
+                line = br.readLine();
+            } catch (NullPointerException e) {
+                LOGGER.log(Level.WARNING, "NPE reading resource " + resourcePath
+                        + ", assuming JDK-8080094; returning 404", e);
+                // workaround lame JDK bug where a broken InputStream is returned in case the resourcePath is a
+                // directory; see https://bugs.openjdk.java.net/browse/JDK-8080094
+                response.sendError(HttpServletResponse.SC_NOT_FOUND);
+                return;
+            }
 
             while (line != null) {
                 sb.append(line);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/QueryServiceServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/QueryServiceServlet.java
index 427e177..0f88f48 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/QueryServiceServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/QueryServiceServlet.java
@@ -54,6 +54,7 @@
 import org.apache.asterix.translator.IStatementExecutorFactory;
 import org.apache.asterix.translator.SessionConfig;
 import org.apache.commons.io.IOUtils;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.core.algebra.prettyprint.AlgebricksAppendable;
 import org.apache.hyracks.api.client.IHyracksClientConnection;
 import org.apache.hyracks.api.dataset.IHyracksDataset;
@@ -284,11 +285,19 @@
     }
 
     private static SessionConfig createSessionConfig(RequestParameters param, PrintWriter resultWriter) {
-        SessionConfig.ResultDecorator resultPrefix = (AlgebricksAppendable app) -> {
-            app.append("\t\"");
-            app.append(ResultFields.RESULTS.str());
-            app.append("\": ");
-            return app;
+        SessionConfig.ResultDecorator resultPrefix = new SessionConfig.ResultDecorator() {
+            int resultNo = -1;
+            @Override
+            public AlgebricksAppendable append(AlgebricksAppendable app) throws AlgebricksException {
+                app.append("\t\"");
+                app.append(ResultFields.RESULTS.str());
+                if (resultNo >= 0) {
+                    app.append('-').append(String.valueOf(resultNo));
+                }
+                ++resultNo;
+                app.append("\": ");
+                return app;
+            }
         };
 
         SessionConfig.ResultDecorator resultPostfix = (AlgebricksAppendable app) -> {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/AsterixNCAppRuntimeContext.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/AsterixNCAppRuntimeContext.java
index ed081b5..c2c214c 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/AsterixNCAppRuntimeContext.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/AsterixNCAppRuntimeContext.java
@@ -75,7 +75,6 @@
 import org.apache.asterix.transaction.management.resource.PersistentLocalResourceRepository;
 import org.apache.asterix.transaction.management.resource.PersistentLocalResourceRepositoryFactory;
 import org.apache.asterix.transaction.management.service.transaction.TransactionSubsystem;
-import org.apache.hyracks.api.application.IApplicationConfig;
 import org.apache.hyracks.api.application.INCApplicationContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.io.IIOManager;
@@ -134,26 +133,17 @@
     private IReplicationManager replicationManager;
     private IRemoteRecoveryManager remoteRecoveryManager;
     private IReplicaResourcesManager replicaResourcesManager;
-    private final int metadataRmiPort;
 
     private final ILibraryManager libraryManager;
     private final NCExtensionManager ncExtensionManager;
 
-    public AsterixNCAppRuntimeContext(INCApplicationContext ncApplicationContext, int metadataRmiPort,
-            List<AsterixExtension> extensions) throws AsterixException, InstantiationException, IllegalAccessException,
+    public AsterixNCAppRuntimeContext(INCApplicationContext ncApplicationContext, List<AsterixExtension> extensions)
+            throws AsterixException, InstantiationException, IllegalAccessException,
             ClassNotFoundException, IOException {
         List<AsterixExtension> allExtensions = new ArrayList<>();
         this.ncApplicationContext = ncApplicationContext;
-        // Determine whether to use old-style asterix-configuration.xml or new-style configuration.
-        // QQQ strip this out eventually
-        AsterixPropertiesAccessor propertiesAccessor;
-        IApplicationConfig cfg = ncApplicationContext.getAppConfig();
-        // QQQ this is NOT a good way to determine whether the config is valid
-        if (cfg.getString("cc", "cluster.address") != null) {
-            propertiesAccessor = new AsterixPropertiesAccessor(cfg);
-        } else {
-            propertiesAccessor = new AsterixPropertiesAccessor();
-        }
+        AsterixPropertiesAccessor propertiesAccessor =
+                AsterixPropertiesAccessor.getInstance(ncApplicationContext.getAppConfig());
         compilerProperties = new AsterixCompilerProperties(propertiesAccessor);
         externalProperties = new AsterixExternalProperties(propertiesAccessor);
         metadataProperties = new AsterixMetadataProperties(propertiesAccessor);
@@ -163,7 +153,6 @@
         buildProperties = new AsterixBuildProperties(propertiesAccessor);
         replicationProperties = new AsterixReplicationProperties(propertiesAccessor);
         messagingProperties = new MessagingProperties(propertiesAccessor);
-        this.metadataRmiPort = metadataRmiPort;
         libraryManager = new ExternalLibraryManager();
         if (extensions != null) {
             allExtensions.addAll(extensions);
@@ -458,7 +447,7 @@
         // This is a special case, we just give the metadataNode directly.
         // This way we can delay the registration of the metadataNode until
         // it is completely initialized.
-        MetadataManager.instantiate(new MetadataManager(proxy, MetadataNode.INSTANCE));
+        MetadataManager.initialize(proxy, MetadataNode.INSTANCE);
         MetadataBootstrap.startUniverse(this, ncApplicationContext, newUniverse);
         MetadataBootstrap.startDDLRecovery();
         ncExtensionManager.initializeMetadata();
@@ -470,7 +459,8 @@
 
     @Override
     public void exportMetadataNodeStub() throws RemoteException {
-        IMetadataNode stub = (IMetadataNode) UnicastRemoteObject.exportObject(MetadataNode.INSTANCE, metadataRmiPort);
+        IMetadataNode stub = (IMetadataNode) UnicastRemoteObject.exportObject(MetadataNode.INSTANCE,
+                getMetadataProperties().getMetadataPort());
         ((IAsterixStateProxy) ncApplicationContext.getDistributedState()).setMetadataNode(stub);
     }
 
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index 91b33ee..f4ef1a1 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -513,9 +513,9 @@
         String dataverseName = getActiveDataverse(dd.getDataverse());
         String datasetName = dd.getName().getValue();
         DatasetType dsType = dd.getDatasetType();
-        String itemTypeDataverseName = dd.getItemTypeDataverse().getValue();
+        String itemTypeDataverseName = getActiveDataverse(dd.getItemTypeDataverse());
         String itemTypeName = dd.getItemTypeName().getValue();
-        String metaItemTypeDataverseName = dd.getMetaItemTypeDataverse().getValue();
+        String metaItemTypeDataverseName = getActiveDataverse(dd.getMetaItemTypeDataverse());
         String metaItemTypeName = dd.getMetaItemTypeName().getValue();
         Identifier ngNameId = dd.getNodegroupName();
         String nodegroupName = getNodeGroupName(ngNameId, dd, dataverseName);
@@ -3101,7 +3101,7 @@
         return (dataverse != null) ? dataverse : activeDefaultDataverse.getDataverseName();
     }
 
-    protected String getActiveDataverse(Identifier dataverse) throws AlgebricksException {
+    protected String getActiveDataverse(Identifier dataverse) {
         return getActiveDataverseName(dataverse != null ? dataverse.getValue() : null);
     }
 
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
index 9120aa5..764b559 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
@@ -71,6 +71,7 @@
 import org.apache.hyracks.api.lifecycle.LifeCycleComponentManager;
 import org.apache.hyracks.api.messages.IMessageBroker;
 import org.apache.hyracks.control.cc.ClusterControllerService;
+import org.apache.hyracks.control.common.controllers.CCConfig;
 import org.eclipse.jetty.server.Server;
 import org.eclipse.jetty.servlet.ServletContextHandler;
 import org.eclipse.jetty.servlet.ServletHolder;
@@ -88,7 +89,8 @@
 
     @Override
     public void start(ICCApplicationContext ccAppCtx, String[] args) throws Exception {
-        IMessageBroker messageBroker = new CCMessageBroker((ClusterControllerService) ccAppCtx.getControllerService());
+        final ClusterControllerService controllerService = (ClusterControllerService) ccAppCtx.getControllerService();
+        IMessageBroker messageBroker = new CCMessageBroker(controllerService);
         this.appCtx = ccAppCtx;
 
         if (LOGGER.isLoggable(Level.INFO)) {
@@ -101,20 +103,21 @@
         AsterixResourceIdManager resourceIdManager = new AsterixResourceIdManager();
         ExternalLibraryUtils.setUpExternaLibraries(libraryManager, false);
         AsterixAppContextInfo.initialize(appCtx, getNewHyracksClientConnection(), GlobalRecoveryManager.instance(),
-                libraryManager, resourceIdManager);
+                libraryManager, resourceIdManager, () -> MetadataManager.INSTANCE);
         ccExtensionManager = new CompilerExtensionManager(getExtensions());
         AsterixAppContextInfo.INSTANCE.setExtensionManager(ccExtensionManager);
 
-        if (System.getProperty("java.rmi.server.hostname") == null) {
-            System.setProperty("java.rmi.server.hostname",
-                    ((ClusterControllerService) ccAppCtx.getControllerService()).getCCConfig().clusterNetIpAddress);
-        }
+        final CCConfig ccConfig = controllerService.getCCConfig();
 
-        setAsterixStateProxy(AsterixStateProxy.registerRemoteObject());
+        if (System.getProperty("java.rmi.server.hostname") == null) {
+            System.setProperty("java.rmi.server.hostname", ccConfig.clusterNetIpAddress);
+        }
+        AsterixMetadataProperties metadataProperties = AsterixAppContextInfo.INSTANCE.getMetadataProperties();
+
+        setAsterixStateProxy(AsterixStateProxy.registerRemoteObject(metadataProperties.getMetadataCallbackPort()));
         appCtx.setDistributedState(proxy);
 
-        AsterixMetadataProperties metadataProperties = AsterixAppContextInfo.INSTANCE.getMetadataProperties();
-        MetadataManager.instantiate(new MetadataManager(proxy, metadataProperties));
+        MetadataManager.initialize(proxy, metadataProperties);
 
         AsterixAppContextInfo.INSTANCE.getCCApplicationContext()
                 .addJobLifecycleListener(ActiveLifecycleListener.INSTANCE);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
index 4eaab2d..75cbe44 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
@@ -42,6 +42,7 @@
 import org.apache.asterix.metadata.cluster.RemoveNodeWorkResponse;
 import org.apache.asterix.runtime.util.ClusterStateManager;
 import org.apache.hyracks.api.application.IClusterLifecycleListener;
+import org.apache.hyracks.api.exceptions.HyracksException;
 
 public class ClusterLifecycleListener implements IClusterLifecycleListener {
 
@@ -64,13 +65,16 @@
     }
 
     @Override
-    public void notifyNodeJoin(String nodeId, Map<String, String> ncConfiguration) {
+    public void notifyNodeJoin(String nodeId, Map<String, String> ncConfiguration) throws HyracksException {
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("NC: " + nodeId + " joined");
         }
         ClusterStateManager.INSTANCE.addNCConfiguration(nodeId, ncConfiguration);
+
         //if metadata node rejoining, we need to rebind the proxy connection when it is active again.
-        MetadataManager.INSTANCE.rebindMetadataNode = !ClusterStateManager.INSTANCE.isMetadataNodeActive();
+        if (!ClusterStateManager.INSTANCE.isMetadataNodeActive()) {
+            MetadataManager.INSTANCE.rebindMetadataNode();
+        }
 
         Set<String> nodeAddition = new HashSet<String>();
         nodeAddition.add(nodeId);
@@ -88,7 +92,7 @@
     }
 
     @Override
-    public void notifyNodeFailure(Set<String> deadNodeIds) {
+    public void notifyNodeFailure(Set<String> deadNodeIds) throws HyracksException {
         for (String deadNode : deadNodeIds) {
             if (LOGGER.isLoggable(Level.INFO)) {
                 LOGGER.info("NC: " + deadNode + " left");
@@ -96,7 +100,9 @@
             ClusterStateManager.INSTANCE.removeNCConfiguration(deadNode);
 
             //if metadata node failed, we need to rebind the proxy connection when it is active again
-            MetadataManager.INSTANCE.rebindMetadataNode = !ClusterStateManager.INSTANCE.isMetadataNodeActive();
+            if (!ClusterStateManager.INSTANCE.isMetadataNodeActive()) {
+                MetadataManager.INSTANCE.rebindMetadataNode();
+            }
         }
         updateProgress(ClusterEventType.NODE_FAILURE, deadNodeIds);
         Set<IClusterEventsSubscriber> subscribers = ClusterManager.INSTANCE.getRegisteredClusterEventSubscribers();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
index f26afa8..ea1f714 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
@@ -39,6 +39,7 @@
 import org.apache.asterix.common.replication.IRemoteRecoveryManager;
 import org.apache.asterix.common.transactions.IRecoveryManager;
 import org.apache.asterix.common.transactions.IRecoveryManager.SystemState;
+import org.apache.asterix.common.utils.PrintUtil;
 import org.apache.asterix.common.utils.StoragePathUtil;
 import org.apache.asterix.event.schema.cluster.Cluster;
 import org.apache.asterix.event.schema.cluster.Node;
@@ -61,10 +62,6 @@
 public class NCApplicationEntryPoint implements INCApplicationEntryPoint {
     private static final Logger LOGGER = Logger.getLogger(NCApplicationEntryPoint.class.getName());
 
-    @Option(name = "-metadata-port", usage = "IP port to bind metadata listener (default: random port)",
-            required = false)
-    public int metadataRmiPort = 0;
-
     @Option(name = "-initial-run",
             usage = "A flag indicating if it's the first time the NC is started (default: false)", required = false)
     public boolean initialRun = false;
@@ -94,7 +91,6 @@
             parser.printUsage(System.err);
             throw e;
         }
-
         ncAppCtx.setThreadFactory(new AsterixThreadFactory(ncAppCtx.getThreadFactory(),
                 ncAppCtx.getLifeCycleComponentManager()));
         ncApplicationContext = ncAppCtx;
@@ -103,11 +99,13 @@
             LOGGER.info("Starting Asterix node controller: " + nodeId);
         }
 
+        final NodeControllerService controllerService = (NodeControllerService) ncAppCtx.getControllerService();
+
         if (System.getProperty("java.rmi.server.hostname") == null) {
-            System.setProperty("java.rmi.server.hostname", ((NodeControllerService) ncAppCtx.getControllerService())
+            System.setProperty("java.rmi.server.hostname", (controllerService)
                     .getConfiguration().clusterNetPublicIPAddress);
         }
-        runtimeContext = new AsterixNCAppRuntimeContext(ncApplicationContext, metadataRmiPort, getExtensions());
+        runtimeContext = new AsterixNCAppRuntimeContext(ncApplicationContext, getExtensions());
         AsterixMetadataProperties metadataProperties = ((IAsterixPropertiesProvider) runtimeContext)
                 .getMetadataProperties();
         if (!metadataProperties.getNodeNames().contains(ncApplicationContext.getNodeId())) {
@@ -120,8 +118,7 @@
         ncApplicationContext.setApplicationObject(runtimeContext);
         MessagingProperties messagingProperties = ((IAsterixPropertiesProvider) runtimeContext)
                 .getMessagingProperties();
-        messageBroker = new NCMessageBroker((NodeControllerService) ncAppCtx.getControllerService(),
-                messagingProperties);
+        messageBroker = new NCMessageBroker(controllerService, messagingProperties);
         ncApplicationContext.setMessageBroker(messageBroker);
         MessagingChannelInterfaceFactory interfaceFactory = new MessagingChannelInterfaceFactory(
                 (NCMessageBroker) messageBroker, messagingProperties);
@@ -213,7 +210,7 @@
             if (LOGGER.isLoggable(Level.INFO)) {
                 LOGGER.info("System state: " + SystemState.NEW_UNIVERSE);
                 LOGGER.info("Node ID: " + nodeId);
-                LOGGER.info("Stores: " + metadataProperties.getStores());
+                LOGGER.info("Stores: " + PrintUtil.toString(metadataProperties.getStores()));
                 LOGGER.info("Root Metadata Store: " + metadataProperties.getStores().get(nodeId)[0]);
             }
 
diff --git a/asterixdb/asterix-app/src/main/resources/asterix-build-configuration.xml b/asterixdb/asterix-app/src/main/resources/asterix-build-configuration.xml
index 142b93a..ef0cdf2 100644
--- a/asterixdb/asterix-app/src/main/resources/asterix-build-configuration.xml
+++ b/asterixdb/asterix-app/src/main/resources/asterix-build-configuration.xml
@@ -89,7 +89,10 @@
     <name>storage.memorycomponent.numpages</name>
     <value>8</value>
     <description>The number of pages to allocate for a memory component.
-      (Default = 8)
+      This budget is shared by all the memory components of the primary
+      index and all its secondary indexes across all I/O devices on a node.
+      Note: in-memory components usually has fill factor of 75% since
+      the pages are 75% full and the remaining 25% is un-utilized. (Default = 256)
     </description>
   </property>
   <property>
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/metadata/queries/basic/dataset_with_default_dataverse_type/dataset_with_default_dataverse_type.1.ddl.aql
similarity index 85%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/metadata/queries/basic/dataset_with_default_dataverse_type/dataset_with_default_dataverse_type.1.ddl.aql
index af2f691..732363f 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/metadata/queries/basic/dataset_with_default_dataverse_type/dataset_with_default_dataverse_type.1.ddl.aql
@@ -16,4 +16,10 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+drop type UserType if exists;
+
+create type UserType as {
+  id: bigint
+};
+
+create dataset GleambookUsers(UserType) primary key id;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/metadata/queries/basic/dataset_with_default_dataverse_type/dataset_with_default_dataverse_type.2.query.aql
similarity index 86%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/metadata/queries/basic/dataset_with_default_dataverse_type/dataset_with_default_dataverse_type.2.query.aql
index af2f691..eb73925 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/metadata/queries/basic/dataset_with_default_dataverse_type/dataset_with_default_dataverse_type.2.query.aql
@@ -16,4 +16,8 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+let $count := count(
+for $v in dataset Metadata.Dataset
+where $v.DatasetName = 'GleambookUsers'
+return $v)
+return $count
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/metadata/queries/basic/dataset_with_default_dataverse_type/dataset_with_default_dataverse_type.3.ddl.aql
similarity index 91%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/metadata/queries/basic/dataset_with_default_dataverse_type/dataset_with_default_dataverse_type.3.ddl.aql
index af2f691..272c659 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/metadata/queries/basic/dataset_with_default_dataverse_type/dataset_with_default_dataverse_type.3.ddl.aql
@@ -16,4 +16,5 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+drop dataset GleambookUsers if exists;
+drop type UserType if exists;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/metadata/results/basic/dataset_with_default_dataverse_type/dataset_with_default_dataverse_type.2.adm b/asterixdb/asterix-app/src/test/resources/metadata/results/basic/dataset_with_default_dataverse_type/dataset_with_default_dataverse_type.2.adm
new file mode 100644
index 0000000..56a6051
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/metadata/results/basic/dataset_with_default_dataverse_type/dataset_with_default_dataverse_type.2.adm
@@ -0,0 +1 @@
+1
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/metadata/testsuite.xml b/asterixdb/asterix-app/src/test/resources/metadata/testsuite.xml
index 251e2e4..7b0e015 100644
--- a/asterixdb/asterix-app/src/test/resources/metadata/testsuite.xml
+++ b/asterixdb/asterix-app/src/test/resources/metadata/testsuite.xml
@@ -19,6 +19,11 @@
 <test-suite xmlns="urn:xml.testframework.asterix.apache.org" ResultOffsetPath="results" QueryOffsetPath="queries" QueryFileExtension=".aql">
   <test-group name="basic">
     <test-case FilePath="basic">
+      <compilation-unit name="dataset_with_default_dataverse_type">
+        <output-dir compare="Text">dataset_with_default_dataverse_type</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="basic">
       <compilation-unit name="dataset_with_meta-1">
         <output-dir compare="Text">dataset_with_meta-1</output-dir>
       </compilation-unit>
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries/multi-indexes/skip-one-index.aql b/asterixdb/asterix-app/src/test/resources/optimizerts/queries/multi-indexes/skip-one-index.aql
new file mode 100644
index 0000000..7383edc
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries/multi-indexes/skip-one-index.aql
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type tTweet as closed {
+  id: int32,
+  location: point,
+  message: string,
+  create_at: datetime,
+  misc: string
+}
+
+create dataset dsTweet(tTweet) primary key id;
+
+create index ngram_index on dsTweet(message) type ngram(3);
+create index time_index on dsTweet(create_at) type btree;
+create index location_index on dsTweet(location) type rtree;
+
+write output to nc1:"rttest/btree-rtree-ngram-intersect.adm";
+
+let $region := create-rectangle(create-point(-128.43007812500002,20.298506037222175), create-point(-64.26992187500002,54.56902589732035))
+let $ts_start := datetime("2015-11-11T00:00:00Z")
+let $ts_end := datetime("2015-12-18T23:59:59Z")
+let $keyword := "hello"
+for $t in dataset dsTweet
+where $t.create_at >= $ts_start and $t.create_at < $ts_end
+  and /* +skip-index */ spatial-intersect($t.location, $region)
+  and contains($t.message, $keyword)
+return $t
+
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/queries/multi-indexes/skip-two-index.aql b/asterixdb/asterix-app/src/test/resources/optimizerts/queries/multi-indexes/skip-two-index.aql
new file mode 100644
index 0000000..de0807f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries/multi-indexes/skip-two-index.aql
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description    : Tests three types of secondary indexes should trigger intersection rule
+ * Success        : Yes
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type tTweet as closed {
+  id: int32,
+  location: point,
+  message: string,
+  create_at: datetime,
+  misc: string
+}
+
+create dataset dsTweet(tTweet) primary key id;
+
+create index ngram_index on dsTweet(message) type ngram(3);
+create index time_index on dsTweet(create_at) type btree;
+create index location_index on dsTweet(location) type rtree;
+
+write output to nc1:"rttest/btree-rtree-ngram-intersect.adm";
+
+let $region := create-rectangle(create-point(-128.43007812500002,20.298506037222175), create-point(-64.26992187500002,54.56902589732035))
+let $ts_start := datetime("2015-11-11T00:00:00Z")
+let $ts_end := datetime("2015-12-18T23:59:59Z")
+let $keyword := "hello"
+for $t in dataset dsTweet
+where $t.create_at /* +skip-index */ >= $ts_start and $t.create_at /* +skip-index */ < $ts_end
+  and  /* +skip-index */ spatial-intersect($t.location, $region)
+  and contains($t.message, $keyword)
+return $t
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/optimizerts/queries/query-ASTERIXDB-1671.sqlpp
similarity index 66%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/optimizerts/queries/query-ASTERIXDB-1671.sqlpp
index af2f691..33b40b2 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries/query-ASTERIXDB-1671.sqlpp
@@ -16,4 +16,25 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+DROP DATAVERSE tpch IF EXISTS;
+CREATE DATAVERSE tpch;
+
+USE tpch;
+
+
+CREATE TYPE LineItemType AS {
+  l_orderkey : bigint,
+  l_linenumber : bigint
+}
+
+CREATE DATASET LineItem(LineItemType) PRIMARY KEY l_orderkey,l_linenumber;
+
+-- Tests that the query does not do a complete sort over the data.
+
+SELECT  *
+FROM  LineItem l
+ORDER BY l.l_returnflag, l.l_linestatus
+LIMIT -1;
+
+DROP DATAVERSE tpch;
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/results/multi-indexes/skip-one-index.plan b/asterixdb/asterix-app/src/test/resources/optimizerts/results/multi-indexes/skip-one-index.plan
new file mode 100644
index 0000000..d4e03b8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/results/multi-indexes/skip-one-index.plan
@@ -0,0 +1,26 @@
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    -- STREAM_PROJECT  |PARTITIONED|
+      -- STREAM_SELECT  |PARTITIONED|
+        -- ASSIGN  |PARTITIONED|
+          -- STREAM_PROJECT  |PARTITIONED|
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              -- BTREE_SEARCH  |PARTITIONED|
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  -- INTERSECT  |PARTITIONED|
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      -- STABLE_SORT [$$29(ASC)]  |PARTITIONED|
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          -- STREAM_PROJECT  |PARTITIONED|
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              -- BTREE_SEARCH  |PARTITIONED|
+                                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                  -- ASSIGN  |PARTITIONED|
+                                    -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      -- STABLE_SORT [$$31(ASC)]  |PARTITIONED|
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          -- LENGTH_PARTITIONED_INVERTED_INDEX_SEARCH  |PARTITIONED|
+                            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                              -- ASSIGN  |PARTITIONED|
+                                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/results/multi-indexes/skip-two-index.plan b/asterixdb/asterix-app/src/test/resources/optimizerts/results/multi-indexes/skip-two-index.plan
new file mode 100644
index 0000000..05b78b8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/results/multi-indexes/skip-two-index.plan
@@ -0,0 +1,15 @@
+-- DISTRIBUTE_RESULT  |PARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+    -- STREAM_PROJECT  |PARTITIONED|
+      -- STREAM_SELECT  |PARTITIONED|
+        -- ASSIGN  |PARTITIONED|
+          -- STREAM_PROJECT  |PARTITIONED|
+            -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+              -- BTREE_SEARCH  |PARTITIONED|
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  -- STABLE_SORT [$$27(ASC)]  |PARTITIONED|
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      -- LENGTH_PARTITIONED_INVERTED_INDEX_SEARCH  |PARTITIONED|
+                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                          -- ASSIGN  |PARTITIONED|
+                            -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/optimizerts/results/query-ASTERIXDB-1671.plan b/asterixdb/asterix-app/src/test/resources/optimizerts/results/query-ASTERIXDB-1671.plan
new file mode 100644
index 0000000..6b370af
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/results/query-ASTERIXDB-1671.plan
@@ -0,0 +1,17 @@
+-- DISTRIBUTE_RESULT  |UNPARTITIONED|
+  -- ONE_TO_ONE_EXCHANGE  |UNPARTITIONED|
+    -- STREAM_LIMIT  |UNPARTITIONED|
+      -- STREAM_PROJECT  |PARTITIONED|
+        -- ASSIGN  |PARTITIONED|
+          -- STREAM_PROJECT  |PARTITIONED|
+            -- SORT_MERGE_EXCHANGE [$$11(ASC), $$12(ASC) ]  |PARTITIONED|
+              -- STREAM_LIMIT  |PARTITIONED|
+                -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                  -- STABLE_SORT [topK: 0] [$$11(ASC), $$12(ASC)]  |PARTITIONED|
+                    -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                      -- ASSIGN  |PARTITIONED|
+                        -- STREAM_PROJECT  |PARTITIONED|
+                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                            -- DATASOURCE_SCAN  |PARTITIONED|
+                              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
+                                -- EMPTY_TUPLE_SOURCE  |PARTITIONED|
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/constructor/polygon-from-open-list_issue1627/polygon-from-open-list_issue1627.1.ddl.aql
similarity index 71%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries/constructor/polygon-from-open-list_issue1627/polygon-from-open-list_issue1627.1.ddl.aql
index af2f691..faf6082 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/constructor/polygon-from-open-list_issue1627/polygon-from-open-list_issue1627.1.ddl.aql
@@ -16,4 +16,19 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+/*
+* Description  : Fix corrupted polygon values from open list of doubles
+* Expected Res : Success
+* Issue        : 1627
+* Date         : 07th Oct. 2016
+*/
+drop dataverse test if exists;
+create dataverse test
+use dataverse test
+
+create type StateType as {
+id: int32
+}
+
+create dataset States(StateType)
+primary key id
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/constructor/polygon-from-open-list_issue1627/polygon-from-open-list_issue1627.2.update.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/constructor/polygon-from-open-list_issue1627/polygon-from-open-list_issue1627.2.update.aql
new file mode 100644
index 0000000..5beb303
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/constructor/polygon-from-open-list_issue1627/polygon-from-open-list_issue1627.2.update.aql
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+* Description  : Fix corrupted polygon values from open list of doubles
+* Expected Res : Success
+* Issue        : 1627
+* Date         : 07th Oct. 2016
+*/
+use dataverse test
+insert into dataset States (
+{
+"id":0,
+"coordinates":[ -87.359296, 35.00118, -85.606675, 34.984749, -85.431413, 34.124869, -85.184951, 32.859696, -85.069935, 32.580372, -84.960397, 32.421541, -85.004212, 32.322956, -84.889196, 32.262709, -85.058981, 32.13674, -85.053504, 32.01077, -85.141136, 31.840985, -85.042551, 31.539753, -85.113751, 31.27686, -85.004212, 31.003013, -85.497137, 30.997536, -87.600282, 30.997536, -87.633143, 30.86609, -87.408589, 30.674397, -87.446927, 30.510088, -87.37025, 30.427934, -87.518128, 30.280057, -87.655051, 30.247195, -87.90699, 30.411504, -87.934375, 30.657966, -88.011052, 30.685351, -88.10416, 30.499135, -88.137022, 30.318396, -88.394438, 30.367688, -88.471115, 31.895754, -88.241084, 33.796253, -88.098683, 34.891641, -88.202745, 34.995703, -87.359296, 35.00118, -94.473842, 36.501861, -90.152536, 36.496384, -90.064905, 36.304691, -90.218259, 36.184199, -90.377091, 35.997983, -89.730812, 35.997983, -89.763673, 35.811767, -89.911551, 35.756997, -89.944412, 35.603643, -90.130628, 35.439335, -90.114197, 35.198349, -90.212782, 35.023087, -90.311367, 34.995703, -90.251121, 34.908072, -90.409952, 34.831394, -90.481152, 34.661609, -90.585214, 34.617794, -90.568783, 34.420624, -90.749522, 34.365854, -90.744046, 34.300131, -90.952169, 34.135823, -90.891923, 34.026284, -91.072662, 33.867453, -91.231493, 33.560744, -91.056231, 33.429298, -91.143862, 33.347144, -91.089093, 33.13902, -91.16577, 33.002096, -93.608485, 33.018527, -94.041164, 33.018527, -94.041164, 33.54979, -94.183564, 33.593606, -94.380734, 33.544313, -94.484796, 33.637421, -94.430026, 35.395519, -94.616242, 36.501861, -94.473842, 36.501861, -109.042503, 37.000263, -109.04798, 31.331629, -111.074448, 31.331629, -112.246513, 31.704061, -114.815198, 32.492741, -114.72209, 32.717295, -114.524921, 32.755634, -114.470151, 32.843265, -114.524921, 33.029481, -114.661844, 33.034958, -114.727567, 33.40739, -114.524921, 33.54979, -114.497536, 33.697668, -114.535874, 33.933176, -114.415382, 34.108438, -114.256551, 34.174162, -114.136058, 34.305608, -114.333228, 34.448009, -114.470151, 34.710902, -114.634459, 34.87521, -114.634459, 35.00118, -114.574213, 35.138103, -114.596121, 35.324319, -114.678275, 35.516012, -114.738521, 36.102045, -114.371566, 36.140383, -114.251074, 36.01989, -114.152489, 36.025367, -114.048427, 36.195153, -114.048427, 37.000263, -110.499369, 37.00574, -109.042503, 37.000263, -123.233256, 42.006186, -122.378853, 42.011663, -121.037003, 41.995232, -120.001861, 41.995232, -119.996384, 40.264519, -120.001861, 38.999346, -118.71478, 38.101128, -117.498899, 37.21934, -116.540435, 36.501861, -115.85034, 35.970598, -114.634459, 35.00118, -114.634459, 34.87521, -114.470151, 34.710902, -114.333228, 34.448009, -114.136058, 34.305608, -114.256551, 34.174162, -114.415382, 34.108438, -114.535874, 33.933176, -114.497536, 33.697668, -114.524921, 33.54979, -114.727567, 33.40739, -114.661844, 33.034958, -114.524921, 33.029481, -114.470151, 32.843265, -114.524921, 32.755634, -114.72209, 32.717295, -116.04751, 32.624187, -117.126467, 32.536556, -117.24696, 32.668003, -117.252437, 32.876127, -117.329114, 33.122589, -117.471515, 33.297851, -117.7837, 33.538836, -118.183517, 33.763391, -118.260194, 33.703145, -118.413548, 33.741483, -118.391641, 33.840068, -118.566903, 34.042715, -118.802411, 33.998899, -119.218659, 34.146777, -119.278905, 34.26727, -119.558229, 34.415147, -119.875891, 34.40967, -120.138784, 34.475393, -120.472878, 34.448009, -120.64814, 34.579455, -120.609801, 34.858779, -120.670048, 34.902595, -120.631709, 35.099764, -120.894602, 35.247642, -120.905556, 35.450289, -121.004141, 35.461243, -121.168449, 35.636505, -121.283465, 35.674843, -121.332757, 35.784382, -121.716143, 36.195153, -121.896882, 36.315645, -121.935221, 36.638785, -121.858544, 36.6114, -121.787344, 36.803093, -121.929744, 36.978355, -122.105006, 36.956447, -122.335038, 37.115279, -122.417192, 37.241248, -122.400761, 37.361741, -122.515777, 37.520572, -122.515777, 37.783465, -122.329561, 37.783465, -122.406238, 38.15042, -122.488392, 38.112082, -122.504823, 37.931343, -122.701993, 37.893004, -122.937501, 38.029928, -122.97584, 38.265436, -123.129194, 38.451652, -123.331841, 38.566668, -123.44138, 38.698114, -123.737134, 38.95553, -123.687842, 39.032208, -123.824765, 39.366301, -123.764519, 39.552517, -123.85215, 39.831841, -124.109566, 40.105688, -124.361506, 40.259042, -124.410798, 40.439781, -124.158859, 40.877937, -124.109566, 41.025814, -124.158859, 41.14083, -124.065751, 41.442061, -124.147905, 41.715908, -124.257444, 41.781632, -124.213628, 42.000709, -123.233256, 42.006186, -107.919731, 41.003906, -105.728954, 40.998429, -104.053011, 41.003906, -102.053927, 41.003906, -102.053927, 40.001626, -102.042974, 36.994786, -103.001438, 37.000263, -104.337812, 36.994786, -106.868158, 36.994786, -107.421329, 37.000263, -109.042503, 37.000263, -109.042503, 38.166851, -109.058934, 38.27639, -109.053457, 39.125316, -109.04798, 40.998429, -107.919731, 41.003906, -73.053528, 42.039048, -71.799309, 42.022617, -71.799309, 42.006186, -71.799309, 41.414677, -71.859555, 41.321569, -71.947186, 41.338, -72.385341, 41.261322, -72.905651, 41.28323, -73.130205, 41.146307, -73.371191, 41.102491, -73.655992, 40.987475, -73.727192, 41.102491, -73.48073, 41.21203, -73.55193, 41.294184, -73.486206, 42.050002, -73.053528, 42.039048, -77.035264, 38.993869, -76.909294, 38.895284, -77.040741, 38.791222, -77.117418, 38.933623, -77.035264, 38.993869, -75.414089, 39.804456, -75.507197, 39.683964, -75.611259, 39.61824, -75.589352, 39.459409, -75.441474, 39.311532, -75.403136, 39.065069, -75.189535, 38.807653, -75.09095, 38.796699, -75.047134, 38.451652, -75.693413, 38.462606, -75.786521, 39.722302, -75.616736, 39.831841, -75.414089, 39.804456, -85.497137, 30.997536, -85.004212, 31.003013, -84.867289, 30.712735, -83.498053, 30.647012, -82.216449, 30.570335, -82.167157, 30.356734, -82.046664, 30.362211, -82.002849, 30.564858, -82.041187, 30.751074, -81.948079, 30.827751, -81.718048, 30.745597, -81.444201, 30.707258, -81.383954, 30.27458, -81.257985, 29.787132, -80.967707, 29.14633, -80.524075, 28.461713, -80.589798, 28.41242, -80.56789, 28.094758, -80.381674, 27.738757, -80.091397, 27.021277, -80.03115, 26.796723, -80.036627, 26.566691, -80.146166, 25.739673, -80.239274, 25.723243, -80.337859, 25.465826, -80.304997, 25.383672, -80.49669, 25.197456, -80.573367, 25.241272, -80.759583, 25.164595, -81.077246, 25.120779, -81.170354, 25.224841, -81.126538, 25.378195, -81.351093, 25.821827, -81.526355, 25.903982, -81.679709, 25.843735, -81.800202, 26.090198, -81.833064, 26.292844, -82.041187, 26.517399, -82.09048, 26.665276, -82.057618, 26.878877, -82.172634, 26.917216, -82.145249, 26.791246, -82.249311, 26.758384, -82.566974, 27.300601, -82.692943, 27.437525, -82.391711, 27.837342, -82.588881, 27.815434, -82.720328, 27.689464, -82.851774, 27.886634, -82.676512, 28.434328, -82.643651, 28.888914, -82.764143, 28.998453, -82.802482, 29.14633, -82.994175, 29.179192, -83.218729, 29.420177, -83.399469, 29.518762, -83.410422, 29.66664, -83.536392, 29.721409, -83.640454, 29.885717, -84.02384, 30.104795, -84.357933, 30.055502, -84.341502, 29.902148, -84.451041, 29.929533, -84.867289, 29.743317, -85.310921, 29.699501, -85.299967, 29.80904, -85.404029, 29.940487, -85.924338, 30.236241, -86.29677, 30.362211, -86.630863, 30.395073, -86.910187, 30.373165, -87.518128, 30.280057, -87.37025, 30.427934, -87.446927, 30.510088, -87.408589, 30.674397, -87.633143, 30.86609, -87.600282, 30.997536, -85.497137, 30.997536, -83.109191, 35.00118, -83.322791, 34.787579, -83.339222, 34.683517, -83.005129, 34.469916, -82.901067, 34.486347, -82.747713, 34.26727, -82.714851, 34.152254, -82.55602, 33.94413, -82.325988, 33.81816, -82.194542, 33.631944, -81.926172, 33.462159, -81.937125, 33.347144, -81.761863, 33.160928, -81.493493, 33.007573, -81.42777, 32.843265, -81.416816, 32.629664, -81.279893, 32.558464, -81.121061, 32.290094, -81.115584, 32.120309, -80.885553, 32.032678, -81.132015, 31.693108, -81.175831, 31.517845, -81.279893, 31.364491, -81.290846, 31.20566, -81.400385, 31.13446, -81.444201, 30.707258, -81.718048, 30.745597, -81.948079, 30.827751, -82.041187, 30.751074, -82.002849, 30.564858, -82.046664, 30.362211, -82.167157, 30.356734, -82.216449, 30.570335, -83.498053, 30.647012, -84.867289, 30.712735, -85.004212, 31.003013, -85.113751, 31.27686, -85.042551, 31.539753, -85.141136, 31.840985, -85.053504, 32.01077, -85.058981, 32.13674, -84.889196, 32.262709, -85.004212, 32.322956, -84.960397, 32.421541, -85.069935, 32.580372, -85.184951, 32.859696, -85.431413, 34.124869, -85.606675, 34.984749, -84.319594, 34.990226, -83.618546, 34.984749, -83.109191, 35.00118 ]}
+)
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/constructor/polygon-from-open-list_issue1627/polygon-from-open-list_issue1627.3.query.aql
similarity index 76%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries/constructor/polygon-from-open-list_issue1627/polygon-from-open-list_issue1627.3.query.aql
index af2f691..3ae56a4 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/constructor/polygon-from-open-list_issue1627/polygon-from-open-list_issue1627.3.query.aql
@@ -16,4 +16,13 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+/*
+* Description  : Fix corrupted polygon values from open list of doubles
+* Expected Res : Success
+* Issue        : 1627
+* Date         : 07th Oct. 2016
+*/
+use dataverse test;
+
+for $x in dataset States
+return create-polygon($x.coordinates)
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.5.query.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.5.pollquery.aql
similarity index 98%
rename from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.5.query.aql
rename to asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.5.pollquery.aql
index 8879fa8..f095804 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.5.query.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.5.pollquery.aql
@@ -25,6 +25,7 @@
  * Expected Res : Success
  * Date         : 23rd Apr 2013
  */
+// polltimeoutsecs=30
 use dataverse externallibtest;
 
 for $x in dataset TweetsFeedIngest
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-1671-2/query-ASTERIXDB-1671-2.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-1671-2/query-ASTERIXDB-1671-2.1.ddl.sqlpp
new file mode 100644
index 0000000..aa21431
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-1671-2/query-ASTERIXDB-1671-2.1.ddl.sqlpp
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop  dataverse tpch if exists;
+create  dataverse tpch;
+
+use tpch;
+
+
+create type LineItemType as
+ closed {
+  l_orderkey : bigint,
+  l_partkey : bigint,
+  l_suppkey : bigint,
+  l_linenumber : bigint,
+  l_quantity : double,
+  l_extendedprice : double,
+  l_discount : double,
+  l_tax : double,
+  l_returnflag : string,
+  l_linestatus : string,
+  l_shipdate : string,
+  l_commitdate : string,
+  l_receiptdate : string,
+  l_shipinstruct : string,
+  l_shipmode : string,
+  l_comment : string
+}
+
+create  dataset LineItem(LineItemType) primary key l_orderkey,l_linenumber;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-1671-2/query-ASTERIXDB-1671-2.2.update.sqlpp
similarity index 82%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-1671-2/query-ASTERIXDB-1671-2.2.update.sqlpp
index af2f691..5fe734c 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-1671-2/query-ASTERIXDB-1671-2.2.update.sqlpp
@@ -16,4 +16,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+use tpch;
+
+
+load  dataset LineItem using localfs ((`path`=`asterix_nc1://data/tpch0.001/lineitem.tbl`),(`format`=`delimited-text`),(`delimiter`=`|`)) pre-sorted;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-1671-2/query-ASTERIXDB-1671-2.3.query.sqlpp
similarity index 83%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-1671-2/query-ASTERIXDB-1671-2.3.query.sqlpp
index af2f691..51c4ceb 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-1671-2/query-ASTERIXDB-1671-2.3.query.sqlpp
@@ -16,4 +16,12 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+USE tpch;
+
+-- Tests that the query with LIMIT -1 can be executed correctly.
+
+SELECT  *
+FROM  LineItem l
+ORDER BY l.l_returnflag, l.l_linestatus
+LIMIT -1;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-1671/query-ASTERIXDB-1671.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-1671/query-ASTERIXDB-1671.1.ddl.sqlpp
new file mode 100644
index 0000000..aa21431
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-1671/query-ASTERIXDB-1671.1.ddl.sqlpp
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop  dataverse tpch if exists;
+create  dataverse tpch;
+
+use tpch;
+
+
+create type LineItemType as
+ closed {
+  l_orderkey : bigint,
+  l_partkey : bigint,
+  l_suppkey : bigint,
+  l_linenumber : bigint,
+  l_quantity : double,
+  l_extendedprice : double,
+  l_discount : double,
+  l_tax : double,
+  l_returnflag : string,
+  l_linestatus : string,
+  l_shipdate : string,
+  l_commitdate : string,
+  l_receiptdate : string,
+  l_shipinstruct : string,
+  l_shipmode : string,
+  l_comment : string
+}
+
+create  dataset LineItem(LineItemType) primary key l_orderkey,l_linenumber;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-1671/query-ASTERIXDB-1671.2.update.sqlpp
similarity index 82%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-1671/query-ASTERIXDB-1671.2.update.sqlpp
index af2f691..5fe734c 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-1671/query-ASTERIXDB-1671.2.update.sqlpp
@@ -16,4 +16,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+use tpch;
+
+
+load  dataset LineItem using localfs ((`path`=`asterix_nc1://data/tpch0.001/lineitem.tbl`),(`format`=`delimited-text`),(`delimiter`=`|`)) pre-sorted;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-1671/query-ASTERIXDB-1671.3.query.sqlpp
similarity index 83%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-1671/query-ASTERIXDB-1671.3.query.sqlpp
index af2f691..5a51067 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/query-ASTERIXDB-1671/query-ASTERIXDB-1671.3.query.sqlpp
@@ -16,4 +16,12 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+USE tpch;
+
+-- Tests that the query with LIMIT 0 can be executed correctly.
+
+SELECT  *
+FROM  LineItem l
+ORDER BY l.l_returnflag, l.l_linestatus
+LIMIT 0;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/null-missing/query-ASTERIXDB-1689/query-ASTERIXDB-1689.1.query.sqlpp
similarity index 91%
rename from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
rename to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/null-missing/query-ASTERIXDB-1689/query-ASTERIXDB-1689.1.query.sqlpp
index af2f691..51d6df8 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/null-missing/query-ASTERIXDB-1689/query-ASTERIXDB-1689.1.query.sqlpp
@@ -16,4 +16,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+SELECT compare
+FROM [ {"a":1} ] t
+LET compare = (t.b is null);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/everysat_04/everysat_04.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/everysat_04/everysat_04.3.query.sqlpp
index 06eb088..113bedd 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/everysat_04/everysat_04.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/everysat_04/everysat_04.3.query.sqlpp
@@ -23,7 +23,7 @@
 
 use test;
 
-with  x as [every x in [false,false] satisfies x,every x in [true,false] satisfies x,every x in [false,true] satisfies x,every x in [true,true] satisfies x,every x in [false,false] satisfies not x,every x in [true,false] satisfies not x,every x in [false,true] satisfies not x,every x in [true,true] satisfies not x]
+with  x as [every x in [false,false] satisfies x end,every x in [true,false] satisfies x,every x in [false,true] satisfies x,every x in [true,true] satisfies x,every x in [false,false] satisfies not x,every x in [true,false] satisfies not x,every x in [false,true] satisfies not x,every x in [true,true] satisfies not x]
 select element i
 from  x as i
 ;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/query-ASTERIXDB-1674/query-ASTERIXDB-1674.1.ddl.sqlpp
similarity index 68%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/query-ASTERIXDB-1674/query-ASTERIXDB-1674.1.ddl.sqlpp
index af2f691..5f8e5a9 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/query-ASTERIXDB-1674/query-ASTERIXDB-1674.1.ddl.sqlpp
@@ -16,4 +16,23 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+drop  dataverse TinySocial if exists;
+create  dataverse TinySocial;
+
+use TinySocial;
+
+
+create type FacebookUserType as
+ open {
+  id : bigint
+}
+
+create type FacebookMessageType as
+ open {
+  `message-id` : bigint
+}
+
+create  dataset FacebookUsers(FacebookUserType) primary key id;
+
+create  dataset FacebookMessages(FacebookMessageType) primary key `message-id`;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/query-ASTERIXDB-1674/query-ASTERIXDB-1674.2.update.sqlpp
similarity index 76%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/query-ASTERIXDB-1674/query-ASTERIXDB-1674.2.update.sqlpp
index af2f691..b49fa5b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/query-ASTERIXDB-1674/query-ASTERIXDB-1674.2.update.sqlpp
@@ -16,4 +16,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+use TinySocial;
+
+load  dataset FacebookUsers using localfs ((`path`=`asterix_nc1://data/tinysocial/fbu.adm`),(`format`=`adm`));
+
+load  dataset FacebookMessages using localfs ((`path`=`asterix_nc1://data/tinysocial/fbm.adm`),(`format`=`adm`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/query-ASTERIXDB-1674/query-ASTERIXDB-1674.3.query.sqlpp
similarity index 66%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/query-ASTERIXDB-1674/query-ASTERIXDB-1674.3.query.sqlpp
index af2f691..dfa2098 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/query-ASTERIXDB-1674/query-ASTERIXDB-1674.3.query.sqlpp
@@ -16,4 +16,24 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+
+USE TinySocial;
+
+WITH nested_msgs AS
+(
+  SELECT fu.name name,
+         (
+           SELECT fu.name name, fm.message msg
+           FROM FacebookMessages fm
+           WHERE fm.`author-id` = fu.id
+           ORDER BY fm.`message-id` DESC
+         ) AS msgs
+  FROM FacebookUsers fu
+)
+
+SELECT VALUE nm
+FROM nested_msgs nm
+WHERE (SOME msg IN nm.msgs SATISFIES msg.name LIKE '%Emory%')
+LIMIT 2;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/somesat_06/somesat_06.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/somesat_06/somesat_06.3.query.sqlpp
index 8186a43..f8e9bf8 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/somesat_06/somesat_06.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/somesat_06/somesat_06.3.query.sqlpp
@@ -26,7 +26,7 @@
 
 set `import-private-functions` `true`;
 
-with  x as [some x in [false,false] satisfies x,some x in [true,false] satisfies x,some x in [false,true] satisfies x,some x in [true,true] satisfies x,some x in [false,false] satisfies NOT x,some x in [true,false] satisfies not x,some x in [false,true] satisfies NOT x,some x in [true,true] satisfies not x]
+with  x as [some x in [false,false] satisfies x,some x in [true,false] satisfies x,some x in [false,true] satisfies x,some x in [true,true] satisfies x,some x in [false,false] satisfies NOT x,some x in [true,false] satisfies not x,some x in [false,true] satisfies NOT x,some x in [true,true] satisfies not x end]
 select element i
 from  x as i
 ;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/subquery/query-ASTERIXDB-1674/query-ASTERIXDB-1674.1.ddl.sqlpp
similarity index 68%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/subquery/query-ASTERIXDB-1674/query-ASTERIXDB-1674.1.ddl.sqlpp
index af2f691..5f8e5a9 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/subquery/query-ASTERIXDB-1674/query-ASTERIXDB-1674.1.ddl.sqlpp
@@ -16,4 +16,23 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+drop  dataverse TinySocial if exists;
+create  dataverse TinySocial;
+
+use TinySocial;
+
+
+create type FacebookUserType as
+ open {
+  id : bigint
+}
+
+create type FacebookMessageType as
+ open {
+  `message-id` : bigint
+}
+
+create  dataset FacebookUsers(FacebookUserType) primary key id;
+
+create  dataset FacebookMessages(FacebookMessageType) primary key `message-id`;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/subquery/query-ASTERIXDB-1674/query-ASTERIXDB-1674.2.update.sqlpp
similarity index 76%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/subquery/query-ASTERIXDB-1674/query-ASTERIXDB-1674.2.update.sqlpp
index af2f691..b49fa5b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/subquery/query-ASTERIXDB-1674/query-ASTERIXDB-1674.2.update.sqlpp
@@ -16,4 +16,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+use TinySocial;
+
+load  dataset FacebookUsers using localfs ((`path`=`asterix_nc1://data/tinysocial/fbu.adm`),(`format`=`adm`));
+
+load  dataset FacebookMessages using localfs ((`path`=`asterix_nc1://data/tinysocial/fbm.adm`),(`format`=`adm`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/subquery/query-ASTERIXDB-1674/query-ASTERIXDB-1674.3.query.sqlpp
similarity index 66%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/subquery/query-ASTERIXDB-1674/query-ASTERIXDB-1674.3.query.sqlpp
index af2f691..76f8501 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/subquery/query-ASTERIXDB-1674/query-ASTERIXDB-1674.3.query.sqlpp
@@ -16,4 +16,23 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+
+USE TinySocial;
+
+WITH nested_msgs AS
+(
+  SELECT fu.id id, fu.name name,
+         (
+           SELECT fu.name name, fm.message msg
+           FROM FacebookMessages fm
+           WHERE fm.`author-id` = fu.id
+         ) AS msgs
+  FROM FacebookUsers fu
+)
+
+SELECT VALUE nm
+FROM nested_msgs nm
+WHERE NOT EXISTS (SELECT * FROM FacebookMessages fm WHERE fm.`author-id` = nm.id)
+ORDER BY nm.id;
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q03/q03.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q03/q03.1.ddl.sqlpp
new file mode 100644
index 0000000..acd3953
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q03/q03.1.ddl.sqlpp
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.item_type as
+ closed {
+  i_item_sk : bigint,
+  i_item_id : string,
+  i_rec_start_date : string?,
+  i_rec_end_date : string?,
+  i_item_desc : string?,
+  i_current_price : double?,
+  i_wholesale_cost : double?,
+  i_brand_id : bigint?,
+  i_brand : string?,
+  i_class_id : bigint?,
+  i_class : string?,
+  i_category_id : bigint?,
+  i_category : string?,
+  i_manufact_id : bigint?,
+  i_manufact : string?,
+  i_size : string?,
+  i_formulation : string?,
+  i_color : string?,
+  i_units : string?,
+  i_container : string?,
+  i_manager_id : bigint?,
+  i_product_name : string?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create dataset item (item_type) primary key i_item_sk;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q03/q03.2.update.sqlpp
similarity index 65%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q03/q03.2.update.sqlpp
index af2f691..dfdaad1 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q03/q03.2.update.sqlpp
@@ -16,4 +16,12 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+use tpcds;
+
+
+load dataset item using localfs ((`path`=`asterix_nc1://data/tpcds/item.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q03/q03.3.query.sqlpp
similarity index 63%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q03/q03.3.query.sqlpp
index af2f691..67f4a14 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q03/q03.3.query.sqlpp
@@ -16,4 +16,25 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+USE tpcds;
+
+
+SELECT dt.d_year
+       ,item.i_brand_id brand_id
+       ,item.i_brand brand
+       ,sum(ss_ext_sales_price) sum_agg
+FROM date_dim dt
+    ,store_sales
+    ,item
+WHERE dt.d_date_sk = store_sales.ss_sold_date_sk
+AND store_sales.ss_item_sk = item.i_item_sk
+AND item.i_manufact_id = 436
+AND dt.d_moy=12
+GROUP BY dt.d_year
+        ,item.i_brand
+        ,item.i_brand_id
+ORDER BY dt.d_year
+         ,sum_agg desc
+         ,brand_id
+LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q07/q07.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q07/q07.1.ddl.sqlpp
new file mode 100644
index 0000000..5b5281f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q07/q07.1.ddl.sqlpp
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.item_type as
+ closed {
+  i_item_sk : bigint,
+  i_item_id : string,
+  i_rec_start_date : string?,
+  i_rec_end_date : string?,
+  i_item_desc : string?,
+  i_current_price : double?,
+  i_wholesale_cost : double?,
+  i_brand_id : bigint?,
+  i_brand : string?,
+  i_class_id : bigint?,
+  i_class : string?,
+  i_category_id : bigint?,
+  i_category : string?,
+  i_manufact_id : bigint?,
+  i_manufact : string?,
+  i_size : string?,
+  i_formulation : string?,
+  i_color : string?,
+  i_units : string?,
+  i_container : string?,
+  i_manager_id : bigint?,
+  i_product_name : string?
+}
+
+create type tpcds.customer_demographics_type as
+ closed {
+  cd_demo_sk : bigint,
+  cd_gender : string?,
+  cd_marital_status : string?,
+  cd_education_status : string?,
+  cd_purchase_estimate : bigint?,
+  cd_credit_rating : string?,
+  cd_dep_count : bigint?,
+  cd_dep_employed_count : bigint?,
+  cd_dep_college_count : bigint?
+}
+
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.promotion_type as
+ closed {
+  p_promo_sk : bigint,
+  p_promo_id : string,
+  p_start_date_sk : bigint?,
+  p_end_date_sk : bigint?,
+  p_item_sk : bigint?,
+  p_cost : double?,
+  p_response_target : bigint?,
+  p_promo_name : string?,
+  p_channel_dmail : string?,
+  p_channel_email : string?,
+  p_channel_catalog : string?,
+  p_channel_tv : string?,
+  p_channel_radio : string?,
+  p_channel_press : string?,
+  p_channel_event : string?,
+  p_channel_demo :  string?,
+  p_channel_details : string?,
+  p_purpose : string?,
+  p_discount_active : string?
+}
+
+create dataset item (item_type) primary key i_item_sk;
+
+create dataset customer_demographics(customer_demographics_type) primary key cd_demo_sk;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset promotion (promotion_type) primary key p_promo_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q07/q07.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q07/q07.2.update.sqlpp
new file mode 100644
index 0000000..00091e6
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q07/q07.2.update.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset item using localfs ((`path`=`asterix_nc1://data/tpcds/item.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset customer_demographics using localfs ((`path`=`asterix_nc1://data/tpcds/customer_demographics.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset promotion using localfs ((`path`=`asterix_nc1://data/tpcds/promotion.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q07/q07.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q07/q07.3.query.sqlpp
new file mode 100644
index 0000000..14c8840
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q07/q07.3.query.sqlpp
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE tpcds;
+
+
+SELECT  i_item_id,
+        avg(ss_quantity) agg1,
+        avg(ss_list_price) agg2,
+        avg(ss_coupon_amt) agg3,
+        avg(ss_sales_price) agg4
+FROM store_sales, customer_demographics, date_dim, item, promotion
+WHERE ss_sold_date_sk = d_date_sk
+AND   ss_item_sk = i_item_sk
+AND   ss_cdemo_sk = cd_demo_sk
+AND   ss_promo_sk = p_promo_sk
+AND   cd_gender = 'F'
+AND   cd_marital_status = 'W'
+AND   cd_education_status = 'Primary'
+AND   (p_channel_email = 'N' OR p_channel_event = 'N')
+AND   d_year = 1998
+GROUP BY i_item_id
+ORDER BY i_item_id
+LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q15/q15.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q15/q15.1.ddl.sqlpp
new file mode 100644
index 0000000..5a9dea9
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q15/q15.1.ddl.sqlpp
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+
+create type tpcds.customer_address_type as  closed {
+  ca_address_sk : bigint,
+  ca_address_id : string,
+  ca_street_number : string?,
+  ca_street_name : string?,
+  ca_street_type : string?,
+  ca_suite_number : string?,
+  ca_city : string?,
+  ca_county : string?,
+  ca_state : string?,
+  ca_zip : string?,
+  ca_country : string?,
+  ca_gmt_offset : double?,
+  ca_location_type : string?
+ }
+
+create type tpcds.catalog_sales_type as
+ closed {
+  cs_sold_date_sk:           bigint?,
+  cs_sold_time_sk:           bigint?,
+  cs_ship_date_sk:           bigint?,
+  cs_bill_customer_sk:       bigint?,
+  cs_bill_cdemo_sk:          bigint?,
+  cs_bill_hdemo_sk:          bigint?,
+  cs_bill_addr_sk:           bigint?,
+  cs_ship_customer_sk:       bigint?,
+  cs_ship_cdemo_sk:          bigint?,
+  cs_ship_hdemo_sk:          bigint?,
+  cs_ship_addr_sk:           bigint?,
+  cs_call_center_sk:         bigint?,
+  cs_ship_mode_sk:           bigint?,
+  cs_warehouse_sk:           bigint?,
+  cs_item_sk:                bigint,
+  cs_promo_sk:               bigint?,
+  cs_order_number:           bigint,
+  cs_quantity:               bigint?,
+  cs_wholesale_cost:         double?,
+  cs_list_price:             double?,
+  cs_sales_price:            double?,
+  cs_ext_discount_amt:       double?,
+  cs_ext_sales_price:        double?,
+  cs_ext_wholesale_cost:     double?,
+  cs_ext_list_price:         double?,
+  cs_ext_tax:                double?,
+  cs_coupon_amt:             double?,
+  cs_ext_ship_cost:          double?,
+  cs_net_paid:               double?,
+  cs_net_paid_inc_tax:       double?,
+  cs_net_paid_inc_ship:      double?,
+  cs_net_paid_inc_ship_tax:  double?,
+  cs_net_profit:             double?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.customer_type as
+ closed {
+  c_customer_sk : bigint,
+  c_customer_id : string,
+  c_current_cdemo_sk : bigint?,
+  c_current_hdemo_sk : bigint?,
+  c_current_addr_sk : bigint?,
+  c_first_shipto_date_sk : bigint?,
+  c_first_sales_date_sk : bigint?,
+  c_salutation : string?,
+  c_first_name : string?,
+  c_last_name : string?,
+  c_preferred_cust_flag : string?,
+  c_birth_day : bigint?,
+  c_birth_month : bigint?,
+  c_birth_year : bigint?,
+  c_birth_country : string?,
+  c_login : string?,
+  c_email_address : string?,
+  c_last_review_date : string?
+}
+
+create dataset customer_address(customer_address_type) primary key ca_address_sk;
+
+create dataset catalog_sales (catalog_sales_type) primary key cs_item_sk, cs_order_number;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset customer (customer_type) primary key c_customer_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q15/q15.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q15/q15.2.update.sqlpp
new file mode 100644
index 0000000..5159a42
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q15/q15.2.update.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset customer_address using localfs ((`path`=`asterix_nc1://data/tpcds/customer_address.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset catalog_sales using localfs ((`path`=`asterix_nc1://data/tpcds/catalog_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset customer using localfs ((`path`=`asterix_nc1://data/tpcds/customer.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q15/q15.3.query.sqlpp
similarity index 61%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q15/q15.3.query.sqlpp
index af2f691..ceb7134 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q15/q15.3.query.sqlpp
@@ -16,4 +16,25 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+USE tpcds;
+
+
+SELECT ca_zip
+      ,sum(cs_sales_price)
+FROM catalog_sales
+    ,customer
+    ,customer_address
+    ,date_dim
+WHERE cs_bill_customer_sk = c_customer_sk
+AND c_current_addr_sk = ca_address_sk
+AND ( substr(ca_zip,1,5) in ['85669', '86197','88274','83405','86475',
+                             '85392', '85460', '80348', '81792']
+OR ca_state in ['CA','WA','GA']
+OR cs_sales_price > 500)
+AND cs_sold_date_sk = d_date_sk
+AND d_qoy = 2
+AND d_year = 2000
+GROUP BY ca_zip
+ORDER BY ca_zip
+LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q19/q19.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q19/q19.1.ddl.sqlpp
new file mode 100644
index 0000000..1b30bcd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q19/q19.1.ddl.sqlpp
@@ -0,0 +1,193 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+
+create type tpcds.customer_address_type as  closed {
+  ca_address_sk : bigint,
+  ca_address_id : string,
+  ca_street_number : string?,
+  ca_street_name : string?,
+  ca_street_type : string?,
+  ca_suite_number : string?,
+  ca_city : string?,
+  ca_county : string?,
+  ca_state : string?,
+  ca_zip : string?,
+  ca_country : string?,
+  ca_gmt_offset : double?,
+  ca_location_type : string?
+ }
+
+create type tpcds.item_type as
+ closed {
+  i_item_sk : bigint,
+  i_item_id : string,
+  i_rec_start_date : string?,
+  i_rec_end_date : string?,
+  i_item_desc : string?,
+  i_current_price : double?,
+  i_wholesale_cost : double?,
+  i_brand_id : bigint?,
+  i_brand : string?,
+  i_class_id : bigint?,
+  i_class : string?,
+  i_category_id : bigint?,
+  i_category : string?,
+  i_manufact_id : bigint?,
+  i_manufact : string?,
+  i_size : string?,
+  i_formulation : string?,
+  i_color : string?,
+  i_units : string?,
+  i_container : string?,
+  i_manager_id : bigint?,
+  i_product_name : string?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create type tpcds.store_type as
+ closed {
+  s_store_sk : bigint,
+  s_store_id : string,
+  s_rec_start_date : string?,
+  s_rec_end_date : string?,
+  s_closed_date_sk : bigint?,
+  s_store_name : string?,
+  s_number_employees : bigint?,
+  s_floor_space : bigint?,
+  s_hours : string?,
+  s_manager : string?,
+  s_market_id : bigint?,
+  s_geography_class : string?,
+  s_market_desc : string?,
+  s_market_manager : string?,
+  s_division_id : bigint?,
+  s_division_name : string?,
+  s_company_id : bigint?,
+  s_company_name : string?,
+  s_street_number : string?,
+  s_street_name : string?,
+  s_street_type : string?,
+  s_suite_number : string?,
+  s_city : string?,
+  s_county : string?,
+  s_state : string?,
+  s_zip : string?,
+  s_country : string?,
+  s_gmt_offset : double?,
+  s_tax_precentage : double?
+}
+
+create type tpcds.customer_type as
+ closed {
+  c_customer_sk : bigint,
+  c_customer_id : string,
+  c_current_cdemo_sk : bigint?,
+  c_current_hdemo_sk : bigint?,
+  c_current_addr_sk : bigint?,
+  c_first_shipto_date_sk : bigint?,
+  c_first_sales_date_sk : bigint?,
+  c_salutation : string?,
+  c_first_name : string?,
+  c_last_name : string?,
+  c_preferred_cust_flag : string?,
+  c_birth_day : bigint?,
+  c_birth_month : bigint?,
+  c_birth_year : bigint?,
+  c_birth_country : string?,
+  c_login : string?,
+  c_email_address : string?,
+  c_last_review_date : string?
+}
+
+create dataset customer_address(customer_address_type) primary key ca_address_sk;
+
+create dataset item (item_type) primary key i_item_sk;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
+
+create dataset store (store_type) primary key s_store_sk;
+
+create dataset customer (customer_type) primary key c_customer_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q19/q19.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q19/q19.2.update.sqlpp
new file mode 100644
index 0000000..b59d644
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q19/q19.2.update.sqlpp
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset customer_address using localfs ((`path`=`asterix_nc1://data/tpcds/customer_address.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset item using localfs ((`path`=`asterix_nc1://data/tpcds/item.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store using localfs ((`path`=`asterix_nc1://data/tpcds/store.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset customer using localfs ((`path`=`asterix_nc1://data/tpcds/customer.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q19/q19.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q19/q19.3.query.sqlpp
new file mode 100644
index 0000000..12ca91f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q19/q19.3.query.sqlpp
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE tpcds;
+
+
+SELECT i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact,
+       SUM(ss_ext_sales_price) ext_price
+FROM date_dim, store_sales, item,customer,customer_address,store
+WHERE d_date_sk = ss_sold_date_sk
+AND ss_item_sk = i_item_sk
+AND i_manager_id=7
+AND d_moy=11
+AND d_year=1999
+AND ss_customer_sk = c_customer_sk
+AND c_current_addr_sk = ca_address_sk
+AND substr(ca_zip,1,5) != substr(s_zip,1,5)
+AND ss_store_sk = s_store_sk
+GROUP BY i_brand
+        ,i_brand_id
+        ,i_manufact_id
+        ,i_manufact
+ORDER BY ext_price desc
+         ,i_brand
+         ,i_brand_id
+         ,i_manufact_id
+         ,i_manufact
+LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q21/q21.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q21/q21.1.ddl.sqlpp
new file mode 100644
index 0000000..de454ba
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q21/q21.1.ddl.sqlpp
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.item_type as
+ closed {
+  i_item_sk : bigint,
+  i_item_id : string,
+  i_rec_start_date : string?,
+  i_rec_end_date : string?,
+  i_item_desc : string?,
+  i_current_price : double?,
+  i_wholesale_cost : double?,
+  i_brand_id : bigint?,
+  i_brand : string?,
+  i_class_id : bigint?,
+  i_class : string?,
+  i_category_id : bigint?,
+  i_category : string?,
+  i_manufact_id : bigint?,
+  i_manufact : string?,
+  i_size : string?,
+  i_formulation : string?,
+  i_color : string?,
+  i_units : string?,
+  i_container : string?,
+  i_manager_id : bigint?,
+  i_product_name : string?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.inventory_type as
+ closed {
+  inv_date_sk : bigint,
+  inv_item_sk : bigint,
+  inv_warehouse_sk : bigint,
+  inv_quantity_on_hand : bigint?
+}
+
+create type tpcds.warehouse_type as
+ closed {
+  w_warehouse_sk : bigint,
+  w_warehouse_id : string,
+  w_warehouse_name : string?,
+  w_warehouse_sq_ft : bigint?,
+  w_street_number : string?,
+  w_street_name : string?,
+  w_street_type : string?,
+  w_suite_number : string?,
+  w_city : string?,
+  w_county : string?,
+  w_state : string?,
+  w_zip : string?,
+  w_country : string?,
+  w_gmt_offset : double?
+}
+
+create dataset item (item_type) primary key i_item_sk;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset inventory (inventory_type) primary key inv_date_sk, inv_item_sk, inv_warehouse_sk;
+
+create dataset warehouse(warehouse_type) primary key w_warehouse_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q21/q21.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q21/q21.2.update.sqlpp
new file mode 100644
index 0000000..74f893d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q21/q21.2.update.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset item using localfs ((`path`=`asterix_nc1://data/tpcds/item.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset inventory using localfs ((`path`=`asterix_nc1://data/tpcds/inventory.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset warehouse using localfs ((`path`=`asterix_nc1://data/tpcds/warehouse.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q21/q21.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q21/q21.3.query.sqlpp
new file mode 100644
index 0000000..d39c5bd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q21/q21.3.query.sqlpp
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE tpcds;
+
+
+SELECT  *
+FROM (SELECT w_warehouse_name
+            ,i_item_id
+            ,SUM(CASE WHEN date(d_date) < date('1998-04-08')
+                 THEN inv_quantity_on_hand
+                 ELSE 0 END) AS inv_before
+            ,SUM(CASE WHEN date(d_date) >= date('1998-04-08')
+                      THEN inv_quantity_on_hand
+                      ELSE 0 END) AS inv_after
+   FROM inventory
+       ,warehouse
+       ,item
+       ,date_dim
+   WHERE i_current_price >= 0.99
+     AND i_current_price <= 1.49
+     AND i_item_sk          = inv_item_sk
+     AND inv_warehouse_sk   = w_warehouse_sk
+     AND inv_date_sk    = d_date_sk
+     AND date(d_date) >= date('1998-03-09')
+     AND date(d_date) <= date('1998-05-08')
+   GROUP BY w_warehouse_name, i_item_id) x
+   WHERE (CASE WHEN inv_before > 0
+          THEN inv_after / inv_before
+          ELSE null
+          END) >= 2.0/3.0
+   AND (CASE WHEN inv_before > 0
+             THEN inv_after / inv_before
+             ELSE null
+             END) <= 3.0/2.0
+ ORDER BY w_warehouse_name, i_item_id
+ LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q24a/q24a.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q24a/q24a.1.ddl.sqlpp
new file mode 100644
index 0000000..b7b449f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q24a/q24a.1.ddl.sqlpp
@@ -0,0 +1,185 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+
+create type tpcds.customer_address_type as  closed {
+  ca_address_sk : bigint,
+  ca_address_id : string,
+  ca_street_number : string?,
+  ca_street_name : string?,
+  ca_street_type : string?,
+  ca_suite_number : string?,
+  ca_city : string?,
+  ca_county : string?,
+  ca_state : string?,
+  ca_zip : string?,
+  ca_country : string?,
+  ca_gmt_offset : double?,
+  ca_location_type : string?
+ }
+
+create type tpcds.item_type as
+ closed {
+  i_item_sk : bigint,
+  i_item_id : string,
+  i_rec_start_date : string?,
+  i_rec_end_date : string?,
+  i_item_desc : string?,
+  i_current_price : double?,
+  i_wholesale_cost : double?,
+  i_brand_id : bigint?,
+  i_brand : string?,
+  i_class_id : bigint?,
+  i_class : string?,
+  i_category_id : bigint?,
+  i_category : string?,
+  i_manufact_id : bigint?,
+  i_manufact : string?,
+  i_size : string?,
+  i_formulation : string?,
+  i_color : string?,
+  i_units : string?,
+  i_container : string?,
+  i_manager_id : bigint?,
+  i_product_name : string?
+}
+
+create type tpcds.store_returns_type as
+ closed {
+  sr_returned_date_sk : bigint?,
+  sr_return_time_sk : bigint?,
+  sr_item_sk : bigint,
+  sr_customer_sk : bigint?,
+  sr_cdemo_sk : bigint?,
+  sr_hdemo_sk : bigint?,
+  sr_addr_sk : bigint?,
+  sr_store_sk : bigint?,
+  sr_reason_sk : bigint?,
+  sr_ticket_number : bigint,
+  sr_return_quantity : bigint?,
+  sr_return_amt : double,
+  sr_return_tax : double?,
+  sr_return_amt_inc_tax : double?,
+  sr_fee : double?,
+  sr_return_ship_cost : double?,
+  sr_refunded_cash : double?,
+  sr_reversed_charge : double?,
+  sr_store_credit : double?,
+  sr_net_loss : double?
+}
+
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create type tpcds.store_type as
+ closed {
+  s_store_sk : bigint,
+  s_store_id : string,
+  s_rec_start_date : string?,
+  s_rec_end_date : string?,
+  s_closed_date_sk : bigint?,
+  s_store_name : string?,
+  s_number_employees : bigint?,
+  s_floor_space : bigint?,
+  s_hours : string?,
+  s_manager : string?,
+  s_market_id : bigint?,
+  s_geography_class : string?,
+  s_market_desc : string?,
+  s_market_manager : string?,
+  s_division_id : bigint?,
+  s_division_name : string?,
+  s_company_id : bigint?,
+  s_company_name : string?,
+  s_street_number : string?,
+  s_street_name : string?,
+  s_street_type : string?,
+  s_suite_number : string?,
+  s_city : string?,
+  s_county : string?,
+  s_state : string?,
+  s_zip : string?,
+  s_country : string?,
+  s_gmt_offset : double?,
+  s_tax_precentage : double?
+}
+
+create type tpcds.customer_type as
+ closed {
+  c_customer_sk : bigint,
+  c_customer_id : string,
+  c_current_cdemo_sk : bigint?,
+  c_current_hdemo_sk : bigint?,
+  c_current_addr_sk : bigint?,
+  c_first_shipto_date_sk : bigint?,
+  c_first_sales_date_sk : bigint?,
+  c_salutation : string?,
+  c_first_name : string?,
+  c_last_name : string?,
+  c_preferred_cust_flag : string?,
+  c_birth_day : bigint?,
+  c_birth_month : bigint?,
+  c_birth_year : bigint?,
+  c_birth_country : string?,
+  c_login : string?,
+  c_email_address : string?,
+  c_last_review_date : string?
+}
+
+create dataset customer_address(customer_address_type) primary key ca_address_sk;
+
+create dataset item (item_type) primary key i_item_sk;
+
+create dataset store_returns (store_returns_type) primary key sr_item_sk, sr_ticket_number;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
+
+create dataset store (store_type) primary key s_store_sk;
+
+create dataset customer (customer_type) primary key c_customer_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q24a/q24a.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q24a/q24a.2.update.sqlpp
new file mode 100644
index 0000000..a423018
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q24a/q24a.2.update.sqlpp
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset customer_address using localfs ((`path`=`asterix_nc1://data/tpcds/customer_address.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset item using localfs ((`path`=`asterix_nc1://data/tpcds/item.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_returns using localfs ((`path`=`asterix_nc1://data/tpcds/store_returns.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store using localfs ((`path`=`asterix_nc1://data/tpcds/store.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset customer using localfs ((`path`=`asterix_nc1://data/tpcds/customer.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q24a/q24a.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q24a/q24a.3.query.sqlpp
new file mode 100644
index 0000000..1286f34
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q24a/q24a.3.query.sqlpp
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+with ssales as
+(SELECT c_last_name
+      ,c_first_name
+      ,s_store_name
+      ,ca_state
+      ,s_state
+      ,i_color
+      ,i_current_price
+      ,i_manager_id
+      ,i_units
+      ,i_size
+      ,SUM(ss_sales_price) netpaid
+FROM store_sales
+    ,store_returns
+    ,store
+    ,item
+    ,customer
+    ,customer_address
+WHERE ss_ticket_number = sr_ticket_number
+  AND ss_item_sk = sr_item_sk
+  AND ss_customer_sk = c_customer_sk
+  AND ss_item_sk = i_item_sk
+  AND ss_store_sk = s_store_sk
+  AND c_birth_country = UPPERCASE(ca_country)
+  AND s_zip = ca_zip
+AND s_market_id=7
+GROUP BY c_last_name
+        ,c_first_name
+        ,s_store_name
+        ,ca_state
+        ,s_state
+        ,i_color
+        ,i_current_price
+        ,i_manager_id
+        ,i_units
+        ,i_size)
+SELECT c_last_name
+      ,c_first_name
+      ,s_store_name
+      ,SUM(netpaid) paid
+FROM ssales
+WHERE i_color = 'orchid'
+GROUP BY c_last_name
+        ,c_first_name
+        ,s_store_name
+HAVING paid > (SELECT value (0.05*avg(netpaid))
+                                 FROM ssales)[0]
+;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q24b/q24b.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q24b/q24b.1.ddl.sqlpp
new file mode 100644
index 0000000..b7b449f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q24b/q24b.1.ddl.sqlpp
@@ -0,0 +1,185 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+
+create type tpcds.customer_address_type as  closed {
+  ca_address_sk : bigint,
+  ca_address_id : string,
+  ca_street_number : string?,
+  ca_street_name : string?,
+  ca_street_type : string?,
+  ca_suite_number : string?,
+  ca_city : string?,
+  ca_county : string?,
+  ca_state : string?,
+  ca_zip : string?,
+  ca_country : string?,
+  ca_gmt_offset : double?,
+  ca_location_type : string?
+ }
+
+create type tpcds.item_type as
+ closed {
+  i_item_sk : bigint,
+  i_item_id : string,
+  i_rec_start_date : string?,
+  i_rec_end_date : string?,
+  i_item_desc : string?,
+  i_current_price : double?,
+  i_wholesale_cost : double?,
+  i_brand_id : bigint?,
+  i_brand : string?,
+  i_class_id : bigint?,
+  i_class : string?,
+  i_category_id : bigint?,
+  i_category : string?,
+  i_manufact_id : bigint?,
+  i_manufact : string?,
+  i_size : string?,
+  i_formulation : string?,
+  i_color : string?,
+  i_units : string?,
+  i_container : string?,
+  i_manager_id : bigint?,
+  i_product_name : string?
+}
+
+create type tpcds.store_returns_type as
+ closed {
+  sr_returned_date_sk : bigint?,
+  sr_return_time_sk : bigint?,
+  sr_item_sk : bigint,
+  sr_customer_sk : bigint?,
+  sr_cdemo_sk : bigint?,
+  sr_hdemo_sk : bigint?,
+  sr_addr_sk : bigint?,
+  sr_store_sk : bigint?,
+  sr_reason_sk : bigint?,
+  sr_ticket_number : bigint,
+  sr_return_quantity : bigint?,
+  sr_return_amt : double,
+  sr_return_tax : double?,
+  sr_return_amt_inc_tax : double?,
+  sr_fee : double?,
+  sr_return_ship_cost : double?,
+  sr_refunded_cash : double?,
+  sr_reversed_charge : double?,
+  sr_store_credit : double?,
+  sr_net_loss : double?
+}
+
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create type tpcds.store_type as
+ closed {
+  s_store_sk : bigint,
+  s_store_id : string,
+  s_rec_start_date : string?,
+  s_rec_end_date : string?,
+  s_closed_date_sk : bigint?,
+  s_store_name : string?,
+  s_number_employees : bigint?,
+  s_floor_space : bigint?,
+  s_hours : string?,
+  s_manager : string?,
+  s_market_id : bigint?,
+  s_geography_class : string?,
+  s_market_desc : string?,
+  s_market_manager : string?,
+  s_division_id : bigint?,
+  s_division_name : string?,
+  s_company_id : bigint?,
+  s_company_name : string?,
+  s_street_number : string?,
+  s_street_name : string?,
+  s_street_type : string?,
+  s_suite_number : string?,
+  s_city : string?,
+  s_county : string?,
+  s_state : string?,
+  s_zip : string?,
+  s_country : string?,
+  s_gmt_offset : double?,
+  s_tax_precentage : double?
+}
+
+create type tpcds.customer_type as
+ closed {
+  c_customer_sk : bigint,
+  c_customer_id : string,
+  c_current_cdemo_sk : bigint?,
+  c_current_hdemo_sk : bigint?,
+  c_current_addr_sk : bigint?,
+  c_first_shipto_date_sk : bigint?,
+  c_first_sales_date_sk : bigint?,
+  c_salutation : string?,
+  c_first_name : string?,
+  c_last_name : string?,
+  c_preferred_cust_flag : string?,
+  c_birth_day : bigint?,
+  c_birth_month : bigint?,
+  c_birth_year : bigint?,
+  c_birth_country : string?,
+  c_login : string?,
+  c_email_address : string?,
+  c_last_review_date : string?
+}
+
+create dataset customer_address(customer_address_type) primary key ca_address_sk;
+
+create dataset item (item_type) primary key i_item_sk;
+
+create dataset store_returns (store_returns_type) primary key sr_item_sk, sr_ticket_number;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
+
+create dataset store (store_type) primary key s_store_sk;
+
+create dataset customer (customer_type) primary key c_customer_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q24b/q24b.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q24b/q24b.2.update.sqlpp
new file mode 100644
index 0000000..a423018
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q24b/q24b.2.update.sqlpp
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset customer_address using localfs ((`path`=`asterix_nc1://data/tpcds/customer_address.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset item using localfs ((`path`=`asterix_nc1://data/tpcds/item.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_returns using localfs ((`path`=`asterix_nc1://data/tpcds/store_returns.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store using localfs ((`path`=`asterix_nc1://data/tpcds/store.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset customer using localfs ((`path`=`asterix_nc1://data/tpcds/customer.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q24b/q24b.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q24b/q24b.3.query.sqlpp
new file mode 100644
index 0000000..07fa88f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q24b/q24b.3.query.sqlpp
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+WITH ssales AS
+(SELECT c_last_name
+      ,c_first_name
+      ,s_store_name
+      ,ca_state
+      ,s_state
+      ,i_color
+      ,i_current_price
+      ,i_manager_id
+      ,i_units
+      ,i_size
+      ,SUM(ss_sales_price) netpaid
+FROM store_sales
+    ,store_returns
+    ,store
+    ,item
+    ,customer
+    ,customer_address
+WHERE ss_ticket_number = sr_ticket_number
+  AND ss_item_sk = sr_item_sk
+  AND ss_customer_sk = c_customer_sk
+  AND ss_item_sk = i_item_sk
+  AND ss_store_sk = s_store_sk
+  AND c_birth_country = uppercase(ca_country)
+  AND s_zip = ca_zip
+  AND s_market_id = 7
+GROUP BY c_last_name
+        ,c_first_name
+        ,s_store_name
+        ,ca_state
+        ,s_state
+        ,i_color
+        ,i_current_price
+        ,i_manager_id
+        ,i_units
+        ,i_size)
+SELECT c_last_name
+      ,c_first_name
+      ,s_store_name
+      ,SUM(netpaid) paid
+FROM ssales
+WHERE i_color = 'chiffon'
+GROUP BY c_last_name
+        ,c_first_name
+        ,s_store_name
+HAVING SUM(netpaid) > (SELECT value (0.05*avg(netpaid))
+                           FROM ssales)[0]
+;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q25/q25.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q25/q25.1.ddl.sqlpp
new file mode 100644
index 0000000..418a756
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q25/q25.1.ddl.sqlpp
@@ -0,0 +1,215 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.item_type as
+ closed {
+  i_item_sk : bigint,
+  i_item_id : string,
+  i_rec_start_date : string?,
+  i_rec_end_date : string?,
+  i_item_desc : string?,
+  i_current_price : double?,
+  i_wholesale_cost : double?,
+  i_brand_id : bigint?,
+  i_brand : string?,
+  i_class_id : bigint?,
+  i_class : string?,
+  i_category_id : bigint?,
+  i_category : string?,
+  i_manufact_id : bigint?,
+  i_manufact : string?,
+  i_size : string?,
+  i_formulation : string?,
+  i_color : string?,
+  i_units : string?,
+  i_container : string?,
+  i_manager_id : bigint?,
+  i_product_name : string?
+}
+
+create type tpcds.store_returns_type as
+ closed {
+  sr_returned_date_sk : bigint?,
+  sr_return_time_sk : bigint?,
+  sr_item_sk : bigint,
+  sr_customer_sk : bigint?,
+  sr_cdemo_sk : bigint?,
+  sr_hdemo_sk : bigint?,
+  sr_addr_sk : bigint?,
+  sr_store_sk : bigint?,
+  sr_reason_sk : bigint?,
+  sr_ticket_number : bigint,
+  sr_return_quantity : bigint?,
+  sr_return_amt : double,
+  sr_return_tax : double?,
+  sr_return_amt_inc_tax : double?,
+  sr_fee : double?,
+  sr_return_ship_cost : double?,
+  sr_refunded_cash : double?,
+  sr_reversed_charge : double?,
+  sr_store_credit : double?,
+  sr_net_loss : double?
+}
+
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create type tpcds.catalog_sales_type as
+ closed {
+  cs_sold_date_sk:           bigint?,
+  cs_sold_time_sk:           bigint?,
+  cs_ship_date_sk:           bigint?,
+  cs_bill_customer_sk:       bigint?,
+  cs_bill_cdemo_sk:          bigint?,
+  cs_bill_hdemo_sk:          bigint?,
+  cs_bill_addr_sk:           bigint?,
+  cs_ship_customer_sk:       bigint?,
+  cs_ship_cdemo_sk:          bigint?,
+  cs_ship_hdemo_sk:          bigint?,
+  cs_ship_addr_sk:           bigint?,
+  cs_call_center_sk:         bigint?,
+  cs_ship_mode_sk:           bigint?,
+  cs_warehouse_sk:           bigint?,
+  cs_item_sk:                bigint,
+  cs_promo_sk:               bigint?,
+  cs_order_number:           bigint,
+  cs_quantity:               bigint?,
+  cs_wholesale_cost:         double?,
+  cs_list_price:             double?,
+  cs_sales_price:            double?,
+  cs_ext_discount_amt:       double?,
+  cs_ext_sales_price:        double?,
+  cs_ext_wholesale_cost:     double?,
+  cs_ext_list_price:         double?,
+  cs_ext_tax:                double?,
+  cs_coupon_amt:             double?,
+  cs_ext_ship_cost:          double?,
+  cs_net_paid:               double?,
+  cs_net_paid_inc_tax:       double?,
+  cs_net_paid_inc_ship:      double?,
+  cs_net_paid_inc_ship_tax:  double?,
+  cs_net_profit:             double?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.store_type as
+ closed {
+  s_store_sk : bigint,
+  s_store_id : string,
+  s_rec_start_date : string?,
+  s_rec_end_date : string?,
+  s_closed_date_sk : bigint?,
+  s_store_name : string?,
+  s_number_employees : bigint?,
+  s_floor_space : bigint?,
+  s_hours : string?,
+  s_manager : string?,
+  s_market_id : bigint?,
+  s_geography_class : string?,
+  s_market_desc : string?,
+  s_market_manager : string?,
+  s_division_id : bigint?,
+  s_division_name : string?,
+  s_company_id : bigint?,
+  s_company_name : string?,
+  s_street_number : string?,
+  s_street_name : string?,
+  s_street_type : string?,
+  s_suite_number : string?,
+  s_city : string?,
+  s_county : string?,
+  s_state : string?,
+  s_zip : string?,
+  s_country : string?,
+  s_gmt_offset : double?,
+  s_tax_precentage : double?
+}
+
+create dataset item (item_type) primary key i_item_sk;
+
+create dataset store_returns (store_returns_type) primary key sr_item_sk, sr_ticket_number;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
+
+create dataset catalog_sales (catalog_sales_type) primary key cs_item_sk, cs_order_number;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset store (store_type) primary key s_store_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q25/q25.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q25/q25.2.update.sqlpp
new file mode 100644
index 0000000..3bf9f8d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q25/q25.2.update.sqlpp
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset item using localfs ((`path`=`asterix_nc1://data/tpcds/item.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_returns using localfs ((`path`=`asterix_nc1://data/tpcds/store_returns.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset catalog_sales using localfs ((`path`=`asterix_nc1://data/tpcds/catalog_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store using localfs ((`path`=`asterix_nc1://data/tpcds/store.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q25/q25.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q25/q25.3.query.sqlpp
new file mode 100644
index 0000000..8245836
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q25/q25.3.query.sqlpp
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+SELECT
+ i_item_id
+ ,i_item_desc
+ ,s_store_id
+ ,s_store_name
+ ,SUM(ss_net_profit) as store_sales_profit
+ ,SUM(sr_net_loss) as store_returns_loss
+ ,SUM(cs_net_profit) as catalog_sales_profit
+ FROM
+ store_sales
+ ,store_returns
+ ,catalog_sales
+ ,date_dim d1
+ ,date_dim d2
+ ,date_dim d3
+ ,store
+ ,item
+ WHERE
+ d1.d_moy = 4
+ AND d1.d_year = 2000
+ AND d1.d_date_sk = ss_sold_date_sk
+ AND i_item_sk = ss_item_sk
+ AND s_store_sk = ss_store_sk
+ AND ss_customer_sk = sr_customer_sk
+ AND ss_item_sk = sr_item_sk
+ AND ss_ticket_number = sr_ticket_number
+ AND sr_returned_date_sk = d2.d_date_sk
+ AND d2.d_moy >= 4
+ AND d2.d_moy <= 10
+ AND d2.d_year = 2000
+ AND sr_customer_sk = cs_bill_customer_sk
+ AND sr_item_sk = cs_item_sk
+ AND cs_sold_date_sk = d3.d_date_sk
+ AND d3.d_moy >= 4
+ AND d3.d_moy <= 10
+ AND d3.d_year = 2000
+ GROUP BY
+ i_item_id
+ ,i_item_desc
+ ,s_store_id
+ ,s_store_name
+ ORDER BY
+ i_item_id
+ ,i_item_desc
+ ,s_store_id
+ ,s_store_name
+ LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q26/q26.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q26/q26.1.ddl.sqlpp
new file mode 100644
index 0000000..2a668dd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q26/q26.1.ddl.sqlpp
@@ -0,0 +1,164 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.item_type as
+ closed {
+  i_item_sk : bigint,
+  i_item_id : string,
+  i_rec_start_date : string?,
+  i_rec_end_date : string?,
+  i_item_desc : string?,
+  i_current_price : double?,
+  i_wholesale_cost : double?,
+  i_brand_id : bigint?,
+  i_brand : string?,
+  i_class_id : bigint?,
+  i_class : string?,
+  i_category_id : bigint?,
+  i_category : string?,
+  i_manufact_id : bigint?,
+  i_manufact : string?,
+  i_size : string?,
+  i_formulation : string?,
+  i_color : string?,
+  i_units : string?,
+  i_container : string?,
+  i_manager_id : bigint?,
+  i_product_name : string?
+}
+
+create type tpcds.customer_demographics_type as
+ closed {
+  cd_demo_sk : bigint,
+  cd_gender : string?,
+  cd_marital_status : string?,
+  cd_education_status : string?,
+  cd_purchase_estimate : bigint?,
+  cd_credit_rating : string?,
+  cd_dep_count : bigint?,
+  cd_dep_employed_count : bigint?,
+  cd_dep_college_count : bigint?
+}
+create type tpcds.catalog_sales_type as
+ closed {
+  cs_sold_date_sk:           bigint?,
+  cs_sold_time_sk:           bigint?,
+  cs_ship_date_sk:           bigint?,
+  cs_bill_customer_sk:       bigint?,
+  cs_bill_cdemo_sk:          bigint?,
+  cs_bill_hdemo_sk:          bigint?,
+  cs_bill_addr_sk:           bigint?,
+  cs_ship_customer_sk:       bigint?,
+  cs_ship_cdemo_sk:          bigint?,
+  cs_ship_hdemo_sk:          bigint?,
+  cs_ship_addr_sk:           bigint?,
+  cs_call_center_sk:         bigint?,
+  cs_ship_mode_sk:           bigint?,
+  cs_warehouse_sk:           bigint?,
+  cs_item_sk:                bigint,
+  cs_promo_sk:               bigint?,
+  cs_order_number:           bigint,
+  cs_quantity:               bigint?,
+  cs_wholesale_cost:         double?,
+  cs_list_price:             double?,
+  cs_sales_price:            double?,
+  cs_ext_discount_amt:       double?,
+  cs_ext_sales_price:        double?,
+  cs_ext_wholesale_cost:     double?,
+  cs_ext_list_price:         double?,
+  cs_ext_tax:                double?,
+  cs_coupon_amt:             double?,
+  cs_ext_ship_cost:          double?,
+  cs_net_paid:               double?,
+  cs_net_paid_inc_tax:       double?,
+  cs_net_paid_inc_ship:      double?,
+  cs_net_paid_inc_ship_tax:  double?,
+  cs_net_profit:             double?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.promotion_type as
+ closed {
+  p_promo_sk : bigint,
+  p_promo_id : string,
+  p_start_date_sk : bigint?,
+  p_end_date_sk : bigint?,
+  p_item_sk : bigint?,
+  p_cost : double?,
+  p_response_target : bigint?,
+  p_promo_name : string?,
+  p_channel_dmail : string?,
+  p_channel_email : string?,
+  p_channel_catalog : string?,
+  p_channel_tv : string?,
+  p_channel_radio : string?,
+  p_channel_press : string?,
+  p_channel_event : string?,
+  p_channel_demo :  string?,
+  p_channel_details : string?,
+  p_purpose : string?,
+  p_discount_active : string?
+}
+
+create dataset item (item_type) primary key i_item_sk;
+
+create dataset customer_demographics(customer_demographics_type) primary key cd_demo_sk;
+
+create dataset catalog_sales (catalog_sales_type) primary key cs_item_sk, cs_order_number;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset promotion (promotion_type) primary key p_promo_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q26/q26.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q26/q26.2.update.sqlpp
new file mode 100644
index 0000000..bcbd71f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q26/q26.2.update.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset item using localfs ((`path`=`asterix_nc1://data/tpcds/item.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset customer_demographics using localfs ((`path`=`asterix_nc1://data/tpcds/customer_demographics.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset catalog_sales using localfs ((`path`=`asterix_nc1://data/tpcds/catalog_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset promotion using localfs ((`path`=`asterix_nc1://data/tpcds/promotion.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q26/q26.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q26/q26.3.query.sqlpp
new file mode 100644
index 0000000..d5c38ce
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q26/q26.3.query.sqlpp
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+SELECT  i_item_id,
+        avg(cs_quantity) agg1,
+        avg(cs_list_price) agg2,
+        avg(cs_coupon_amt) agg3,
+        avg(cs_sales_price) agg4
+ FROM catalog_sales, customer_demographics, date_dim, item, promotion
+ WHERE cs_sold_date_sk = d_date_sk AND
+       cs_item_sk = i_item_sk AND
+       cs_bill_cdemo_sk = cd_demo_sk AND
+       cs_promo_sk = p_promo_sk AND
+       cd_gender = 'F' AND
+       cd_marital_status = 'W' AND
+       cd_education_status = 'Primary' AND
+       (p_channel_email = 'N' OR p_channel_event = 'N') AND
+       d_year = 1998
+ GROUP BY i_item_id
+ ORDER BY i_item_id
+ LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q29/q29.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q29/q29.1.ddl.sqlpp
new file mode 100644
index 0000000..418a756
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q29/q29.1.ddl.sqlpp
@@ -0,0 +1,215 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.item_type as
+ closed {
+  i_item_sk : bigint,
+  i_item_id : string,
+  i_rec_start_date : string?,
+  i_rec_end_date : string?,
+  i_item_desc : string?,
+  i_current_price : double?,
+  i_wholesale_cost : double?,
+  i_brand_id : bigint?,
+  i_brand : string?,
+  i_class_id : bigint?,
+  i_class : string?,
+  i_category_id : bigint?,
+  i_category : string?,
+  i_manufact_id : bigint?,
+  i_manufact : string?,
+  i_size : string?,
+  i_formulation : string?,
+  i_color : string?,
+  i_units : string?,
+  i_container : string?,
+  i_manager_id : bigint?,
+  i_product_name : string?
+}
+
+create type tpcds.store_returns_type as
+ closed {
+  sr_returned_date_sk : bigint?,
+  sr_return_time_sk : bigint?,
+  sr_item_sk : bigint,
+  sr_customer_sk : bigint?,
+  sr_cdemo_sk : bigint?,
+  sr_hdemo_sk : bigint?,
+  sr_addr_sk : bigint?,
+  sr_store_sk : bigint?,
+  sr_reason_sk : bigint?,
+  sr_ticket_number : bigint,
+  sr_return_quantity : bigint?,
+  sr_return_amt : double,
+  sr_return_tax : double?,
+  sr_return_amt_inc_tax : double?,
+  sr_fee : double?,
+  sr_return_ship_cost : double?,
+  sr_refunded_cash : double?,
+  sr_reversed_charge : double?,
+  sr_store_credit : double?,
+  sr_net_loss : double?
+}
+
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create type tpcds.catalog_sales_type as
+ closed {
+  cs_sold_date_sk:           bigint?,
+  cs_sold_time_sk:           bigint?,
+  cs_ship_date_sk:           bigint?,
+  cs_bill_customer_sk:       bigint?,
+  cs_bill_cdemo_sk:          bigint?,
+  cs_bill_hdemo_sk:          bigint?,
+  cs_bill_addr_sk:           bigint?,
+  cs_ship_customer_sk:       bigint?,
+  cs_ship_cdemo_sk:          bigint?,
+  cs_ship_hdemo_sk:          bigint?,
+  cs_ship_addr_sk:           bigint?,
+  cs_call_center_sk:         bigint?,
+  cs_ship_mode_sk:           bigint?,
+  cs_warehouse_sk:           bigint?,
+  cs_item_sk:                bigint,
+  cs_promo_sk:               bigint?,
+  cs_order_number:           bigint,
+  cs_quantity:               bigint?,
+  cs_wholesale_cost:         double?,
+  cs_list_price:             double?,
+  cs_sales_price:            double?,
+  cs_ext_discount_amt:       double?,
+  cs_ext_sales_price:        double?,
+  cs_ext_wholesale_cost:     double?,
+  cs_ext_list_price:         double?,
+  cs_ext_tax:                double?,
+  cs_coupon_amt:             double?,
+  cs_ext_ship_cost:          double?,
+  cs_net_paid:               double?,
+  cs_net_paid_inc_tax:       double?,
+  cs_net_paid_inc_ship:      double?,
+  cs_net_paid_inc_ship_tax:  double?,
+  cs_net_profit:             double?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.store_type as
+ closed {
+  s_store_sk : bigint,
+  s_store_id : string,
+  s_rec_start_date : string?,
+  s_rec_end_date : string?,
+  s_closed_date_sk : bigint?,
+  s_store_name : string?,
+  s_number_employees : bigint?,
+  s_floor_space : bigint?,
+  s_hours : string?,
+  s_manager : string?,
+  s_market_id : bigint?,
+  s_geography_class : string?,
+  s_market_desc : string?,
+  s_market_manager : string?,
+  s_division_id : bigint?,
+  s_division_name : string?,
+  s_company_id : bigint?,
+  s_company_name : string?,
+  s_street_number : string?,
+  s_street_name : string?,
+  s_street_type : string?,
+  s_suite_number : string?,
+  s_city : string?,
+  s_county : string?,
+  s_state : string?,
+  s_zip : string?,
+  s_country : string?,
+  s_gmt_offset : double?,
+  s_tax_precentage : double?
+}
+
+create dataset item (item_type) primary key i_item_sk;
+
+create dataset store_returns (store_returns_type) primary key sr_item_sk, sr_ticket_number;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
+
+create dataset catalog_sales (catalog_sales_type) primary key cs_item_sk, cs_order_number;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset store (store_type) primary key s_store_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q29/q29.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q29/q29.2.update.sqlpp
new file mode 100644
index 0000000..3bf9f8d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q29/q29.2.update.sqlpp
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset item using localfs ((`path`=`asterix_nc1://data/tpcds/item.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_returns using localfs ((`path`=`asterix_nc1://data/tpcds/store_returns.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset catalog_sales using localfs ((`path`=`asterix_nc1://data/tpcds/catalog_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store using localfs ((`path`=`asterix_nc1://data/tpcds/store.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q29/q29.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q29/q29.3.query.sqlpp
new file mode 100644
index 0000000..e2ba54d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q29/q29.3.query.sqlpp
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE tpcds;
+
+
+SELECT
+     i_item_id
+    ,i_item_desc
+    ,s_store_id
+    ,s_store_name
+    ,SUM(ss_quantity)        as store_sales_quantity
+    ,SUM(sr_return_quantity) as store_returns_quantity
+    ,SUM(cs_quantity)        as catalog_sales_quantity
+ FROM
+    store_sales
+   ,store_returns
+   ,catalog_sales
+   ,date_dim             d1
+   ,date_dim             d2
+   ,date_dim             d3
+   ,store
+   ,item
+ WHERE
+     d1.d_moy               = 4
+ AND d1.d_year              = 1999
+ AND d1.d_date_sk           = ss_sold_date_sk
+ AND i_item_sk              = ss_item_sk
+ AND s_store_sk             = ss_store_sk
+ AND ss_customer_sk         = sr_customer_sk
+ AND ss_item_sk             = sr_item_sk
+ AND ss_ticket_number       = sr_ticket_number
+ AND sr_returned_date_sk    = d2.d_date_sk
+ AND d2.d_moy >= 4 AND  d2.d_moy <= 4 + 3
+ AND d2.d_year              = 1999
+ AND sr_customer_sk         = cs_bill_customer_sk
+ AND sr_item_sk             = cs_item_sk
+ AND cs_sold_date_sk        = d3.d_date_sk
+ AND d3.d_year              IN [1999,1999+1,1999+2]
+ GROUP BY
+    i_item_id
+   ,i_item_desc
+   ,s_store_id
+   ,s_store_name
+ ORDER BY
+    i_item_id
+   ,i_item_desc
+   ,s_store_id
+   ,s_store_name
+ LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q30/q30.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q30/q30.1.ddl.sqlpp
new file mode 100644
index 0000000..86c6792
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q30/q30.1.ddl.sqlpp
@@ -0,0 +1,131 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+
+create type tpcds.customer_address_type as  closed {
+  ca_address_sk : bigint,
+  ca_address_id : string,
+  ca_street_number : string?,
+  ca_street_name : string?,
+  ca_street_type : string?,
+  ca_suite_number : string?,
+  ca_city : string?,
+  ca_county : string?,
+  ca_state : string?,
+  ca_zip : string?,
+  ca_country : string?,
+  ca_gmt_offset : double?,
+  ca_location_type : string?
+ }
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.web_returns_type as
+ closed {
+  wr_returned_date_sk : bigint?,
+  wr_returned_time_sk : bigint?,
+  wr_item_sk : bigint,
+  wr_refunded_customer_sk : bigint?,
+  wr_refunded_cdemo_sk : bigint?,
+  wr_refunded_hdemo_sk : bigint?,
+  wr_refunded_addr_sk : bigint?,
+  wr_returning_customer_sk : bigint?,
+  wr_returning_cdemo_sk : bigint?,
+  wr_returning_hdemo_sk : bigint?,
+  wr_returning_addr_sk : bigint?,
+  wr_web_page_sk : bigint?,
+  wr_reason_sk : bigint?,
+  wr_order_number : bigint,
+  wr_return_quantity : bigint?,
+  wr_return_amt : double?,
+  wr_return_tax : double?,
+  wr_return_amt_inc_tax : double?,
+  wr_fee : double?,
+  wr_return_ship_cost: double?,
+  wr_refunded_cash: double?,
+  wr_reversed_charge: double?,
+  wr_account_credit: double?,
+  wr_net_loss: double?
+}
+
+create type tpcds.customer_type as
+ closed {
+  c_customer_sk : bigint,
+  c_customer_id : string,
+  c_current_cdemo_sk : bigint?,
+  c_current_hdemo_sk : bigint?,
+  c_current_addr_sk : bigint?,
+  c_first_shipto_date_sk : bigint?,
+  c_first_sales_date_sk : bigint?,
+  c_salutation : string?,
+  c_first_name : string?,
+  c_last_name : string?,
+  c_preferred_cust_flag : string?,
+  c_birth_day : bigint?,
+  c_birth_month : bigint?,
+  c_birth_year : bigint?,
+  c_birth_country : string?,
+  c_login : string?,
+  c_email_address : string?,
+  c_last_review_date : string?
+}
+
+create dataset customer_address(customer_address_type) primary key ca_address_sk;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset web_returns (web_returns_type) primary key wr_item_sk, wr_order_number;
+
+create dataset customer (customer_type) primary key c_customer_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q30/q30.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q30/q30.2.update.sqlpp
new file mode 100644
index 0000000..190a142
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q30/q30.2.update.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset customer_address using localfs ((`path`=`asterix_nc1://data/tpcds/customer_address.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset web_returns using localfs ((`path`=`asterix_nc1://data/tpcds/web_returns.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset customer using localfs ((`path`=`asterix_nc1://data/tpcds/customer.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q30/q30.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q30/q30.3.query.sqlpp
new file mode 100644
index 0000000..0793006
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q30/q30.3.query.sqlpp
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+WITH customer_total_return as
+ (SELECT wr_returning_customer_sk as ctr_customer_sk
+        ,ca_state as ctr_state,
+  sum(wr_return_amt) as ctr_total_return
+ FROM web_returns
+     ,date_dim
+     ,customer_address
+ where wr_returned_date_sk = d_date_sk
+   AND d_year =2002
+   AND wr_returning_addr_sk = ca_address_sk
+ group by wr_returning_customer_sk
+         ,ca_state)
+  SELECT  c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag
+       ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address
+       ,c_last_review_date,ctr_total_return
+ FROM customer_total_return ctr1
+     ,customer_address
+     ,customer
+ where ctr1.ctr_total_return > (SELECT VALUE (AVG(ctr2.ctr_total_return)*1.2)
+      FROM customer_total_return ctr2
+                     where ctr1.ctr_state = ctr2.ctr_state)[0]
+       AND ca_address_sk = c_current_addr_sk
+       AND ca_state = 'IL'
+       AND ctr1.ctr_customer_sk = c_customer_sk
+ ORDER BY c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag
+                  ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address
+                  ,c_last_review_date,ctr_total_return
+LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q31/q31.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q31/q31.1.ddl.sqlpp
new file mode 100644
index 0000000..3762222
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q31/q31.1.ddl.sqlpp
@@ -0,0 +1,146 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+
+create type tpcds.customer_address_type as  closed {
+  ca_address_sk : bigint,
+  ca_address_id : string,
+  ca_street_number : string?,
+  ca_street_name : string?,
+  ca_street_type : string?,
+  ca_suite_number : string?,
+  ca_city : string?,
+  ca_county : string?,
+  ca_state : string?,
+  ca_zip : string?,
+  ca_country : string?,
+  ca_gmt_offset : double?,
+  ca_location_type : string?
+ }
+
+create type tpcds.web_sales_type as
+ closed {
+  ws_sold_date_sk : bigint?,
+  ws_sold_time_sk : bigint?,
+  ws_ship_date_sk : bigint?,
+  ws_item_sk : bigint,
+  ws_bill_customer_sk : bigint?,
+  ws_bill_cdemo_sk : bigint?,
+  ws_bill_hdemo_sk : bigint?,
+  ws_bill_addr_sk : bigint?,
+  ws_ship_customer_sk : bigint?,
+  ws_ship_cdemo_sk : bigint?,
+  ws_ship_hdemo_sk : bigint?,
+  ws_ship_addr_sk : bigint?,
+  ws_web_page_sk : bigint?,
+  ws_web_site_sk : bigint?,
+  ws_ship_mode_sk : bigint?,
+  ws_warehouse_sk : bigint?,
+  ws_promo_sk : bigint?,
+  ws_order_number : bigint,
+  ws_quantity : bigint?,
+  ws_wholesale_cost : double?,
+  ws_list_price : double?,
+  ws_sales_price : double?,
+  ws_ext_discount_amt : double?,
+  ws_ext_sales_price : double?,
+  ws_ext_wholesale_cost : double?,
+  ws_ext_list_price : double?,
+  ws_ext_tax : double?,
+  ws_coupon_amt : double?,
+  ws_ext_ship_cost : double?,
+  ws_net_paid : double?,
+  ws_net_paid_inc_tax : double?,
+  ws_net_paid_inc_ship : double?,
+  ws_net_paid_inc_ship_tax : double?,
+  ws_net_profit : double?
+}
+
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create dataset customer_address(customer_address_type) primary key ca_address_sk;
+
+create dataset web_sales (web_sales_type) primary key ws_item_sk, ws_order_number;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q31/q31.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q31/q31.2.update.sqlpp
new file mode 100644
index 0000000..b25c37b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q31/q31.2.update.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset customer_address using localfs ((`path`=`asterix_nc1://data/tpcds/customer_address.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset web_sales using localfs ((`path`=`asterix_nc1://data/tpcds/web_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q31/q31.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q31/q31.3.query.sqlpp
new file mode 100644
index 0000000..b869d5e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q31/q31.3.query.sqlpp
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+WITH ss AS
+ (SELECT ca_county,d_qoy, d_year,sum(ss_ext_sales_price) as store_sales
+ FROM store_sales,date_dim,customer_address
+ WHERE ss_sold_date_sk = d_date_sk
+  AND ss_addr_sk=ca_address_sk
+ GROUP BY ca_county,d_qoy, d_year),
+ ws AS
+ (SELECT ca_county,d_qoy, d_year,sum(ws_ext_sales_price) as web_sales
+ FROM web_sales,date_dim,customer_address
+ WHERE ws_sold_date_sk = d_date_sk
+  AND ws_bill_addr_sk=ca_address_sk
+ GROUP BY ca_county,d_qoy, d_year)
+ SELECT /* tt */
+        ss1.ca_county
+       ,ss1.d_year
+       ,ws2.web_sales/ws1.web_sales web_q1_q2_increase
+       ,ss2.store_sales/ss1.store_sales store_q1_q2_increase
+       ,ws3.web_sales/ws2.web_sales web_q2_q3_increase
+       ,ss3.store_sales/ss2.store_sales store_q2_q3_increase
+ FROM
+        ss ss1
+       ,ss ss2
+       ,ss ss3
+       ,ws ws1
+       ,ws ws2
+       ,ws ws3
+ WHERE
+    ss1.d_qoy = 1
+    AND ss1.d_year = 2000
+    AND ss1.ca_county = ss2.ca_county
+    AND ss2.d_qoy = 2
+    AND ss2.d_year = 2000
+    AND ss2.ca_county = ss3.ca_county
+    AND ss3.d_qoy = 3
+    AND ss3.d_year = 2000
+    AND ss1.ca_county = ws1.ca_county
+    AND ws1.d_qoy = 1
+    AND ws1.d_year = 2000
+    AND ws1.ca_county = ws2.ca_county
+    AND ws2.d_qoy = 2
+    AND ws2.d_year = 2000
+    AND ws1.ca_county = ws3.ca_county
+    AND ws3.d_qoy = 3
+    AND ws3.d_year =2000
+    AND (CASE WHEN ws1.web_sales > 0 THEN ws2.web_sales/ws1.web_sales ELSE null END)
+       > (CASE WHEN ss1.store_sales > 0 THEN ss2.store_sales/ss1.store_sales ELSE null END)
+    AND (CASE WHEN ws2.web_sales > 0 THEN ws3.web_sales/ws2.web_sales ELSE null END)
+       > (CASE WHEN ss2.store_sales > 0 THEN ss3.store_sales/ss2.store_sales ELSE null END)
+ORDER BY ss1.d_year;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q34/q34.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q34/q34.1.ddl.sqlpp
new file mode 100644
index 0000000..f7a6453
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q34/q34.1.ddl.sqlpp
@@ -0,0 +1,156 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.household_demographics_type as
+ closed {
+  hd_demo_sk : bigint,
+  hd_income_band_sk : bigint?,
+  hd_buy_potential : string?,
+  hd_dep_count : bigint?,
+  hd_vehicle_count : bigint?
+}
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.store_type as
+ closed {
+  s_store_sk : bigint,
+  s_store_id : string,
+  s_rec_start_date : string?,
+  s_rec_end_date : string?,
+  s_closed_date_sk : bigint?,
+  s_store_name : string?,
+  s_number_employees : bigint?,
+  s_floor_space : bigint?,
+  s_hours : string?,
+  s_manager : string?,
+  s_market_id : bigint?,
+  s_geography_class : string?,
+  s_market_desc : string?,
+  s_market_manager : string?,
+  s_division_id : bigint?,
+  s_division_name : string?,
+  s_company_id : bigint?,
+  s_company_name : string?,
+  s_street_number : string?,
+  s_street_name : string?,
+  s_street_type : string?,
+  s_suite_number : string?,
+  s_city : string?,
+  s_county : string?,
+  s_state : string?,
+  s_zip : string?,
+  s_country : string?,
+  s_gmt_offset : double?,
+  s_tax_precentage : double?
+}
+
+create type tpcds.customer_type as
+ closed {
+  c_customer_sk : bigint,
+  c_customer_id : string,
+  c_current_cdemo_sk : bigint?,
+  c_current_hdemo_sk : bigint?,
+  c_current_addr_sk : bigint?,
+  c_first_shipto_date_sk : bigint?,
+  c_first_sales_date_sk : bigint?,
+  c_salutation : string?,
+  c_first_name : string?,
+  c_last_name : string?,
+  c_preferred_cust_flag : string?,
+  c_birth_day : bigint?,
+  c_birth_month : bigint?,
+  c_birth_year : bigint?,
+  c_birth_country : string?,
+  c_login : string?,
+  c_email_address : string?,
+  c_last_review_date : string?
+}
+
+create dataset household_demographics (household_demographics_type) primary key hd_demo_sk;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset store (store_type) primary key s_store_sk;
+
+create dataset customer (customer_type) primary key c_customer_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q34/q34.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q34/q34.2.update.sqlpp
new file mode 100644
index 0000000..97ea2e8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q34/q34.2.update.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset household_demographics using localfs ((`path`=`asterix_nc1://data/tpcds/household_demographics.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store using localfs ((`path`=`asterix_nc1://data/tpcds/store.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset customer using localfs ((`path`=`asterix_nc1://data/tpcds/customer.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q34/q34.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q34/q34.3.query.sqlpp
new file mode 100644
index 0000000..e81d182
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q34/q34.3.query.sqlpp
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+SELECT  c_last_name
+       ,c_first_name
+       ,c_salutation
+       ,c_preferred_cust_flag
+       ,ss_ticket_number
+       ,cnt
+       FROM
+       (select ss_ticket_number
+          ,ss_customer_sk
+          ,LEN(dngrp) cnt
+     FROM store_sales,date_dim,store,household_demographics
+     WHERE store_sales.ss_sold_date_sk = date_dim.d_date_sk
+     AND store_sales.ss_store_sk = store.s_store_sk
+     AND store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk
+     AND (date_dim.d_dom >= 1
+      AND date_dim.d_dom <= 3
+     OR date_dim.d_dom >= 25
+      AND date_dim.d_dom <= 28)
+     AND (household_demographics.hd_buy_potential = '>10000'
+  OR household_demographics.hd_buy_potential = 'unknown')
+     AND household_demographics.hd_vehicle_count > 0
+     AND (CASE WHEN household_demographics.hd_vehicle_count > 0
+ THEN household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count
+ ELSE null
+ END)  > 1.2
+     AND date_dim.d_year in [1998,1998+1,1998+2]
+     AND store.s_county in ['Williamson County','Williamson County','Williamson County','Williamson County',
+                           'Williamson County','Williamson County','Williamson County','Williamson County']
+GROUP BY ss_ticket_number,ss_customer_sk GROUP AS dngrp) dn, customer
+WHERE dn.ss_customer_sk = c_customer_sk
+      AND cnt >= 15
+      AND cnt <= 20
+ORDER BY c_last_name,c_first_name,c_salutation,c_preferred_cust_flag desc;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q37/q37.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q37/q37.1.ddl.sqlpp
new file mode 100644
index 0000000..cb90a52
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q37/q37.1.ddl.sqlpp
@@ -0,0 +1,135 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.item_type as
+ closed {
+  i_item_sk : bigint,
+  i_item_id : string,
+  i_rec_start_date : string?,
+  i_rec_end_date : string?,
+  i_item_desc : string?,
+  i_current_price : double?,
+  i_wholesale_cost : double?,
+  i_brand_id : bigint?,
+  i_brand : string?,
+  i_class_id : bigint?,
+  i_class : string?,
+  i_category_id : bigint?,
+  i_category : string?,
+  i_manufact_id : bigint?,
+  i_manufact : string?,
+  i_size : string?,
+  i_formulation : string?,
+  i_color : string?,
+  i_units : string?,
+  i_container : string?,
+  i_manager_id : bigint?,
+  i_product_name : string?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.catalog_sales_type as
+ closed {
+  cs_sold_date_sk:           bigint?,
+  cs_sold_time_sk:           bigint?,
+  cs_ship_date_sk:           bigint?,
+  cs_bill_customer_sk:       bigint?,
+  cs_bill_cdemo_sk:          bigint?,
+  cs_bill_hdemo_sk:          bigint?,
+  cs_bill_addr_sk:           bigint?,
+  cs_ship_customer_sk:       bigint?,
+  cs_ship_cdemo_sk:          bigint?,
+  cs_ship_hdemo_sk:          bigint?,
+  cs_ship_addr_sk:           bigint?,
+  cs_call_center_sk:         bigint?,
+  cs_ship_mode_sk:           bigint?,
+  cs_warehouse_sk:           bigint?,
+  cs_item_sk:                bigint,
+  cs_promo_sk:               bigint?,
+  cs_order_number:           bigint,
+  cs_quantity:               bigint?,
+  cs_wholesale_cost:         double?,
+  cs_list_price:             double?,
+  cs_sales_price:            double?,
+  cs_ext_discount_amt:       double?,
+  cs_ext_sales_price:        double?,
+  cs_ext_wholesale_cost:     double?,
+  cs_ext_list_price:         double?,
+  cs_ext_tax:                double?,
+  cs_coupon_amt:             double?,
+  cs_ext_ship_cost:          double?,
+  cs_net_paid:               double?,
+  cs_net_paid_inc_tax:       double?,
+  cs_net_paid_inc_ship:      double?,
+  cs_net_paid_inc_ship_tax:  double?,
+  cs_net_profit:             double?
+}
+
+create type tpcds.inventory_type as
+ closed {
+  inv_date_sk : bigint,
+  inv_item_sk : bigint,
+  inv_warehouse_sk : bigint,
+  inv_quantity_on_hand : bigint?
+}
+
+create dataset item (item_type) primary key i_item_sk;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset catalog_sales (catalog_sales_type) primary key cs_item_sk, cs_order_number;
+
+create dataset inventory (inventory_type) primary key inv_date_sk, inv_item_sk, inv_warehouse_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q37/q37.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q37/q37.2.update.sqlpp
new file mode 100644
index 0000000..729df27
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q37/q37.2.update.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset item using localfs ((`path`=`asterix_nc1://data/tpcds/item.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset catalog_sales using localfs ((`path`=`asterix_nc1://data/tpcds/catalog_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset inventory using localfs ((`path`=`asterix_nc1://data/tpcds/inventory.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q37/q37.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q37/q37.3.query.sqlpp
new file mode 100644
index 0000000..df1b580
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q37/q37.3.query.sqlpp
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+SELECT  i_item_id
+       ,i_item_desc
+       ,i_current_price
+ FROM item, inventory, date_dim, catalog_sales
+ WHERE i_current_price >= 22 AND i_current_price <= 22 + 30
+ AND inv_item_sk = i_item_sk
+ AND d_date_sk=inv_date_sk
+ AND date(d_date) >= date('2001-06-02') AND date(d_date) <= date('2001-08-01')
+ AND i_manufact_id in [678,964,918,849]
+ AND inv_quantity_on_hand >= 100 AND inv_quantity_on_hand <= 500
+ AND cs_item_sk = i_item_sk
+ GROUP BY i_item_id,i_item_desc,i_current_price
+ ORDER BY i_item_id
+ LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q41/q41.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q41/q41.1.ddl.sqlpp
new file mode 100644
index 0000000..8bc86f0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q41/q41.1.ddl.sqlpp
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.item_type as
+ closed {
+  i_item_sk : bigint,
+  i_item_id : string,
+  i_rec_start_date : string?,
+  i_rec_end_date : string?,
+  i_item_desc : string?,
+  i_current_price : double?,
+  i_wholesale_cost : double?,
+  i_brand_id : bigint?,
+  i_brand : string?,
+  i_class_id : bigint?,
+  i_class : string?,
+  i_category_id : bigint?,
+  i_category : string?,
+  i_manufact_id : bigint?,
+  i_manufact : string?,
+  i_size : string?,
+  i_formulation : string?,
+  i_color : string?,
+  i_units : string?,
+  i_container : string?,
+  i_manager_id : bigint?,
+  i_product_name : string?
+}
+
+create dataset item (item_type) primary key i_item_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q41/q41.2.update.sqlpp
similarity index 84%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q41/q41.2.update.sqlpp
index af2f691..a04c6dd 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q41/q41.2.update.sqlpp
@@ -16,4 +16,8 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+use tpcds;
+
+
+load dataset item using localfs ((`path`=`asterix_nc1://data/tpcds/item.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q41/q41.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q41/q41.3.query.sqlpp
new file mode 100644
index 0000000..f1fbe45
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q41/q41.3.query.sqlpp
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+SELECT  distinct(i_product_name)
+ FROM item i1
+ WHERE i1.i_manufact_id >= 742 AND i1.i_manufact_id <= 742+40
+   AND array_count((select 1 as item_cnt
+        from item i2
+        where (i2.i_manufact = i1.i_manufact AND
+        ((i2.i_category = 'Women' AND
+        (i2.i_color = 'orchid' OR i2.i_color = 'papaya') AND
+        (i2.i_units = 'Pound' OR i2.i_units = 'Lb') AND
+        (i2.i_size = 'petite' OR i2.i_size = 'medium')
+        ) or
+        (i2.i_category = 'Women' AND
+        (i2.i_color = 'burlywood' OR i2.i_color = 'navy') AND
+        (i2.i_units = 'Bundle' OR i2.i_units = 'Each') AND
+        (i2.i_size = 'N/A' OR i2.i_size = 'extra large')
+        ) or
+        (i2.i_category = 'Men' AND
+        (i2.i_color = 'bisque' OR i2.i_color = 'azure') AND
+        (i2.i_units = 'N/A' OR i2.i_units = 'Tsp') AND
+        (i2.i_size = 'small' OR i2.i_size = 'large')
+        ) or
+        (i2.i_category = 'Men' AND
+        (i2.i_color = 'chocolate' OR i2.i_color = 'cornflower') AND
+        (i2.i_units = 'Bunch' OR i2.i_units = 'Gross') AND
+        (i2.i_size = 'petite' OR i2.i_size = 'medium')
+        ))) or
+       (i2.i_manufact = i1.i_manufact AND
+        ((i2.i_category = 'Women' AND
+        (i2.i_color = 'salmon' OR i2.i_color = 'midnight') AND
+        (i2.i_units = 'Oz' OR i2.i_units = 'Box') AND
+        (i2.i_size = 'petite' OR i2.i_size = 'medium')
+        ) or
+        (i2.i_category = 'Women' AND
+        (i2.i_color = 'snow' OR i2.i_color = 'steel') AND
+        (i2.i_units = 'Carton' OR i2.i_units = 'Tbl') AND
+        (i2.i_size = 'N/A' OR i2.i_size = 'extra large')
+        ) or
+        (i2.i_category = 'Men' AND
+        (i2.i_color = 'purple' OR i2.i_color = 'gainsboro') AND
+        (i2.i_units = 'Dram' OR i2.i_units = 'Unknown') AND
+        (i2.i_size = 'small' OR i2.i_size = 'large')
+        ) or
+        (i2.i_category = 'Men' AND
+        (i2.i_color = 'metallic' OR i2.i_color = 'forest') AND
+        (i2.i_units = 'Gram' OR i2.i_units = 'Ounce') AND
+        (i2.i_size = 'petite' OR i2.i_size = 'medium')
+        ))))) > 0
+ ORDER BY i1.i_product_name
+ LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q42/q42.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q42/q42.1.ddl.sqlpp
new file mode 100644
index 0000000..acd3953
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q42/q42.1.ddl.sqlpp
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.item_type as
+ closed {
+  i_item_sk : bigint,
+  i_item_id : string,
+  i_rec_start_date : string?,
+  i_rec_end_date : string?,
+  i_item_desc : string?,
+  i_current_price : double?,
+  i_wholesale_cost : double?,
+  i_brand_id : bigint?,
+  i_brand : string?,
+  i_class_id : bigint?,
+  i_class : string?,
+  i_category_id : bigint?,
+  i_category : string?,
+  i_manufact_id : bigint?,
+  i_manufact : string?,
+  i_size : string?,
+  i_formulation : string?,
+  i_color : string?,
+  i_units : string?,
+  i_container : string?,
+  i_manager_id : bigint?,
+  i_product_name : string?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create dataset item (item_type) primary key i_item_sk;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q42/q42.2.update.sqlpp
similarity index 65%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q42/q42.2.update.sqlpp
index af2f691..dfdaad1 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q42/q42.2.update.sqlpp
@@ -16,4 +16,12 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+use tpcds;
+
+
+load dataset item using localfs ((`path`=`asterix_nc1://data/tpcds/item.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q42/q42.3.query.sqlpp
similarity index 62%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q42/q42.3.query.sqlpp
index af2f691..cee34eb 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q42/q42.3.query.sqlpp
@@ -16,4 +16,26 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+
+USE tpcds;
+
+SELECT  dt.d_year
+  ,item.i_category_id
+  ,item.i_category
+  ,sum(ss_ext_sales_price)
+ FROM  date_dim dt
+  ,store_sales
+  ,item
+ WHERE dt.d_date_sk = store_sales.ss_sold_date_sk
+  AND store_sales.ss_item_sk = item.i_item_sk
+  AND item.i_manager_id = 1
+  AND dt.d_moy=12
+  AND dt.d_year=1998
+ GROUP BY  dt.d_year
+   ,item.i_category_id
+   ,item.i_category
+ ORDER BY SUM(ss_ext_sales_price) desc,dt.d_year
+   ,item.i_category_id
+   ,item.i_category
+LIMIT 100 ;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q43/q43.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q43/q43.1.ddl.sqlpp
new file mode 100644
index 0000000..1c86ec3
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q43/q43.1.ddl.sqlpp
@@ -0,0 +1,122 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create type tpcds.store_type as
+ closed {
+  s_store_sk : bigint,
+  s_store_id : string,
+  s_rec_start_date : string?,
+  s_rec_end_date : string?,
+  s_closed_date_sk : bigint?,
+  s_store_name : string?,
+  s_number_employees : bigint?,
+  s_floor_space : bigint?,
+  s_hours : string?,
+  s_manager : string?,
+  s_market_id : bigint?,
+  s_geography_class : string?,
+  s_market_desc : string?,
+  s_market_manager : string?,
+  s_division_id : bigint?,
+  s_division_name : string?,
+  s_company_id : bigint?,
+  s_company_name : string?,
+  s_street_number : string?,
+  s_street_name : string?,
+  s_street_type : string?,
+  s_suite_number : string?,
+  s_city : string?,
+  s_county : string?,
+  s_state : string?,
+  s_zip : string?,
+  s_country : string?,
+  s_gmt_offset : double?,
+  s_tax_precentage : double?
+}
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
+
+create dataset store (store_type) primary key s_store_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q43/q43.2.update.sqlpp
similarity index 65%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q43/q43.2.update.sqlpp
index af2f691..bbf1838 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q43/q43.2.update.sqlpp
@@ -16,4 +16,12 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+use tpcds;
+
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store using localfs ((`path`=`asterix_nc1://data/tpcds/store.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q43/q43.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q43/q43.3.query.sqlpp
new file mode 100644
index 0000000..6e77ed4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q43/q43.3.query.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+SELECT  s_store_name, s_store_id,
+        SUM(CASE WHEN (d_day_name='Sunday') THEN ss_sales_price ELSE null END) sun_sales,
+        SUM(CASE WHEN (d_day_name='Monday') THEN ss_sales_price ELSE null END) mon_sales,
+        SUM(CASE WHEN (d_day_name='Tuesday') THEN ss_sales_price ELSE  null END) tue_sales,
+        SUM(CASE WHEN (d_day_name='Wednesday') THEN ss_sales_price ELSE null END) wed_sales,
+        SUM(CASE WHEN (d_day_name='Thursday') THEN ss_sales_price ELSE null END) thu_sales,
+        SUM(CASE WHEN (d_day_name='Friday') THEN ss_sales_price ELSE null END) fri_sales,
+        SUM(CASE WHEN (d_day_name='Saturday') THEN ss_sales_price ELSE null END) sat_sales
+ FROM date_dim, store_sales, store
+ where d_date_sk = ss_sold_date_sk and
+       s_store_sk = ss_store_sk and
+       s_gmt_offset = -5 and
+       d_year = 1998
+ GROUP BY s_store_name, s_store_id
+ ORDER BY s_store_name, s_store_id,sun_sales,mon_sales,tue_sales,wed_sales,thu_sales,fri_sales,sat_sales
+ LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q45/q45.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q45/q45.1.ddl.sqlpp
new file mode 100644
index 0000000..4eebc51
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q45/q45.1.ddl.sqlpp
@@ -0,0 +1,169 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+
+create type tpcds.customer_address_type as  closed {
+  ca_address_sk : bigint,
+  ca_address_id : string,
+  ca_street_number : string?,
+  ca_street_name : string?,
+  ca_street_type : string?,
+  ca_suite_number : string?,
+  ca_city : string?,
+  ca_county : string?,
+  ca_state : string?,
+  ca_zip : string?,
+  ca_country : string?,
+  ca_gmt_offset : double?,
+  ca_location_type : string?
+ }
+
+create type tpcds.item_type as
+ closed {
+  i_item_sk : bigint,
+  i_item_id : string,
+  i_rec_start_date : string?,
+  i_rec_end_date : string?,
+  i_item_desc : string?,
+  i_current_price : double?,
+  i_wholesale_cost : double?,
+  i_brand_id : bigint?,
+  i_brand : string?,
+  i_class_id : bigint?,
+  i_class : string?,
+  i_category_id : bigint?,
+  i_category : string?,
+  i_manufact_id : bigint?,
+  i_manufact : string?,
+  i_size : string?,
+  i_formulation : string?,
+  i_color : string?,
+  i_units : string?,
+  i_container : string?,
+  i_manager_id : bigint?,
+  i_product_name : string?
+}
+
+create type tpcds.web_sales_type as
+ closed {
+  ws_sold_date_sk : bigint?,
+  ws_sold_time_sk : bigint?,
+  ws_ship_date_sk : bigint?,
+  ws_item_sk : bigint,
+  ws_bill_customer_sk : bigint?,
+  ws_bill_cdemo_sk : bigint?,
+  ws_bill_hdemo_sk : bigint?,
+  ws_bill_addr_sk : bigint?,
+  ws_ship_customer_sk : bigint?,
+  ws_ship_cdemo_sk : bigint?,
+  ws_ship_hdemo_sk : bigint?,
+  ws_ship_addr_sk : bigint?,
+  ws_web_page_sk : bigint?,
+  ws_web_site_sk : bigint?,
+  ws_ship_mode_sk : bigint?,
+  ws_warehouse_sk : bigint?,
+  ws_promo_sk : bigint?,
+  ws_order_number : bigint,
+  ws_quantity : bigint?,
+  ws_wholesale_cost : double?,
+  ws_list_price : double?,
+  ws_sales_price : double?,
+  ws_ext_discount_amt : double?,
+  ws_ext_sales_price : double?,
+  ws_ext_wholesale_cost : double?,
+  ws_ext_list_price : double?,
+  ws_ext_tax : double?,
+  ws_coupon_amt : double?,
+  ws_ext_ship_cost : double?,
+  ws_net_paid : double?,
+  ws_net_paid_inc_tax : double?,
+  ws_net_paid_inc_ship : double?,
+  ws_net_paid_inc_ship_tax : double?,
+  ws_net_profit : double?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.customer_type as
+ closed {
+  c_customer_sk : bigint,
+  c_customer_id : string,
+  c_current_cdemo_sk : bigint?,
+  c_current_hdemo_sk : bigint?,
+  c_current_addr_sk : bigint?,
+  c_first_shipto_date_sk : bigint?,
+  c_first_sales_date_sk : bigint?,
+  c_salutation : string?,
+  c_first_name : string?,
+  c_last_name : string?,
+  c_preferred_cust_flag : string?,
+  c_birth_day : bigint?,
+  c_birth_month : bigint?,
+  c_birth_year : bigint?,
+  c_birth_country : string?,
+  c_login : string?,
+  c_email_address : string?,
+  c_last_review_date : string?
+}
+
+create dataset customer_address(customer_address_type) primary key ca_address_sk;
+
+create dataset item (item_type) primary key i_item_sk;
+
+create dataset web_sales (web_sales_type) primary key ws_item_sk, ws_order_number;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset customer (customer_type) primary key c_customer_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q45/q45.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q45/q45.2.update.sqlpp
new file mode 100644
index 0000000..cd8b95d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q45/q45.2.update.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset customer_address using localfs ((`path`=`asterix_nc1://data/tpcds/customer_address.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset item using localfs ((`path`=`asterix_nc1://data/tpcds/item.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset web_sales using localfs ((`path`=`asterix_nc1://data/tpcds/web_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset customer using localfs ((`path`=`asterix_nc1://data/tpcds/customer.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q45/q45.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q45/q45.3.query.sqlpp
new file mode 100644
index 0000000..83acd4a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q45/q45.3.query.sqlpp
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+SELECT  ca_zip, ca_county, SUM(ws_sales_price)
+ FROM web_sales, customer, customer_address, date_dim, item i2
+ WHERE ws_bill_customer_sk = c_customer_sk
+  AND c_current_addr_sk = ca_address_sk
+  AND ws_item_sk = i2.i_item_sk
+  AND ( substr(ca_zip,1,5) IN ['85669', '86197','88274','83405','86475', '85392', '85460', '80348', '81792']
+        OR
+        i2.i_item_id IN (SELECT VALUE i1.i_item_id
+                         FROM item i1
+                         WHERE i1.i_item_sk in [2, 3, 5, 7, 11, 13, 17, 19, 23, 29]
+                         )
+      )
+  AND ws_sold_date_sk = d_date_sk
+  AND d_qoy = 2 and d_year = 2000
+ GROUP BY ca_zip, ca_county
+ ORDER BY ca_zip, ca_county
+ LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q46/q46.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q46/q46.1.ddl.sqlpp
new file mode 100644
index 0000000..67e2de3
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q46/q46.1.ddl.sqlpp
@@ -0,0 +1,176 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+
+create type tpcds.customer_address_type as  closed {
+  ca_address_sk : bigint,
+  ca_address_id : string,
+  ca_street_number : string?,
+  ca_street_name : string?,
+  ca_street_type : string?,
+  ca_suite_number : string?,
+  ca_city : string?,
+  ca_county : string?,
+  ca_state : string?,
+  ca_zip : string?,
+  ca_country : string?,
+  ca_gmt_offset : double?,
+  ca_location_type : string?
+ }
+
+create type tpcds.household_demographics_type as
+ closed {
+  hd_demo_sk : bigint,
+  hd_income_band_sk : bigint?,
+  hd_buy_potential : string?,
+  hd_dep_count : bigint?,
+  hd_vehicle_count : bigint?
+}
+
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.store_type as
+ closed {
+  s_store_sk : bigint,
+  s_store_id : string,
+  s_rec_start_date : string?,
+  s_rec_end_date : string?,
+  s_closed_date_sk : bigint?,
+  s_store_name : string?,
+  s_number_employees : bigint?,
+  s_floor_space : bigint?,
+  s_hours : string?,
+  s_manager : string?,
+  s_market_id : bigint?,
+  s_geography_class : string?,
+  s_market_desc : string?,
+  s_market_manager : string?,
+  s_division_id : bigint?,
+  s_division_name : string?,
+  s_company_id : bigint?,
+  s_company_name : string?,
+  s_street_number : string?,
+  s_street_name : string?,
+  s_street_type : string?,
+  s_suite_number : string?,
+  s_city : string?,
+  s_county : string?,
+  s_state : string?,
+  s_zip : string?,
+  s_country : string?,
+  s_gmt_offset : double?,
+  s_tax_precentage : double?
+}
+
+create type tpcds.customer_type as
+ closed {
+  c_customer_sk : bigint,
+  c_customer_id : string,
+  c_current_cdemo_sk : bigint?,
+  c_current_hdemo_sk : bigint?,
+  c_current_addr_sk : bigint?,
+  c_first_shipto_date_sk : bigint?,
+  c_first_sales_date_sk : bigint?,
+  c_salutation : string?,
+  c_first_name : string?,
+  c_last_name : string?,
+  c_preferred_cust_flag : string?,
+  c_birth_day : bigint?,
+  c_birth_month : bigint?,
+  c_birth_year : bigint?,
+  c_birth_country : string?,
+  c_login : string?,
+  c_email_address : string?,
+  c_last_review_date : string?
+}
+
+create dataset customer_address(customer_address_type) primary key ca_address_sk;
+
+create dataset household_demographics (household_demographics_type) primary key hd_demo_sk;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
+
+create dataset date_dim (date_dim_type) primary key d_date_sk;
+
+create dataset store (store_type) primary key s_store_sk;
+
+create dataset customer (customer_type) primary key c_customer_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q46/q46.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q46/q46.2.update.sqlpp
new file mode 100644
index 0000000..6ab1d16
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q46/q46.2.update.sqlpp
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset customer_address using localfs ((`path`=`asterix_nc1://data/tpcds/customer_address.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset household_demographics using localfs ((`path`=`asterix_nc1://data/tpcds/household_demographics.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store using localfs ((`path`=`asterix_nc1://data/tpcds/store.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset customer using localfs ((`path`=`asterix_nc1://data/tpcds/customer.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q46/q46.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q46/q46.3.query.sqlpp
new file mode 100644
index 0000000..873d78b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q46/q46.3.query.sqlpp
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+SELECT  c_last_name
+       ,c_first_name
+       ,ca_city
+       ,bought_city
+       ,ss_ticket_number
+       ,amt,profit
+ FROM
+   (SELECT ss_ticket_number
+          ,ss_customer_sk
+          ,ca_city bought_city
+          ,sum(ss_coupon_amt) amt
+          ,sum(ss_net_profit) profit
+    FROM store_sales,date_dim,store,household_demographics,customer_address
+    WHERE store_sales.ss_sold_date_sk = date_dim.d_date_sk
+    AND store_sales.ss_store_sk = store.s_store_sk
+    AND store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk
+    AND store_sales.ss_addr_sk = customer_address.ca_address_sk
+    AND (household_demographics.hd_dep_count = 5 or
+         household_demographics.hd_vehicle_count= 3)
+    AND date_dim.d_dow in [6,0]
+    AND date_dim.d_year in [1999,1999+1,1999+2]
+    AND store.s_city in ['Midway','Fairview','Fairview','Fairview','Fairview']
+    GROUP BY ss_ticket_number,ss_customer_sk,ss_addr_sk,ca_city) dn,customer,customer_address current_addr
+    WHERE ss_customer_sk = c_customer_sk
+      AND customer.c_current_addr_sk = current_addr.ca_address_sk
+      AND current_addr.ca_city != bought_city
+  ORDER BY c_last_name
+          ,c_first_name
+          ,ca_city
+          ,bought_city
+          ,ss_ticket_number
+  LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q50/q50.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q50/q50.1.ddl.sqlpp
new file mode 100644
index 0000000..8779419
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q50/q50.1.ddl.sqlpp
@@ -0,0 +1,148 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.store_returns_type as
+ closed {
+  sr_returned_date_sk : bigint?,
+  sr_return_time_sk : bigint?,
+  sr_item_sk : bigint,
+  sr_customer_sk : bigint?,
+  sr_cdemo_sk : bigint?,
+  sr_hdemo_sk : bigint?,
+  sr_addr_sk : bigint?,
+  sr_store_sk : bigint?,
+  sr_reason_sk : bigint?,
+  sr_ticket_number : bigint,
+  sr_return_quantity : bigint?,
+  sr_return_amt : double,
+  sr_return_tax : double?,
+  sr_return_amt_inc_tax : double?,
+  sr_fee : double?,
+  sr_return_ship_cost : double?,
+  sr_refunded_cash : double?,
+  sr_reversed_charge : double?,
+  sr_store_credit : double?,
+  sr_net_loss : double?
+}
+
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.store_type as
+ closed {
+  s_store_sk : bigint,
+  s_store_id : string,
+  s_rec_start_date : string?,
+  s_rec_end_date : string?,
+  s_closed_date_sk : bigint?,
+  s_store_name : string?,
+  s_number_employees : bigint?,
+  s_floor_space : bigint?,
+  s_hours : string?,
+  s_manager : string?,
+  s_market_id : bigint?,
+  s_geography_class : string?,
+  s_market_desc : string?,
+  s_market_manager : string?,
+  s_division_id : bigint?,
+  s_division_name : string?,
+  s_company_id : bigint?,
+  s_company_name : string?,
+  s_street_number : string?,
+  s_street_name : string?,
+  s_street_type : string?,
+  s_suite_number : string?,
+  s_city : string?,
+  s_county : string?,
+  s_state : string?,
+  s_zip : string?,
+  s_country : string?,
+  s_gmt_offset : double?,
+  s_tax_precentage : double?
+}
+
+create dataset store_returns (store_returns_type) primary key sr_item_sk, sr_ticket_number;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset store (store_type) primary key s_store_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q50/q50.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q50/q50.2.update.sqlpp
new file mode 100644
index 0000000..d50d706
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q50/q50.2.update.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset store_returns using localfs ((`path`=`asterix_nc1://data/tpcds/store_returns.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store using localfs ((`path`=`asterix_nc1://data/tpcds/store.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q50/q50.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q50/q50.3.query.sqlpp
new file mode 100644
index 0000000..5bf8683
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q50/q50.3.query.sqlpp
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+SELECT
+   s_store_name
+  ,s_company_id
+  ,s_street_number
+  ,s_street_name
+  ,s_street_type
+  ,s_suite_number
+  ,s_city
+  ,s_county
+  ,s_state
+  ,s_zip
+  ,SUM((CASE WHEN (sr_returned_date_sk - ss_sold_date_sk) <= 30  THEN 1 ELSE 0 END))  AS c30_days
+  ,SUM((CASE WHEN ((sr_returned_date_sk - ss_sold_date_sk) > 30 AND
+                 (sr_returned_date_sk - ss_sold_date_sk) <= 60) THEN 1 ELSE 0 END ))  AS c31_60_days
+  ,SUM((CASE WHEN ((sr_returned_date_sk - ss_sold_date_sk) > 60 AND
+                 (sr_returned_date_sk - ss_sold_date_sk) <= 90) THEN 1 ELSE 0 END))  AS c61_90_days
+  ,SUM((CASE WHEN ((sr_returned_date_sk - ss_sold_date_sk) > 90 AND
+                 (sr_returned_date_sk - ss_sold_date_sk) <= 120) THEN 1 ELSE 0 END))  AS c91_120_days
+  ,SUM((CASE WHEN (sr_returned_date_sk - ss_sold_date_sk)  > 120 THEN 1 ELSE 0 END))  AS gt120_days
+FROM
+   store_sales
+  ,store_returns
+  ,store
+  ,date_dim d1
+  ,date_dim d2
+WHERE
+    d2.d_year = 2000
+AND d2.d_moy  = 9
+AND ss_ticket_number = sr_ticket_number
+AND ss_item_sk = sr_item_sk
+AND ss_sold_date_sk   = d1.d_date_sk
+AND sr_returned_date_sk   = d2.d_date_sk
+AND ss_customer_sk = sr_customer_sk
+AND ss_store_sk = s_store_sk
+GROUP BY
+   s_store_name
+  ,s_company_id
+  ,s_street_number
+  ,s_street_name
+  ,s_street_type
+  ,s_suite_number
+  ,s_city
+  ,s_county
+  ,s_state
+  ,s_zip
+ORDER BY s_store_name
+        ,s_company_id
+        ,s_street_number
+        ,s_street_name
+        ,s_street_type
+        ,s_suite_number
+        ,s_city
+        ,s_county
+        ,s_state
+        ,s_zip
+LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q52/q52.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q52/q52.1.ddl.sqlpp
new file mode 100644
index 0000000..acd3953
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q52/q52.1.ddl.sqlpp
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.item_type as
+ closed {
+  i_item_sk : bigint,
+  i_item_id : string,
+  i_rec_start_date : string?,
+  i_rec_end_date : string?,
+  i_item_desc : string?,
+  i_current_price : double?,
+  i_wholesale_cost : double?,
+  i_brand_id : bigint?,
+  i_brand : string?,
+  i_class_id : bigint?,
+  i_class : string?,
+  i_category_id : bigint?,
+  i_category : string?,
+  i_manufact_id : bigint?,
+  i_manufact : string?,
+  i_size : string?,
+  i_formulation : string?,
+  i_color : string?,
+  i_units : string?,
+  i_container : string?,
+  i_manager_id : bigint?,
+  i_product_name : string?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create dataset item (item_type) primary key i_item_sk;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q52/q52.2.update.sqlpp
similarity index 65%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q52/q52.2.update.sqlpp
index af2f691..dfdaad1 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q52/q52.2.update.sqlpp
@@ -16,4 +16,12 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+use tpcds;
+
+
+load dataset item using localfs ((`path`=`asterix_nc1://data/tpcds/item.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q52/q52.3.query.sqlpp
similarity index 63%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q52/q52.3.query.sqlpp
index af2f691..6c3ad96 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q52/q52.3.query.sqlpp
@@ -16,4 +16,26 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+
+USE tpcds;
+
+SELECT  dt.d_year
+  ,item.i_brand_id brand_id
+  ,item.i_brand brand
+  ,SUM(ss_ext_sales_price) ext_price
+ FROM date_dim dt
+     ,store_sales
+     ,item
+ WHERE dt.d_date_sk = store_sales.ss_sold_date_sk
+    AND store_sales.ss_item_sk = item.i_item_sk
+    AND item.i_manager_id = 1
+    AND dt.d_moy=12
+    AND dt.d_year=1998
+ GROUP BY dt.d_year
+  ,item.i_brand
+  ,item.i_brand_id
+ ORDER BY dt.d_year
+  ,ext_price desc
+  ,brand_id
+LIMIT 100 ;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q55/q55.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q55/q55.1.ddl.sqlpp
new file mode 100644
index 0000000..acd3953
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q55/q55.1.ddl.sqlpp
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.item_type as
+ closed {
+  i_item_sk : bigint,
+  i_item_id : string,
+  i_rec_start_date : string?,
+  i_rec_end_date : string?,
+  i_item_desc : string?,
+  i_current_price : double?,
+  i_wholesale_cost : double?,
+  i_brand_id : bigint?,
+  i_brand : string?,
+  i_class_id : bigint?,
+  i_class : string?,
+  i_category_id : bigint?,
+  i_category : string?,
+  i_manufact_id : bigint?,
+  i_manufact : string?,
+  i_size : string?,
+  i_formulation : string?,
+  i_color : string?,
+  i_units : string?,
+  i_container : string?,
+  i_manager_id : bigint?,
+  i_product_name : string?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create dataset item (item_type) primary key i_item_sk;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q55/q55.2.update.sqlpp
similarity index 65%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q55/q55.2.update.sqlpp
index af2f691..dfdaad1 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q55/q55.2.update.sqlpp
@@ -16,4 +16,12 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+use tpcds;
+
+
+load dataset item using localfs ((`path`=`asterix_nc1://data/tpcds/item.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q55/q55.3.query.sqlpp
similarity index 70%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q55/q55.3.query.sqlpp
index af2f691..4928de3 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q55/q55.3.query.sqlpp
@@ -16,4 +16,18 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+
+USE tpcds;
+
+SELECT  i_brand_id brand_id, i_brand brand,
+  SUM(ss_ext_sales_price) ext_price
+ FROM date_dim, store_sales, item
+ WHERE d_date_sk = ss_sold_date_sk
+  AND ss_item_sk = i_item_sk
+  AND i_manager_id=36
+  AND d_moy=12
+  AND d_year=2001
+ GROUP BY i_brand, i_brand_id
+ ORDER BY ext_price desc, i_brand_id
+LIMIT 100 ;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q59/q59.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q59/q59.1.ddl.sqlpp
new file mode 100644
index 0000000..fb4779f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q59/q59.1.ddl.sqlpp
@@ -0,0 +1,122 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.store_type as
+ closed {
+  s_store_sk : bigint,
+  s_store_id : string,
+  s_rec_start_date : string?,
+  s_rec_end_date : string?,
+  s_closed_date_sk : bigint?,
+  s_store_name : string?,
+  s_number_employees : bigint?,
+  s_floor_space : bigint?,
+  s_hours : string?,
+  s_manager : string?,
+  s_market_id : bigint?,
+  s_geography_class : string?,
+  s_market_desc : string?,
+  s_market_manager : string?,
+  s_division_id : bigint?,
+  s_division_name : string?,
+  s_company_id : bigint?,
+  s_company_name : string?,
+  s_street_number : string?,
+  s_street_name : string?,
+  s_street_type : string?,
+  s_suite_number : string?,
+  s_city : string?,
+  s_county : string?,
+  s_state : string?,
+  s_zip : string?,
+  s_country : string?,
+  s_gmt_offset : double?,
+  s_tax_precentage : double?
+}
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset store (store_type) primary key s_store_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q59/q59.2.update.sqlpp
similarity index 65%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q59/q59.2.update.sqlpp
index af2f691..4cadf19 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q59/q59.2.update.sqlpp
@@ -16,4 +16,12 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+use tpcds;
+
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store using localfs ((`path`=`asterix_nc1://data/tpcds/store.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q59/q59.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q59/q59.3.query.sqlpp
new file mode 100644
index 0000000..ff45e4d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q59/q59.3.query.sqlpp
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+WITH wss AS
+ (SELECT d_week_seq,
+        ss_store_sk,
+        SUM(CASE WHEN d_day_name='Sunday' THEN ss_sales_price ELSE null END) sun_sales,
+        SUM(CASE WHEN d_day_name='Monday' THEN ss_sales_price ELSE null END) mon_sales,
+        SUM(CASE WHEN d_day_name='Tuesday' THEN ss_sales_price ELSE  null END) tue_sales,
+        SUM(CASE WHEN d_day_name='Wednesday' THEN ss_sales_price ELSE null END) wed_sales,
+        SUM(CASE WHEN d_day_name='Thursday' THEN ss_sales_price ELSE null END) thu_sales,
+        SUM(CASE WHEN d_day_name='Friday' THEN ss_sales_price ELSE null END) fri_sales,
+        SUM(CASE WHEN d_day_name='Saturday' THEN ss_sales_price ELSE null END) sat_sales
+ FROM store_sales,date_dim
+ WHERE d_date_sk = ss_sold_date_sk
+ GROUP BY d_week_seq,ss_store_sk
+ )
+  SELECT  s_store_name1,s_store_id1,d_week_seq1
+       ,sun_sales1/sun_sales2,mon_sales1/mon_sales2
+       ,tue_sales1/tue_sales2,wed_sales1/wed_sales2,thu_sales1/thu_sales2
+       ,fri_sales1/fri_sales2,sat_sales1/sat_sales2
+ FROM
+ (SELECT s_store_name s_store_name1,wss.d_week_seq d_week_seq1
+        ,s_store_id s_store_id1,sun_sales sun_sales1
+        ,mon_sales mon_sales1,tue_sales tue_sales1
+        ,wed_sales wed_sales1,thu_sales thu_sales1
+        ,fri_sales fri_sales1,sat_sales sat_sales1
+  FROM wss,store,date_dim d
+  WHERE d.d_week_seq = wss.d_week_seq AND
+        ss_store_sk = s_store_sk AND
+        d_month_seq >= 1185 AND d_month_seq <= 1185 + 11) y,
+ (SELECT s_store_name s_store_name2,wss.d_week_seq d_week_seq2
+        ,s_store_id s_store_id2,sun_sales sun_sales2
+        ,mon_sales mon_sales2,tue_sales tue_sales2
+        ,wed_sales wed_sales2,thu_sales thu_sales2
+        ,fri_sales fri_sales2,sat_sales sat_sales2
+  FROM wss,store,date_dim d
+  WHERE d.d_week_seq = wss.d_week_seq AND
+        ss_store_sk = s_store_sk AND
+        d_month_seq >= 1185+ 12 AND d_month_seq <= 1185 + 23) x
+  WHERE s_store_id1=s_store_id2
+  AND d_week_seq1=d_week_seq2-52
+ ORDER BY s_store_name1,s_store_id1,d_week_seq1
+LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q62/q62.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q62/q62.1.ddl.sqlpp
new file mode 100644
index 0000000..f9d0500
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q62/q62.1.ddl.sqlpp
@@ -0,0 +1,162 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.web_sales_type as
+ closed {
+  ws_sold_date_sk : bigint?,
+  ws_sold_time_sk : bigint?,
+  ws_ship_date_sk : bigint?,
+  ws_item_sk : bigint,
+  ws_bill_customer_sk : bigint?,
+  ws_bill_cdemo_sk : bigint?,
+  ws_bill_hdemo_sk : bigint?,
+  ws_bill_addr_sk : bigint?,
+  ws_ship_customer_sk : bigint?,
+  ws_ship_cdemo_sk : bigint?,
+  ws_ship_hdemo_sk : bigint?,
+  ws_ship_addr_sk : bigint?,
+  ws_web_page_sk : bigint?,
+  ws_web_site_sk : bigint?,
+  ws_ship_mode_sk : bigint?,
+  ws_warehouse_sk : bigint?,
+  ws_promo_sk : bigint?,
+  ws_order_number : bigint,
+  ws_quantity : bigint?,
+  ws_wholesale_cost : double?,
+  ws_list_price : double?,
+  ws_sales_price : double?,
+  ws_ext_discount_amt : double?,
+  ws_ext_sales_price : double?,
+  ws_ext_wholesale_cost : double?,
+  ws_ext_list_price : double?,
+  ws_ext_tax : double?,
+  ws_coupon_amt : double?,
+  ws_ext_ship_cost : double?,
+  ws_net_paid : double?,
+  ws_net_paid_inc_tax : double?,
+  ws_net_paid_inc_ship : double?,
+  ws_net_paid_inc_ship_tax : double?,
+  ws_net_profit : double?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.web_site_type as
+ closed {
+  web_site_sk:               bigint,
+  web_site_id:               string,
+  web_rec_start_date:        string?,
+  web_rec_end_date:          string?,
+  web_name:                  string?,
+  web_open_date_sk:          bigint?,
+  web_close_date_sk:         bigint?,
+  web_class:                 string?,
+  web_manager:               string?,
+  web_mkt_id:                bigint?,
+  web_mkt_class:             string?,
+  web_mkt_desc:              string?,
+  web_market_manager:        string?,
+  web_company_id:            bigint?,
+  web_company_name:          string?,
+  web_street_number:         string?,
+  web_street_name:           string?,
+  web_street_type:           string?,
+  web_suite_number:          string?,
+  web_city:                  string?,
+  web_county:                string?,
+  web_state:                 string?,
+  web_zip:                   string?,
+  web_country:               string?,
+  web_gmt_offset:            double?,
+  web_tax_percentage:        double?
+}
+
+create type tpcds.warehouse_type as
+ closed {
+  w_warehouse_sk : bigint,
+  w_warehouse_id : string,
+  w_warehouse_name : string?,
+  w_warehouse_sq_ft : bigint?,
+  w_street_number : string?,
+  w_street_name : string?,
+  w_street_type : string?,
+  w_suite_number : string?,
+  w_city : string?,
+  w_county : string?,
+  w_state : string?,
+  w_zip : string?,
+  w_country : string?,
+  w_gmt_offset : double?
+}
+
+create type tpcds.ship_mode_type as
+ closed {
+  sm_ship_mode_sk : bigint,
+  sm_ship_mode_id : string,
+  sm_type : string?,
+  sm_code : string?,
+  sm_carrier : string?,
+  sm_contract : string?
+}
+
+create dataset web_sales (web_sales_type) primary key ws_item_sk, ws_order_number;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset web_site (web_site_type) primary key web_site_sk;
+
+create dataset warehouse(warehouse_type) primary key w_warehouse_sk;
+
+create dataset ship_mode(ship_mode_type) primary key sm_ship_mode_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q62/q62.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q62/q62.2.update.sqlpp
new file mode 100644
index 0000000..2aab7e4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q62/q62.2.update.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset web_sales using localfs ((`path`=`asterix_nc1://data/tpcds/web_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset web_site using localfs ((`path`=`asterix_nc1://data/tpcds/web_site.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset warehouse using localfs ((`path`=`asterix_nc1://data/tpcds/warehouse.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset ship_mode using localfs ((`path`=`asterix_nc1://data/tpcds/ship_mode.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q62/q62.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q62/q62.3.query.sqlpp
new file mode 100644
index 0000000..4d45927
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q62/q62.3.query.sqlpp
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+SELECT
+   SUBSTR(w_warehouse_name,1,20)
+  ,sm_type
+  ,web_name
+  ,SUM(CASE WHEN (ws_ship_date_sk - ws_sold_date_sk) <= 30 THEN 1 ELSE 0 END)  AS c30_days
+  ,SUM(CASE WHEN (ws_ship_date_sk - ws_sold_date_sk) > 30 AND
+                 (ws_ship_date_sk - ws_sold_date_sk) <= 60 THEN 1 ELSE 0 END )  AS c31_60_days
+  ,SUM(CASE WHEN (ws_ship_date_sk - ws_sold_date_sk) > 60 AND
+                 (ws_ship_date_sk - ws_sold_date_sk) <= 90 THEN 1 ELSE 0 END)  AS c61_90_days
+  ,SUM(CASE WHEN (ws_ship_date_sk - ws_sold_date_sk) > 90 AND
+                 (ws_ship_date_sk - ws_sold_date_sk) <= 120 THEN 1 ELSE 0 END)  AS c91_120_days
+  ,SUM(CASE WHEN (ws_ship_date_sk - ws_sold_date_sk)  > 120 THEN 1 ELSE 0 END)  AS gt120_days
+FROM
+   web_sales
+  ,warehouse
+  ,ship_mode
+  ,web_site
+  ,date_dim
+WHERE
+    d_month_seq >= 1212 AND d_month_seq <= 1212 + 11
+AND ws_ship_date_sk   = d_date_sk
+AND ws_warehouse_sk   = w_warehouse_sk
+AND ws_ship_mode_sk   = sm_ship_mode_sk
+AND ws_web_site_sk    = web_site_sk
+GROUP BY
+   SUBSTR(w_warehouse_name,1,20)
+  ,sm_type
+  ,web_name
+ORDER BY SUBSTR(w_warehouse_name,1,20)
+        ,sm_type
+       ,web_name
+LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q68/q68.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q68/q68.1.ddl.sqlpp
new file mode 100644
index 0000000..3d99b67
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q68/q68.1.ddl.sqlpp
@@ -0,0 +1,175 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+
+create type tpcds.customer_address_type as  closed {
+  ca_address_sk : bigint,
+  ca_address_id : string,
+  ca_street_number : string?,
+  ca_street_name : string?,
+  ca_street_type : string?,
+  ca_suite_number : string?,
+  ca_city : string?,
+  ca_county : string?,
+  ca_state : string?,
+  ca_zip : string?,
+  ca_country : string?,
+  ca_gmt_offset : double?,
+  ca_location_type : string?
+ }
+
+create type tpcds.household_demographics_type as
+ closed {
+  hd_demo_sk : bigint,
+  hd_income_band_sk : bigint?,
+  hd_buy_potential : string?,
+  hd_dep_count : bigint?,
+  hd_vehicle_count : bigint?
+}
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.store_type as
+ closed {
+  s_store_sk : bigint,
+  s_store_id : string,
+  s_rec_start_date : string?,
+  s_rec_end_date : string?,
+  s_closed_date_sk : bigint?,
+  s_store_name : string?,
+  s_number_employees : bigint?,
+  s_floor_space : bigint?,
+  s_hours : string?,
+  s_manager : string?,
+  s_market_id : bigint?,
+  s_geography_class : string?,
+  s_market_desc : string?,
+  s_market_manager : string?,
+  s_division_id : bigint?,
+  s_division_name : string?,
+  s_company_id : bigint?,
+  s_company_name : string?,
+  s_street_number : string?,
+  s_street_name : string?,
+  s_street_type : string?,
+  s_suite_number : string?,
+  s_city : string?,
+  s_county : string?,
+  s_state : string?,
+  s_zip : string?,
+  s_country : string?,
+  s_gmt_offset : double?,
+  s_tax_precentage : double?
+}
+
+create type tpcds.customer_type as
+ closed {
+  c_customer_sk : bigint,
+  c_customer_id : string,
+  c_current_cdemo_sk : bigint?,
+  c_current_hdemo_sk : bigint?,
+  c_current_addr_sk : bigint?,
+  c_first_shipto_date_sk : bigint?,
+  c_first_sales_date_sk : bigint?,
+  c_salutation : string?,
+  c_first_name : string?,
+  c_last_name : string?,
+  c_preferred_cust_flag : string?,
+  c_birth_day : bigint?,
+  c_birth_month : bigint?,
+  c_birth_year : bigint?,
+  c_birth_country : string?,
+  c_login : string?,
+  c_email_address : string?,
+  c_last_review_date : string?
+}
+
+create dataset customer_address(customer_address_type) primary key ca_address_sk;
+
+create dataset household_demographics (household_demographics_type) primary key hd_demo_sk;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset store (store_type) primary key s_store_sk;
+
+create dataset customer (customer_type) primary key c_customer_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q68/q68.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q68/q68.2.update.sqlpp
new file mode 100644
index 0000000..6ab1d16
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q68/q68.2.update.sqlpp
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset customer_address using localfs ((`path`=`asterix_nc1://data/tpcds/customer_address.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset household_demographics using localfs ((`path`=`asterix_nc1://data/tpcds/household_demographics.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store using localfs ((`path`=`asterix_nc1://data/tpcds/store.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset customer using localfs ((`path`=`asterix_nc1://data/tpcds/customer.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q68/q68.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q68/q68.3.query.sqlpp
new file mode 100644
index 0000000..5e5833b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q68/q68.3.query.sqlpp
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+ SELECT  c_last_name
+       ,c_first_name
+       ,ca_city
+       ,bought_city
+       ,ss_ticket_number
+       ,extended_price
+       ,extended_tax
+       ,list_price
+ FROM (SELECT ss_ticket_number
+             ,ss_customer_sk
+             ,ca_city bought_city
+             ,sum(ss_ext_sales_price) extended_price
+             ,sum(ss_ext_list_price) list_price
+             ,sum(ss_ext_tax) extended_tax
+       FROM store_sales
+           ,date_dim
+           ,store
+           ,household_demographics
+           ,customer_address
+        WHERE store_sales.ss_sold_date_sk = date_dim.d_date_sk
+        AND store_sales.ss_store_sk = store.s_store_sk
+        AND store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk
+        AND store_sales.ss_addr_sk = customer_address.ca_address_sk
+        AND date_dim.d_dom >= 1 AND date_dim.d_dom <= 2
+        AND (household_demographics.hd_dep_count = 5 or
+             household_demographics.hd_vehicle_count= 3)
+        AND date_dim.d_year in [1999,1999+1,1999+2]
+        AND store.s_city IN ['Midway','Fairview']
+        GROUP BY ss_ticket_number
+               ,ss_customer_sk
+               ,ss_addr_sk,ca_city) dn
+      ,customer
+      ,customer_address current_addr
+ WHERE ss_customer_sk = c_customer_sk
+   AND customer.c_current_addr_sk = current_addr.ca_address_sk
+   AND current_addr.ca_city != bought_city
+ ORDER BY c_last_name
+         ,ss_ticket_number
+ LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q73/q73.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q73/q73.1.ddl.sqlpp
new file mode 100644
index 0000000..f7a6453
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q73/q73.1.ddl.sqlpp
@@ -0,0 +1,156 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.household_demographics_type as
+ closed {
+  hd_demo_sk : bigint,
+  hd_income_band_sk : bigint?,
+  hd_buy_potential : string?,
+  hd_dep_count : bigint?,
+  hd_vehicle_count : bigint?
+}
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.store_type as
+ closed {
+  s_store_sk : bigint,
+  s_store_id : string,
+  s_rec_start_date : string?,
+  s_rec_end_date : string?,
+  s_closed_date_sk : bigint?,
+  s_store_name : string?,
+  s_number_employees : bigint?,
+  s_floor_space : bigint?,
+  s_hours : string?,
+  s_manager : string?,
+  s_market_id : bigint?,
+  s_geography_class : string?,
+  s_market_desc : string?,
+  s_market_manager : string?,
+  s_division_id : bigint?,
+  s_division_name : string?,
+  s_company_id : bigint?,
+  s_company_name : string?,
+  s_street_number : string?,
+  s_street_name : string?,
+  s_street_type : string?,
+  s_suite_number : string?,
+  s_city : string?,
+  s_county : string?,
+  s_state : string?,
+  s_zip : string?,
+  s_country : string?,
+  s_gmt_offset : double?,
+  s_tax_precentage : double?
+}
+
+create type tpcds.customer_type as
+ closed {
+  c_customer_sk : bigint,
+  c_customer_id : string,
+  c_current_cdemo_sk : bigint?,
+  c_current_hdemo_sk : bigint?,
+  c_current_addr_sk : bigint?,
+  c_first_shipto_date_sk : bigint?,
+  c_first_sales_date_sk : bigint?,
+  c_salutation : string?,
+  c_first_name : string?,
+  c_last_name : string?,
+  c_preferred_cust_flag : string?,
+  c_birth_day : bigint?,
+  c_birth_month : bigint?,
+  c_birth_year : bigint?,
+  c_birth_country : string?,
+  c_login : string?,
+  c_email_address : string?,
+  c_last_review_date : string?
+}
+
+create dataset household_demographics (household_demographics_type) primary key hd_demo_sk;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset store (store_type) primary key s_store_sk;
+
+create dataset customer (customer_type) primary key c_customer_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q73/q73.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q73/q73.2.update.sqlpp
new file mode 100644
index 0000000..97ea2e8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q73/q73.2.update.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset household_demographics using localfs ((`path`=`asterix_nc1://data/tpcds/household_demographics.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store using localfs ((`path`=`asterix_nc1://data/tpcds/store.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset customer using localfs ((`path`=`asterix_nc1://data/tpcds/customer.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q73/q73.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q73/q73.3.query.sqlpp
new file mode 100644
index 0000000..0036755
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q73/q73.3.query.sqlpp
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+SELECT c_last_name
+       ,c_first_name
+       ,c_salutation
+       ,c_preferred_cust_flag
+       ,ss_ticket_number
+       ,cnt FROM
+   (SELECT ss_ticket_number
+          ,ss_customer_sk
+          ,len(djgrp) cnt
+    FROM store_sales,date_dim,store,household_demographics
+    WHERE store_sales.ss_sold_date_sk = date_dim.d_date_sk
+    AND store_sales.ss_store_sk = store.s_store_sk
+    AND store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk
+    AND date_dim.d_dom >= 1 AND date_dim.d_dom <= 2
+    AND (household_demographics.hd_buy_potential = '>10000' OR
+         household_demographics.hd_buy_potential = 'unknown')
+    AND household_demographics.hd_vehicle_count > 0
+    AND (CASE WHEN household_demographics.hd_vehicle_count > 0 THEN
+             household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count ELSE null END) > 1
+    AND date_dim.d_year in [1998,1998+1,1998+2]
+    AND store.s_county in ['Williamson County','Williamson County','Williamson County','Williamson County']
+    GROUP BY ss_ticket_number,ss_customer_sk
+    GROUP AS djgrp) dj,customer
+    WHERE ss_customer_sk = c_customer_sk
+      AND cnt >= 1 AND cnt <= 5
+    ORDER BY cnt desc;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q79/q79.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q79/q79.1.ddl.sqlpp
new file mode 100644
index 0000000..f7a6453
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q79/q79.1.ddl.sqlpp
@@ -0,0 +1,156 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.household_demographics_type as
+ closed {
+  hd_demo_sk : bigint,
+  hd_income_band_sk : bigint?,
+  hd_buy_potential : string?,
+  hd_dep_count : bigint?,
+  hd_vehicle_count : bigint?
+}
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.store_type as
+ closed {
+  s_store_sk : bigint,
+  s_store_id : string,
+  s_rec_start_date : string?,
+  s_rec_end_date : string?,
+  s_closed_date_sk : bigint?,
+  s_store_name : string?,
+  s_number_employees : bigint?,
+  s_floor_space : bigint?,
+  s_hours : string?,
+  s_manager : string?,
+  s_market_id : bigint?,
+  s_geography_class : string?,
+  s_market_desc : string?,
+  s_market_manager : string?,
+  s_division_id : bigint?,
+  s_division_name : string?,
+  s_company_id : bigint?,
+  s_company_name : string?,
+  s_street_number : string?,
+  s_street_name : string?,
+  s_street_type : string?,
+  s_suite_number : string?,
+  s_city : string?,
+  s_county : string?,
+  s_state : string?,
+  s_zip : string?,
+  s_country : string?,
+  s_gmt_offset : double?,
+  s_tax_precentage : double?
+}
+
+create type tpcds.customer_type as
+ closed {
+  c_customer_sk : bigint,
+  c_customer_id : string,
+  c_current_cdemo_sk : bigint?,
+  c_current_hdemo_sk : bigint?,
+  c_current_addr_sk : bigint?,
+  c_first_shipto_date_sk : bigint?,
+  c_first_sales_date_sk : bigint?,
+  c_salutation : string?,
+  c_first_name : string?,
+  c_last_name : string?,
+  c_preferred_cust_flag : string?,
+  c_birth_day : bigint?,
+  c_birth_month : bigint?,
+  c_birth_year : bigint?,
+  c_birth_country : string?,
+  c_login : string?,
+  c_email_address : string?,
+  c_last_review_date : string?
+}
+
+create dataset household_demographics (household_demographics_type) primary key hd_demo_sk;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset store (store_type) primary key s_store_sk;
+
+create dataset customer (customer_type) primary key c_customer_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q79/q79.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q79/q79.2.update.sqlpp
new file mode 100644
index 0000000..97ea2e8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q79/q79.2.update.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset household_demographics using localfs ((`path`=`asterix_nc1://data/tpcds/household_demographics.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store using localfs ((`path`=`asterix_nc1://data/tpcds/store.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset customer using localfs ((`path`=`asterix_nc1://data/tpcds/customer.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q79/q79.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q79/q79.3.query.sqlpp
new file mode 100644
index 0000000..f9a9a5c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q79/q79.3.query.sqlpp
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+SELECT
+  c_last_name,c_first_name,substr(s_city,1,30),ss_ticket_number,amt,profit
+  FROM
+   (SELECT ss_ticket_number
+          ,ss_customer_sk
+          ,store.s_city
+          ,sum(ss_coupon_amt) amt
+          ,sum(ss_net_profit) profit
+    FROM store_sales,date_dim,store,household_demographics
+    WHERE store_sales.ss_sold_date_sk = date_dim.d_date_sk
+    AND store_sales.ss_store_sk = store.s_store_sk
+    AND store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk
+    AND (household_demographics.hd_dep_count = 8 or household_demographics.hd_vehicle_count > 0)
+    AND date_dim.d_dow = 1
+    AND date_dim.d_year IN [1998,1998+1,1998+2]
+    AND store.s_number_employees >= 200 AND store.s_number_employees <= 295
+    group by ss_ticket_number,ss_customer_sk,ss_addr_sk,store.s_city) ms,customer
+    WHERE ss_customer_sk = c_customer_sk
+ ORDER BY c_last_name,c_first_name,SUBSTR(s_city,1,30), profit
+ LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q81/q81.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q81/q81.1.ddl.sqlpp
new file mode 100644
index 0000000..724dde8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q81/q81.1.ddl.sqlpp
@@ -0,0 +1,134 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+
+create type tpcds.customer_address_type as  closed {
+  ca_address_sk : bigint,
+  ca_address_id : string,
+  ca_street_number : string?,
+  ca_street_name : string?,
+  ca_street_type : string?,
+  ca_suite_number : string?,
+  ca_city : string?,
+  ca_county : string?,
+  ca_state : string?,
+  ca_zip : string?,
+  ca_country : string?,
+  ca_gmt_offset : double?,
+  ca_location_type : string?
+ }
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.catalog_returns_type as
+ closed {
+  cr_returned_date_sk : bigint?,
+  cr_returned_time_sk : bigint?,
+  cr_item_sk : bigint,
+  cr_refunded_customer_sk : bigint?,
+  cr_refunded_cdemo_sk : bigint?,
+  cr_refunded_hdemo_sk : bigint?,
+  cr_refunded_addr_sk : bigint?,
+  cr_returning_customer_sk : bigint?,
+  cr_returning_cdemo_sk : bigint?,
+  cr_returning_hdemo_sk : bigint?,
+  cr_returning_addr_sk : bigint?,
+  cr_call_center_sk : bigint?,
+  cr_catalog_page_sk : bigint?,
+  cr_ship_mode_sk : bigint?,
+  cr_warehouse_sk : bigint?,
+  cr_reason_sk : bigint?,
+  cr_order_number : bigint,
+  cr_return_quantity : bigint?,
+  cr_return_amount : double?,
+  cr_return_tax : double?,
+  cr_return_amt_inc_tax : double?,
+  cr_fee : double?,
+  cr_return_ship_cost : double?,
+  cr_refunded_cash : double?,
+  cr_reversed_charge : double?,
+  cr_store_credit : double?,
+  cr_net_loss : double?
+}
+
+create type tpcds.customer_type as
+ closed {
+  c_customer_sk : bigint,
+  c_customer_id : string,
+  c_current_cdemo_sk : bigint?,
+  c_current_hdemo_sk : bigint?,
+  c_current_addr_sk : bigint?,
+  c_first_shipto_date_sk : bigint?,
+  c_first_sales_date_sk : bigint?,
+  c_salutation : string?,
+  c_first_name : string?,
+  c_last_name : string?,
+  c_preferred_cust_flag : string?,
+  c_birth_day : bigint?,
+  c_birth_month : bigint?,
+  c_birth_year : bigint?,
+  c_birth_country : string?,
+  c_login : string?,
+  c_email_address : string?,
+  c_last_review_date : string?
+}
+
+create dataset customer_address(customer_address_type) primary key ca_address_sk;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset catalog_returns (catalog_returns_type) primary key cr_item_sk, cr_order_number;
+
+create dataset customer (customer_type) primary key c_customer_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q81/q81.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q81/q81.2.update.sqlpp
new file mode 100644
index 0000000..b7e0e29
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q81/q81.2.update.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset customer_address using localfs ((`path`=`asterix_nc1://data/tpcds/customer_address.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset catalog_returns using localfs ((`path`=`asterix_nc1://data/tpcds/catalog_returns.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset customer using localfs ((`path`=`asterix_nc1://data/tpcds/customer.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q81/q81.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q81/q81.3.query.sqlpp
new file mode 100644
index 0000000..1dbe2ef
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q81/q81.3.query.sqlpp
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+WITH customer_total_return AS
+ (SELECT cr_returning_customer_sk as ctr_customer_sk
+        ,ca_state as ctr_state,
+  SUM(cr_return_amt_inc_tax) as ctr_total_return
+  FROM catalog_returns
+     ,date_dim
+     ,customer_address
+ WHERE cr_returned_date_sk = d_date_sk
+   AND d_year =1998
+   AND cr_returning_addr_sk = ca_address_sk
+ GROUP BY cr_returning_customer_sk
+         ,ca_state )
+  SELECT  c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name
+                   ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset
+                  ,ca_location_type,ctr_total_return
+ FROM customer_total_return ctr1
+     ,customer_address
+     ,customer
+ WHERE ctr1.ctr_total_return > (SELECT VALUE (AVG(ctr2.ctr_total_return)*1.2)
+      FROM customer_total_return ctr2
+                     WHERE ctr1.ctr_state = ctr2.ctr_state)[0]
+       AND ca_address_sk = c_current_addr_sk
+       AND ca_state = 'IL'
+       AND ctr1.ctr_customer_sk = c_customer_sk
+ ORDER BY c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name
+                   ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset
+                  ,ca_location_type,ctr_total_return
+LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q82/q82.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q82/q82.1.ddl.sqlpp
new file mode 100644
index 0000000..c2cc51b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q82/q82.1.ddl.sqlpp
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.item_type as
+ closed {
+  i_item_sk : bigint,
+  i_item_id : string,
+  i_rec_start_date : string?,
+  i_rec_end_date : string?,
+  i_item_desc : string?,
+  i_current_price : double?,
+  i_wholesale_cost : double?,
+  i_brand_id : bigint?,
+  i_brand : string?,
+  i_class_id : bigint?,
+  i_class : string?,
+  i_category_id : bigint?,
+  i_category : string?,
+  i_manufact_id : bigint?,
+  i_manufact : string?,
+  i_size : string?,
+  i_formulation : string?,
+  i_color : string?,
+  i_units : string?,
+  i_container : string?,
+  i_manager_id : bigint?,
+  i_product_name : string?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create type tpcds.inventory_type as
+ closed {
+  inv_date_sk : bigint,
+  inv_item_sk : bigint,
+  inv_warehouse_sk : bigint,
+  inv_quantity_on_hand : bigint?
+}
+
+create dataset item (item_type) primary key i_item_sk;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
+
+create dataset inventory (inventory_type) primary key inv_date_sk, inv_item_sk, inv_warehouse_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q82/q82.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q82/q82.2.update.sqlpp
new file mode 100644
index 0000000..037dc26
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q82/q82.2.update.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset item using localfs ((`path`=`asterix_nc1://data/tpcds/item.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset inventory using localfs ((`path`=`asterix_nc1://data/tpcds/inventory.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q82/q82.3.query.sqlpp
similarity index 60%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q82/q82.3.query.sqlpp
index af2f691..7463682 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q82/q82.3.query.sqlpp
@@ -16,4 +16,21 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+
+USE tpcds;
+
+SELECT  i_item_id
+       ,i_item_desc
+       ,i_current_price
+ FROM item, inventory, date_dim, store_sales
+ WHERE i_current_price >= 30 AND i_current_price <= 30+30
+ AND inv_item_sk = i_item_sk
+ AND d_date_sk=inv_date_sk
+ AND date(d_date) >= date('2002-05-30') AND date(d_date) <= date('2002-07-01')
+ AND i_manufact_id in [437,129,727,663]
+ AND inv_quantity_on_hand >= 100 AND inv_quantity_on_hand <= 500
+ AND ss_item_sk = i_item_sk
+ GROUP BY i_item_id,i_item_desc,i_current_price
+ ORDER BY i_item_id
+ LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q88/q88.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q88/q88.1.ddl.sqlpp
new file mode 100644
index 0000000..342fecd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q88/q88.1.ddl.sqlpp
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.household_demographics_type as
+ closed {
+  hd_demo_sk : bigint,
+  hd_income_band_sk : bigint?,
+  hd_buy_potential : string?,
+  hd_dep_count : bigint?,
+  hd_vehicle_count : bigint?
+}
+create type tpcds.time_dim_type as
+ closed {
+  t_time_sk : bigint,
+  t_time_id : string,
+  t_time : bigint?,
+  t_hour : bigint?,
+  t_minute : bigint?,
+  t_second : bigint?,
+  t_am_pm : string?,
+  t_shift : string?,
+  t_sub_shift : string?,
+  t_meal_time : string?
+}
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create type tpcds.store_type as
+ closed {
+  s_store_sk : bigint,
+  s_store_id : string,
+  s_rec_start_date : string?,
+  s_rec_end_date : string?,
+  s_closed_date_sk : bigint?,
+  s_store_name : string?,
+  s_number_employees : bigint?,
+  s_floor_space : bigint?,
+  s_hours : string?,
+  s_manager : string?,
+  s_market_id : bigint?,
+  s_geography_class : string?,
+  s_market_desc : string?,
+  s_market_manager : string?,
+  s_division_id : bigint?,
+  s_division_name : string?,
+  s_company_id : bigint?,
+  s_company_name : string?,
+  s_street_number : string?,
+  s_street_name : string?,
+  s_street_type : string?,
+  s_suite_number : string?,
+  s_city : string?,
+  s_county : string?,
+  s_state : string?,
+  s_zip : string?,
+  s_country : string?,
+  s_gmt_offset : double?,
+  s_tax_precentage : double?
+}
+
+create dataset household_demographics (household_demographics_type) primary key hd_demo_sk;
+
+create dataset tpcds.time_dim(time_dim_type) primary key t_time_sk
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
+
+create dataset store (store_type) primary key s_store_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q88/q88.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q88/q88.2.update.sqlpp
new file mode 100644
index 0000000..a673f5b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q88/q88.2.update.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset household_demographics using localfs ((`path`=`asterix_nc1://data/tpcds/household_demographics.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset time_dim using localfs ((`path`=`asterix_nc1://data/tpcds/time_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store using localfs ((`path`=`asterix_nc1://data/tpcds/store.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q88/q88.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q88/q88.3.query.sqlpp
new file mode 100644
index 0000000..455eab7
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q88/q88.3.query.sqlpp
@@ -0,0 +1,103 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+SELECT VALUE {"h8_30_to_9" : COLL_COUNT((select 1
+     FROM store_sales ss1, household_demographics , time_dim, store s1
+     WHERE ss1.ss_sold_time_sk = time_dim.t_time_sk
+     AND ss1.ss_hdemo_sk = household_demographics.hd_demo_sk
+     AND ss1.ss_store_sk = s1.s_store_sk
+     AND time_dim.t_hour = 8
+     AND time_dim.t_minute >= 30
+     AND ((household_demographics.hd_dep_count = 3 AND household_demographics.hd_vehicle_count<=3+2) or
+          (household_demographics.hd_dep_count = 0 AND household_demographics.hd_vehicle_count<=0+2) or
+          (household_demographics.hd_dep_count = 1 AND household_demographics.hd_vehicle_count<=1+2))
+     AND s1.s_store_name = 'ese')), "h9_to_9_30": COLL_COUNT((select 1
+     FROM store_sales ss2, household_demographics , time_dim, store s2
+     WHERE ss2.ss_sold_time_sk = time_dim.t_time_sk
+     AND ss2.ss_hdemo_sk = household_demographics.hd_demo_sk
+     AND ss2.ss_store_sk = s2.s_store_sk
+     AND time_dim.t_hour = 9
+     AND time_dim.t_minute < 30
+     AND ((household_demographics.hd_dep_count = 3 AND household_demographics.hd_vehicle_count<=3+2) or
+          (household_demographics.hd_dep_count = 0 AND household_demographics.hd_vehicle_count<=0+2) or
+          (household_demographics.hd_dep_count = 1 AND household_demographics.hd_vehicle_count<=1+2))
+     AND s2.s_store_name = 'ese')), "h9_30_to_10": COLL_COUNT((select 1
+     FROM store_sales ss3, household_demographics , time_dim, store s3
+     WHERE ss3.ss_sold_time_sk = time_dim.t_time_sk
+     AND ss3.ss_hdemo_sk = household_demographics.hd_demo_sk
+     AND ss3.ss_store_sk = s3.s_store_sk
+     AND time_dim.t_hour = 9
+     AND time_dim.t_minute >= 30
+     AND ((household_demographics.hd_dep_count = 3 AND household_demographics.hd_vehicle_count<=3+2) or
+          (household_demographics.hd_dep_count = 0 AND household_demographics.hd_vehicle_count<=0+2) or
+          (household_demographics.hd_dep_count = 1 AND household_demographics.hd_vehicle_count<=1+2))
+     AND s3.s_store_name = 'ese')), "h10_to_10_30": COLL_COUNT((select 1
+     FROM store_sales ss4, household_demographics , time_dim, store s4
+     WHERE ss4.ss_sold_time_sk = time_dim.t_time_sk
+     AND ss4.ss_hdemo_sk = household_demographics.hd_demo_sk
+     AND ss4.ss_store_sk = s4.s_store_sk
+     AND time_dim.t_hour = 10
+     AND time_dim.t_minute < 30
+     AND ((household_demographics.hd_dep_count = 3 AND household_demographics.hd_vehicle_count<=3+2) or
+          (household_demographics.hd_dep_count = 0 AND household_demographics.hd_vehicle_count<=0+2) or
+          (household_demographics.hd_dep_count = 1 AND household_demographics.hd_vehicle_count<=1+2))
+     AND s4.s_store_name = 'ese')), "h10_30_to_11": COLL_COUNT( (select 1
+     FROM store_sales ss5, household_demographics , time_dim, store s5
+     WHERE ss5.ss_sold_time_sk = time_dim.t_time_sk
+     AND ss5.ss_hdemo_sk = household_demographics.hd_demo_sk
+     AND ss5.ss_store_sk = s5.s_store_sk
+     AND time_dim.t_hour = 10
+     AND time_dim.t_minute >= 30
+     AND ((household_demographics.hd_dep_count = 3 AND household_demographics.hd_vehicle_count<=3+2) or
+          (household_demographics.hd_dep_count = 0 AND household_demographics.hd_vehicle_count<=0+2) or
+          (household_demographics.hd_dep_count = 1 AND household_demographics.hd_vehicle_count<=1+2))
+     AND s5.s_store_name = 'ese')), "h11_to_11_30": COLL_COUNT((select 1
+     FROM store_sales ss6, household_demographics , time_dim, store s6
+     WHERE ss6.ss_sold_time_sk = time_dim.t_time_sk
+     AND ss6.ss_hdemo_sk = household_demographics.hd_demo_sk
+     AND ss6.ss_store_sk = s6.s_store_sk
+     AND time_dim.t_hour = 11
+     AND time_dim.t_minute < 30
+     AND ((household_demographics.hd_dep_count = 3 AND household_demographics.hd_vehicle_count<=3+2) or
+          (household_demographics.hd_dep_count = 0 AND household_demographics.hd_vehicle_count<=0+2) or
+          (household_demographics.hd_dep_count = 1 AND household_demographics.hd_vehicle_count<=1+2))
+     AND s6.s_store_name = 'ese')), "h11_30_to_12": COLL_COUNT((select 1
+     FROM store_sales ss7, household_demographics , time_dim, store s7
+     WHERE ss7.ss_sold_time_sk = time_dim.t_time_sk
+     AND ss7.ss_hdemo_sk = household_demographics.hd_demo_sk
+     AND ss7.ss_store_sk = s7.s_store_sk
+     AND time_dim.t_hour = 11
+     AND time_dim.t_minute >= 30
+     AND ((household_demographics.hd_dep_count = 3 AND household_demographics.hd_vehicle_count<=3+2) or
+          (household_demographics.hd_dep_count = 0 AND household_demographics.hd_vehicle_count<=0+2) or
+          (household_demographics.hd_dep_count = 1 AND household_demographics.hd_vehicle_count<=1+2))
+     AND s7.s_store_name = 'ese')), "h12_to_12_30":  COLL_COUNT((select 1
+     FROM store_sales ss8, household_demographics , time_dim, store s8
+     WHERE ss8.ss_sold_time_sk = time_dim.t_time_sk
+     AND ss8.ss_hdemo_sk = household_demographics.hd_demo_sk
+     AND ss8.ss_store_sk = s8.s_store_sk
+     AND time_dim.t_hour = 12
+     AND time_dim.t_minute < 30
+     AND ((household_demographics.hd_dep_count = 3 AND household_demographics.hd_vehicle_count<=3+2) or
+          (household_demographics.hd_dep_count = 0 AND household_demographics.hd_vehicle_count<=0+2) or
+          (household_demographics.hd_dep_count = 1 AND household_demographics.hd_vehicle_count<=1+2))
+     AND s8.s_store_name = 'ese'))};
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q91/q91.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q91/q91.1.ddl.sqlpp
new file mode 100644
index 0000000..5497be1
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q91/q91.1.ddl.sqlpp
@@ -0,0 +1,195 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+
+create type tpcds.customer_address_type as  closed {
+  ca_address_sk : bigint,
+  ca_address_id : string,
+  ca_street_number : string?,
+  ca_street_name : string?,
+  ca_street_type : string?,
+  ca_suite_number : string?,
+  ca_city : string?,
+  ca_county : string?,
+  ca_state : string?,
+  ca_zip : string?,
+  ca_country : string?,
+  ca_gmt_offset : double?,
+  ca_location_type : string?
+ }
+
+create type tpcds.call_center_type as
+ closed {
+  cc_call_center_sk : bigint,
+  cc_call_center_id : string,
+  cc_rec_start_date : string?,
+  cc_rec_end_date : string?,
+  cc_closed_date_sk : bigint?,
+  cc_open_date_sk : bigint?,
+  cc_name : string?,
+  cc_class : string?,
+  cc_employees : bigint?,
+  cc_sq_ft : bigint?,
+  cc_hours : string?,
+  cc_manager : string?,
+  cc_mkt_id : bigint?,
+  cc_mkt_class : string?,
+  cc_mkt_desc : string?,
+  cc_market_manager : string?,
+  cc_division : bigint?,
+  cc_division_name : string?,
+  cc_company : bigint?,
+  cc_company_name : string?,
+  cc_street_number : bigint?,
+  cc_street_name : string?,
+  cc_street_type : string?,
+  cc_suite_number : string?,
+  cc_city : string?,
+  cc_county : string?,
+  cc_state : string?,
+  cc_zip : string?,
+  cc_country : string?,
+  cc_gmt_offset : double?,
+  cc_tax_percentage : double?
+}
+
+create type tpcds.household_demographics_type as
+ closed {
+  hd_demo_sk : bigint,
+  hd_income_band_sk : bigint?,
+  hd_buy_potential : string?,
+  hd_dep_count : bigint?,
+  hd_vehicle_count : bigint?
+}
+create type tpcds.customer_demographics_type as
+ closed {
+  cd_demo_sk : bigint,
+  cd_gender : string?,
+  cd_marital_status : string?,
+  cd_education_status : string?,
+  cd_purchase_estimate : bigint?,
+  cd_credit_rating : string?,
+  cd_dep_count : bigint?,
+  cd_dep_employed_count : bigint?,
+  cd_dep_college_count : bigint?
+}
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.catalog_returns_type as
+ closed {
+  cr_returned_date_sk : bigint?,
+  cr_returned_time_sk : bigint?,
+  cr_item_sk : bigint,
+  cr_refunded_customer_sk : bigint?,
+  cr_refunded_cdemo_sk : bigint?,
+  cr_refunded_hdemo_sk : bigint?,
+  cr_refunded_addr_sk : bigint?,
+  cr_returning_customer_sk : bigint?,
+  cr_returning_cdemo_sk : bigint?,
+  cr_returning_hdemo_sk : bigint?,
+  cr_returning_addr_sk : bigint?,
+  cr_call_center_sk : bigint?,
+  cr_catalog_page_sk : bigint?,
+  cr_ship_mode_sk : bigint?,
+  cr_warehouse_sk : bigint?,
+  cr_reason_sk : bigint?,
+  cr_order_number : bigint,
+  cr_return_quantity : bigint?,
+  cr_return_amount : double?,
+  cr_return_tax : double?,
+  cr_return_amt_inc_tax : double?,
+  cr_fee : double?,
+  cr_return_ship_cost : double?,
+  cr_refunded_cash : double?,
+  cr_reversed_charge : double?,
+  cr_store_credit : double?,
+  cr_net_loss : double?
+}
+
+create type tpcds.customer_type as
+ closed {
+  c_customer_sk : bigint,
+  c_customer_id : string,
+  c_current_cdemo_sk : bigint?,
+  c_current_hdemo_sk : bigint?,
+  c_current_addr_sk : bigint?,
+  c_first_shipto_date_sk : bigint?,
+  c_first_sales_date_sk : bigint?,
+  c_salutation : string?,
+  c_first_name : string?,
+  c_last_name : string?,
+  c_preferred_cust_flag : string?,
+  c_birth_day : bigint?,
+  c_birth_month : bigint?,
+  c_birth_year : bigint?,
+  c_birth_country : string?,
+  c_login : string?,
+  c_email_address : string?,
+  c_last_review_date : string?
+}
+
+create dataset customer_address(customer_address_type) primary key ca_address_sk;
+
+create dataset call_center (call_center_type) primary key cc_call_center_sk;
+
+create dataset household_demographics (household_demographics_type) primary key hd_demo_sk;
+
+create dataset customer_demographics(customer_demographics_type) primary key cd_demo_sk;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset catalog_returns (catalog_returns_type) primary key cr_item_sk, cr_order_number;
+
+create dataset customer (customer_type) primary key c_customer_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q91/q91.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q91/q91.2.update.sqlpp
new file mode 100644
index 0000000..4599485
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q91/q91.2.update.sqlpp
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset customer_address using localfs ((`path`=`asterix_nc1://data/tpcds/customer_address.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset call_center using localfs ((`path`=`asterix_nc1://data/tpcds/call_center.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset household_demographics using localfs ((`path`=`asterix_nc1://data/tpcds/household_demographics.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset customer_demographics using localfs ((`path`=`asterix_nc1://data/tpcds/customer_demographics.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset catalog_returns using localfs ((`path`=`asterix_nc1://data/tpcds/catalog_returns.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset customer using localfs ((`path`=`asterix_nc1://data/tpcds/customer.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q91/q91.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q91/q91.3.query.sqlpp
new file mode 100644
index 0000000..9ce8ffe
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q91/q91.3.query.sqlpp
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+SELECT
+        cc_call_center_id Call_Center,
+        cc_name Call_Center_Name,
+        cc_manager Manager,
+        SUM(cr_net_loss) Returns_Loss
+FROM
+        call_center,
+        catalog_returns,
+        date_dim,
+        customer,
+        customer_address,
+        customer_demographics,
+        household_demographics
+WHERE
+        cr_call_center_sk       = cc_call_center_sk
+AND     cr_returned_date_sk     = d_date_sk
+AND     cr_returning_customer_sk= c_customer_sk
+AND     cd_demo_sk              = c_current_cdemo_sk
+AND     hd_demo_sk              = c_current_hdemo_sk
+AND     ca_address_sk           = c_current_addr_sk
+AND     d_year                  = 1999
+AND     d_moy                   = 11
+AND     ( (cd_marital_status       = 'M' AND cd_education_status     = 'Unknown')
+        OR (cd_marital_status       = 'W' AND cd_education_status     = 'Advanced Degree'))
+AND     hd_buy_potential like '0-500%'
+AND     ca_gmt_offset           = -7
+GROUP BY cc_call_center_id,cc_name,cc_manager,cd_marital_status,cd_education_status
+ORDER BY SUM(cr_net_loss) desc;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q94/q94.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q94/q94.1.ddl.sqlpp
new file mode 100644
index 0000000..027a556
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q94/q94.1.ddl.sqlpp
@@ -0,0 +1,179 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+
+create type tpcds.customer_address_type as  closed {
+  ca_address_sk : bigint,
+  ca_address_id : string,
+  ca_street_number : string?,
+  ca_street_name : string?,
+  ca_street_type : string?,
+  ca_suite_number : string?,
+  ca_city : string?,
+  ca_county : string?,
+  ca_state : string?,
+  ca_zip : string?,
+  ca_country : string?,
+  ca_gmt_offset : double?,
+  ca_location_type : string?
+ }
+
+create type tpcds.web_sales_type as
+ closed {
+  ws_sold_date_sk : bigint?,
+  ws_sold_time_sk : bigint?,
+  ws_ship_date_sk : bigint?,
+  ws_item_sk : bigint,
+  ws_bill_customer_sk : bigint?,
+  ws_bill_cdemo_sk : bigint?,
+  ws_bill_hdemo_sk : bigint?,
+  ws_bill_addr_sk : bigint?,
+  ws_ship_customer_sk : bigint?,
+  ws_ship_cdemo_sk : bigint?,
+  ws_ship_hdemo_sk : bigint?,
+  ws_ship_addr_sk : bigint?,
+  ws_web_page_sk : bigint?,
+  ws_web_site_sk : bigint?,
+  ws_ship_mode_sk : bigint?,
+  ws_warehouse_sk : bigint?,
+  ws_promo_sk : bigint?,
+  ws_order_number : bigint,
+  ws_quantity : bigint?,
+  ws_wholesale_cost : double?,
+  ws_list_price : double?,
+  ws_sales_price : double?,
+  ws_ext_discount_amt : double?,
+  ws_ext_sales_price : double?,
+  ws_ext_wholesale_cost : double?,
+  ws_ext_list_price : double?,
+  ws_ext_tax : double?,
+  ws_coupon_amt : double?,
+  ws_ext_ship_cost : double?,
+  ws_net_paid : double?,
+  ws_net_paid_inc_tax : double?,
+  ws_net_paid_inc_ship : double?,
+  ws_net_paid_inc_ship_tax : double?,
+  ws_net_profit : double?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.web_returns_type as
+ closed {
+  wr_returned_date_sk : bigint?,
+  wr_returned_time_sk : bigint?,
+  wr_item_sk : bigint,
+  wr_refunded_customer_sk : bigint?,
+  wr_refunded_cdemo_sk : bigint?,
+  wr_refunded_hdemo_sk : bigint?,
+  wr_refunded_addr_sk : bigint?,
+  wr_returning_customer_sk : bigint?,
+  wr_returning_cdemo_sk : bigint?,
+  wr_returning_hdemo_sk : bigint?,
+  wr_returning_addr_sk : bigint?,
+  wr_web_page_sk : bigint?,
+  wr_reason_sk : bigint?,
+  wr_order_number : bigint,
+  wr_return_quantity : bigint?,
+  wr_return_amt : double?,
+  wr_return_tax : double?,
+  wr_return_amt_inc_tax : double?,
+  wr_fee : double?,
+  wr_return_ship_cost: double?,
+  wr_refunded_cash: double?,
+  wr_reversed_charge: double?,
+  wr_account_credit: double?,
+  wr_net_loss: double?
+}
+
+create type tpcds.web_site_type as
+ closed {
+  web_site_sk:               bigint,
+  web_site_id:               string,
+  web_rec_start_date:        string?,
+  web_rec_end_date:          string?,
+  web_name:                  string?,
+  web_open_date_sk:          bigint?,
+  web_close_date_sk:         bigint?,
+  web_class:                 string?,
+  web_manager:               string?,
+  web_mkt_id:                bigint?,
+  web_mkt_class:             string?,
+  web_mkt_desc:              string?,
+  web_market_manager:        string?,
+  web_company_id:            bigint?,
+  web_company_name:          string?,
+  web_street_number:         string?,
+  web_street_name:           string?,
+  web_street_type:           string?,
+  web_suite_number:          string?,
+  web_city:                  string?,
+  web_county:                string?,
+  web_state:                 string?,
+  web_zip:                   string?,
+  web_country:               string?,
+  web_gmt_offset:            double?,
+  web_tax_percentage:        double?
+}
+
+create dataset customer_address(customer_address_type) primary key ca_address_sk;
+
+create dataset web_sales (web_sales_type) primary key ws_item_sk, ws_order_number;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset web_returns (web_returns_type) primary key wr_item_sk, wr_order_number;
+
+create dataset web_site (web_site_type) primary key web_site_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q94/q94.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q94/q94.2.update.sqlpp
new file mode 100644
index 0000000..08a8f79
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q94/q94.2.update.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset customer_address using localfs ((`path`=`asterix_nc1://data/tpcds/customer_address.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset web_sales using localfs ((`path`=`asterix_nc1://data/tpcds/web_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset web_returns using localfs ((`path`=`asterix_nc1://data/tpcds/web_returns.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset web_site using localfs ((`path`=`asterix_nc1://data/tpcds/web_site.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q94/q94.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q94/q94.3.query.sqlpp
new file mode 100644
index 0000000..9de99d7
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q94/q94.3.query.sqlpp
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+SELECT
+   COUNT(wsnondup.ws_order_number) order_count,
+   SUM(ws_ext_ship_cost) total_shipping_cost,
+   SUM(ws_net_profit) total_net_profit
+FROM
+   (SELECT ws_order_number
+    FROM web_sales
+    GROUP BY ws_order_number) AS wsnondup,
+   web_sales as ws1,
+   date_dim,
+   customer_address,
+   web_site
+WHERE
+    date(d_date) >= date('1999-05-01')
+AND date(d_date) <= date('1999-06-29')
+AND ws1.ws_ship_date_sk = d_date_sk
+AND ws1.ws_ship_addr_sk = ca_address_sk
+AND ca_state = 'TX'
+AND ws1.ws_web_site_sk = web_site_sk
+AND web_company_name = 'pri'
+AND EXISTS (SELECT *
+            FROM web_sales ws2
+            WHERE ws1.ws_order_number = ws2.ws_order_number
+            AND ws1.ws_warehouse_sk != ws2.ws_warehouse_sk)
+AND NOT EXISTS (select *
+                from web_returns wr1
+                where ws1.ws_order_number = wr1.wr_order_number)
+ORDER BY COUNT(wsnondup.ws_order_number)
+LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q95/q95.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q95/q95.1.ddl.sqlpp
new file mode 100644
index 0000000..027a556
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q95/q95.1.ddl.sqlpp
@@ -0,0 +1,179 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+
+create type tpcds.customer_address_type as  closed {
+  ca_address_sk : bigint,
+  ca_address_id : string,
+  ca_street_number : string?,
+  ca_street_name : string?,
+  ca_street_type : string?,
+  ca_suite_number : string?,
+  ca_city : string?,
+  ca_county : string?,
+  ca_state : string?,
+  ca_zip : string?,
+  ca_country : string?,
+  ca_gmt_offset : double?,
+  ca_location_type : string?
+ }
+
+create type tpcds.web_sales_type as
+ closed {
+  ws_sold_date_sk : bigint?,
+  ws_sold_time_sk : bigint?,
+  ws_ship_date_sk : bigint?,
+  ws_item_sk : bigint,
+  ws_bill_customer_sk : bigint?,
+  ws_bill_cdemo_sk : bigint?,
+  ws_bill_hdemo_sk : bigint?,
+  ws_bill_addr_sk : bigint?,
+  ws_ship_customer_sk : bigint?,
+  ws_ship_cdemo_sk : bigint?,
+  ws_ship_hdemo_sk : bigint?,
+  ws_ship_addr_sk : bigint?,
+  ws_web_page_sk : bigint?,
+  ws_web_site_sk : bigint?,
+  ws_ship_mode_sk : bigint?,
+  ws_warehouse_sk : bigint?,
+  ws_promo_sk : bigint?,
+  ws_order_number : bigint,
+  ws_quantity : bigint?,
+  ws_wholesale_cost : double?,
+  ws_list_price : double?,
+  ws_sales_price : double?,
+  ws_ext_discount_amt : double?,
+  ws_ext_sales_price : double?,
+  ws_ext_wholesale_cost : double?,
+  ws_ext_list_price : double?,
+  ws_ext_tax : double?,
+  ws_coupon_amt : double?,
+  ws_ext_ship_cost : double?,
+  ws_net_paid : double?,
+  ws_net_paid_inc_tax : double?,
+  ws_net_paid_inc_ship : double?,
+  ws_net_paid_inc_ship_tax : double?,
+  ws_net_profit : double?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create type tpcds.web_returns_type as
+ closed {
+  wr_returned_date_sk : bigint?,
+  wr_returned_time_sk : bigint?,
+  wr_item_sk : bigint,
+  wr_refunded_customer_sk : bigint?,
+  wr_refunded_cdemo_sk : bigint?,
+  wr_refunded_hdemo_sk : bigint?,
+  wr_refunded_addr_sk : bigint?,
+  wr_returning_customer_sk : bigint?,
+  wr_returning_cdemo_sk : bigint?,
+  wr_returning_hdemo_sk : bigint?,
+  wr_returning_addr_sk : bigint?,
+  wr_web_page_sk : bigint?,
+  wr_reason_sk : bigint?,
+  wr_order_number : bigint,
+  wr_return_quantity : bigint?,
+  wr_return_amt : double?,
+  wr_return_tax : double?,
+  wr_return_amt_inc_tax : double?,
+  wr_fee : double?,
+  wr_return_ship_cost: double?,
+  wr_refunded_cash: double?,
+  wr_reversed_charge: double?,
+  wr_account_credit: double?,
+  wr_net_loss: double?
+}
+
+create type tpcds.web_site_type as
+ closed {
+  web_site_sk:               bigint,
+  web_site_id:               string,
+  web_rec_start_date:        string?,
+  web_rec_end_date:          string?,
+  web_name:                  string?,
+  web_open_date_sk:          bigint?,
+  web_close_date_sk:         bigint?,
+  web_class:                 string?,
+  web_manager:               string?,
+  web_mkt_id:                bigint?,
+  web_mkt_class:             string?,
+  web_mkt_desc:              string?,
+  web_market_manager:        string?,
+  web_company_id:            bigint?,
+  web_company_name:          string?,
+  web_street_number:         string?,
+  web_street_name:           string?,
+  web_street_type:           string?,
+  web_suite_number:          string?,
+  web_city:                  string?,
+  web_county:                string?,
+  web_state:                 string?,
+  web_zip:                   string?,
+  web_country:               string?,
+  web_gmt_offset:            double?,
+  web_tax_percentage:        double?
+}
+
+create dataset customer_address(customer_address_type) primary key ca_address_sk;
+
+create dataset web_sales (web_sales_type) primary key ws_item_sk, ws_order_number;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
+
+create dataset web_returns (web_returns_type) primary key wr_item_sk, wr_order_number;
+
+create dataset web_site (web_site_type) primary key web_site_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q95/q95.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q95/q95.2.update.sqlpp
new file mode 100644
index 0000000..08a8f79
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q95/q95.2.update.sqlpp
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset customer_address using localfs ((`path`=`asterix_nc1://data/tpcds/customer_address.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset web_sales using localfs ((`path`=`asterix_nc1://data/tpcds/web_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset web_returns using localfs ((`path`=`asterix_nc1://data/tpcds/web_returns.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset web_site using localfs ((`path`=`asterix_nc1://data/tpcds/web_site.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q95/q95.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q95/q95.3.query.sqlpp
new file mode 100644
index 0000000..e32122f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q95/q95.3.query.sqlpp
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+WITH ws_wh AS
+(SELECT ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2
+ FROM web_sales ws1,web_sales ws2
+ WHERE ws1.ws_order_number = ws2.ws_order_number
+   AND ws1.ws_warehouse_sk != ws2.ws_warehouse_sk)
+ SELECT
+    count(ws_order_number) AS order_count
+  , sum(ws_ext_ship_cost) AS total_shipping_cost
+  ,sum(ws_net_profit) AS total_net_profit
+ FROM
+ (
+ SELECT
+   distinct ws_order_number
+  ,ws_ext_ship_cost
+  ,ws_net_profit
+FROM
+   web_sales ws1
+  ,date_dim
+  ,customer_address
+  ,web_site
+WHERE
+    date(d_date) >= date('1999-05-01')
+AND date(d_date) <= date('1999-06-30')
+AND ws1.ws_ship_date_sk = d_date_sk
+AND ws1.ws_ship_addr_sk = ca_address_sk
+AND ca_state = 'TX'
+AND ws1.ws_web_site_sk = web_site_sk
+AND web_company_name = 'pri'
+AND ws1.ws_order_number IN (SELECT VALUE ws_wh.ws_order_number
+                            FROM ws_wh)
+AND ws1.ws_order_number IN (SELECT VALUE wr_order_number
+                            FROM web_returns,ws_wh
+                            WHERE wr_order_number = ws_wh.ws_order_number)
+) t1
+ORDER BY COUNT(ws_order_number)
+LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q96/q96.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q96/q96.1.ddl.sqlpp
new file mode 100644
index 0000000..342fecd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q96/q96.1.ddl.sqlpp
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.household_demographics_type as
+ closed {
+  hd_demo_sk : bigint,
+  hd_income_band_sk : bigint?,
+  hd_buy_potential : string?,
+  hd_dep_count : bigint?,
+  hd_vehicle_count : bigint?
+}
+create type tpcds.time_dim_type as
+ closed {
+  t_time_sk : bigint,
+  t_time_id : string,
+  t_time : bigint?,
+  t_hour : bigint?,
+  t_minute : bigint?,
+  t_second : bigint?,
+  t_am_pm : string?,
+  t_shift : string?,
+  t_sub_shift : string?,
+  t_meal_time : string?
+}
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create type tpcds.store_type as
+ closed {
+  s_store_sk : bigint,
+  s_store_id : string,
+  s_rec_start_date : string?,
+  s_rec_end_date : string?,
+  s_closed_date_sk : bigint?,
+  s_store_name : string?,
+  s_number_employees : bigint?,
+  s_floor_space : bigint?,
+  s_hours : string?,
+  s_manager : string?,
+  s_market_id : bigint?,
+  s_geography_class : string?,
+  s_market_desc : string?,
+  s_market_manager : string?,
+  s_division_id : bigint?,
+  s_division_name : string?,
+  s_company_id : bigint?,
+  s_company_name : string?,
+  s_street_number : string?,
+  s_street_name : string?,
+  s_street_type : string?,
+  s_suite_number : string?,
+  s_city : string?,
+  s_county : string?,
+  s_state : string?,
+  s_zip : string?,
+  s_country : string?,
+  s_gmt_offset : double?,
+  s_tax_precentage : double?
+}
+
+create dataset household_demographics (household_demographics_type) primary key hd_demo_sk;
+
+create dataset tpcds.time_dim(time_dim_type) primary key t_time_sk
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
+
+create dataset store (store_type) primary key s_store_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q96/q96.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q96/q96.2.update.sqlpp
new file mode 100644
index 0000000..a673f5b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q96/q96.2.update.sqlpp
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpcds;
+
+
+load dataset household_demographics using localfs ((`path`=`asterix_nc1://data/tpcds/household_demographics.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset time_dim using localfs ((`path`=`asterix_nc1://data/tpcds/time_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store using localfs ((`path`=`asterix_nc1://data/tpcds/store.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q96/q96.3.query.sqlpp
similarity index 62%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q96/q96.3.query.sqlpp
index af2f691..1fa8c67 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q96/q96.3.query.sqlpp
@@ -16,4 +16,23 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+
+USE tpcds;
+
+SELECT COUNT(soldaftertable)
+FROM
+(SELECT  *
+FROM store_sales,
+           household_demographics,
+           time_dim, store
+WHERE ss_sold_time_sk = time_dim.t_time_sk
+    AND ss_hdemo_sk = household_demographics.hd_demo_sk
+    AND ss_store_sk = s_store_sk
+    AND time_dim.t_hour = 8
+    AND time_dim.t_minute >= 30
+    AND household_demographics.hd_dep_count = 5
+    AND store.s_store_name = 'ese'
+) AS soldaftertable
+ORDER BY COUNT(soldaftertable)
+LIMIT 100;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q98/q98.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q98/q98.1.ddl.sqlpp
new file mode 100644
index 0000000..4769c9c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q98/q98.1.ddl.sqlpp
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop dataverse tpcds if exists;
+create dataverse tpcds;
+
+use tpcds;
+
+
+create type tpcds.item_type as
+ closed {
+  i_item_sk : bigint,
+  i_item_id : string,
+  i_rec_start_date : string?,
+  i_rec_end_date : string?,
+  i_item_desc : string?,
+  i_current_price : double?,
+  i_wholesale_cost : double?,
+  i_brand_id : bigint?,
+  i_brand : string?,
+  i_class_id : bigint?,
+  i_class : string?,
+  i_category_id : bigint?,
+  i_category : string?,
+  i_manufact_id : bigint?,
+  i_manufact : string?,
+  i_size : string?,
+  i_formulation : string?,
+  i_color : string?,
+  i_units : string?,
+  i_container : string?,
+  i_manager_id : bigint?,
+  i_product_name : string?
+}
+
+create type tpcds.store_sales_type as
+ closed {
+  ss_sold_date_sk:           bigint?,
+  ss_sold_time_sk:           bigint?,
+  ss_item_sk:                bigint,
+  ss_customer_sk:            bigint?,
+  ss_cdemo_sk:               bigint?,
+  ss_hdemo_sk:               bigint?,
+  ss_addr_sk:                bigint?,
+  ss_store_sk:               bigint?,
+  ss_promo_sk:               bigint?,
+  ss_ticket_number:          bigint,
+  ss_quantity:               bigint?,
+  ss_wholesale_cost:         double?,
+  ss_list_price:             double?,
+  ss_sales_price:            double?,
+  ss_ext_discount_amt:       double?,
+  ss_ext_sales_price:        double?,
+  ss_ext_wholesale_cost:     double?,
+  ss_ext_list_price:         double?,
+  ss_ext_tax:                double?,
+  ss_coupon_amt:             double?,
+  ss_net_paid:               double?,
+  ss_net_paid_inc_tax:       double?,
+  ss_net_profit:             double?
+}
+
+create type tpcds.date_dim_type as
+ closed {
+  d_date_sk : bigint,
+  d_date_id : string,
+  d_date : string?,
+  d_month_seq : bigint?,
+  d_week_seq : bigint?,
+  d_quarter_seq : bigint?,
+  d_year : bigint? ,
+  d_dow : bigint? ,
+  d_moy : bigint?,
+  d_dom : bigint?,
+  d_qoy : bigint?,
+  d_fy_year : bigint?,
+  d_fy_quarter_seq : bigint?,
+  d_fy_week_seq : bigint?,
+  d_day_name : string?,
+  d_quarter_name : string?,
+  d_holiday : string?,
+  d_weekend : string?,
+  d_following_holiday : string?,
+  d_first_dom : bigint?,
+  d_last_dom : bigint?,
+  d_same_day_ly : bigint?,
+  d_same_day_lq : bigint?,
+  d_current_day : string?,
+  d_current_week : string?,
+  d_current_month : string?,
+  d_current_quarter : string?,
+  d_current_year : string?
+}
+
+create dataset item (item_type) primary key i_item_sk;
+
+create dataset store_sales (store_sales_type) primary key ss_item_sk, ss_ticket_number;
+
+create dataset date_dim(date_dim_type) primary key d_date_sk;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q98/q98.2.update.sqlpp
similarity index 65%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q98/q98.2.update.sqlpp
index af2f691..71629d8 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q98/q98.2.update.sqlpp
@@ -16,4 +16,12 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+use tpcds;
+
+
+load dataset item using localfs ((`path`=`asterix_nc1://data/tpcds/item.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset store_sales using localfs ((`path`=`asterix_nc1://data/tpcds/store_sales.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
+
+load dataset date_dim using localfs ((`path`=`asterix_nc1://data/tpcds/date_dim.csv`),(`format`=`delimited-text`),(`delimiter`=`|`));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q98/q98.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q98/q98.3.query.sqlpp
new file mode 100644
index 0000000..99d81e0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpcds/q98/q98.3.query.sqlpp
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions AND limitations
+ * under the License.
+ */
+
+
+USE tpcds;
+
+SELECT
+       currpricetable.i_item_desc,
+       currpricetable.i_category,
+       currpricetable.i_class,
+       currpricetable.i_current_price,
+       currpricetable.itemrevenue,
+       (currpricetable.itemrevenue * 100 / revrattable.revrat) revenueratio
+FROM
+(SELECT i_item_desc,
+        i_category,
+        i_class,
+        i_current_price,
+        i_item_id,
+        SUM(ss_ext_sales_price) itemrevenue
+FROM
+  store_sales,
+  item,
+  date_dim
+WHERE
+  ss_item_sk = i_item_sk
+  AND i_category IN ["Jewelry", "Sports", "Books"]
+  AND ss_sold_date_sk = d_date_sk
+  AND date(d_date) >= date('2001-01-12')
+  AND date(d_date) <= date('2001-02-11')
+GROUP BY
+  i_item_id,
+  i_item_desc,
+  i_category,
+  i_class,
+  i_current_price
+) AS currpricetable
+JOIN
+(SELECT i_item_id, i_item_desc, i_category, i_class, SUM(ss_ext_sales_price) revrat
+  FROM
+  store_sales,
+  item
+  WHERE ss_item_sk = i_item_sk
+  GROUP BY
+     i_item_id,
+     i_item_desc,
+     i_category,
+     i_class
+) AS revrattable
+ON  currpricetable.i_item_id = revrattable.i_item_id
+AND currpricetable.i_class = revrattable.i_class
+AND currpricetable.i_item_desc = revrattable.i_item_desc
+AND currpricetable.i_category = revrattable.i_category
+ORDER BY
+  currpricetable.i_category,
+  currpricetable.i_class,
+  currpricetable.i_item_id,
+  currpricetable.i_item_desc,
+  revenueratio;
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/types/isarray/isarray.1.query.sqlpp
similarity index 78%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/types/isarray/isarray.1.query.sqlpp
index af2f691..ab87cb1 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/types/isarray/isarray.1.query.sqlpp
@@ -16,4 +16,15 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+{
+  "a": isarray(true),
+  "b": isarray(false),
+  "c": isarray(null),
+  "d": isarray(missing),
+  "e": is_array("d"),
+  "f": is_array(4.0),
+  "g": isarray(5),
+  "h": isarray(["1", 2]),
+  "i": isarray({"a":1})
+};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/types/isboolean/isboolean.1.query.sqlpp
similarity index 78%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/types/isboolean/isboolean.1.query.sqlpp
index af2f691..de3c64e 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/types/isboolean/isboolean.1.query.sqlpp
@@ -16,4 +16,15 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+{
+  "a": isboolean(true),
+  "b": isboolean(false),
+  "c": is_boolean(null),
+  "d": is_boolean(missing),
+  "e": isbool("d"),
+  "f": isbool(4.0),
+  "g": isbool(5),
+  "h": isbool(["1", 2]),
+  "i": isbool({"a":1})
+};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/types/isnumber/isnumber.1.query.sqlpp
similarity index 78%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/types/isnumber/isnumber.1.query.sqlpp
index af2f691..1e153d3 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/types/isnumber/isnumber.1.query.sqlpp
@@ -16,4 +16,16 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+{
+  "a": isnumber(true),
+  "b": isnumber(false),
+  "c": isnumber(null),
+  "d": is_number(missing),
+  "e": is_number("d"),
+  "f": isnumber(4.0),
+  "g": isnum(5),
+  "h": isnum(["1", 2]),
+  "i": isnum({"a":1})
+};
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/types/isobject/isobject.1.query.sqlpp
similarity index 79%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/types/isobject/isobject.1.query.sqlpp
index af2f691..8305977 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/types/isobject/isobject.1.query.sqlpp
@@ -16,4 +16,15 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+{
+  "a": isobject(true),
+  "b": isobject(false),
+  "c": is_object(null),
+  "d": is_object(missing),
+  "e": isobj("d"),
+  "f": isobj(4.0),
+  "g": isobj(5),
+  "h": isobj(["1", 2]),
+  "i": isobj({"a":1})
+};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/types/isstring/isstring.1.query.sqlpp
similarity index 79%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/types/isstring/isstring.1.query.sqlpp
index af2f691..a5caf3c 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/types/isstring/isstring.1.query.sqlpp
@@ -16,4 +16,15 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+{
+  "a": is_string(true),
+  "b": isstring(false),
+  "c": isstring(null),
+  "d": isstr(missing),
+  "e": isstr("d"),
+  "f": isstr(4.0),
+  "g": isstr(5),
+  "h": isstr(["1", 2]),
+  "i": isstr({"a":1})
+};
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.adm
index ea002ab..d076f74 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.adm
@@ -7,20 +7,53 @@
     "config": {
         "api.port": 19002,
         "cc.java.opts": "-Xmx1024m",
+        "cluster.partitions": {
+            "0": "ID:0, Original Node: asterix_nc1, IODevice: 0, Active Node: asterix_nc1",
+            "1": "ID:1, Original Node: asterix_nc1, IODevice: 1, Active Node: asterix_nc1",
+            "2": "ID:2, Original Node: asterix_nc2, IODevice: 0, Active Node: asterix_nc2",
+            "3": "ID:3, Original Node: asterix_nc2, IODevice: 1, Active Node: asterix_nc2"
+        },
         "compiler.framesize": 32768,
         "compiler.groupmemory": 163840,
         "compiler.joinmemory": 163840,
         "compiler.pregelix.home": "~/pregelix",
         "compiler.sortmemory": 327680,
+        "core.dump.paths": {},
         "feed.central.manager.port": 4500,
         "feed.max.threshold.period": 5,
         "feed.memory.available.wait.timeout": 10,
         "feed.memory.global.budget": 67108864,
         "feed.pending.work.threshold": 50,
         "feed.port": 19003,
+        "instance.name": null,
         "log.level": "INFO",
         "max.wait.active.cluster": 60,
+        "metadata.callback.port": 0,
+        "metadata.node": "asterix_nc1",
+        "metadata.partition": "ID:0, Original Node: asterix_nc1, IODevice: 0, Active Node: asterix_nc1",
+        "metadata.port": 0,
+        "metadata.registration.timeout.secs": 60,
         "nc.java.opts": "-Xmx1024m",
+        "node.partitions": {
+            "asterix_nc1": [
+                "ID:0, Original Node: asterix_nc1, IODevice: 0, Active Node: asterix_nc1",
+                "ID:1, Original Node: asterix_nc1, IODevice: 1, Active Node: asterix_nc1"
+            ],
+            "asterix_nc2": [
+                "ID:2, Original Node: asterix_nc2, IODevice: 0, Active Node: asterix_nc2",
+                "ID:3, Original Node: asterix_nc2, IODevice: 1, Active Node: asterix_nc2"
+            ]
+        },
+        "node.stores": {
+            "asterix_nc1": [
+                "iodevice0",
+                "iodevice1"
+            ],
+            "asterix_nc2": [
+                "iodevice0",
+                "iodevice1"
+            ]
+        },
         "plot.activate": false,
         "replication.enabled": false,
         "replication.factor": 2,
@@ -38,6 +71,10 @@
         "storage.memorycomponent.numpages": 8,
         "storage.memorycomponent.pagesize": 131072,
         "storage.metadata.memorycomponent.numpages": 256,
+        "transaction.log.dirs": {
+            "asterix_nc1": "target/txnLogDir/asterix_nc1",
+            "asterix_nc2": "target/txnLogDir/asterix_nc2"
+        },
         "txn.commitprofiler.reportinterval": 5,
         "txn.job.recovery.memorysize": 67108864,
         "txn.lock.escalationthreshold": 1000,
@@ -95,4 +132,4 @@
     "shutdownUri": "http://127.0.0.1:19002/admin/shutdown",
     "state": "ACTIVE",
     "versionUri": "http://127.0.0.1:19002/admin/version"
-}
\ No newline at end of file
+}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/constructor/polygon-from-open-list_issue1627/polygon-from-open-list_issue1627.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/constructor/polygon-from-open-list_issue1627/polygon-from-open-list_issue1627.1.adm
new file mode 100644
index 0000000..d1d3a36
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/constructor/polygon-from-open-list_issue1627/polygon-from-open-list_issue1627.1.adm
@@ -0,0 +1 @@
+polygon("-87.359296,35.00118 -85.606675,34.984749 -85.431413,34.124869 -85.184951,32.859696 -85.069935,32.580372 -84.960397,32.421541 -85.004212,32.322956 -84.889196,32.262709 -85.058981,32.13674 -85.053504,32.01077 -85.141136,31.840985 -85.042551,31.539753 -85.113751,31.27686 -85.004212,31.003013 -85.497137,30.997536 -87.600282,30.997536 -87.633143,30.86609 -87.408589,30.674397 -87.446927,30.510088 -87.37025,30.427934 -87.518128,30.280057 -87.655051,30.247195 -87.90699,30.411504 -87.934375,30.657966 -88.011052,30.685351 -88.10416,30.499135 -88.137022,30.318396 -88.394438,30.367688 -88.471115,31.895754 -88.241084,33.796253 -88.098683,34.891641 -88.202745,34.995703 -87.359296,35.00118 -94.473842,36.501861 -90.152536,36.496384 -90.064905,36.304691 -90.218259,36.184199 -90.377091,35.997983 -89.730812,35.997983 -89.763673,35.811767 -89.911551,35.756997 -89.944412,35.603643 -90.130628,35.439335 -90.114197,35.198349 -90.212782,35.023087 -90.311367,34.995703 -90.251121,34.908072 -90.409952,34.831394 -90.481152,34.661609 -90.585214,34.617794 -90.568783,34.420624 -90.749522,34.365854 -90.744046,34.300131 -90.952169,34.135823 -90.891923,34.026284 -91.072662,33.867453 -91.231493,33.560744 -91.056231,33.429298 -91.143862,33.347144 -91.089093,33.13902 -91.16577,33.002096 -93.608485,33.018527 -94.041164,33.018527 -94.041164,33.54979 -94.183564,33.593606 -94.380734,33.544313 -94.484796,33.637421 -94.430026,35.395519 -94.616242,36.501861 -94.473842,36.501861 -109.042503,37.000263 -109.04798,31.331629 -111.074448,31.331629 -112.246513,31.704061 -114.815198,32.492741 -114.72209,32.717295 -114.524921,32.755634 -114.470151,32.843265 -114.524921,33.029481 -114.661844,33.034958 -114.727567,33.40739 -114.524921,33.54979 -114.497536,33.697668 -114.535874,33.933176 -114.415382,34.108438 -114.256551,34.174162 -114.136058,34.305608 -114.333228,34.448009 -114.470151,34.710902 -114.634459,34.87521 -114.634459,35.00118 -114.574213,35.138103 -114.596121,35.324319 -114.678275,35.516012 -114.738521,36.102045 -114.371566,36.140383 -114.251074,36.01989 -114.152489,36.025367 -114.048427,36.195153 -114.048427,37.000263 -110.499369,37.00574 -109.042503,37.000263 -123.233256,42.006186 -122.378853,42.011663 -121.037003,41.995232 -120.001861,41.995232 -119.996384,40.264519 -120.001861,38.999346 -118.71478,38.101128 -117.498899,37.21934 -116.540435,36.501861 -115.85034,35.970598 -114.634459,35.00118 -114.634459,34.87521 -114.470151,34.710902 -114.333228,34.448009 -114.136058,34.305608 -114.256551,34.174162 -114.415382,34.108438 -114.535874,33.933176 -114.497536,33.697668 -114.524921,33.54979 -114.727567,33.40739 -114.661844,33.034958 -114.524921,33.029481 -114.470151,32.843265 -114.524921,32.755634 -114.72209,32.717295 -116.04751,32.624187 -117.126467,32.536556 -117.24696,32.668003 -117.252437,32.876127 -117.329114,33.122589 -117.471515,33.297851 -117.7837,33.538836 -118.183517,33.763391 -118.260194,33.703145 -118.413548,33.741483 -118.391641,33.840068 -118.566903,34.042715 -118.802411,33.998899 -119.218659,34.146777 -119.278905,34.26727 -119.558229,34.415147 -119.875891,34.40967 -120.138784,34.475393 -120.472878,34.448009 -120.64814,34.579455 -120.609801,34.858779 -120.670048,34.902595 -120.631709,35.099764 -120.894602,35.247642 -120.905556,35.450289 -121.004141,35.461243 -121.168449,35.636505 -121.283465,35.674843 -121.332757,35.784382 -121.716143,36.195153 -121.896882,36.315645 -121.935221,36.638785 -121.858544,36.6114 -121.787344,36.803093 -121.929744,36.978355 -122.105006,36.956447 -122.335038,37.115279 -122.417192,37.241248 -122.400761,37.361741 -122.515777,37.520572 -122.515777,37.783465 -122.329561,37.783465 -122.406238,38.15042 -122.488392,38.112082 -122.504823,37.931343 -122.701993,37.893004 -122.937501,38.029928 -122.97584,38.265436 -123.129194,38.451652 -123.331841,38.566668 -123.44138,38.698114 -123.737134,38.95553 -123.687842,39.032208 -123.824765,39.366301 -123.764519,39.552517 -123.85215,39.831841 -124.109566,40.105688 -124.361506,40.259042 -124.410798,40.439781 -124.158859,40.877937 -124.109566,41.025814 -124.158859,41.14083 -124.065751,41.442061 -124.147905,41.715908 -124.257444,41.781632 -124.213628,42.000709 -123.233256,42.006186 -107.919731,41.003906 -105.728954,40.998429 -104.053011,41.003906 -102.053927,41.003906 -102.053927,40.001626 -102.042974,36.994786 -103.001438,37.000263 -104.337812,36.994786 -106.868158,36.994786 -107.421329,37.000263 -109.042503,37.000263 -109.042503,38.166851 -109.058934,38.27639 -109.053457,39.125316 -109.04798,40.998429 -107.919731,41.003906 -73.053528,42.039048 -71.799309,42.022617 -71.799309,42.006186 -71.799309,41.414677 -71.859555,41.321569 -71.947186,41.338 -72.385341,41.261322 -72.905651,41.28323 -73.130205,41.146307 -73.371191,41.102491 -73.655992,40.987475 -73.727192,41.102491 -73.48073,41.21203 -73.55193,41.294184 -73.486206,42.050002 -73.053528,42.039048 -77.035264,38.993869 -76.909294,38.895284 -77.040741,38.791222 -77.117418,38.933623 -77.035264,38.993869 -75.414089,39.804456 -75.507197,39.683964 -75.611259,39.61824 -75.589352,39.459409 -75.441474,39.311532 -75.403136,39.065069 -75.189535,38.807653 -75.09095,38.796699 -75.047134,38.451652 -75.693413,38.462606 -75.786521,39.722302 -75.616736,39.831841 -75.414089,39.804456 -85.497137,30.997536 -85.004212,31.003013 -84.867289,30.712735 -83.498053,30.647012 -82.216449,30.570335 -82.167157,30.356734 -82.046664,30.362211 -82.002849,30.564858 -82.041187,30.751074 -81.948079,30.827751 -81.718048,30.745597 -81.444201,30.707258 -81.383954,30.27458 -81.257985,29.787132 -80.967707,29.14633 -80.524075,28.461713 -80.589798,28.41242 -80.56789,28.094758 -80.381674,27.738757 -80.091397,27.021277 -80.03115,26.796723 -80.036627,26.566691 -80.146166,25.739673 -80.239274,25.723243 -80.337859,25.465826 -80.304997,25.383672 -80.49669,25.197456 -80.573367,25.241272 -80.759583,25.164595 -81.077246,25.120779 -81.170354,25.224841 -81.126538,25.378195 -81.351093,25.821827 -81.526355,25.903982 -81.679709,25.843735 -81.800202,26.090198 -81.833064,26.292844 -82.041187,26.517399 -82.09048,26.665276 -82.057618,26.878877 -82.172634,26.917216 -82.145249,26.791246 -82.249311,26.758384 -82.566974,27.300601 -82.692943,27.437525 -82.391711,27.837342 -82.588881,27.815434 -82.720328,27.689464 -82.851774,27.886634 -82.676512,28.434328 -82.643651,28.888914 -82.764143,28.998453 -82.802482,29.14633 -82.994175,29.179192 -83.218729,29.420177 -83.399469,29.518762 -83.410422,29.66664 -83.536392,29.721409 -83.640454,29.885717 -84.02384,30.104795 -84.357933,30.055502 -84.341502,29.902148 -84.451041,29.929533 -84.867289,29.743317 -85.310921,29.699501 -85.299967,29.80904 -85.404029,29.940487 -85.924338,30.236241 -86.29677,30.362211 -86.630863,30.395073 -86.910187,30.373165 -87.518128,30.280057 -87.37025,30.427934 -87.446927,30.510088 -87.408589,30.674397 -87.633143,30.86609 -87.600282,30.997536 -85.497137,30.997536 -83.109191,35.00118 -83.322791,34.787579 -83.339222,34.683517 -83.005129,34.469916 -82.901067,34.486347 -82.747713,34.26727 -82.714851,34.152254 -82.55602,33.94413 -82.325988,33.81816 -82.194542,33.631944 -81.926172,33.462159 -81.937125,33.347144 -81.761863,33.160928 -81.493493,33.007573 -81.42777,32.843265 -81.416816,32.629664 -81.279893,32.558464 -81.121061,32.290094 -81.115584,32.120309 -80.885553,32.032678 -81.132015,31.693108 -81.175831,31.517845 -81.279893,31.364491 -81.290846,31.20566 -81.400385,31.13446 -81.444201,30.707258 -81.718048,30.745597 -81.948079,30.827751 -82.041187,30.751074 -82.002849,30.564858 -82.046664,30.362211 -82.167157,30.356734 -82.216449,30.570335 -83.498053,30.647012 -84.867289,30.712735 -85.004212,31.003013 -85.113751,31.27686 -85.042551,31.539753 -85.141136,31.840985 -85.053504,32.01077 -85.058981,32.13674 -84.889196,32.262709 -85.004212,32.322956 -84.960397,32.421541 -85.069935,32.580372 -85.184951,32.859696 -85.431413,34.124869 -85.606675,34.984749 -84.319594,34.990226 -83.618546,34.984749 -83.109191,35.00118")
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/misc/query-ASTERIXDB-1671/query-ASTERIXDB-1671.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/misc/query-ASTERIXDB-1671/query-ASTERIXDB-1671.1.adm
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/misc/query-ASTERIXDB-1671/query-ASTERIXDB-1671.1.adm
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/null-missing/query-ASTERIXDB-1689/query-ASTERIXDB-1689.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/null-missing/query-ASTERIXDB-1689/query-ASTERIXDB-1689.1.adm
new file mode 100644
index 0000000..6578fd5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/null-missing/query-ASTERIXDB-1689/query-ASTERIXDB-1689.1.adm
@@ -0,0 +1 @@
+{  }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/quantifiers/query-ASTERIXDB-1674/query-ASTERIXDB-1674.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/quantifiers/query-ASTERIXDB-1674/query-ASTERIXDB-1674.1.adm
new file mode 100644
index 0000000..7c7ddd0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/quantifiers/query-ASTERIXDB-1674/query-ASTERIXDB-1674.1.adm
@@ -0,0 +1 @@
+{ "msgs": [ { "name": "EmoryUnk", "msg": " love verizon its wireless is good" }, { "name": "EmoryUnk", "msg": " love sprint its shortcut-menu is awesome:)" } ], "name": "EmoryUnk" }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/subquery/query-ASTERIXDB-1674/query-ASTERIXDB-1674.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/subquery/query-ASTERIXDB-1674/query-ASTERIXDB-1674.1.adm
new file mode 100644
index 0000000..b8fbf3c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/subquery/query-ASTERIXDB-1674/query-ASTERIXDB-1674.1.adm
@@ -0,0 +1,2 @@
+{ "id": 4, "msgs": [  ], "name": "NicholasStroh" }
+{ "id": 8, "msgs": [  ], "name": "NilaMilliron" }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q03/q03.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q03/q03.1.adm
new file mode 100644
index 0000000..31182dc
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q03/q03.1.adm
@@ -0,0 +1 @@
+{ "d_year": 2001, "brand_id": 10008011, "brand": "corpnameless #3", "sum_agg": 4332.24 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q07/q07.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q07/q07.1.adm
new file mode 100644
index 0000000..cfedddb
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q07/q07.1.adm
@@ -0,0 +1 @@
+{ "i_item_id": "AAAAAAAACAAAAAAA", "agg1": 630.0, "agg2": 3.83, "agg3": 0.0, "agg4": 1.11 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q15/q15.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q15/q15.1.adm
new file mode 100644
index 0000000..cdb4592
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q15/q15.1.adm
@@ -0,0 +1 @@
+{ "ca_zip": "35708", "$1": 108.92 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q19/q19.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q19/q19.1.adm
new file mode 100644
index 0000000..5820ef4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q19/q19.1.adm
@@ -0,0 +1 @@
+{ "brand_id": 2002002, "brand": "exportiimporto #1", "i_manufact_id": 212, "i_manufact": "barableable", "ext_price": 8484.84 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q21/q21.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q21/q21.1.adm
new file mode 100644
index 0000000..053af7e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q21/q21.1.adm
@@ -0,0 +1 @@
+{ "x": { "w_warehouse_name": "Important issues liv", "i_item_id": "AAAAAAAAEAAAAAAA", "inv_before": 200, "inv_after": 300 } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q24a/q24a.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q24a/q24a.1.adm
new file mode 100644
index 0000000..1d7ade5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q24a/q24a.1.adm
@@ -0,0 +1 @@
+{ "c_last_name": "Sharp", "c_first_name": "Brunilda", "s_store_name": "anti", "paid": 83.98 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q24b/q24b.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q24b/q24b.1.adm
new file mode 100644
index 0000000..420127b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q24b/q24b.1.adm
@@ -0,0 +1 @@
+{ "c_last_name": "Sharp", "c_first_name": "Brunilda", "s_store_name": "ought", "paid": 6.68 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q25/q25.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q25/q25.1.adm
new file mode 100644
index 0000000..c73f5cf
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q25/q25.1.adm
@@ -0,0 +1 @@
+{ "i_item_id": "AAAAAAAAOAAAAAAA", "i_item_desc": "Teachers carry by the children; old democrats enco", "s_store_id": "AAAAAAAACAAAAAAA", "s_store_name": "ese", "store_sales_profit": -3833.05, "store_returns_loss": 1005.17, "catalog_sales_profit": 5261.55 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q26/q26.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q26/q26.1.adm
new file mode 100644
index 0000000..7fc4a51
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q26/q26.1.adm
@@ -0,0 +1 @@
+{ "i_item_id": "AAAAAAAAKAAAAAAA", "agg1": 7.0, "agg2": 40.88, "agg3": 11.56, "agg4": 51.91 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q29/q29.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q29/q29.1.adm
new file mode 100644
index 0000000..af7a17b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q29/q29.1.adm
@@ -0,0 +1 @@
+{ "i_item_id": "AAAAAAAAOAAAAAAA", "i_item_desc": "Teachers carry by the children; old democrats enco", "s_store_id": "AAAAAAAACAAAAAAA", "s_store_name": "ese", "store_sales_quantity": 565, "store_returns_quantity": 46, "catalog_sales_quantity": 29 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q30/q30.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q30/q30.1.adm
new file mode 100644
index 0000000..9833a6b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q30/q30.1.adm
@@ -0,0 +1 @@
+{ "c_customer_id": "AAAAAAAAJAAAAAAA", "c_salutation": "Sir", "c_first_name": "Karl", "c_last_name": "Gilbert", "c_preferred_cust_flag": "N", "c_birth_day": 26, "c_birth_month": 10, "c_birth_year": 1966, "c_birth_country": "MONTSERRAT", "c_login": null, "c_email_address": "Karl.Gilbert@Crg5KyP2IxX9C4d6.edu", "c_last_review_date": "2452454", "ctr_total_return": 100.87 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q31/q31.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q31/q31.1.adm
new file mode 100644
index 0000000..e8d70a1
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q31/q31.1.adm
@@ -0,0 +1 @@
+{ "ca_county": "Taos County", "d_year": 2000, "web_q1_q2_increase": 1.0, "store_q1_q2_increase": 0.0986730622674497, "web_q2_q3_increase": 0.21626622172784574, "store_q2_q3_increase": 0.0291899504793058 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q34/q34.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q34/q34.1.adm
new file mode 100644
index 0000000..b59d026
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q34/q34.1.adm
@@ -0,0 +1 @@
+{ "c_last_name": "Sharp", "c_first_name": "Brunilda", "c_salutation": "Ms.", "c_preferred_cust_flag": "N", "ss_ticket_number": 1, "cnt": 15 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q37/q37.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q37/q37.1.adm
new file mode 100644
index 0000000..fc3ec07
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q37/q37.1.adm
@@ -0,0 +1 @@
+{ "i_item_id": "AAAAAAAABAAAAAAA", "i_item_desc": "Powers will not get influences. Electoral ports should show low, annual chains. Now young visitors may pose now however final pages. Bitterly right children suit increasing, leading el", "i_current_price": 27.02 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q41/q41.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q41/q41.1.adm
new file mode 100644
index 0000000..ae29f2e
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q41/q41.1.adm
@@ -0,0 +1 @@
+{ "i_product_name": "ese" }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q42/q42.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q42/q42.1.adm
new file mode 100644
index 0000000..a252a02
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q42/q42.1.adm
@@ -0,0 +1,2 @@
+{ "d_year": 1998, "i_category_id": 3, "i_category": "Women", "$1": 85.4 }
+{ "d_year": 1998, "i_category_id": 7, "i_category": "Home", "$1": 33.3 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q43/q43.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q43/q43.1.adm
new file mode 100644
index 0000000..cd98ef2
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q43/q43.1.adm
@@ -0,0 +1,2 @@
+{ "s_store_name": "able", "s_store_id": "AAAAAAAACAAAAAAA", "sun_sales": null, "mon_sales": null, "tue_sales": 1.11, "wed_sales": null, "thu_sales": null, "fri_sales": null, "sat_sales": null }
+{ "s_store_name": "anti", "s_store_id": "AAAAAAAAEAAAAAAA", "sun_sales": null, "mon_sales": null, "tue_sales": 90.08, "wed_sales": null, "thu_sales": null, "fri_sales": null, "sat_sales": null }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q45/q45.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q45/q45.1.adm
new file mode 100644
index 0000000..e76b10f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q45/q45.1.adm
@@ -0,0 +1 @@
+{ "ca_zip": "35709", "ca_county": "Taos County", "$1": 27.43 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q46/q46.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q46/q46.1.adm
new file mode 100644
index 0000000..356c7dc
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q46/q46.1.adm
@@ -0,0 +1 @@
+{ "c_last_name": "Lewis", "c_first_name": "Javier", "ca_city": "Fairfield", "bought_city": "Pleasant Valley", "ss_ticket_number": 1, "amt": 0.0, "profit": -302.72 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q50/q50.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q50/q50.1.adm
new file mode 100644
index 0000000..7b67f68
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q50/q50.1.adm
@@ -0,0 +1,3 @@
+{ "s_store_name": "ese", "s_company_id": 1, "s_street_number": "877", "s_street_name": "Park Laurel", "s_street_type": "Road", "s_suite_number": "Suite T", "s_city": "Midway", "s_county": "Williamson County", "s_state": "TN", "s_zip": "31904", "c30_days": 1, "c31_60_days": 0, "c61_90_days": 0, "c91_120_days": 0, "gt120_days": 0 }
+{ "s_store_name": "ought", "s_company_id": 1, "s_street_number": "767", "s_street_name": "Spring ", "s_street_type": "Wy", "s_suite_number": "Suite 250", "s_city": "Midway", "s_county": "Williamson County", "s_state": "TN", "s_zip": "35709", "c30_days": 1, "c31_60_days": 0, "c61_90_days": 0, "c91_120_days": 0, "gt120_days": 0 }
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q52/q52.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q52/q52.1.adm
new file mode 100644
index 0000000..056a7a8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q52/q52.1.adm
@@ -0,0 +1,3 @@
+{ "d_year": 1998, "brand_id": 3002001, "brand": "importoexporti #1", "ext_price": 85.4 }
+{ "d_year": 1998, "brand_id": 1001001, "brand": "brandbrand #4", "ext_price": 33.3 }
+
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q55/q55.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q55/q55.1.adm
new file mode 100644
index 0000000..ba1c91c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q55/q55.1.adm
@@ -0,0 +1 @@
+{ "brand_id": 10008011, "brand": "corpnameless #3", "ext_price": 4332.24 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q59/q59.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q59/q59.1.adm
new file mode 100644
index 0000000..c987136
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q59/q59.1.adm
@@ -0,0 +1,2 @@
+{ "s_store_name1": "able", "s_store_id1": "AAAAAAAACAAAAAAA", "d_week_seq1": 1, "$1": null, "$2": null, "$3": null, "$4": null, "$5": null, "$6": null, "$7": null }
+{ "s_store_name1": "ese", "s_store_id1": "AAAAAAAACAAAAAAA", "d_week_seq1": 1, "$1": null, "$2": null, "$3": null, "$4": null, "$5": null, "$6": null, "$7": 2.0351384869780897 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q62/q62.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q62/q62.1.adm
new file mode 100644
index 0000000..99ff95d
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q62/q62.1.adm
@@ -0,0 +1,2 @@
+{ "$1": null, "sm_type": "LIBRARY", "web_name": "site_0", "c30_days": 1, "c31_60_days": 0, "c61_90_days": 0, "c91_120_days": 0, "gt120_days": 0 }
+{ "$1": "Conventional childr", "sm_type": "NEXT DAY", "web_name": "site_0", "c30_days": 1, "c31_60_days": 0, "c61_90_days": 0, "c91_120_days": 0, "gt120_days": 0 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q68/q68.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q68/q68.1.adm
new file mode 100644
index 0000000..beec128
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q68/q68.1.adm
@@ -0,0 +1 @@
+{ "c_last_name": "Sharp", "c_first_name": "Brunilda", "ca_city": "Fairview", "bought_city": "Pleasant Valley", "ss_ticket_number": 1, "extended_price": 4428.0, "extended_tax": 177.12, "list_price": 8051.76 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q73/q73.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q73/q73.1.adm
new file mode 100644
index 0000000..234ece7
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q73/q73.1.adm
@@ -0,0 +1 @@
+{ "c_last_name": "Sharp", "c_first_name": "Brunilda", "c_salutation": "Ms.", "c_preferred_cust_flag": "N", "ss_ticket_number": 1, "cnt": 4 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q79/q79.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q79/q79.1.adm
new file mode 100644
index 0000000..74e67ad
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q79/q79.1.adm
@@ -0,0 +1,2 @@
+{ "c_last_name": "Lewis", "c_first_name": "Javier", "$1": "Midway", "ss_ticket_number": 26, "amt": 0.0, "profit": -3833.05 }
+{ "c_last_name": "Wiles", "c_first_name": "Fonda", "$1": "Midway", "ss_ticket_number": 2, "amt": 0.0, "profit": -3833.05 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q81/q81.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q81/q81.1.adm
new file mode 100644
index 0000000..26b0eb2
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q81/q81.1.adm
@@ -0,0 +1 @@
+{ "c_customer_id": "AAAAAAAAIAAAAAAA", "c_salutation": "Sir", "c_first_name": "Ollie", "c_last_name": "Shipman", "ca_street_number": "362", "ca_street_name": "Washington 6th", "ca_street_type": "RD", "ca_suite_number": "Suite 80", "ca_city": "Fairview", "ca_county": "Taos County", "ca_state": "IL", "ca_zip": "35709", "ca_country": "Suriname", "ca_gmt_offset": -7.0, "ca_location_type": "condo", "ctr_total_return": 900.87 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q82/q82.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q82/q82.1.adm
new file mode 100644
index 0000000..0339d14
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q82/q82.1.adm
@@ -0,0 +1 @@
+{ "i_item_id": "AAAAAAAAHAAAAAAA", "i_item_desc": "Anxious accounts must catch also years. Revolutionary, large directors used to embrace then mo", "i_current_price": 39.94 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q88/q88.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q88/q88.1.adm
new file mode 100644
index 0000000..8cfbcc0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q88/q88.1.adm
@@ -0,0 +1 @@
+{ "h8_30_to_9": 0, "h9_to_9_30": 0, "h9_30_to_10": 0, "h10_to_10_30": 1, "h10_30_to_11": 0, "h11_to_11_30": 0, "h11_30_to_12": 0, "h12_to_12_30": 1 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q91/q91.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q91/q91.1.adm
new file mode 100644
index 0000000..dd582c4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q91/q91.1.adm
@@ -0,0 +1 @@
+{ "Call_Center": "AAAAAAAACAAAAAAA", "Call_Center_Name": "Mid Atlantic", "Manager": "Felipe Perkins", "Returns_Loss": 106.07 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q94/q94.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q94/q94.1.adm
new file mode 100644
index 0000000..ed2027f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q94/q94.1.adm
@@ -0,0 +1 @@
+{ "order_count": 13, "total_shipping_cost": 4813.119999999999, "total_net_profit": 4754.880000000001 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q95/q95.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q95/q95.1.adm
new file mode 100644
index 0000000..887359c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q95/q95.1.adm
@@ -0,0 +1 @@
+{ "order_count": 1, "total_shipping_cost": 910.86, "total_net_profit": -129.96 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q96/q96.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q96/q96.1.adm
new file mode 100644
index 0000000..73a998b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q96/q96.1.adm
@@ -0,0 +1 @@
+{ "$1": 4 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q98/q98.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q98/q98.1.adm
new file mode 100644
index 0000000..24d4840
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/q98/q98.1.adm
@@ -0,0 +1 @@
+{ "i_item_desc": "Correct, fo", "i_category": "Jewelry", "i_class": "estate", "i_current_price": 54.87, "itemrevenue": 1064.36, "revenueratio": 100.0 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1580/query-ASTERIXDB-1580.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1580/query-ASTERIXDB-1580.1.adm
index 4c6b9f7..aac5eb9 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1580/query-ASTERIXDB-1580.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1580/query-ASTERIXDB-1580.1.adm
@@ -1 +1 @@
-{ "state": "MN", "cnt": 10 }
+{ "state": "IL", "cnt": 28 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1581-2/query-ASTERIXDB-1581-2.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1581-2/query-ASTERIXDB-1581-2.1.adm
index c7a4707..2f7bdfc 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1581-2/query-ASTERIXDB-1581-2.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1581-2/query-ASTERIXDB-1581-2.1.adm
@@ -1 +1 @@
-{ "bucket1": [ { "$1": 2.16 } ] }
+{ "bucket1": [ { "$1": 24.261666666666667 } ] }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1581-4/query-ASTERIXDB-1581-4.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1581-4/query-ASTERIXDB-1581-4.1.adm
index 92de03a..ecbf2ce 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1581-4/query-ASTERIXDB-1581-4.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1581-4/query-ASTERIXDB-1581-4.1.adm
@@ -1 +1 @@
-{ "bucket1": 2.16 }
+{ "bucket1": 24.261666666666667 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1581-6/query-ASTERIXDB-1581-6.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1581-6/query-ASTERIXDB-1581-6.1.adm
index 0db3ede..7e6eb68 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1581-6/query-ASTERIXDB-1581-6.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1581-6/query-ASTERIXDB-1581-6.1.adm
@@ -1 +1 @@
-{ "bucket1": [ { "$2": -377.0216666666667 } ] }
+{ "bucket1": [ { "$2": -746.2933333333334 } ] }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1581-correlated/query-ASTERIXDB-1581-correlated.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1581-correlated/query-ASTERIXDB-1581-correlated.1.adm
index aad62bb..d9395d8 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1581-correlated/query-ASTERIXDB-1581-correlated.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1581-correlated/query-ASTERIXDB-1581-correlated.1.adm
@@ -1 +1 @@
-{ "bucket1": [ { "$1": 0.0 } ] }
+{ "bucket1": [ { "$1": 46.03 } ] }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1591/query-ASTERIXDB-1591.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1591/query-ASTERIXDB-1591.1.adm
index d2aaa79..2605cb6 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1591/query-ASTERIXDB-1591.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1591/query-ASTERIXDB-1591.1.adm
@@ -1 +1 @@
-{ "c": { "c_customer_sk": 1, "c_customer_id": "AAAAAAAABAAAAAAA", "c_current_cdemo_sk": 980124, "c_current_hdemo_sk": 7135, "c_current_addr_sk": 13513, "c_first_shipto_date_sk": 2452238, "c_first_sales_date_sk": 2452208, "c_salutation": "Mr.", "c_first_name": "Javier", "c_last_name": "Lewis", "c_preferred_cust_flag": "Y", "c_birth_day": 9, "c_birth_month": 12, "c_birth_year": 1936, "c_birth_country": "CHILE", "c_login": null, "c_email_address": "Javier.Lewis@VFAxlnZEvOx.org", "c_last_review_date": "2452508" }, "ca": { "ca_address_sk": 13513, "ca_address_id": "AAAAAAAAJMEDAAAA", "ca_street_number": "545", "ca_street_name": "Meadow ", "ca_street_type": "RD", "ca_suite_number": "Suite X", "ca_city": "Crystal", "ca_county": "DeKalb County", "ca_state": "MO", "ca_zip": "65258", "ca_country": "United States", "ca_gmt_offset": -6.0, "ca_location_type": "condo" } }
+{ "c": { "c_customer_sk": 1, "c_customer_id": "AAAAAAAABAAAAAAA", "c_current_cdemo_sk": 4, "c_current_hdemo_sk": 5, "c_current_addr_sk": 1, "c_first_shipto_date_sk": 2415022, "c_first_sales_date_sk": 2415023, "c_salutation": "Mr.", "c_first_name": "Javier", "c_last_name": "Lewis", "c_preferred_cust_flag": "Y", "c_birth_day": 9, "c_birth_month": 12, "c_birth_year": 1936, "c_birth_country": "CHILE", "c_login": null, "c_email_address": "Javier.Lewis@VFAxlnZEvOx.org", "c_last_review_date": "2452508" }, "ca": { "ca_address_sk": 1, "ca_address_id": "AAAAAAAABAAAAAAA", "ca_street_number": "18", "ca_street_name": "Jackson ", "ca_street_type": "Parkway", "ca_suite_number": "Suite 280", "ca_city": "Fairfield", "ca_county": "Maricopa County", "ca_state": "IL", "ca_zip": "86192", "ca_country": "United States", "ca_gmt_offset": -7.0, "ca_location_type": "condo" } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1596/query-ASTERIXDB-1596.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1596/query-ASTERIXDB-1596.1.adm
index 6c8c02c..cc33c12 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1596/query-ASTERIXDB-1596.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1596/query-ASTERIXDB-1596.1.adm
@@ -1,19 +1,19 @@
-{ "cs1": { "cs_sold_date_sk": 2450815, "cs_sold_time_sk": 38212, "cs_ship_date_sk": 2450886, "cs_bill_customer_sk": 1, "cs_bill_cdemo_sk": 1822764, "cs_bill_hdemo_sk": 5775, "cs_bill_addr_sk": 19986, "cs_ship_customer_sk": 1, "cs_ship_cdemo_sk": 1822764, "cs_ship_hdemo_sk": 5775, "cs_ship_addr_sk": 19986, "cs_call_center_sk": 4, "cs_catalog_page_sk": 62, "cs_ship_mode_sk": 3, "cs_warehouse_sk": 4, "cs_item_sk": 1, "cs_promo_sk": 196, "cs_order_number": 1, "cs_quantity": 47, "cs_wholesale_cost": 27.7, "cs_list_price": 44.32, "cs_sales_price": 42.99, "cs_ext_discount_amt": 62.51, "cs_ext_sales_price": 2020.53, "cs_ext_wholesale_cost": 1301.9, "cs_ext_list_price": 2083.04, "cs_ext_tax": 101.02, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 1041.52, "cs_net_paid": 2020.53, "cs_net_paid_inc_tax": 2121.55, "cs_net_paid_inc_ship": 3062.05, "cs_net_paid_inc_ship_tax": 3163.07, "cs_net_profit": 718.63 }, "i1": { "i_item_sk": 1, "i_item_id": "AAAAAAABAAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": null, "i_item_desc": "Powers will not get influences. Electoral ports should show low, annual chains. Now young visitors may pose now however final pages. Bitterly right children suit increasing, leading el", "i_current_price": 27.02, "i_wholesale_cost": 23.23, "i_brand_id": 5003002, "i_brand": "exportischolar #2", "i_class_id": 3, "i_class": "pop", "i_category_id": 5, "i_category": "Music", "i_manufact_id": 52, "i_manufact": "ableanti", "i_size": "N/A", "i_formulation": "3663peru009490160959", "i_color": "spring", "i_units": "Tsp", "i_container": "Unknown", "i_manager_id": 6, "i_product_name": "ought" } }
-{ "cs1": { "cs_sold_date_sk": 2450815, "cs_sold_time_sk": 29485, "cs_ship_date_sk": 2450862, "cs_bill_customer_sk": 14601, "cs_bill_cdemo_sk": 797995, "cs_bill_hdemo_sk": 6189, "cs_bill_addr_sk": 9583, "cs_ship_customer_sk": 14601, "cs_ship_cdemo_sk": 797995, "cs_ship_hdemo_sk": 6189, "cs_ship_addr_sk": 9583, "cs_call_center_sk": 1, "cs_catalog_page_sk": 23, "cs_ship_mode_sk": 16, "cs_warehouse_sk": 5, "cs_item_sk": 1, "cs_promo_sk": 272, "cs_order_number": 3, "cs_quantity": 45, "cs_wholesale_cost": 9.54, "cs_list_price": 23.37, "cs_sales_price": 21.5, "cs_ext_discount_amt": 84.15, "cs_ext_sales_price": 967.5, "cs_ext_wholesale_cost": 429.3, "cs_ext_list_price": 1051.65, "cs_ext_tax": 21.76, "cs_coupon_amt": 725.62, "cs_ext_ship_cost": 294.3, "cs_net_paid": 241.88, "cs_net_paid_inc_tax": 263.64, "cs_net_paid_inc_ship": 536.18, "cs_net_paid_inc_ship_tax": 557.94, "cs_net_profit": -187.42 }, "i1": { "i_item_sk": 1, "i_item_id": "AAAAAAABAAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": null, "i_item_desc": "Powers will not get influences. Electoral ports should show low, annual chains. Now young visitors may pose now however final pages. Bitterly right children suit increasing, leading el", "i_current_price": 27.02, "i_wholesale_cost": 23.23, "i_brand_id": 5003002, "i_brand": "exportischolar #2", "i_class_id": 3, "i_class": "pop", "i_category_id": 5, "i_category": "Music", "i_manufact_id": 52, "i_manufact": "ableanti", "i_size": "N/A", "i_formulation": "3663peru009490160959", "i_color": "spring", "i_units": "Tsp", "i_container": "Unknown", "i_manager_id": 6, "i_product_name": "ought" } }
-{ "cs1": { "cs_sold_date_sk": 2450815, "cs_sold_time_sk": 38212, "cs_ship_date_sk": 2450846, "cs_bill_customer_sk": 1, "cs_bill_cdemo_sk": 1822764, "cs_bill_hdemo_sk": 5775, "cs_bill_addr_sk": 19986, "cs_ship_customer_sk": 1, "cs_ship_cdemo_sk": 1822764, "cs_ship_hdemo_sk": 5775, "cs_ship_addr_sk": 19986, "cs_call_center_sk": 4, "cs_catalog_page_sk": 31, "cs_ship_mode_sk": 8, "cs_warehouse_sk": 2, "cs_item_sk": 2, "cs_promo_sk": 270, "cs_order_number": 1, "cs_quantity": 20, "cs_wholesale_cost": 87.55, "cs_list_price": 260.89, "cs_sales_price": 153.92, "cs_ext_discount_amt": 2139.4, "cs_ext_sales_price": 3078.4, "cs_ext_wholesale_cost": 1751.0, "cs_ext_list_price": 5217.8, "cs_ext_tax": 71.41, "cs_coupon_amt": 1292.92, "cs_ext_ship_cost": 1356.6, "cs_net_paid": 1785.48, "cs_net_paid_inc_tax": 1856.89, "cs_net_paid_inc_ship": 3142.08, "cs_net_paid_inc_ship_tax": 3213.49, "cs_net_profit": 34.48 }, "i1": { "i_item_sk": 2, "i_item_id": "AAAAAAAACAAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": "2000-10-26", "i_item_desc": "False opportunities would run alone with a views. Early approaches would show inc, european intentions; important, main passages shall know urban, ", "i_current_price": 1.12, "i_wholesale_cost": 0.38, "i_brand_id": 1001001, "i_brand": "amalgamalg #1", "i_class_id": 1, "i_class": "dresses", "i_category_id": 1, "i_category": "Women", "i_manufact_id": 294, "i_manufact": "esen stable", "i_size": "petite", "i_formulation": "516steel060826230906", "i_color": "rosy", "i_units": "Bunch", "i_container": "Unknown", "i_manager_id": 98, "i_product_name": "able" } }
-{ "cs1": { "cs_sold_date_sk": 2450815, "cs_sold_time_sk": 38212, "cs_ship_date_sk": 2450851, "cs_bill_customer_sk": 1, "cs_bill_cdemo_sk": 1822764, "cs_bill_hdemo_sk": 5775, "cs_bill_addr_sk": 19986, "cs_ship_customer_sk": 1, "cs_ship_cdemo_sk": 1822764, "cs_ship_hdemo_sk": 5775, "cs_ship_addr_sk": 19986, "cs_call_center_sk": 4, "cs_catalog_page_sk": 89, "cs_ship_mode_sk": 15, "cs_warehouse_sk": 2, "cs_item_sk": 2, "cs_promo_sk": 284, "cs_order_number": 2, "cs_quantity": 50, "cs_wholesale_cost": 70.0, "cs_list_price": 205.1, "cs_sales_price": 188.69, "cs_ext_discount_amt": 820.5, "cs_ext_sales_price": 9434.5, "cs_ext_wholesale_cost": 3500.0, "cs_ext_list_price": 10255.0, "cs_ext_tax": 377.38, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 4307.0, "cs_net_paid": 9434.5, "cs_net_paid_inc_tax": 9811.88, "cs_net_paid_inc_ship": 13741.5, "cs_net_paid_inc_ship_tax": 14118.88, "cs_net_profit": 5934.5 }, "cr1": { "cr_returned_date_sk": 2450926, "cr_returned_time_sk": 45816, "cr_item_sk": 2, "cr_refunded_customer_sk": 14601, "cr_refunded_cdemo_sk": 797995, "cr_refunded_hdemo_sk": 6189, "cr_refunded_addr_sk": 9583, "cr_returning_customer_sk": 14601, "cr_returning_cdemo_sk": 797995, "cr_returning_hdemo_sk": 4703, "cr_returning_addr_sk": 9583, "cr_call_center_sk": 1, "cr_catalog_page_sk": 106, "cr_ship_mode_sk": 2, "cr_warehouse_sk": 2, "cr_reason_sk": 30, "cr_order_number": 2, "cr_return_quantity": 47, "cr_return_amount": 3888.31, "cr_return_tax": 233.29, "cr_return_amt_inc_tax": 4121.6, "cr_fee": 91.23, "cr_return_ship_cost": 1348.9, "cr_refunded_cash": 3577.24, "cr_reversed_charge": 186.64, "cr_store_credit": 124.43, "cr_net_loss": 1673.42 }, "i1": { "i_item_sk": 2, "i_item_id": "AAAAAAAACAAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": "2000-10-26", "i_item_desc": "False opportunities would run alone with a views. Early approaches would show inc, european intentions; important, main passages shall know urban, ", "i_current_price": 1.12, "i_wholesale_cost": 0.38, "i_brand_id": 1001001, "i_brand": "amalgamalg #1", "i_class_id": 1, "i_class": "dresses", "i_category_id": 1, "i_category": "Women", "i_manufact_id": 294, "i_manufact": "esen stable", "i_size": "petite", "i_formulation": "516steel060826230906", "i_color": "rosy", "i_units": "Bunch", "i_container": "Unknown", "i_manager_id": 98, "i_product_name": "able" } }
-{ "cs1": { "cs_sold_date_sk": 2450815, "cs_sold_time_sk": 38212, "cs_ship_date_sk": 2450868, "cs_bill_customer_sk": 1, "cs_bill_cdemo_sk": 1822764, "cs_bill_hdemo_sk": 5775, "cs_bill_addr_sk": 19986, "cs_ship_customer_sk": 1, "cs_ship_cdemo_sk": 1822764, "cs_ship_hdemo_sk": 5775, "cs_ship_addr_sk": 19986, "cs_call_center_sk": 4, "cs_catalog_page_sk": 76, "cs_ship_mode_sk": 2, "cs_warehouse_sk": 2, "cs_item_sk": 3, "cs_promo_sk": 97, "cs_order_number": 1, "cs_quantity": 19, "cs_wholesale_cost": 69.86, "cs_list_price": 88.72, "cs_sales_price": 29.27, "cs_ext_discount_amt": 1129.55, "cs_ext_sales_price": 556.13, "cs_ext_wholesale_cost": 1327.34, "cs_ext_list_price": 1685.68, "cs_ext_tax": 33.36, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 168.53, "cs_net_paid": 556.13, "cs_net_paid_inc_tax": 589.49, "cs_net_paid_inc_ship": 724.66, "cs_net_paid_inc_ship_tax": 758.02, "cs_net_profit": -771.21 }, "i1": { "i_item_sk": 3, "i_item_id": "AAAAAAAACAAAAAAA", "i_rec_start_date": "2000-10-27", "i_rec_end_date": null, "i_item_desc": "False opportunities would run alone with a views. Early approaches would show inc, european intentions; important, main passages shall know urban, ", "i_current_price": 7.11, "i_wholesale_cost": 0.38, "i_brand_id": 1001001, "i_brand": "brandbrand #4", "i_class_id": 7, "i_class": "decor", "i_category_id": 7, "i_category": "Home", "i_manufact_id": 294, "i_manufact": "esen stable", "i_size": "N/A", "i_formulation": "516steel060826230906", "i_color": "sienna", "i_units": "Cup", "i_container": "Unknown", "i_manager_id": 18, "i_product_name": "pri" } }
-{ "cs1": { "cs_sold_date_sk": 2450815, "cs_sold_time_sk": 29485, "cs_ship_date_sk": 2450904, "cs_bill_customer_sk": 14601, "cs_bill_cdemo_sk": 797995, "cs_bill_hdemo_sk": 6189, "cs_bill_addr_sk": 9583, "cs_ship_customer_sk": 14601, "cs_ship_cdemo_sk": 797995, "cs_ship_hdemo_sk": 6189, "cs_ship_addr_sk": 9583, "cs_call_center_sk": 1, "cs_catalog_page_sk": 64, "cs_ship_mode_sk": 18, "cs_warehouse_sk": 3, "cs_item_sk": 4, "cs_promo_sk": 176, "cs_order_number": 2, "cs_quantity": 56, "cs_wholesale_cost": 67.54, "cs_list_price": 166.82, "cs_sales_price": 18.35, "cs_ext_discount_amt": 8314.32, "cs_ext_sales_price": 1027.6, "cs_ext_wholesale_cost": 3782.24, "cs_ext_list_price": 9341.92, "cs_ext_tax": 0.0, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 3736.32, "cs_net_paid": 1027.6, "cs_net_paid_inc_tax": 1027.6, "cs_net_paid_inc_ship": 4763.92, "cs_net_paid_inc_ship_tax": 4763.92, "cs_net_profit": -2754.64 }, "cr1": { "cr_returned_date_sk": 2450946, "cr_returned_time_sk": 74710, "cr_item_sk": 4, "cr_refunded_customer_sk": 14601, "cr_refunded_cdemo_sk": 797995, "cr_refunded_hdemo_sk": 6189, "cr_refunded_addr_sk": 9583, "cr_returning_customer_sk": 82809, "cr_returning_cdemo_sk": 665550, "cr_returning_hdemo_sk": 991, "cr_returning_addr_sk": 14832, "cr_call_center_sk": 1, "cr_catalog_page_sk": 17, "cr_ship_mode_sk": 2, "cr_warehouse_sk": 5, "cr_reason_sk": 6, "cr_order_number": 2, "cr_return_quantity": 49, "cr_return_amount": 2490.18, "cr_return_tax": 99.6, "cr_return_amt_inc_tax": 2589.78, "cr_fee": 52.54, "cr_return_ship_cost": 1867.39, "cr_refunded_cash": 323.72, "cr_reversed_charge": 931.57, "cr_store_credit": 1234.89, "cr_net_loss": 2019.53 }, "i1": { "i_item_sk": 4, "i_item_id": "AAAAAAAAEAAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": "1999-10-27", "i_item_desc": "Normal systems would join simply different theories. Full, new clothes may eat instead achievements. D", "i_current_price": 1.35, "i_wholesale_cost": 0.85, "i_brand_id": 3002001, "i_brand": "importoexporti #1", "i_class_id": 2, "i_class": "infants", "i_category_id": 3, "i_category": "Children", "i_manufact_id": 479, "i_manufact": "n stationese", "i_size": "extra large", "i_formulation": "610157moccasin018327", "i_color": "red", "i_units": "Tbl", "i_container": "Unknown", "i_manager_id": 26, "i_product_name": "ese" } }
-{ "cs1": { "cs_sold_date_sk": 2450815, "cs_sold_time_sk": 29485, "cs_ship_date_sk": 2450890, "cs_bill_customer_sk": 14601, "cs_bill_cdemo_sk": 797995, "cs_bill_hdemo_sk": 6189, "cs_bill_addr_sk": 9583, "cs_ship_customer_sk": 14601, "cs_ship_cdemo_sk": 797995, "cs_ship_hdemo_sk": 6189, "cs_ship_addr_sk": 9583, "cs_call_center_sk": 1, "cs_catalog_page_sk": 75, "cs_ship_mode_sk": 8, "cs_warehouse_sk": 1, "cs_item_sk": 5, "cs_promo_sk": 278, "cs_order_number": 2, "cs_quantity": 88, "cs_wholesale_cost": 20.08, "cs_list_price": 60.03, "cs_sales_price": 20.41, "cs_ext_discount_amt": 3486.56, "cs_ext_sales_price": 1796.08, "cs_ext_wholesale_cost": 1767.04, "cs_ext_list_price": 5282.64, "cs_ext_tax": 13.82, "cs_coupon_amt": 1598.51, "cs_ext_ship_cost": 1056.0, "cs_net_paid": 197.57, "cs_net_paid_inc_tax": 211.39, "cs_net_paid_inc_ship": 1253.57, "cs_net_paid_inc_ship_tax": 1267.39, "cs_net_profit": -1569.47 }, "i1": { "i_item_sk": 5, "i_item_id": "AAAAAAAAEAAAAAAA", "i_rec_start_date": "1999-10-28", "i_rec_end_date": "2001-10-26", "i_item_desc": "Normal systems would join simply different theories. Full, new clothes may eat instead achievements. D", "i_current_price": 4.0, "i_wholesale_cost": 1.76, "i_brand_id": 2002002, "i_brand": "importoimporto #2", "i_class_id": 2, "i_class": "shirts", "i_category_id": 2, "i_category": "Men", "i_manufact_id": 220, "i_manufact": "barableable", "i_size": "petite", "i_formulation": "42214rosy28066558020", "i_color": "pink", "i_units": "Cup", "i_container": "Unknown", "i_manager_id": 27, "i_product_name": "anti" } }
-{ "cs1": { "cs_sold_date_sk": 2450815, "cs_sold_time_sk": 29485, "cs_ship_date_sk": 2450849, "cs_bill_customer_sk": 14601, "cs_bill_cdemo_sk": 797995, "cs_bill_hdemo_sk": 6189, "cs_bill_addr_sk": 9583, "cs_ship_customer_sk": 14601, "cs_ship_cdemo_sk": 797995, "cs_ship_hdemo_sk": 6189, "cs_ship_addr_sk": 9583, "cs_call_center_sk": 1, "cs_catalog_page_sk": 39, "cs_ship_mode_sk": 4, "cs_warehouse_sk": 3, "cs_item_sk": 6, "cs_promo_sk": 207, "cs_order_number": 2, "cs_quantity": 31, "cs_wholesale_cost": 40.88, "cs_list_price": 51.91, "cs_sales_price": 6.22, "cs_ext_discount_amt": 1416.39, "cs_ext_sales_price": 192.82, "cs_ext_wholesale_cost": 1267.28, "cs_ext_list_price": 1609.21, "cs_ext_tax": 11.56, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 321.78, "cs_net_paid": 192.82, "cs_net_paid_inc_tax": 204.38, "cs_net_paid_inc_ship": 514.6, "cs_net_paid_inc_ship_tax": 526.16, "cs_net_profit": -1074.46 }, "i1": { "i_item_sk": 6, "i_item_id": "AAAAAAAAEAAAAAAA", "i_rec_start_date": "2001-10-27", "i_rec_end_date": null, "i_item_desc": "Normal systems would join simply different theories. Full, new clothes may eat instead achievements. D", "i_current_price": 0.85, "i_wholesale_cost": 1.76, "i_brand_id": 2002002, "i_brand": "exportiimporto #1", "i_class_id": 3, "i_class": "pants", "i_category_id": 2, "i_category": "Men", "i_manufact_id": 212, "i_manufact": "barableable", "i_size": "large", "i_formulation": "42214rosy28066558020", "i_color": "moccasin", "i_units": "Bundle", "i_container": "Unknown", "i_manager_id": 6, "i_product_name": "cally" } }
-{ "cs1": { "cs_sold_date_sk": 2450815, "cs_sold_time_sk": 29485, "cs_ship_date_sk": 2450889, "cs_bill_customer_sk": 14601, "cs_bill_cdemo_sk": 797995, "cs_bill_hdemo_sk": 6189, "cs_bill_addr_sk": 9583, "cs_ship_customer_sk": 14601, "cs_ship_cdemo_sk": 797995, "cs_ship_hdemo_sk": 6189, "cs_ship_addr_sk": 9583, "cs_call_center_sk": 1, "cs_catalog_page_sk": 49, "cs_ship_mode_sk": 8, "cs_warehouse_sk": 2, "cs_item_sk": 7, "cs_promo_sk": 162, "cs_order_number": 2, "cs_quantity": 100, "cs_wholesale_cost": 49.56, "cs_list_price": 137.77, "cs_sales_price": 5.51, "cs_ext_discount_amt": 13226.0, "cs_ext_sales_price": 551.0, "cs_ext_wholesale_cost": 4956.0, "cs_ext_list_price": 13777.0, "cs_ext_tax": 0.0, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 3306.0, "cs_net_paid": 551.0, "cs_net_paid_inc_tax": 551.0, "cs_net_paid_inc_ship": 3857.0, "cs_net_paid_inc_ship_tax": 3857.0, "cs_net_profit": -4405.0 }, "i1": { "i_item_sk": 7, "i_item_id": "AAAAAAAAHAAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": null, "i_item_desc": "Anxious accounts must catch also years. Revolutionary, large directors used to embrace then mo", "i_current_price": 9.94, "i_wholesale_cost": 6.75, "i_brand_id": 3001002, "i_brand": "amalgexporti #2", "i_class_id": 1, "i_class": "newborn", "i_category_id": 3, "i_category": "Children", "i_manufact_id": 214, "i_manufact": "eseoughtable", "i_size": "petite", "i_formulation": "6moccasin24027188872", "i_color": "spring", "i_units": "Tsp", "i_container": "Unknown", "i_manager_id": 64, "i_product_name": "ation" } }
-{ "cs1": { "cs_sold_date_sk": 2450815, "cs_sold_time_sk": 29485, "cs_ship_date_sk": 2450868, "cs_bill_customer_sk": 14601, "cs_bill_cdemo_sk": 797995, "cs_bill_hdemo_sk": 6189, "cs_bill_addr_sk": 9583, "cs_ship_customer_sk": 14601, "cs_ship_cdemo_sk": 797995, "cs_ship_hdemo_sk": 6189, "cs_ship_addr_sk": 9583, "cs_call_center_sk": 1, "cs_catalog_page_sk": 69, "cs_ship_mode_sk": 17, "cs_warehouse_sk": 5, "cs_item_sk": 8, "cs_promo_sk": 72, "cs_order_number": 2, "cs_quantity": 30, "cs_wholesale_cost": 72.82, "cs_list_price": 88.84, "cs_sales_price": 71.07, "cs_ext_discount_amt": 533.1, "cs_ext_sales_price": 2132.1, "cs_ext_wholesale_cost": 2184.6, "cs_ext_list_price": 2665.2, "cs_ext_tax": 21.32, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 133.2, "cs_net_paid": 2132.1, "cs_net_paid_inc_tax": 2153.42, "cs_net_paid_inc_ship": 2265.3, "cs_net_paid_inc_ship_tax": 2286.62, "cs_net_profit": -52.5 }, "i1": { "i_item_sk": 8, "i_item_id": "AAAAAAAAIAAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": "2000-10-26", "i_item_desc": "F", "i_current_price": 2.76, "i_wholesale_cost": 0.85, "i_brand_id": 3003001, "i_brand": "exportiexporti #1", "i_class_id": 3, "i_class": "toddlers", "i_category_id": 3, "i_category": "Children", "i_manufact_id": 630, "i_manufact": "barprically", "i_size": "extra large", "i_formulation": "35123wheat3256343398", "i_color": "turquoise", "i_units": "Carton", "i_container": "Unknown", "i_manager_id": 25, "i_product_name": "eing" } }
-{ "cs1": { "cs_sold_date_sk": 2450815, "cs_sold_time_sk": 29485, "cs_ship_date_sk": 2450831, "cs_bill_customer_sk": 14601, "cs_bill_cdemo_sk": 797995, "cs_bill_hdemo_sk": 6189, "cs_bill_addr_sk": 9583, "cs_ship_customer_sk": 14601, "cs_ship_cdemo_sk": 797995, "cs_ship_hdemo_sk": 6189, "cs_ship_addr_sk": 9583, "cs_call_center_sk": 1, "cs_catalog_page_sk": 64, "cs_ship_mode_sk": 1, "cs_warehouse_sk": 3, "cs_item_sk": 9, "cs_promo_sk": 52, "cs_order_number": 2, "cs_quantity": 40, "cs_wholesale_cost": 94.56, "cs_list_price": 277.06, "cs_sales_price": 2.77, "cs_ext_discount_amt": 10971.6, "cs_ext_sales_price": 110.8, "cs_ext_wholesale_cost": 3782.4, "cs_ext_list_price": 11082.4, "cs_ext_tax": 2.21, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 664.8, "cs_net_paid": 110.8, "cs_net_paid_inc_tax": 113.01, "cs_net_paid_inc_ship": 775.6, "cs_net_paid_inc_ship_tax": 777.81, "cs_net_profit": -3671.6 }, "i1": { "i_item_sk": 9, "i_item_id": "AAAAAAAAIAAAAAAA", "i_rec_start_date": "2000-10-27", "i_rec_end_date": null, "i_item_desc": "F", "i_current_price": 4.46, "i_wholesale_cost": 0.85, "i_brand_id": 1004002, "i_brand": "edu packamalg #2", "i_class_id": 3, "i_class": "swimwear", "i_category_id": 1, "i_category": "Women", "i_manufact_id": 630, "i_manufact": "barprically", "i_size": "medium", "i_formulation": "35123wheat3256343398", "i_color": "wheat", "i_units": "Tbl", "i_container": "Unknown", "i_manager_id": 3, "i_product_name": "n st" } }
-{ "cs1": { "cs_sold_date_sk": 2450815, "cs_sold_time_sk": 29485, "cs_ship_date_sk": 2450839, "cs_bill_customer_sk": 14601, "cs_bill_cdemo_sk": 797995, "cs_bill_hdemo_sk": 6189, "cs_bill_addr_sk": 9583, "cs_ship_customer_sk": 14601, "cs_ship_cdemo_sk": 797995, "cs_ship_hdemo_sk": 6189, "cs_ship_addr_sk": 9583, "cs_call_center_sk": 1, "cs_catalog_page_sk": 108, "cs_ship_mode_sk": 8, "cs_warehouse_sk": 4, "cs_item_sk": 10, "cs_promo_sk": 131, "cs_order_number": 3, "cs_quantity": 40, "cs_wholesale_cost": 25.96, "cs_list_price": 54.51, "cs_sales_price": 15.8, "cs_ext_discount_amt": 1548.4, "cs_ext_sales_price": 632.0, "cs_ext_wholesale_cost": 1038.4, "cs_ext_list_price": 2180.4, "cs_ext_tax": 22.75, "cs_coupon_amt": 176.96, "cs_ext_ship_cost": 1090.0, "cs_net_paid": 455.04, "cs_net_paid_inc_tax": 477.79, "cs_net_paid_inc_ship": 1545.04, "cs_net_paid_inc_ship_tax": 1567.79, "cs_net_profit": -583.36 }, "i1": { "i_item_sk": 10, "i_item_id": "AAAAAAAAKAAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": "1999-10-27", "i_item_desc": "Classical services go trousers. However great galleries might say needs. Assumptions change very in favour of the notes. Teeth woul", "i_current_price": 8.94, "i_wholesale_cost": 4.11, "i_brand_id": 10008011, "i_brand": "namelessunivamalg #11", "i_class_id": 8, "i_class": "scanners", "i_category_id": 10, "i_category": "Electronics", "i_manufact_id": 350, "i_manufact": "barantipri", "i_size": "N/A", "i_formulation": "8159007505thistle447", "i_color": "pale", "i_units": "Tsp", "i_container": "Unknown", "i_manager_id": 34, "i_product_name": "barought" } }
-{ "cs1": { "cs_sold_date_sk": 2450815, "cs_sold_time_sk": 29485, "cs_ship_date_sk": 2450825, "cs_bill_customer_sk": 14601, "cs_bill_cdemo_sk": 797995, "cs_bill_hdemo_sk": 6189, "cs_bill_addr_sk": 9583, "cs_ship_customer_sk": 14601, "cs_ship_cdemo_sk": 797995, "cs_ship_hdemo_sk": 6189, "cs_ship_addr_sk": 9583, "cs_call_center_sk": 1, "cs_catalog_page_sk": 14, "cs_ship_mode_sk": 20, "cs_warehouse_sk": 3, "cs_item_sk": 11, "cs_promo_sk": 296, "cs_order_number": 3, "cs_quantity": 45, "cs_wholesale_cost": 5.95, "cs_list_price": 12.97, "cs_sales_price": 8.81, "cs_ext_discount_amt": 187.2, "cs_ext_sales_price": 396.45, "cs_ext_wholesale_cost": 267.75, "cs_ext_list_price": 583.65, "cs_ext_tax": 3.96, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 17.1, "cs_net_paid": 396.45, "cs_net_paid_inc_tax": 400.41, "cs_net_paid_inc_ship": 413.55, "cs_net_paid_inc_ship_tax": 417.51, "cs_net_profit": 128.7 }, "i1": { "i_item_sk": 11, "i_item_id": "AAAAAAAAKAAAAAAA", "i_rec_start_date": "1999-10-28", "i_rec_end_date": "2001-10-26", "i_item_desc": "Correct, fo", "i_current_price": 54.87, "i_wholesale_cost": 4.11, "i_brand_id": 10008011, "i_brand": "edu packbrand #4", "i_class_id": 14, "i_class": "estate", "i_category_id": 6, "i_category": "Jewelry", "i_manufact_id": 625, "i_manufact": "antiablecally", "i_size": "N/A", "i_formulation": "snow1543775706017405", "i_color": "yellow", "i_units": "Bunch", "i_container": "Unknown", "i_manager_id": 26, "i_product_name": "oughtought" } }
-{ "cs1": { "cs_sold_date_sk": 2450815, "cs_sold_time_sk": 29485, "cs_ship_date_sk": 2450837, "cs_bill_customer_sk": 14601, "cs_bill_cdemo_sk": 797995, "cs_bill_hdemo_sk": 6189, "cs_bill_addr_sk": 9583, "cs_ship_customer_sk": 14601, "cs_ship_cdemo_sk": 797995, "cs_ship_hdemo_sk": 6189, "cs_ship_addr_sk": 9583, "cs_call_center_sk": 1, "cs_catalog_page_sk": 13, "cs_ship_mode_sk": 7, "cs_warehouse_sk": 1, "cs_item_sk": 12, "cs_promo_sk": 271, "cs_order_number": 3, "cs_quantity": 34, "cs_wholesale_cost": 43.69, "cs_list_price": 105.29, "cs_sales_price": 82.12, "cs_ext_discount_amt": 787.78, "cs_ext_sales_price": 2792.08, "cs_ext_wholesale_cost": 1485.46, "cs_ext_list_price": 3579.86, "cs_ext_tax": 55.84, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 930.58, "cs_net_paid": 2792.08, "cs_net_paid_inc_tax": 2847.92, "cs_net_paid_inc_ship": 3722.66, "cs_net_paid_inc_ship_tax": 3778.5, "cs_net_profit": 1306.62 }, "i1": { "i_item_sk": 12, "i_item_id": "AAAAAAAAKAAAAAAA", "i_rec_start_date": "2001-10-27", "i_rec_end_date": null, "i_item_desc": "Corporate, important facilities claim trying, external sides. Elements used to expect home pr", "i_current_price": 6.54, "i_wholesale_cost": 4.11, "i_brand_id": 10008011, "i_brand": "corpnameless #3", "i_class_id": 14, "i_class": "furniture", "i_category_id": 7, "i_category": "Home", "i_manufact_id": 264, "i_manufact": "esecallyable", "i_size": "N/A", "i_formulation": "968467777sky92069287", "i_color": "royal", "i_units": "Tbl", "i_container": "Unknown", "i_manager_id": 19, "i_product_name": "ableought" } }
-{ "cs1": { "cs_sold_date_sk": 2450815, "cs_sold_time_sk": 29485, "cs_ship_date_sk": 2450822, "cs_bill_customer_sk": 14601, "cs_bill_cdemo_sk": 797995, "cs_bill_hdemo_sk": 6189, "cs_bill_addr_sk": 9583, "cs_ship_customer_sk": 14601, "cs_ship_cdemo_sk": 797995, "cs_ship_hdemo_sk": 6189, "cs_ship_addr_sk": 9583, "cs_call_center_sk": 1, "cs_catalog_page_sk": 106, "cs_ship_mode_sk": 16, "cs_warehouse_sk": 5, "cs_item_sk": 13, "cs_promo_sk": 123, "cs_order_number": 3, "cs_quantity": 59, "cs_wholesale_cost": 78.9, "cs_list_price": 84.42, "cs_sales_price": 82.73, "cs_ext_discount_amt": 99.71, "cs_ext_sales_price": 4881.07, "cs_ext_wholesale_cost": 4655.1, "cs_ext_list_price": 4980.78, "cs_ext_tax": 292.86, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 1444.32, "cs_net_paid": 4881.07, "cs_net_paid_inc_tax": 5173.93, "cs_net_paid_inc_ship": 6325.39, "cs_net_paid_inc_ship_tax": 6618.25, "cs_net_profit": 225.97 }, "i1": { "i_item_sk": 13, "i_item_id": "AAAAAAAANAAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": null, "i_item_desc": "Hard, private departments spoil more quickly possible members; clear troops fail only needs. ", "i_current_price": 8.76, "i_wholesale_cost": 7.62, "i_brand_id": 6012006, "i_brand": "importobrand #6", "i_class_id": 12, "i_class": "costume", "i_category_id": 6, "i_category": "Jewelry", "i_manufact_id": 167, "i_manufact": "ationcallyought", "i_size": "N/A", "i_formulation": "883208731996blue7862", "i_color": "olive", "i_units": "Bundle", "i_container": "Unknown", "i_manager_id": 51, "i_product_name": "priought" } }
-{ "cs1": { "cs_sold_date_sk": 2450815, "cs_sold_time_sk": 29485, "cs_ship_date_sk": 2450895, "cs_bill_customer_sk": 14601, "cs_bill_cdemo_sk": 797995, "cs_bill_hdemo_sk": 6189, "cs_bill_addr_sk": 9583, "cs_ship_customer_sk": 14601, "cs_ship_cdemo_sk": 797995, "cs_ship_hdemo_sk": 6189, "cs_ship_addr_sk": 9583, "cs_call_center_sk": 1, "cs_catalog_page_sk": 17, "cs_ship_mode_sk": 19, "cs_warehouse_sk": 4, "cs_item_sk": 14, "cs_promo_sk": 131, "cs_order_number": 3, "cs_quantity": 94, "cs_wholesale_cost": 41.36, "cs_list_price": 105.88, "cs_sales_price": 50.82, "cs_ext_discount_amt": 5175.64, "cs_ext_sales_price": 4777.08, "cs_ext_wholesale_cost": 3887.84, "cs_ext_list_price": 9952.72, "cs_ext_tax": 191.08, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 497.26, "cs_net_paid": 4777.08, "cs_net_paid_inc_tax": 4968.16, "cs_net_paid_inc_ship": 5274.34, "cs_net_paid_inc_ship_tax": 5465.42, "cs_net_profit": 889.24 }, "i1": { "i_item_sk": 14, "i_item_id": "AAAAAAAAOAAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": "2000-10-26", "i_item_desc": "Teachers carry by the children; old democrats enco", "i_current_price": 1.85, "i_wholesale_cost": 0.59, "i_brand_id": 8007005, "i_brand": "brandnameless #5", "i_class_id": 7, "i_class": "hockey", "i_category_id": 8, "i_category": "Sports", "i_manufact_id": 460, "i_manufact": "barcallyese", "i_size": "N/A", "i_formulation": "1144670162goldenrod2", "i_color": "red", "i_units": "Dram", "i_container": "Unknown", "i_manager_id": 6, "i_product_name": "eseought" } }
-{ "cs1": { "cs_sold_date_sk": 2450815, "cs_sold_time_sk": 29485, "cs_ship_date_sk": 2450892, "cs_bill_customer_sk": 14601, "cs_bill_cdemo_sk": 797995, "cs_bill_hdemo_sk": 6189, "cs_bill_addr_sk": 9583, "cs_ship_customer_sk": 14601, "cs_ship_cdemo_sk": 797995, "cs_ship_hdemo_sk": 6189, "cs_ship_addr_sk": 9583, "cs_call_center_sk": 1, "cs_catalog_page_sk": 74, "cs_ship_mode_sk": 18, "cs_warehouse_sk": 2, "cs_item_sk": 15, "cs_promo_sk": 290, "cs_order_number": 3, "cs_quantity": 38, "cs_wholesale_cost": 26.96, "cs_list_price": 51.22, "cs_sales_price": 35.85, "cs_ext_discount_amt": 584.06, "cs_ext_sales_price": 1362.3, "cs_ext_wholesale_cost": 1024.48, "cs_ext_list_price": 1946.36, "cs_ext_tax": 108.98, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 642.2, "cs_net_paid": 1362.3, "cs_net_paid_inc_tax": 1471.28, "cs_net_paid_inc_ship": 2004.5, "cs_net_paid_inc_ship_tax": 2113.48, "cs_net_profit": 337.82 }, "i1": { "i_item_sk": 15, "i_item_id": "AAAAAAAAOAAAAAAA", "i_rec_start_date": "2000-10-27", "i_rec_end_date": null, "i_item_desc": "Teachers carry by the children; old democrats enco", "i_current_price": 2.57, "i_wholesale_cost": 0.59, "i_brand_id": 5002002, "i_brand": "importoscholar #2", "i_class_id": 2, "i_class": "country", "i_category_id": 5, "i_category": "Music", "i_manufact_id": 86, "i_manufact": "barcallyese", "i_size": "N/A", "i_formulation": "1144670162goldenrod2", "i_color": "royal", "i_units": "Pound", "i_container": "Unknown", "i_manager_id": 11, "i_product_name": "antiought" } }
-{ "cs1": { "cs_sold_date_sk": 2450815, "cs_sold_time_sk": 29485, "cs_ship_date_sk": 2450834, "cs_bill_customer_sk": 14601, "cs_bill_cdemo_sk": 797995, "cs_bill_hdemo_sk": 6189, "cs_bill_addr_sk": 9583, "cs_ship_customer_sk": 14601, "cs_ship_cdemo_sk": 797995, "cs_ship_hdemo_sk": 6189, "cs_ship_addr_sk": 9583, "cs_call_center_sk": 1, "cs_catalog_page_sk": 63, "cs_ship_mode_sk": 5, "cs_warehouse_sk": 3, "cs_item_sk": 16, "cs_promo_sk": 127, "cs_order_number": 3, "cs_quantity": 41, "cs_wholesale_cost": 39.04, "cs_list_price": 108.92, "cs_sales_price": 75.15, "cs_ext_discount_amt": 1384.57, "cs_ext_sales_price": 3081.15, "cs_ext_wholesale_cost": 1600.64, "cs_ext_list_price": 4465.72, "cs_ext_tax": 215.68, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 1964.72, "cs_net_paid": 3081.15, "cs_net_paid_inc_tax": 3296.83, "cs_net_paid_inc_ship": 5045.87, "cs_net_paid_inc_ship_tax": 5261.55, "cs_net_profit": 1480.51 }, "i1": { "i_item_sk": 16, "i_item_id": "AAAAAAAAABAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": "1999-10-27", "i_item_desc": "Dominant, christian pp. may not raise", "i_current_price": 0.31, "i_wholesale_cost": 0.14, "i_brand_id": 1002001, "i_brand": "importoamalg #1", "i_class_id": 2, "i_class": "fragrances", "i_category_id": 1, "i_category": "Women", "i_manufact_id": 117, "i_manufact": "ationoughtought", "i_size": "large", "i_formulation": "36933056603steel7373", "i_color": "bisque", "i_units": "Lb", "i_container": "Unknown", "i_manager_id": 23, "i_product_name": "callyought" } }
-{ "cs1": { "cs_sold_date_sk": 2450815, "cs_sold_time_sk": 10687, "cs_ship_date_sk": 2450864, "cs_bill_customer_sk": 67572, "cs_bill_cdemo_sk": 437897, "cs_bill_hdemo_sk": 6622, "cs_bill_addr_sk": 46147, "cs_ship_customer_sk": 67572, "cs_ship_cdemo_sk": 437897, "cs_ship_hdemo_sk": 6622, "cs_ship_addr_sk": 46147, "cs_call_center_sk": 1, "cs_catalog_page_sk": 28, "cs_ship_mode_sk": 5, "cs_warehouse_sk": 5, "cs_item_sk": 17, "cs_promo_sk": 170, "cs_order_number": 3, "cs_quantity": 99, "cs_wholesale_cost": 75.88, "cs_list_price": 178.31, "cs_sales_price": 156.91, "cs_ext_discount_amt": 2118.6, "cs_ext_sales_price": 15534.09, "cs_ext_wholesale_cost": 7512.12, "cs_ext_list_price": 17652.69, "cs_ext_tax": 1398.06, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 6884.46, "cs_net_paid": 15534.09, "cs_net_paid_inc_tax": 16932.15, "cs_net_paid_inc_ship": 22418.55, "cs_net_paid_inc_ship_tax": 23816.61, "cs_net_profit": 8021.97 }, "i1": { "i_item_sk": 17, "i_item_id": "AAAAAAAAABAAAAAA", "i_rec_start_date": "1999-10-28", "i_rec_end_date": "2001-10-26", "i_item_desc": "Dominant, christian pp. may not raise", "i_current_price": 6.49, "i_wholesale_cost": 0.14, "i_brand_id": 1002001, "i_brand": "amalgimporto #2", "i_class_id": 1, "i_class": "accessories", "i_category_id": 2, "i_category": "Men", "i_manufact_id": 117, "i_manufact": "ationoughtought", "i_size": "extra large", "i_formulation": "452645olive281530722", "i_color": "snow", "i_units": "Dram", "i_container": "Unknown", "i_manager_id": 41, "i_product_name": "ationought" } }
+{ "cs1": { "cs_sold_date_sk": 2415027, "cs_sold_time_sk": 2, "cs_ship_date_sk": 2415027, "cs_bill_customer_sk": 3, "cs_bill_cdemo_sk": 6, "cs_bill_hdemo_sk": 4, "cs_bill_addr_sk": 10, "cs_ship_customer_sk": 8, "cs_ship_cdemo_sk": 5, "cs_ship_hdemo_sk": 2, "cs_ship_addr_sk": 2, "cs_call_center_sk": 1, "cs_catalog_page_sk": 2, "cs_ship_mode_sk": 3, "cs_warehouse_sk": 3, "cs_item_sk": 1, "cs_promo_sk": 10, "cs_order_number": 9, "cs_quantity": 30, "cs_wholesale_cost": 72.82, "cs_list_price": 88.84, "cs_sales_price": 71.07, "cs_ext_discount_amt": 533.1, "cs_ext_sales_price": 2132.1, "cs_ext_wholesale_cost": 2184.6, "cs_ext_list_price": 2665.2, "cs_ext_tax": 21.32, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 133.2, "cs_net_paid": 2132.1, "cs_net_paid_inc_tax": 2153.42, "cs_net_paid_inc_ship": 2265.3, "cs_net_paid_inc_ship_tax": 2286.62, "cs_net_profit": -52.5 }, "i1": { "i_item_sk": 1, "i_item_id": "AAAAAAAABAAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": null, "i_item_desc": "Powers will not get influences. Electoral ports should show low, annual chains. Now young visitors may pose now however final pages. Bitterly right children suit increasing, leading el", "i_current_price": 27.02, "i_wholesale_cost": 23.23, "i_brand_id": 5003002, "i_brand": "exportischolar #2", "i_class_id": 3, "i_class": "pop", "i_category_id": 5, "i_category": "Music", "i_manufact_id": 964, "i_manufact": "ableanti", "i_size": "N/A", "i_formulation": "3663peru009490160959", "i_color": "spring", "i_units": "Tsp", "i_container": "Unknown", "i_manager_id": 6, "i_product_name": "ought" } }
+{ "cs1": { "cs_sold_date_sk": 2415027, "cs_sold_time_sk": 4, "cs_ship_date_sk": 2415029, "cs_bill_customer_sk": 3, "cs_bill_cdemo_sk": 2, "cs_bill_hdemo_sk": 5, "cs_bill_addr_sk": 5, "cs_ship_customer_sk": 1, "cs_ship_cdemo_sk": 5, "cs_ship_hdemo_sk": 2, "cs_ship_addr_sk": 3, "cs_call_center_sk": 1, "cs_catalog_page_sk": 4, "cs_ship_mode_sk": 4, "cs_warehouse_sk": 3, "cs_item_sk": 1, "cs_promo_sk": 8, "cs_order_number": 16, "cs_quantity": 38, "cs_wholesale_cost": 26.96, "cs_list_price": 51.22, "cs_sales_price": 35.85, "cs_ext_discount_amt": 584.06, "cs_ext_sales_price": 1362.3, "cs_ext_wholesale_cost": 1024.48, "cs_ext_list_price": 1946.36, "cs_ext_tax": 108.98, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 642.2, "cs_net_paid": 1362.3, "cs_net_paid_inc_tax": 1471.28, "cs_net_paid_inc_ship": 2004.5, "cs_net_paid_inc_ship_tax": 2113.48, "cs_net_profit": 337.82 }, "i1": { "i_item_sk": 1, "i_item_id": "AAAAAAAABAAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": null, "i_item_desc": "Powers will not get influences. Electoral ports should show low, annual chains. Now young visitors may pose now however final pages. Bitterly right children suit increasing, leading el", "i_current_price": 27.02, "i_wholesale_cost": 23.23, "i_brand_id": 5003002, "i_brand": "exportischolar #2", "i_class_id": 3, "i_class": "pop", "i_category_id": 5, "i_category": "Music", "i_manufact_id": 964, "i_manufact": "ableanti", "i_size": "N/A", "i_formulation": "3663peru009490160959", "i_color": "spring", "i_units": "Tsp", "i_container": "Unknown", "i_manager_id": 6, "i_product_name": "ought" } }
+{ "cs1": { "cs_sold_date_sk": 2415028, "cs_sold_time_sk": 8, "cs_ship_date_sk": 2415023, "cs_bill_customer_sk": 10, "cs_bill_cdemo_sk": 1, "cs_bill_hdemo_sk": 5, "cs_bill_addr_sk": 6, "cs_ship_customer_sk": 1, "cs_ship_cdemo_sk": 4, "cs_ship_hdemo_sk": 6, "cs_ship_addr_sk": 3, "cs_call_center_sk": 4, "cs_catalog_page_sk": 1, "cs_ship_mode_sk": 1, "cs_warehouse_sk": 2, "cs_item_sk": 2, "cs_promo_sk": 1, "cs_order_number": 4, "cs_quantity": 50, "cs_wholesale_cost": 70.0, "cs_list_price": 205.1, "cs_sales_price": 188.69, "cs_ext_discount_amt": 820.5, "cs_ext_sales_price": 9434.5, "cs_ext_wholesale_cost": 3500.0, "cs_ext_list_price": 10255.0, "cs_ext_tax": 377.38, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 4307.0, "cs_net_paid": 9434.5, "cs_net_paid_inc_tax": 9811.88, "cs_net_paid_inc_ship": 13741.5, "cs_net_paid_inc_ship_tax": 14118.88, "cs_net_profit": 5934.5 }, "i1": { "i_item_sk": 2, "i_item_id": "AAAAAAAACAAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": "2000-10-26", "i_item_desc": "False opportunities would run alone with a views. Early approaches would show inc, european intentions; important, main passages shall know urban, ", "i_current_price": 1.12, "i_wholesale_cost": 0.38, "i_brand_id": 1001001, "i_brand": "amalgamalg #1", "i_class_id": 1, "i_class": "dresses", "i_category_id": 1, "i_category": "Women", "i_manufact_id": 294, "i_manufact": "esen stable", "i_size": "petite", "i_formulation": "516steel060826230906", "i_color": "rosy", "i_units": "Bunch", "i_container": "Unknown", "i_manager_id": 98, "i_product_name": "able" } }
+{ "cs1": { "cs_sold_date_sk": 2415034, "cs_sold_time_sk": 5, "cs_ship_date_sk": 2415030, "cs_bill_customer_sk": 9, "cs_bill_cdemo_sk": 2, "cs_bill_hdemo_sk": 3, "cs_bill_addr_sk": 2, "cs_ship_customer_sk": 1, "cs_ship_cdemo_sk": 1, "cs_ship_hdemo_sk": 3, "cs_ship_addr_sk": 10, "cs_call_center_sk": 1, "cs_catalog_page_sk": 2, "cs_ship_mode_sk": 3, "cs_warehouse_sk": 3, "cs_item_sk": 2, "cs_promo_sk": 4, "cs_order_number": 8, "cs_quantity": 100, "cs_wholesale_cost": 49.56, "cs_list_price": 137.77, "cs_sales_price": 5.51, "cs_ext_discount_amt": 13226.0, "cs_ext_sales_price": 551.0, "cs_ext_wholesale_cost": 4956.0, "cs_ext_list_price": 13777.0, "cs_ext_tax": 0.0, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 3306.0, "cs_net_paid": 551.0, "cs_net_paid_inc_tax": 551.0, "cs_net_paid_inc_ship": 3857.0, "cs_net_paid_inc_ship_tax": 3857.0, "cs_net_profit": -4405.0 }, "cr1": { "cr_returned_date_sk": 2415031, "cr_returned_time_sk": 1, "cr_item_sk": 2, "cr_refunded_customer_sk": 10, "cr_refunded_cdemo_sk": 2, "cr_refunded_hdemo_sk": 3, "cr_refunded_addr_sk": 1, "cr_returning_customer_sk": 2, "cr_returning_cdemo_sk": 2, "cr_returning_hdemo_sk": 3, "cr_returning_addr_sk": 9, "cr_call_center_sk": 3, "cr_catalog_page_sk": 42, "cr_ship_mode_sk": 3, "cr_warehouse_sk": 4, "cr_reason_sk": 3, "cr_order_number": 8, "cr_return_quantity": 9, "cr_return_amount": 29.79, "cr_return_tax": 1.19, "cr_return_amt_inc_tax": 30.98, "cr_fee": 91.16, "cr_return_ship_cost": 59.67, "cr_refunded_cash": 0.0, "cr_reversed_charge": 20.85, "cr_store_credit": 8.94, "cr_net_loss": 152.02 }, "i1": { "i_item_sk": 2, "i_item_id": "AAAAAAAACAAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": "2000-10-26", "i_item_desc": "False opportunities would run alone with a views. Early approaches would show inc, european intentions; important, main passages shall know urban, ", "i_current_price": 1.12, "i_wholesale_cost": 0.38, "i_brand_id": 1001001, "i_brand": "amalgamalg #1", "i_class_id": 1, "i_class": "dresses", "i_category_id": 1, "i_category": "Women", "i_manufact_id": 294, "i_manufact": "esen stable", "i_size": "petite", "i_formulation": "516steel060826230906", "i_color": "rosy", "i_units": "Bunch", "i_container": "Unknown", "i_manager_id": 98, "i_product_name": "able" } }
+{ "cs1": { "cs_sold_date_sk": 2415028, "cs_sold_time_sk": 6, "cs_ship_date_sk": 2415024, "cs_bill_customer_sk": 2, "cs_bill_cdemo_sk": 5, "cs_bill_hdemo_sk": 5, "cs_bill_addr_sk": 9, "cs_ship_customer_sk": 3, "cs_ship_cdemo_sk": 1, "cs_ship_hdemo_sk": 5, "cs_ship_addr_sk": 6, "cs_call_center_sk": 1, "cs_catalog_page_sk": 2, "cs_ship_mode_sk": 2, "cs_warehouse_sk": 10, "cs_item_sk": 2, "cs_promo_sk": 10, "cs_order_number": 10, "cs_quantity": 40, "cs_wholesale_cost": 94.56, "cs_list_price": 277.06, "cs_sales_price": 2.77, "cs_ext_discount_amt": 10971.6, "cs_ext_sales_price": 110.8, "cs_ext_wholesale_cost": 3782.4, "cs_ext_list_price": 11082.4, "cs_ext_tax": 2.21, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 664.8, "cs_net_paid": 110.8, "cs_net_paid_inc_tax": 113.01, "cs_net_paid_inc_ship": 775.6, "cs_net_paid_inc_ship_tax": 777.81, "cs_net_profit": -3671.6 }, "i1": { "i_item_sk": 2, "i_item_id": "AAAAAAAACAAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": "2000-10-26", "i_item_desc": "False opportunities would run alone with a views. Early approaches would show inc, european intentions; important, main passages shall know urban, ", "i_current_price": 1.12, "i_wholesale_cost": 0.38, "i_brand_id": 1001001, "i_brand": "amalgamalg #1", "i_class_id": 1, "i_class": "dresses", "i_category_id": 1, "i_category": "Women", "i_manufact_id": 294, "i_manufact": "esen stable", "i_size": "petite", "i_formulation": "516steel060826230906", "i_color": "rosy", "i_units": "Bunch", "i_container": "Unknown", "i_manager_id": 98, "i_product_name": "able" } }
+{ "cs1": { "cs_sold_date_sk": 2415027, "cs_sold_time_sk": 8, "cs_ship_date_sk": 2415022, "cs_bill_customer_sk": 2, "cs_bill_cdemo_sk": 2, "cs_bill_hdemo_sk": 3, "cs_bill_addr_sk": 8, "cs_ship_customer_sk": 9, "cs_ship_cdemo_sk": 3, "cs_ship_hdemo_sk": 5, "cs_ship_addr_sk": 10, "cs_call_center_sk": 1, "cs_catalog_page_sk": 3, "cs_ship_mode_sk": 1, "cs_warehouse_sk": 3, "cs_item_sk": 2, "cs_promo_sk": 3, "cs_order_number": 12, "cs_quantity": 45, "cs_wholesale_cost": 5.95, "cs_list_price": 12.97, "cs_sales_price": 8.81, "cs_ext_discount_amt": 187.2, "cs_ext_sales_price": 396.45, "cs_ext_wholesale_cost": 267.75, "cs_ext_list_price": 583.65, "cs_ext_tax": 3.96, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 17.1, "cs_net_paid": 396.45, "cs_net_paid_inc_tax": 400.41, "cs_net_paid_inc_ship": 413.55, "cs_net_paid_inc_ship_tax": 417.51, "cs_net_profit": 128.7 }, "i1": { "i_item_sk": 2, "i_item_id": "AAAAAAAACAAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": "2000-10-26", "i_item_desc": "False opportunities would run alone with a views. Early approaches would show inc, european intentions; important, main passages shall know urban, ", "i_current_price": 1.12, "i_wholesale_cost": 0.38, "i_brand_id": 1001001, "i_brand": "amalgamalg #1", "i_class_id": 1, "i_class": "dresses", "i_category_id": 1, "i_category": "Women", "i_manufact_id": 294, "i_manufact": "esen stable", "i_size": "petite", "i_formulation": "516steel060826230906", "i_color": "rosy", "i_units": "Bunch", "i_container": "Unknown", "i_manager_id": 98, "i_product_name": "able" } }
+{ "cs1": { "cs_sold_date_sk": 2415031, "cs_sold_time_sk": 8, "cs_ship_date_sk": 2415026, "cs_bill_customer_sk": 10, "cs_bill_cdemo_sk": 2, "cs_bill_hdemo_sk": 2, "cs_bill_addr_sk": 6, "cs_ship_customer_sk": 2, "cs_ship_cdemo_sk": 6, "cs_ship_hdemo_sk": 6, "cs_ship_addr_sk": 3, "cs_call_center_sk": 4, "cs_catalog_page_sk": 1, "cs_ship_mode_sk": 3, "cs_warehouse_sk": 4, "cs_item_sk": 3, "cs_promo_sk": 6, "cs_order_number": 1, "cs_quantity": 47, "cs_wholesale_cost": 27.7, "cs_list_price": 44.32, "cs_sales_price": 42.99, "cs_ext_discount_amt": 62.51, "cs_ext_sales_price": 2020.53, "cs_ext_wholesale_cost": 1301.9, "cs_ext_list_price": 2083.04, "cs_ext_tax": 101.02, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 1041.52, "cs_net_paid": 2020.53, "cs_net_paid_inc_tax": 2121.55, "cs_net_paid_inc_ship": 3062.05, "cs_net_paid_inc_ship_tax": 3163.07, "cs_net_profit": 718.63 }, "i1": { "i_item_sk": 3, "i_item_id": "AAAAAAAACAAAAAAA", "i_rec_start_date": "2000-10-27", "i_rec_end_date": null, "i_item_desc": "False opportunities would run alone with a views. Early approaches would show inc, european intentions; important, main passages shall know urban, ", "i_current_price": 7.11, "i_wholesale_cost": 0.38, "i_brand_id": 1001001, "i_brand": "brandbrand #4", "i_class_id": 7, "i_class": "decor", "i_category_id": 7, "i_category": "Home", "i_manufact_id": 294, "i_manufact": "esen stable", "i_size": "N/A", "i_formulation": "516steel060826230906", "i_color": "chiffon", "i_units": "Cup", "i_container": "Unknown", "i_manager_id": 1, "i_product_name": "pri" } }
+{ "cs1": { "cs_sold_date_sk": 2415026, "cs_sold_time_sk": 7, "cs_ship_date_sk": 2415026, "cs_bill_customer_sk": 8, "cs_bill_cdemo_sk": 2, "cs_bill_hdemo_sk": 2, "cs_bill_addr_sk": 9, "cs_ship_customer_sk": 8, "cs_ship_cdemo_sk": 4, "cs_ship_hdemo_sk": 6, "cs_ship_addr_sk": 7, "cs_call_center_sk": 4, "cs_catalog_page_sk": 1, "cs_ship_mode_sk": 3, "cs_warehouse_sk": 2, "cs_item_sk": 3, "cs_promo_sk": 10, "cs_order_number": 2, "cs_quantity": 20, "cs_wholesale_cost": 87.55, "cs_list_price": 260.89, "cs_sales_price": 153.92, "cs_ext_discount_amt": 2139.4, "cs_ext_sales_price": 3078.4, "cs_ext_wholesale_cost": 1751.0, "cs_ext_list_price": 5217.8, "cs_ext_tax": 71.41, "cs_coupon_amt": 1292.92, "cs_ext_ship_cost": 1356.6, "cs_net_paid": 1785.48, "cs_net_paid_inc_tax": 1856.89, "cs_net_paid_inc_ship": 3142.08, "cs_net_paid_inc_ship_tax": 3213.49, "cs_net_profit": 34.48 }, "i1": { "i_item_sk": 3, "i_item_id": "AAAAAAAACAAAAAAA", "i_rec_start_date": "2000-10-27", "i_rec_end_date": null, "i_item_desc": "False opportunities would run alone with a views. Early approaches would show inc, european intentions; important, main passages shall know urban, ", "i_current_price": 7.11, "i_wholesale_cost": 0.38, "i_brand_id": 1001001, "i_brand": "brandbrand #4", "i_class_id": 7, "i_class": "decor", "i_category_id": 7, "i_category": "Home", "i_manufact_id": 294, "i_manufact": "esen stable", "i_size": "N/A", "i_formulation": "516steel060826230906", "i_color": "chiffon", "i_units": "Cup", "i_container": "Unknown", "i_manager_id": 1, "i_product_name": "pri" } }
+{ "cs1": { "cs_sold_date_sk": 2415031, "cs_sold_time_sk": 2, "cs_ship_date_sk": 2415025, "cs_bill_customer_sk": 5, "cs_bill_cdemo_sk": 1, "cs_bill_hdemo_sk": 3, "cs_bill_addr_sk": 7, "cs_ship_customer_sk": 10, "cs_ship_cdemo_sk": 4, "cs_ship_hdemo_sk": 1, "cs_ship_addr_sk": 4, "cs_call_center_sk": 1, "cs_catalog_page_sk": 5, "cs_ship_mode_sk": 3, "cs_warehouse_sk": 1, "cs_item_sk": 3, "cs_promo_sk": 6, "cs_order_number": 6, "cs_quantity": 88, "cs_wholesale_cost": 20.08, "cs_list_price": 60.03, "cs_sales_price": 20.41, "cs_ext_discount_amt": 3486.56, "cs_ext_sales_price": 1796.08, "cs_ext_wholesale_cost": 1767.04, "cs_ext_list_price": 5282.64, "cs_ext_tax": 13.82, "cs_coupon_amt": 1598.51, "cs_ext_ship_cost": 1056.0, "cs_net_paid": 197.57, "cs_net_paid_inc_tax": 211.39, "cs_net_paid_inc_ship": 1253.57, "cs_net_paid_inc_ship_tax": 1267.39, "cs_net_profit": -1569.47 }, "i1": { "i_item_sk": 3, "i_item_id": "AAAAAAAACAAAAAAA", "i_rec_start_date": "2000-10-27", "i_rec_end_date": null, "i_item_desc": "False opportunities would run alone with a views. Early approaches would show inc, european intentions; important, main passages shall know urban, ", "i_current_price": 7.11, "i_wholesale_cost": 0.38, "i_brand_id": 1001001, "i_brand": "brandbrand #4", "i_class_id": 7, "i_class": "decor", "i_category_id": 7, "i_category": "Home", "i_manufact_id": 294, "i_manufact": "esen stable", "i_size": "N/A", "i_formulation": "516steel060826230906", "i_color": "chiffon", "i_units": "Cup", "i_container": "Unknown", "i_manager_id": 1, "i_product_name": "pri" } }
+{ "cs1": { "cs_sold_date_sk": 2415029, "cs_sold_time_sk": 3, "cs_ship_date_sk": 2415028, "cs_bill_customer_sk": 8, "cs_bill_cdemo_sk": 6, "cs_bill_hdemo_sk": 4, "cs_bill_addr_sk": 5, "cs_ship_customer_sk": 4, "cs_ship_cdemo_sk": 4, "cs_ship_hdemo_sk": 6, "cs_ship_addr_sk": 9, "cs_call_center_sk": 1, "cs_catalog_page_sk": 3, "cs_ship_mode_sk": 3, "cs_warehouse_sk": 17, "cs_item_sk": 3, "cs_promo_sk": 2, "cs_order_number": 13, "cs_quantity": 34, "cs_wholesale_cost": 43.69, "cs_list_price": 105.29, "cs_sales_price": 82.12, "cs_ext_discount_amt": 787.78, "cs_ext_sales_price": 2792.08, "cs_ext_wholesale_cost": 1485.46, "cs_ext_list_price": 3579.86, "cs_ext_tax": 55.84, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 930.58, "cs_net_paid": 2792.08, "cs_net_paid_inc_tax": 2847.92, "cs_net_paid_inc_ship": 3722.66, "cs_net_paid_inc_ship_tax": 3778.5, "cs_net_profit": 1306.62 }, "i1": { "i_item_sk": 3, "i_item_id": "AAAAAAAACAAAAAAA", "i_rec_start_date": "2000-10-27", "i_rec_end_date": null, "i_item_desc": "False opportunities would run alone with a views. Early approaches would show inc, european intentions; important, main passages shall know urban, ", "i_current_price": 7.11, "i_wholesale_cost": 0.38, "i_brand_id": 1001001, "i_brand": "brandbrand #4", "i_class_id": 7, "i_class": "decor", "i_category_id": 7, "i_category": "Home", "i_manufact_id": 294, "i_manufact": "esen stable", "i_size": "N/A", "i_formulation": "516steel060826230906", "i_color": "chiffon", "i_units": "Cup", "i_container": "Unknown", "i_manager_id": 1, "i_product_name": "pri" } }
+{ "cs1": { "cs_sold_date_sk": 2415027, "cs_sold_time_sk": 3, "cs_ship_date_sk": 2415029, "cs_bill_customer_sk": 3, "cs_bill_cdemo_sk": 5, "cs_bill_hdemo_sk": 4, "cs_bill_addr_sk": 4, "cs_ship_customer_sk": 1, "cs_ship_cdemo_sk": 2, "cs_ship_hdemo_sk": 4, "cs_ship_addr_sk": 8, "cs_call_center_sk": 4, "cs_catalog_page_sk": 2, "cs_ship_mode_sk": 4, "cs_warehouse_sk": 2, "cs_item_sk": 5, "cs_promo_sk": 3, "cs_order_number": 3, "cs_quantity": 19, "cs_wholesale_cost": 69.86, "cs_list_price": 88.72, "cs_sales_price": 29.27, "cs_ext_discount_amt": 1129.55, "cs_ext_sales_price": 556.13, "cs_ext_wholesale_cost": 1327.34, "cs_ext_list_price": 1685.68, "cs_ext_tax": 33.36, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 168.53, "cs_net_paid": 556.13, "cs_net_paid_inc_tax": 589.49, "cs_net_paid_inc_ship": 724.66, "cs_net_paid_inc_ship_tax": 758.02, "cs_net_profit": -771.21 }, "cr1": { "cr_returned_date_sk": 2415029, "cr_returned_time_sk": 1, "cr_item_sk": 5, "cr_refunded_customer_sk": 4, "cr_refunded_cdemo_sk": 5, "cr_refunded_hdemo_sk": 1, "cr_refunded_addr_sk": 3, "cr_returning_customer_sk": 3, "cr_returning_cdemo_sk": 3, "cr_returning_hdemo_sk": 1, "cr_returning_addr_sk": 9, "cr_call_center_sk": 2, "cr_catalog_page_sk": 75, "cr_ship_mode_sk": 3, "cr_warehouse_sk": 1, "cr_reason_sk": 5, "cr_order_number": 3, "cr_return_quantity": 44, "cr_return_amount": 2157.76, "cr_return_tax": 43.15, "cr_return_amt_inc_tax": 2200.91, "cr_fee": 46.29, "cr_return_ship_cost": 1280.84, "cr_refunded_cash": 1834.09, "cr_reversed_charge": 51.78, "cr_store_credit": 271.89, "cr_net_loss": 1370.28 }, "i1": { "i_item_sk": 5, "i_item_id": "AAAAAAAAEAAAAAAA", "i_rec_start_date": "1999-10-28", "i_rec_end_date": "2001-10-26", "i_item_desc": "Normal systems would join simply different theories. Full, new clothes may eat instead achievements. D", "i_current_price": 1.3, "i_wholesale_cost": 1.76, "i_brand_id": 2002002, "i_brand": "importoimporto #2", "i_class_id": 2, "i_class": "shirts", "i_category_id": 2, "i_category": "Men", "i_manufact_id": 220, "i_manufact": "barableable", "i_size": "petite", "i_formulation": "42214rosy28066558020", "i_color": "chiffon", "i_units": "Cup", "i_container": "Unknown", "i_manager_id": 27, "i_product_name": "anti" } }
+{ "cs1": { "cs_sold_date_sk": 2415025, "cs_sold_time_sk": 8, "cs_ship_date_sk": 2415022, "cs_bill_customer_sk": 2, "cs_bill_cdemo_sk": 4, "cs_bill_hdemo_sk": 3, "cs_bill_addr_sk": 10, "cs_ship_customer_sk": 1, "cs_ship_cdemo_sk": 1, "cs_ship_hdemo_sk": 6, "cs_ship_addr_sk": 1, "cs_call_center_sk": 1, "cs_catalog_page_sk": 5, "cs_ship_mode_sk": 2, "cs_warehouse_sk": 18, "cs_item_sk": 5, "cs_promo_sk": 3, "cs_order_number": 15, "cs_quantity": 94, "cs_wholesale_cost": 41.36, "cs_list_price": 105.88, "cs_sales_price": 530.82, "cs_ext_discount_amt": 5175.64, "cs_ext_sales_price": 4777.08, "cs_ext_wholesale_cost": 3887.84, "cs_ext_list_price": 9952.72, "cs_ext_tax": 191.08, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 497.26, "cs_net_paid": 4777.08, "cs_net_paid_inc_tax": 4968.16, "cs_net_paid_inc_ship": 5274.34, "cs_net_paid_inc_ship_tax": 5465.42, "cs_net_profit": 889.24 }, "i1": { "i_item_sk": 5, "i_item_id": "AAAAAAAAEAAAAAAA", "i_rec_start_date": "1999-10-28", "i_rec_end_date": "2001-10-26", "i_item_desc": "Normal systems would join simply different theories. Full, new clothes may eat instead achievements. D", "i_current_price": 1.3, "i_wholesale_cost": 1.76, "i_brand_id": 2002002, "i_brand": "importoimporto #2", "i_class_id": 2, "i_class": "shirts", "i_category_id": 2, "i_category": "Men", "i_manufact_id": 220, "i_manufact": "barableable", "i_size": "petite", "i_formulation": "42214rosy28066558020", "i_color": "chiffon", "i_units": "Cup", "i_container": "Unknown", "i_manager_id": 27, "i_product_name": "anti" } }
+{ "cs1": { "cs_sold_date_sk": 2415028, "cs_sold_time_sk": 3, "cs_ship_date_sk": 2415031, "cs_bill_customer_sk": 1, "cs_bill_cdemo_sk": 3, "cs_bill_hdemo_sk": 1, "cs_bill_addr_sk": 6, "cs_ship_customer_sk": 10, "cs_ship_cdemo_sk": 4, "cs_ship_hdemo_sk": 1, "cs_ship_addr_sk": 7, "cs_call_center_sk": 1, "cs_catalog_page_sk": 1, "cs_ship_mode_sk": 2, "cs_warehouse_sk": 15, "cs_item_sk": 5, "cs_promo_sk": 10, "cs_order_number": 17, "cs_quantity": 45, "cs_wholesale_cost": 9.54, "cs_list_price": 23.37, "cs_sales_price": 21.5, "cs_ext_discount_amt": 84.15, "cs_ext_sales_price": 967.5, "cs_ext_wholesale_cost": 429.3, "cs_ext_list_price": 1051.65, "cs_ext_tax": 21.76, "cs_coupon_amt": 725.62, "cs_ext_ship_cost": 294.3, "cs_net_paid": 241.88, "cs_net_paid_inc_tax": 263.64, "cs_net_paid_inc_ship": 536.18, "cs_net_paid_inc_ship_tax": 557.94, "cs_net_profit": -187.42 }, "i1": { "i_item_sk": 5, "i_item_id": "AAAAAAAAEAAAAAAA", "i_rec_start_date": "1999-10-28", "i_rec_end_date": "2001-10-26", "i_item_desc": "Normal systems would join simply different theories. Full, new clothes may eat instead achievements. D", "i_current_price": 1.3, "i_wholesale_cost": 1.76, "i_brand_id": 2002002, "i_brand": "importoimporto #2", "i_class_id": 2, "i_class": "shirts", "i_category_id": 2, "i_category": "Men", "i_manufact_id": 220, "i_manufact": "barableable", "i_size": "petite", "i_formulation": "42214rosy28066558020", "i_color": "chiffon", "i_units": "Cup", "i_container": "Unknown", "i_manager_id": 27, "i_product_name": "anti" } }
+{ "cs1": { "cs_sold_date_sk": 2415023, "cs_sold_time_sk": 4, "cs_ship_date_sk": 2415022, "cs_bill_customer_sk": 2, "cs_bill_cdemo_sk": 2, "cs_bill_hdemo_sk": 1, "cs_bill_addr_sk": 2, "cs_ship_customer_sk": 2, "cs_ship_cdemo_sk": 5, "cs_ship_hdemo_sk": 1, "cs_ship_addr_sk": 9, "cs_call_center_sk": 1, "cs_catalog_page_sk": 3, "cs_ship_mode_sk": 5, "cs_warehouse_sk": 10, "cs_item_sk": 6, "cs_promo_sk": 9, "cs_order_number": 7, "cs_quantity": 31, "cs_wholesale_cost": 40.88, "cs_list_price": 51.91, "cs_sales_price": 6.22, "cs_ext_discount_amt": 1416.39, "cs_ext_sales_price": 192.82, "cs_ext_wholesale_cost": 1267.28, "cs_ext_list_price": 1609.21, "cs_ext_tax": 11.56, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 321.78, "cs_net_paid": 192.82, "cs_net_paid_inc_tax": 204.38, "cs_net_paid_inc_ship": 514.6, "cs_net_paid_inc_ship_tax": 526.16, "cs_net_profit": -1074.46 }, "i1": { "i_item_sk": 6, "i_item_id": "AAAAAAAAEAAAAAAA", "i_rec_start_date": "2001-10-27", "i_rec_end_date": null, "i_item_desc": "Normal systems would join simply different theories. Full, new clothes may eat instead achievements. D", "i_current_price": 0.85, "i_wholesale_cost": 1.76, "i_brand_id": 2002002, "i_brand": "exportiimporto #1", "i_class_id": 3, "i_class": "pants", "i_category_id": 2, "i_category": "Men", "i_manufact_id": 212, "i_manufact": "barableable", "i_size": "large", "i_formulation": "42214rosy28066558020", "i_color": "chiffon", "i_units": "Bundle", "i_container": "Unknown", "i_manager_id": 7, "i_product_name": "cally" } }
+{ "cs1": { "cs_sold_date_sk": 2415030, "cs_sold_time_sk": 6, "cs_ship_date_sk": 2415025, "cs_bill_customer_sk": 2, "cs_bill_cdemo_sk": 1, "cs_bill_hdemo_sk": 3, "cs_bill_addr_sk": 5, "cs_ship_customer_sk": 3, "cs_ship_cdemo_sk": 1, "cs_ship_hdemo_sk": 4, "cs_ship_addr_sk": 7, "cs_call_center_sk": 1, "cs_catalog_page_sk": 3, "cs_ship_mode_sk": 3, "cs_warehouse_sk": 3, "cs_item_sk": 6, "cs_promo_sk": 9, "cs_order_number": 18, "cs_quantity": 41, "cs_wholesale_cost": 39.04, "cs_list_price": 108.92, "cs_sales_price": 75.15, "cs_ext_discount_amt": 1384.57, "cs_ext_sales_price": 3081.15, "cs_ext_wholesale_cost": 1600.64, "cs_ext_list_price": 4465.72, "cs_ext_tax": 215.68, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 1964.72, "cs_net_paid": 3081.15, "cs_net_paid_inc_tax": 3296.83, "cs_net_paid_inc_ship": 5045.87, "cs_net_paid_inc_ship_tax": 5261.55, "cs_net_profit": 1480.51 }, "i1": { "i_item_sk": 6, "i_item_id": "AAAAAAAAEAAAAAAA", "i_rec_start_date": "2001-10-27", "i_rec_end_date": null, "i_item_desc": "Normal systems would join simply different theories. Full, new clothes may eat instead achievements. D", "i_current_price": 0.85, "i_wholesale_cost": 1.76, "i_brand_id": 2002002, "i_brand": "exportiimporto #1", "i_class_id": 3, "i_class": "pants", "i_category_id": 2, "i_category": "Men", "i_manufact_id": 212, "i_manufact": "barableable", "i_size": "large", "i_formulation": "42214rosy28066558020", "i_color": "chiffon", "i_units": "Bundle", "i_container": "Unknown", "i_manager_id": 7, "i_product_name": "cally" } }
+{ "cs1": { "cs_sold_date_sk": 2415030, "cs_sold_time_sk": 6, "cs_ship_date_sk": 2415025, "cs_bill_customer_sk": 1, "cs_bill_cdemo_sk": 1, "cs_bill_hdemo_sk": 3, "cs_bill_addr_sk": 5, "cs_ship_customer_sk": 3, "cs_ship_cdemo_sk": 1, "cs_ship_hdemo_sk": 4, "cs_ship_addr_sk": 7, "cs_call_center_sk": 1, "cs_catalog_page_sk": 3, "cs_ship_mode_sk": 3, "cs_warehouse_sk": 15, "cs_item_sk": 6, "cs_promo_sk": 9, "cs_order_number": 29, "cs_quantity": 41, "cs_wholesale_cost": 39.04, "cs_list_price": 108.92, "cs_sales_price": 75.15, "cs_ext_discount_amt": 1384.57, "cs_ext_sales_price": 3081.15, "cs_ext_wholesale_cost": 1600.64, "cs_ext_list_price": 4465.72, "cs_ext_tax": 215.68, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 1964.72, "cs_net_paid": 3081.15, "cs_net_paid_inc_tax": 3296.83, "cs_net_paid_inc_ship": 5045.87, "cs_net_paid_inc_ship_tax": 5261.55, "cs_net_profit": 1480.51 }, "i1": { "i_item_sk": 6, "i_item_id": "AAAAAAAAEAAAAAAA", "i_rec_start_date": "2001-10-27", "i_rec_end_date": null, "i_item_desc": "Normal systems would join simply different theories. Full, new clothes may eat instead achievements. D", "i_current_price": 0.85, "i_wholesale_cost": 1.76, "i_brand_id": 2002002, "i_brand": "exportiimporto #1", "i_class_id": 3, "i_class": "pants", "i_category_id": 2, "i_category": "Men", "i_manufact_id": 212, "i_manufact": "barableable", "i_size": "large", "i_formulation": "42214rosy28066558020", "i_color": "chiffon", "i_units": "Bundle", "i_container": "Unknown", "i_manager_id": 7, "i_product_name": "cally" } }
+{ "cs1": { "cs_sold_date_sk": 2415022, "cs_sold_time_sk": 4, "cs_ship_date_sk": 2415022, "cs_bill_customer_sk": 2, "cs_bill_cdemo_sk": 5, "cs_bill_hdemo_sk": 5, "cs_bill_addr_sk": 8, "cs_ship_customer_sk": 2, "cs_ship_cdemo_sk": 4, "cs_ship_hdemo_sk": 3, "cs_ship_addr_sk": 2, "cs_call_center_sk": 1, "cs_catalog_page_sk": 1, "cs_ship_mode_sk": 5, "cs_warehouse_sk": 3, "cs_item_sk": 7, "cs_promo_sk": 5, "cs_order_number": 14, "cs_quantity": 59, "cs_wholesale_cost": 78.9, "cs_list_price": 84.42, "cs_sales_price": 82.73, "cs_ext_discount_amt": 99.71, "cs_ext_sales_price": 4881.07, "cs_ext_wholesale_cost": 4655.1, "cs_ext_list_price": 4980.78, "cs_ext_tax": 292.86, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 1444.32, "cs_net_paid": 4881.07, "cs_net_paid_inc_tax": 5173.93, "cs_net_paid_inc_ship": 6325.39, "cs_net_paid_inc_ship_tax": 6618.25, "cs_net_profit": 225.97 }, "i1": { "i_item_sk": 7, "i_item_id": "AAAAAAAAHAAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": null, "i_item_desc": "Anxious accounts must catch also years. Revolutionary, large directors used to embrace then mo", "i_current_price": 39.94, "i_wholesale_cost": 1.3, "i_brand_id": 3001002, "i_brand": "amalgexporti #2", "i_class_id": 1, "i_class": "newborn", "i_category_id": 3, "i_category": "Children", "i_manufact_id": 129, "i_manufact": "eseoughtable", "i_size": "petite", "i_formulation": "6moccasin24027188872", "i_color": "chiffon", "i_units": "Tsp", "i_container": "Unknown", "i_manager_id": 64, "i_product_name": "ation" } }
+{ "cs1": { "cs_sold_date_sk": 2415023, "cs_sold_time_sk": 6, "cs_ship_date_sk": 2415027, "cs_bill_customer_sk": 5, "cs_bill_cdemo_sk": 1, "cs_bill_hdemo_sk": 4, "cs_bill_addr_sk": 8, "cs_ship_customer_sk": 9, "cs_ship_cdemo_sk": 4, "cs_ship_hdemo_sk": 2, "cs_ship_addr_sk": 8, "cs_call_center_sk": 1, "cs_catalog_page_sk": 2, "cs_ship_mode_sk": 3, "cs_warehouse_sk": 3, "cs_item_sk": 10, "cs_promo_sk": 7, "cs_order_number": 5, "cs_quantity": 56, "cs_wholesale_cost": 67.54, "cs_list_price": 166.82, "cs_sales_price": 18.35, "cs_ext_discount_amt": 8314.32, "cs_ext_sales_price": 1027.6, "cs_ext_wholesale_cost": 3782.24, "cs_ext_list_price": 9341.92, "cs_ext_tax": 0.0, "cs_coupon_amt": 0.0, "cs_ext_ship_cost": 3736.32, "cs_net_paid": 1027.6, "cs_net_paid_inc_tax": 1027.6, "cs_net_paid_inc_ship": 4763.92, "cs_net_paid_inc_ship_tax": 4763.92, "cs_net_profit": -2754.64 }, "i1": { "i_item_sk": 10, "i_item_id": "AAAAAAAAKAAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": "1999-10-27", "i_item_desc": "Classical services go trousers. However great galleries might say needs. Assumptions change very in favour of the notes. Teeth woul", "i_current_price": 8.94, "i_wholesale_cost": 4.11, "i_brand_id": 10008011, "i_brand": "namelessunivamalg #11", "i_class_id": 8, "i_class": "scanners", "i_category_id": 10, "i_category": "Electronics", "i_manufact_id": 350, "i_manufact": "barantipri", "i_size": "N/A", "i_formulation": "8159007505thistle447", "i_color": "pale", "i_units": "Tsp", "i_container": "Unknown", "i_manager_id": 34, "i_product_name": "barought" } }
+{ "cs1": { "cs_sold_date_sk": 2415030, "cs_sold_time_sk": 2, "cs_ship_date_sk": 2415022, "cs_bill_customer_sk": 8, "cs_bill_cdemo_sk": 5, "cs_bill_hdemo_sk": 5, "cs_bill_addr_sk": 3, "cs_ship_customer_sk": 3, "cs_ship_cdemo_sk": 2, "cs_ship_hdemo_sk": 3, "cs_ship_addr_sk": 4, "cs_call_center_sk": 1, "cs_catalog_page_sk": 2, "cs_ship_mode_sk": 1, "cs_warehouse_sk": 16, "cs_item_sk": 10, "cs_promo_sk": 7, "cs_order_number": 11, "cs_quantity": 40, "cs_wholesale_cost": 25.96, "cs_list_price": 54.51, "cs_sales_price": 15.8, "cs_ext_discount_amt": 1548.4, "cs_ext_sales_price": 632.0, "cs_ext_wholesale_cost": 1038.4, "cs_ext_list_price": 2180.4, "cs_ext_tax": 22.75, "cs_coupon_amt": 176.96, "cs_ext_ship_cost": 1090.0, "cs_net_paid": 455.04, "cs_net_paid_inc_tax": 477.79, "cs_net_paid_inc_ship": 1545.04, "cs_net_paid_inc_ship_tax": 1567.79, "cs_net_profit": -583.36 }, "i1": { "i_item_sk": 10, "i_item_id": "AAAAAAAAKAAAAAAA", "i_rec_start_date": "1997-10-27", "i_rec_end_date": "1999-10-27", "i_item_desc": "Classical services go trousers. However great galleries might say needs. Assumptions change very in favour of the notes. Teeth woul", "i_current_price": 8.94, "i_wholesale_cost": 4.11, "i_brand_id": 10008011, "i_brand": "namelessunivamalg #11", "i_class_id": 8, "i_class": "scanners", "i_category_id": 10, "i_category": "Electronics", "i_manufact_id": 350, "i_manufact": "barantipri", "i_size": "N/A", "i_formulation": "8159007505thistle447", "i_color": "pale", "i_units": "Tsp", "i_container": "Unknown", "i_manager_id": 34, "i_product_name": "barought" } }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1602/query-ASTERIXDB-1602.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1602/query-ASTERIXDB-1602.1.adm
index 2dc6b04..65db206 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1602/query-ASTERIXDB-1602.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpcds/query-ASTERIXDB-1602/query-ASTERIXDB-1602.1.adm
@@ -1 +1 @@
-{ "$1": 121 }
+{ "$1": 144 }
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/types/isarray/isarray.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/types/isarray/isarray.1.adm
new file mode 100644
index 0000000..b97271c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/types/isarray/isarray.1.adm
@@ -0,0 +1 @@
+{ "a": false, "b": false, "c": null, "e": false, "f": false, "g": false, "h": true, "i": false }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/types/isboolean/isboolean.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/types/isboolean/isboolean.1.adm
new file mode 100644
index 0000000..66ed941
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/types/isboolean/isboolean.1.adm
@@ -0,0 +1 @@
+{ "a": true, "b": true, "c": null, "e": false, "f": false, "g": false, "h": false, "i": false }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/types/isnumber/isnumber.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/types/isnumber/isnumber.1.adm
new file mode 100644
index 0000000..fc8b4a8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/types/isnumber/isnumber.1.adm
@@ -0,0 +1 @@
+{ "a": false, "b": false, "c": null, "e": false, "f": true, "g": true, "h": false, "i": false }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/types/isobject/isobject.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/types/isobject/isobject.1.adm
new file mode 100644
index 0000000..00007d7
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/types/isobject/isobject.1.adm
@@ -0,0 +1 @@
+{ "a": false, "b": false, "c": null, "e": false, "f": false, "g": false, "h": false, "i": true }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/types/isstring/isstring.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/types/isstring/isstring.1.adm
new file mode 100644
index 0000000..6a8318f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/types/isstring/isstring.1.adm
@@ -0,0 +1 @@
+{ "a": false, "b": false, "c": null, "e": true, "f": false, "g": false, "h": false, "i": false }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
index f45ebb4..352e0d1 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -1345,6 +1345,11 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="constructor">
+      <compilation-unit name="polygon-from-open-list_issue1627">
+        <output-dir compare="Text">polygon-from-open-list_issue1627</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="constructor">
       <compilation-unit name="primitive-01">
         <output-dir compare="Text">primitive-01</output-dir>
       </compilation-unit>
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
index 5dd0bd0..ada8989 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
@@ -3145,6 +3145,16 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="misc">
+      <compilation-unit name="query-ASTERIXDB-1671">
+        <output-dir compare="Text">query-ASTERIXDB-1671</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="misc">
+      <compilation-unit name="query-ASTERIXDB-1671-2">
+        <output-dir compare="Text">query-ASTERIXDB-1671</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="misc">
       <compilation-unit name="uuid">
         <output-dir compare="Text">uuid</output-dir>
       </compilation-unit>
@@ -4004,6 +4014,11 @@
         <output-dir compare="Text">is</output-dir>
       </compilation-unit>
     </test-case>
+    <test-case FilePath="null-missing">
+      <compilation-unit name="query-ASTERIXDB-1689">
+        <output-dir compare="Text">query-ASTERIXDB-1689</output-dir>
+      </compilation-unit>
+    </test-case>
   </test-group>
   <test-group name="numeric">
     <test-case FilePath="numeric">
@@ -4702,6 +4717,11 @@
         <output-dir compare="Text">query-ASTERIXDB-1005</output-dir>
       </compilation-unit>
     </test-case>
+    <test-case FilePath="quantifiers">
+      <compilation-unit name="query-ASTERIXDB-1674">
+        <output-dir compare="Text">query-ASTERIXDB-1674</output-dir>
+      </compilation-unit>
+    </test-case>
     <!--
         <test-case FilePath="quantifiers">
           <compilation-unit name="everysat_02">
@@ -5819,6 +5839,11 @@
         <output-dir compare="Text">query-ASTERIXDB-1597</output-dir>
       </compilation-unit>
     </test-case>
+    <test-case FilePath="subquery">
+      <compilation-unit name="query-ASTERIXDB-1674">
+        <output-dir compare="Text">query-ASTERIXDB-1674</output-dir>
+      </compilation-unit>
+    </test-case>
   </test-group>
   <test-group name="subset-collection">
     <test-case FilePath="subset-collection">
@@ -5965,6 +5990,181 @@
         <output-dir compare="Text">query-ASTERIXDB-1602</output-dir>
       </compilation-unit>
     </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q03">
+        <output-dir compare="Text">q03</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q07">
+        <output-dir compare="Text">q07</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q15">
+        <output-dir compare="Text">q15</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q19">
+        <output-dir compare="Text">q19</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q21">
+        <output-dir compare="Text">q21</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q24a">
+        <output-dir compare="Text">q24a</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q24b">
+        <output-dir compare="Text">q24b</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q25">
+        <output-dir compare="Text">q25</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q26">
+        <output-dir compare="Text">q26</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q29">
+        <output-dir compare="Text">q29</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q30">
+        <output-dir compare="Text">q30</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q31">
+        <output-dir compare="Text">q31</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q34">
+        <output-dir compare="Text">q34</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q37">
+        <output-dir compare="Text">q37</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q41">
+        <output-dir compare="Text">q41</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q42">
+        <output-dir compare="Text">q42</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q43">
+        <output-dir compare="Text">q43</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q45">
+        <output-dir compare="Text">q45</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q46">
+        <output-dir compare="Text">q46</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q50">
+        <output-dir compare="Text">q50</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q52">
+        <output-dir compare="Text">q52</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q55">
+        <output-dir compare="Text">q55</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q59">
+        <output-dir compare="Text">q59</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q62">
+        <output-dir compare="Text">q62</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q68">
+        <output-dir compare="Text">q68</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q73">
+        <output-dir compare="Text">q73</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q79">
+        <output-dir compare="Text">q79</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q81">
+        <output-dir compare="Text">q81</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q82">
+        <output-dir compare="Text">q82</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q88">
+        <output-dir compare="Text">q88</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q91">
+        <output-dir compare="Text">q91</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q94">
+        <output-dir compare="Text">q94</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q95">
+        <output-dir compare="Text">q95</output-dir>
+      </compilation-unit>
+    </test-case>
+     <test-case FilePath="tpcds">
+      <compilation-unit name="q96">
+        <output-dir compare="Text">q96</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="tpcds">
+      <compilation-unit name="q98">
+        <output-dir compare="Text">q98</output-dir>
+      </compilation-unit>
+    </test-case>
   </test-group>
   <test-group name="tpch">
     <test-case FilePath="tpch">
@@ -7726,6 +7926,31 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="types">
+      <compilation-unit name="isarray">
+        <output-dir compare="Text">isarray</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="types">
+      <compilation-unit name="isboolean">
+        <output-dir compare="Text">isboolean</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="types">
+      <compilation-unit name="isnumber">
+        <output-dir compare="Text">isnumber</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="types">
+      <compilation-unit name="isobject">
+        <output-dir compare="Text">isobject</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="types">
+      <compilation-unit name="isstring">
+        <output-dir compare="Text">isstring</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="types">
       <compilation-unit name="record01">
         <output-dir compare="Text">record01</output-dir>
       </compilation-unit>
diff --git a/asterixdb/asterix-client-helper/pom.xml b/asterixdb/asterix-client-helper/pom.xml
index e082629..a078d16 100644
--- a/asterixdb/asterix-client-helper/pom.xml
+++ b/asterixdb/asterix-client-helper/pom.xml
@@ -100,12 +100,10 @@
     <dependency>
       <groupId>commons-io</groupId>
       <artifactId>commons-io</artifactId>
-      <version>2.4</version>
     </dependency>
     <dependency>
       <groupId>org.json</groupId>
       <artifactId>json</artifactId>
-      <version>20090211</version>
     </dependency>
   </dependencies>
 </project>
diff --git a/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/Args.java b/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/Args.java
index e767f28..ceb873d 100644
--- a/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/Args.java
+++ b/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/Args.java
@@ -45,7 +45,7 @@
 
 
     @Option(name = "-timeout", metaVar = "<secs>", usage = "Timeout for wait commands in seconds")
-    protected int timeoutSecs = -1;
+    protected int timeoutSecs = 0;
 
     @Option(name = "-quiet", aliases = "-q", usage = "Suppress all normal output")
     protected boolean quiet;
diff --git a/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/commands/RemoteCommand.java b/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/commands/RemoteCommand.java
index 031a721..e7b6be3 100644
--- a/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/commands/RemoteCommand.java
+++ b/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/commands/RemoteCommand.java
@@ -23,10 +23,14 @@
 import java.net.HttpURLConnection;
 import java.net.MalformedURLException;
 import java.net.URL;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.asterix.clienthelper.Args;
 
 public abstract class RemoteCommand extends ClientCommand {
+
+    public static final int MAX_CONNECTION_TIMEOUT_SECS = 60;
+
     protected enum Method {
         GET,
         POST
@@ -64,6 +68,10 @@
     protected HttpURLConnection openConnection(String path, Method method) throws IOException {
         URL url = new URL("http://" + hostPort + "/" + path);
         HttpURLConnection conn = (HttpURLConnection)url.openConnection();
+        final int timeoutMillis =
+                (int) TimeUnit.SECONDS.toMillis(Math.max(MAX_CONNECTION_TIMEOUT_SECS, args.getTimeoutSecs()));
+        conn.setConnectTimeout(timeoutMillis);
+        conn.setReadTimeout(timeoutMillis);
         conn.setRequestMethod(method.name());
         return conn;
     }
diff --git a/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/commands/WaitForClusterCommand.java b/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/commands/WaitForClusterCommand.java
index 390ce7b..b0b4c6f 100644
--- a/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/commands/WaitForClusterCommand.java
+++ b/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/commands/WaitForClusterCommand.java
@@ -21,6 +21,7 @@
 import java.io.IOException;
 import java.net.HttpURLConnection;
 import java.nio.charset.StandardCharsets;
+import java.util.concurrent.TimeUnit;
 
 import javax.servlet.http.HttpServletResponse;
 
@@ -38,17 +39,18 @@
     @Override
     @SuppressWarnings("squid:S2142") // interrupted exception
     public int execute() throws IOException {
+        final int timeoutSecs = args.getTimeoutSecs();
         log("Waiting "
-                + (args.getTimeoutSecs() > 0 ? "up to " + args.getTimeoutSecs() + " seconds " : "")
+                + (timeoutSecs > 0 ? "up to " + timeoutSecs + " seconds " : "")
                 + "for cluster " + hostPort + " to be available.");
 
         long startTime = System.currentTimeMillis();
+        long timeoutMillis = TimeUnit.SECONDS.toMillis(timeoutSecs);
         boolean first = true;
         String lastState = null;
         while (true) {
             if (!first) {
-                if (args.getTimeoutSecs() >= 0
-                        && (startTime + (args.getTimeoutSecs() * 1000) < System.currentTimeMillis())) {
+                if (timeoutMillis > 0 && (startTime + timeoutMillis < System.currentTimeMillis())) {
                     break;
                 }
                 try {
@@ -75,7 +77,7 @@
                 // ignore exception, try again
             }
         }
-        log("Cluster " + hostPort + " was not available before timeout of " + args.getTimeoutSecs()
+        log("Cluster " + hostPort + " was not available before timeout of " + timeoutSecs
                 + " seconds was exhausted" + (lastState != null ? " (state: " + lastState + ")" : "")
                 + "; check logs for more information");
         return 1;
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IClusterManagementWork.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IClusterManagementWork.java
index adf8e38..323df65 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IClusterManagementWork.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IClusterManagementWork.java
@@ -27,6 +27,7 @@
 
     public enum ClusterState {
         STARTING,
+        PENDING,
         ACTIVE,
         UNUSABLE,
         REBALANCING
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/cluster/ClusterPartition.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/cluster/ClusterPartition.java
index 6cd44a7..cc27fbb 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/cluster/ClusterPartition.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/cluster/ClusterPartition.java
@@ -57,17 +57,16 @@
 
     @Override
     public ClusterPartition clone() {
-        ClusterPartition clone = new ClusterPartition(partitionId, nodeId, ioDeviceNum);
-        return clone;
+        return new ClusterPartition(partitionId, nodeId, ioDeviceNum);
     }
 
     @Override
     public String toString() {
         StringBuilder sb = new StringBuilder();
         sb.append("ID:" + partitionId);
-        sb.append(" Original Node: " + nodeId);
-        sb.append(" IODevice: " + ioDeviceNum);
-        sb.append(" Active Node: " + activeNodeId);
+        sb.append(", Original Node: " + nodeId);
+        sb.append(", IODevice: " + ioDeviceNum);
+        sb.append(", Active Node: " + activeNodeId);
         return sb.toString();
     }
 
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixMetadataProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixMetadataProperties.java
index 677fc78..3584f2b 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixMetadataProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixMetadataProperties.java
@@ -26,22 +26,35 @@
 
 public class AsterixMetadataProperties extends AbstractAsterixProperties {
 
+    private static final String METADATA_REGISTRATION_TIMEOUT_KEY = "metadata.registration.timeout.secs";
+    private static final long METADATA_REGISTRATION_TIMEOUT_DEFAULT = 60;
+
+    private static final String METADATA_PORT_KEY = "metadata.port";
+    private static final int METADATA_PORT_DEFAULT = 0;
+
+    private static final String METADATA_CALLBACK_PORT_KEY = "metadata.callback.port";
+    private static final int METADATA_CALLBACK_PORT_DEFAULT = 0;
+
     public AsterixMetadataProperties(AsterixPropertiesAccessor accessor) {
         super(accessor);
     }
 
+    @PropertyKey("instance.name")
     public String getInstanceName() {
         return accessor.getInstanceName();
     }
 
+    @PropertyKey("metadata.node")
     public String getMetadataNodeName() {
         return accessor.getMetadataNodeName();
     }
 
+    @PropertyKey("metadata.partition")
     public ClusterPartition getMetadataPartition() {
         return accessor.getMetadataPartition();
     }
 
+    @PropertyKey("node.stores")
     public Map<String, String[]> getStores() {
         return accessor.getStores();
     }
@@ -54,19 +67,41 @@
         return accessor.getCoredumpPath(nodeId);
     }
 
+    @PropertyKey("core.dump.paths")
     public Map<String, String> getCoredumpPaths() {
         return accessor.getCoredumpConfig();
     }
 
+    @PropertyKey("node.partitions")
     public Map<String, ClusterPartition[]> getNodePartitions() {
         return accessor.getNodePartitions();
     }
 
+    @PropertyKey("cluster.partitions")
     public SortedMap<Integer, ClusterPartition> getClusterPartitions() {
         return accessor.getClusterPartitions();
     }
 
+    @PropertyKey("transaction.log.dirs")
     public Map<String, String> getTransactionLogDirs() {
         return accessor.getTransactionLogDirs();
     }
+
+    @PropertyKey(METADATA_REGISTRATION_TIMEOUT_KEY)
+    public long getRegistrationTimeoutSecs() {
+        return accessor.getProperty(METADATA_REGISTRATION_TIMEOUT_KEY, METADATA_REGISTRATION_TIMEOUT_DEFAULT,
+                PropertyInterpreters.getLongPropertyInterpreter());
+    }
+
+    @PropertyKey(METADATA_PORT_KEY)
+    public int getMetadataPort() {
+        return accessor.getProperty(METADATA_PORT_KEY, METADATA_PORT_DEFAULT,
+                PropertyInterpreters.getIntegerPropertyInterpreter());
+    }
+
+    @PropertyKey(METADATA_CALLBACK_PORT_KEY)
+    public int getMetadataCallbackPort() {
+        return accessor.getProperty(METADATA_CALLBACK_PORT_KEY, METADATA_CALLBACK_PORT_DEFAULT,
+                PropertyInterpreters.getIntegerPropertyInterpreter());
+    }
 }
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixProperties.java
index 1576774..3ae2bd9 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixProperties.java
@@ -34,7 +34,7 @@
     public static final String PROPERTY_CLUSTER_ADDRESS = "cluster.address";
     public static final String PROPERTY_INSTANCE_NAME = "instance";
     public static final String DEFAULT_INSTANCE_NAME = "DEFAULT_INSTANCE";
-    public static final String PROPERTY_METADATA_PORT = "metadata.port";
+    public static final String PROPERTY_METADATA_NODE = "metadata.node";
     public static final String PROPERTY_COREDUMP_DIR = "coredumpdir";
     public static final String DEFAULT_COREDUMP_DIR = String.join(File.separator, ASTERIXDB, "coredump");
     public static final String PROPERTY_TXN_LOG_DIR = "txnlogdir";
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixPropertiesAccessor.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixPropertiesAccessor.java
index ea1ee31..a12d802 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixPropertiesAccessor.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixPropertiesAccessor.java
@@ -31,6 +31,7 @@
 import java.util.Set;
 import java.util.SortedMap;
 import java.util.TreeMap;
+import java.util.concurrent.atomic.AtomicReference;
 
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBException;
@@ -54,6 +55,7 @@
 public class AsterixPropertiesAccessor {
     private static final Logger LOGGER = Logger.getLogger(AsterixPropertiesAccessor.class.getName());
 
+    private static final AtomicReference<AsterixPropertiesAccessor> instanceHolder = new AtomicReference<>();
     private final String instanceName;
     private final String metadataNodeName;
     private final List<String> nodeNames = new ArrayList<>();;
@@ -76,7 +78,7 @@
      * @throws AsterixException
      * @throws IOException
      */
-    public AsterixPropertiesAccessor() throws AsterixException, IOException {
+    private AsterixPropertiesAccessor() throws AsterixException, IOException {
         String fileName = System.getProperty(GlobalConfig.CONFIG_FILE_PROPERTY);
         if (fileName == null) {
             fileName = GlobalConfig.DEFAULT_CONFIG_FILE_NAME;
@@ -164,25 +166,24 @@
     /**
      * Constructor which wraps an IApplicationConfig.
      */
-    public AsterixPropertiesAccessor(IApplicationConfig cfg) throws AsterixException {
+    private AsterixPropertiesAccessor(IApplicationConfig cfg) throws AsterixException {
         this.cfg = cfg;
         instanceName = cfg.getString(AsterixProperties.SECTION_ASTERIX, AsterixProperties.PROPERTY_INSTANCE_NAME,
                 AsterixProperties.DEFAULT_INSTANCE_NAME);
-        String mdNode = null;
         nodePartitionsMap = new HashMap<>();
         MutableInt uniquePartitionId = new MutableInt(0);
         extensions = new ArrayList<>();
         // Iterate through each configured NC.
         for (String section : cfg.getSections()) {
             if (section.startsWith(AsterixProperties.SECTION_PREFIX_NC)) {
-                mdNode = configureNc(section, mdNode, uniquePartitionId);
+                configureNc(section, uniquePartitionId);
             } else if (section.startsWith(AsterixProperties.SECTION_PREFIX_EXTENSION)) {
                 String className = AsterixProperties.getSectionId(AsterixProperties.SECTION_PREFIX_EXTENSION, section);
                 configureExtension(className, section);
             }
         }
-
-        metadataNodeName = mdNode;
+        metadataNodeName = getProperty(AsterixProperties.PROPERTY_METADATA_NODE,
+                nodeNames.isEmpty() ? "" : nodeNames.get(0), PropertyInterpreters.getStringPropertyInterpreter());
         asterixConfigurationParams = null;
         loadAsterixBuildProperties();
     }
@@ -197,16 +198,8 @@
         extensions.add(new AsterixExtension(className, kvs));
     }
 
-    private String configureNc(String section, String mdNode, MutableInt uniquePartitionId) {
+    private void configureNc(String section, MutableInt uniquePartitionId) {
         String ncId = AsterixProperties.getSectionId(AsterixProperties.SECTION_PREFIX_NC, section);
-        String newMetadataNode = mdNode;
-
-        // Here we figure out which is the metadata node. If any NCs
-        // declare "metadata.port", use that one; otherwise just use the first.
-        if (mdNode == null || cfg.getString(section, AsterixProperties.PROPERTY_METADATA_PORT) != null) {
-            // QQQ But we don't actually *honor* metadata.port yet!
-            newMetadataNode = ncId;
-        }
 
         // Now we assign the coredump and txnlog directories for this node.
         // QQQ Default values? Should they be specified here? Or should there
@@ -225,7 +218,7 @@
         String[] nodeStores = new String[iodevices.length];
         ClusterPartition[] nodePartitions = new ClusterPartition[iodevices.length];
         for (int i = 0; i < nodePartitions.length; i++) {
-            // Construct final storage path from iodevice dir + storage subdir.
+            // Construct final storage path from iodevice dir + storage subdir.s
             nodeStores[i] = iodevices[i] + File.separator + storageSubdir;
             // Create ClusterPartition instances for this NC.
             ClusterPartition partition = new ClusterPartition(uniquePartitionId.getValue(), ncId, i);
@@ -236,7 +229,6 @@
         stores.put(ncId, nodeStores);
         nodePartitionsMap.put(ncId, nodePartitions);
         nodeNames.add(ncId);
-        return newMetadataNode;
     }
 
     private void loadAsterixBuildProperties() throws AsterixException {
@@ -286,7 +278,14 @@
             p = asterixConfigurationParams.get(property);
             value = (p == null) ? null : p.getValue();
         } else {
-            value = cfg.getString("asterix", property);
+            value = cfg.getString("app", property);
+            if (value == null) {
+                value = cfg.getString("asterix", property);
+                if (value != null) {
+                    LOGGER.warn("[asterix] config section deprecated and will be removed in a future release;" +
+                            " please update to [app] (found: " + property + ')');
+                }
+            }
         }
         if (value == null) {
             return defaultValue;
@@ -327,4 +326,24 @@
     public List<AsterixExtension> getExtensions() {
         return extensions;
     }
+
+    public static AsterixPropertiesAccessor getInstance(IApplicationConfig cfg) throws IOException, AsterixException {
+        // Determine whether to use old-style asterix-configuration.xml or new-style configuration.
+        // QQQ strip this out eventually
+        // QQQ this is NOT a good way to determine whether the config is valid
+        AsterixPropertiesAccessor propertiesAccessor;
+        if (cfg != null && cfg.getString("cc", "cluster.address") != null) {
+            propertiesAccessor = new AsterixPropertiesAccessor(cfg);
+        } else {
+            propertiesAccessor = new AsterixPropertiesAccessor();
+        }
+        if (!instanceHolder.compareAndSet(null, propertiesAccessor)) {
+            propertiesAccessor = instanceHolder.get();
+        }
+        return propertiesAccessor;
+    }
+
+    public static AsterixPropertiesAccessor getInstance() throws IOException, AsterixException {
+        return getInstance(null);
+    }
 }
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/IApplicationMessage.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/IApplicationMessage.java
index dbd2139..6e8c4cf 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/IApplicationMessage.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/IApplicationMessage.java
@@ -28,5 +28,5 @@
     /**
      * handle the message upon delivery
      */
-    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException;
+    void handle(IControllerService cs) throws HyracksDataException, InterruptedException;
 }
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/IMetadataBootstrap.java
similarity index 71%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/IMetadataBootstrap.java
index af2f691..940ec60 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/metadata/IMetadataBootstrap.java
@@ -16,4 +16,14 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+package org.apache.asterix.common.metadata;
+
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+@FunctionalInterface
+public interface IMetadataBootstrap {
+    /**
+     * Initializes the metadata manager, e.g., finds the remote metadata node.
+     */
+    void init() throws HyracksDataException;
+}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/JSONUtil.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/JSONUtil.java
index fd89c5e..0cbf1b9 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/JSONUtil.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/JSONUtil.java
@@ -49,7 +49,7 @@
         } else if (o instanceof JSONArray) {
             return append(sb, (JSONArray) o, indent);
         } else if (o instanceof String) {
-            return quote(sb, (String) o);
+            return quoteAndEscape(sb, (String) o);
         } else if (JSONObject.NULL.equals(o) || o instanceof Number || o instanceof Boolean) {
             return sb.append(String.valueOf(o));
         }
@@ -102,10 +102,11 @@
     }
 
     public static String quoteAndEscape(String str) {
-        StringBuilder sb = new StringBuilder();
-        sb.append('"');
-        escape(sb, str);
-        return sb.append('"').toString();
+        return quoteAndEscape(new StringBuilder(), str).toString();
+    }
+
+    private static StringBuilder quoteAndEscape(StringBuilder sb, String str) {
+        return escape(sb.append('"'), str).append('"');
     }
 
     public static String escape(String str) {
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/PrintUtil.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/PrintUtil.java
new file mode 100644
index 0000000..8c0e4ff
--- /dev/null
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/PrintUtil.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.common.utils;
+
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.Map;
+
+public class PrintUtil {
+    private PrintUtil() {
+    }
+
+    public static String toString(Map<String, ? extends Object[]> map) {
+        Iterator<? extends Map.Entry<String, ? extends Object[]>> iter = map.entrySet().iterator();
+        if (!iter.hasNext()) {
+            return "{}";
+        }
+        StringBuilder sb = new StringBuilder();
+        sb.append('{');
+        while (true) {
+            Map.Entry<String, ? extends Object[]> entry = iter.next();
+            sb.append(entry.getKey());
+            sb.append('=');
+            sb.append(Arrays.toString(entry.getValue()));
+            if (! iter.hasNext()) {
+                break;
+            }
+            sb.append(',').append(' ');
+        }
+        return sb.append('}').toString();
+    }
+}
diff --git a/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/aql/TestExecutor.java b/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/aql/TestExecutor.java
index 08a0342..29c0afd 100644
--- a/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/aql/TestExecutor.java
+++ b/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/aql/TestExecutor.java
@@ -37,6 +37,7 @@
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
+import java.util.concurrent.TimeUnit;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.regex.Matcher;
@@ -81,6 +82,10 @@
     private static final Pattern JAVA_BLOCK_COMMENT_PATTERN =
             Pattern.compile("/\\*.*\\*/", Pattern.MULTILINE | Pattern.DOTALL);
     private static final Pattern REGEX_LINES_PATTERN = Pattern.compile("^(-)?/(.*)/([im]*)$");
+    private static final Pattern POLL_TIMEOUT_PATTERN =
+            Pattern.compile("polltimeoutsecs=(\\d+)(\\D|$)", Pattern.MULTILINE);
+    private static final Pattern POLL_DELAY_PATTERN = Pattern.compile("polldelaysecs=(\\d+)(\\D|$)", Pattern.MULTILINE);
+
     private static Method managixExecuteMethod = null;
     private static final HashMap<Integer, ITestServer> runningTestServers = new HashMap<>();
 
@@ -658,6 +663,42 @@
                     ResultExtractor.extract(resultStream);
                 }
                 break;
+            case "pollquery":
+                // polltimeoutsecs=nnn, polldelaysecs=nnn
+                final Matcher timeoutMatcher = POLL_TIMEOUT_PATTERN.matcher(statement);
+                int timeoutSecs;
+                if (timeoutMatcher.find()) {
+                    timeoutSecs = Integer.parseInt(timeoutMatcher.group(1));
+                } else {
+                    throw new IllegalArgumentException("ERROR: polltimeoutsecs=nnn must be present in poll file");
+                }
+                final Matcher retryDelayMatcher = POLL_DELAY_PATTERN.matcher(statement);
+                int retryDelaySecs = retryDelayMatcher.find() ? Integer.parseInt(timeoutMatcher.group(1)) : 1;
+                long startTime = System.currentTimeMillis();
+                long limitTime = startTime + TimeUnit.SECONDS.toMillis(timeoutSecs);
+                ctx.setType(ctx.getType().substring("poll".length()));
+                Exception finalException;
+                LOGGER.fine("polling for up to " + timeoutSecs + " seconds w/ " + retryDelaySecs  + " second(s) delay");
+                while (true) {
+                    try {
+                        executeTest(testCaseCtx, ctx, statement, isDmlRecoveryTest, pb, cUnit, queryCount,
+                                expectedResultFileCtxs, testFile, actualPath);
+                        finalException = null;
+                        break;
+                    } catch (Exception e) {
+                        if ((System.currentTimeMillis() > limitTime)) {
+                            finalException = e;
+                            break;
+                        }
+                        LOGGER.fine("sleeping " + retryDelaySecs + " second(s) before polling again");
+                        Thread.sleep(TimeUnit.SECONDS.toMillis(retryDelaySecs));
+                    }
+                }
+                if (finalException != null) {
+                    throw new Exception("Poll limit (" + timeoutSecs + "s) exceeded without obtaining expected result",
+                            finalException);
+                }
+                break;
             case "query":
             case "async":
             case "asyncdefer":
diff --git a/asterixdb/asterix-doc/pom.xml b/asterixdb/asterix-doc/pom.xml
index f674f7f..a41bce2 100644
--- a/asterixdb/asterix-doc/pom.xml
+++ b/asterixdb/asterix-doc/pom.xml
@@ -58,11 +58,11 @@
                 </concat>
                 <concat destfile="${project.build.directory}/generated-site/markdown/sqlpp/builtins.md">
                   <filelist dir="${project.basedir}/src/main/markdown/builtins"
-                            files="0_toc.md,1_numeric.md,2_string.md,3_binary.md,4_spatial.md,5_similarity.md,6_tokenizing.md,7_temporal.md,7_allens.md,8_record.md,9_aggregate_sql.md,10_comparison.md,11_others.md"/>
+                            files="0_toc.md,1_numeric.md,2_string.md,3_binary.md,4_spatial.md,5_similarity.md,6_tokenizing.md,7_temporal.md,7_allens.md,8_record.md,9_aggregate_sql.md,10_comparison.md,11_type.md,12_misc.md"/>
                 </concat>
                 <concat destfile="${project.build.directory}/generated-site/markdown/aql/builtins.md">
                   <filelist dir="${project.basedir}/src/main/markdown/builtins"
-                            files="0_toc.md,1_numeric.md,2_string.md,3_binary.md,4_spatial.md,5_similarity.md,6_tokenizing.md,7_temporal.md,7_allens.md,8_record.md,9_aggregate_aql.md,10_comparison.md,11_others.md"/>
+                            files="0_toc.md,1_numeric.md,2_string.md,3_binary.md,4_spatial.md,5_similarity.md,6_tokenizing.md,7_temporal.md,7_allens.md,8_record.md,9_aggregate_aql.md,10_comparison.md,11_type.md,12_misc.md"/>
                 </concat>
               </target>
             </configuration>
diff --git a/asterixdb/asterix-doc/src/main/markdown/builtins/0_toc.md b/asterixdb/asterix-doc/src/main/markdown/builtins/0_toc.md
index 2c8dedf..2cab02c3 100644
--- a/asterixdb/asterix-doc/src/main/markdown/builtins/0_toc.md
+++ b/asterixdb/asterix-doc/src/main/markdown/builtins/0_toc.md
@@ -31,7 +31,8 @@
 * [Record Functions](#RecordFunctions)
 * [Aggregate Functions (Array Functions)](#AggregateFunctions)
 * [Comparison Functions](#ComparisonFunctions)
-* [Other Functions](#OtherFunctions)
+* [Type Functions](#TypeFunctions)
+* [Miscellaneous Functions](#MiscFunctions)
 
 The system provides various classes of functions to support operations on numeric, string, spatial, and temporal data.
 This document explains how to use these functions.
diff --git a/asterixdb/asterix-doc/src/main/markdown/builtins/11_type.md b/asterixdb/asterix-doc/src/main/markdown/builtins/11_type.md
new file mode 100644
index 0000000..7d355b2
--- /dev/null
+++ b/asterixdb/asterix-doc/src/main/markdown/builtins/11_type.md
@@ -0,0 +1,263 @@
+<!--
+ ! Licensed to the Apache Software Foundation (ASF) under one
+ ! or more contributor license agreements.  See the NOTICE file
+ ! distributed with this work for additional information
+ ! regarding copyright ownership.  The ASF licenses this file
+ ! to you under the Apache License, Version 2.0 (the
+ ! "License"); you may not use this file except in compliance
+ ! with the License.  You may obtain a copy of the License at
+ !
+ !   http://www.apache.org/licenses/LICENSE-2.0
+ !
+ ! Unless required by applicable law or agreed to in writing,
+ ! software distributed under the License is distributed on an
+ ! "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ! KIND, either express or implied.  See the License for the
+ ! specific language governing permissions and limitations
+ ! under the License.
+ !-->
+
+## <a id="TypeFunctions">Type Functions</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
+
+### is_array (isarray) ###
+ * Syntax:
+
+        is_array(expr)
+
+ * Checks whether the given expression is evaluated to be an `array` value.
+ * Arguments:
+    * `expr` : an expression (any type is allowed).
+ * Return Value:
+    * a `boolean` on whether the argument is an `array` value or not,
+    * a `missing` if the argument is a `missing` value,
+    * a `null` if the argument is a `null` value.
+
+ * Example:
+
+        {
+          "a": is_array(true),
+          "b": is_array(false),
+          "c": isarray(null),
+          "d": isarray(missing),
+          "e": isarray("d"),
+          "f": isarray(4.0),
+          "g": isarray(5),
+          "h": isarray(["1", 2]),
+          "i": isarray({"a":1})
+        };
+
+
+ * The expected result is:
+
+        { "a": false, "b": false, "c": null, "e": false, "f": false, "g": false, "h": true, "i": false }
+
+ The function has an alias `isarray`.
+
+
+### is_boolean (isboolean, isbool) ###
+ * Syntax:
+
+        is_boolean(expr)
+
+ * Checks whether the given expression is evaluated to be a `boolean` value.
+ * Arguments:
+    * `expr` : an expression (any type is allowed).
+ * Return Value:
+    * a `boolean` on whether the argument is a `boolean` value or not,
+    * a `missing` if the argument is a `missing` value,
+    * a `null` if the argument is a `null` value.
+
+ * Example:
+
+        {
+          "a": isboolean(true),
+          "b": isboolean(false),
+          "c": is_boolean(null),
+          "d": is_boolean(missing),
+          "e": isbool("d"),
+          "f": isbool(4.0),
+          "g": isbool(5),
+          "h": isbool(["1", 2]),
+          "i": isbool({"a":1})
+        };
+
+
+ * The expected result is:
+
+        { "a": true, "b": true, "c": null, "e": false, "f": false, "g": false, "h": false, "i": false }
+
+ The function has two aliases, `isboolean` or `isbool`.
+
+
+### is_number (isnumber, isnum) ###
+ * Syntax:
+
+        is_number(expr)
+
+ * Checks whether the given expression is evaluated to be a numeric value.
+ * Arguments:
+    * `expr` : an expression (any type is allowed).
+ * Return Value:
+    * a `boolean` on whether the argument is a `smallint`/`tinyint`/`integer`/`bigint`/`float`/`double`
+      value or not,
+    * a `missing` if the argument is a `missing` value,
+    * a `null` if the argument is a `null` value.
+
+ * Example:
+
+        {
+          "a": is_number(true),
+          "b": is_number(false),
+          "c": isnumber(null),
+          "d": isnumber(missing),
+          "e": isnumber("d"),
+          "f": isnum(4.0),
+          "g": isnum(5),
+          "h": isnum(["1", 2]),
+          "i": isnum({"a":1})
+        };
+
+
+ * The expected result is:
+
+        { "a": false, "b": false, "c": null, "e": false, "f": true, "g": true, "h": false, "i": false }
+
+ The function has two aliases, `isnumber` or `isnum`.
+
+### is_object (isobject, isobj) ###
+ * Syntax:
+
+        is_object(expr)
+
+ * Checks whether the given expression is evaluated to be a `record` value.
+ * Arguments:
+    * `expr` : an expression (any type is allowed).
+ * Return Value:
+    * a `boolean` on whether the argument is a `record` value or not,
+    * a `missing` if the argument is a `missing` value,
+    * a `null` if the argument is a `null` value.
+
+ * Example:
+
+        {
+          "a": is_object(true),
+          "b": is_object(false),
+          "c": isobject(null),
+          "d": isobject(missing),
+          "e": isobj("d"),
+          "f": isobj(4.0),
+          "g": isobj(5),
+          "h": isobj(["1", 2]),
+          "i": isobj({"a":1})
+        };
+
+
+ * The expected result is:
+
+       { "a": false, "b": false, "c": null, "e": false, "f": false, "g": false, "h": false, "i": true }
+
+ The function has two aliases, `isobject` or `isobj`.
+
+
+### is_string (isstring, isstr) ###
+ * Syntax:
+
+        is_string(expr)
+
+ * Checks whether the given expression is evaluated to be a `string` value.
+ * Arguments:
+    * `expr` : an expression (any type is allowed).
+ * Return Value:
+    * a `boolean` on whether the argument is a `string` value or not,
+    * a `missing` if the argument is a `missing` value,
+    * a `null` if the argument is a `null` value.
+
+ * Example:
+
+        {
+          "a": is_string(true),
+          "b": isstring(false),
+          "c": isstring(null),
+          "d": isstr(missing),
+          "e": isstr("d"),
+          "f": isstr(4.0),
+          "g": isstr(5),
+          "h": isstr(["1", 2]),
+          "i": isstr({"a":1})
+        };
+
+
+ * The expected result is:
+
+        { "a": false, "b": false, "c": null, "e": true, "f": false, "g": false, "h": false, "i": false }
+
+ The function has two aliases, `isstring` or `isstr`.
+
+
+### is_null ###
+ * Syntax:
+
+        is_null(expr)
+
+ * Checks whether the given expression is evaluated to be a `null` value.
+ * Arguments:
+    * `expr` : an expression (any type is allowed).
+ * Return Value:
+    * a `boolean` on whether the variable is a `null` or not,
+    * a `missing` if the input is `missing`.
+
+ * Example:
+
+        { "v1": is_null(null), "v2": is_null(1), "v3": is_null(missing) };
+
+
+ * The expected result is:
+
+        { "v1": true, "v2": false }
+
+ The function has an alias `isnull`.
+
+### is_missing ###
+ * Syntax:
+
+        is_missing(expr)
+
+ * Checks whether the given expression is evaluated to be a `missing` value.
+ * Arguments:
+    * `expr` : an expression (any type is allowed).
+ * Return Value:
+    * a `boolean` on whether the variable is a `missing` or not.
+
+ * Example:
+
+        { "v1": is_missing(null), "v2": is_missing(1), "v3": is_missing(missing) };
+
+
+ * The expected result is:
+
+        { "v1": false, "v2": false, "v3": true }
+
+ The function has an alias `ismissing`.
+
+### is_unknown ###
+ * Syntax:
+
+        is_unknown(expr)
+
+ * Checks whether the given variable is a `null` value or a `missing` value.
+ * Arguments:
+    * `expr` : an expression (any type is allowed).
+ * Return Value:
+    * a `boolean` on whether the variable is a `null`/``missing` value (`true`) or not (`false`).
+
+ * Example:
+
+        { "v1": is_unknown(null), "v2": is_unknown(1), "v3": is_unknown(missing) };
+
+
+ * The expected result is:
+
+        { "v1": true, "v2": false, "v3": true }
+
+ The function has an alias `isunknown`.
+
diff --git a/asterixdb/asterix-doc/src/main/markdown/builtins/11_others.md b/asterixdb/asterix-doc/src/main/markdown/builtins/12_misc.md
similarity index 76%
rename from asterixdb/asterix-doc/src/main/markdown/builtins/11_others.md
rename to asterixdb/asterix-doc/src/main/markdown/builtins/12_misc.md
index a20b8b3..ee5ca31 100644
--- a/asterixdb/asterix-doc/src/main/markdown/builtins/11_others.md
+++ b/asterixdb/asterix-doc/src/main/markdown/builtins/12_misc.md
@@ -17,7 +17,7 @@
  ! under the License.
  !-->
 
-## <a id="OtherFunctions">Other Functions</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
+## <a id="MiscFunctions">Miscellaneous Functions</a> <font size="4"><a href="#toc">[Back to TOC]</a></font> ##
 
 ### uuid ###
  * Syntax:
@@ -31,70 +31,6 @@
     * a generated, random `uuid`.
 
 
-### is_null ###
- * Syntax:
-
-        is_null(expr)
-
- * Checks whether the given expression is evaluated to be a `null` value.
- * Arguments:
-    * `expr` : an expression (any type is allowed).
- * Return Value:
-    * a `boolean` on whether the variable is a `null` or not,
-    * a `missing` if the input is `missing`.
-
- * Example:
-
-        { "v1": is_null(null), "v2": is_null(1), "v3": is_null(missing) };
-
-
- * The expected result is:
-
-        { "v1": true, "v2": false }
-
-
-### is_missing ###
- * Syntax:
-
-        is_missing(expr)
-
- * Checks whether the given expression is evaluated to be a `missing` value.
- * Arguments:
-    * `expr` : an expression (any type is allowed).
- * Return Value:
-    * a `boolean` on whether the variable is a `missing` or not.
-
- * Example:
-
-        { "v1": is_missing(null), "v2": is_missing(1), "v3": is_missing(missing) };
-
-
- * The expected result is:
-
-        { "v1": false, "v2": false, "v3": true }
-
-
-### is_unknown ###
- * Syntax:
-
-        is_unknown(expr)
-
- * Checks whether the given variable is a `null` value or a `missing` value.
- * Arguments:
-    * `expr` : an expression (any type is allowed).
- * Return Value:
-    * a `boolean` on whether the variable is a `null`/``missing` value (`true`) or not (`false`).
-
- * Example:
-
-        { "v1": is_unknown(null), "v2": is_unknown(1), "v3": is_unknown(missing) };
-
-
- * The expected result is:
-
-        { "v1": true, "v2": false, "v3": true }
-
-
 ### len ###
  * Syntax:
 
diff --git a/asterixdb/asterix-doc/src/main/markdown/builtins/7_temporal.md b/asterixdb/asterix-doc/src/main/markdown/builtins/7_temporal.md
index df47037..cee47ba 100644
--- a/asterixdb/asterix-doc/src/main/markdown/builtins/7_temporal.md
+++ b/asterixdb/asterix-doc/src/main/markdown/builtins/7_temporal.md
@@ -663,9 +663,6 @@
           "overlap6": null
         }
 
-
-### interval_before/interval_after/interval_meets/interval_met_by/interval_overlaps/interval_overlapped_by/interval_overlapping/interval_starts/interval_started_by/interval_covers/interval_covered_by/interval_ends/interval_ended_by ###
-
 ### interval_bin ###
  * Syntax:
 
diff --git a/asterixdb/asterix-doc/src/main/markdown/sqlpp/1_intro.md b/asterixdb/asterix-doc/src/main/markdown/sqlpp/1_intro.md
index fdc04cb..f7df33f 100644
--- a/asterixdb/asterix-doc/src/main/markdown/sqlpp/1_intro.md
+++ b/asterixdb/asterix-doc/src/main/markdown/sqlpp/1_intro.md
@@ -27,11 +27,11 @@
 relational databases, while SQL++ is much newer and targets the nested, schema-optional
 (or even schema-less) world of modern NoSQL systems.
 
-In the context of Apache AsterixDB, SQL++ is intended for working with the Asterix Data Model (ADM),
-a data model based on a superset of JSON with an enriched and flexible type system.
+In the context of Apache AsterixDB, SQL++ is intended for working with the Asterix Data Model
+([ADM](../datamodel.html)),a data model based on a superset of JSON with an enriched and flexible type system.
 New AsterixDB users are encouraged to read and work through the (much friendlier) guide
-"AsterixDB 101: An ADM and SQL++ Primer" before attempting to make use of this document.
-In addition, readers are advised to read through the Asterix Data Model (ADM) reference guide
+"[AsterixDB 101: An ADM and SQL++ Primer](primer-sqlpp.html)" before attempting to make use of this document.
+In addition, readers are advised to read through the [Asterix Data Model (ADM) reference guide](../datamodel.html)
 first as well, as an understanding of the data model is a prerequisite to understanding SQL++.
 
 In what follows, we detail the features of the SQL++ language in a grammar-guided manner.
diff --git a/asterixdb/asterix-doc/src/main/markdown/sqlpp/2_expr.md b/asterixdb/asterix-doc/src/main/markdown/sqlpp/2_expr.md
index 7641b92..811094e 100644
--- a/asterixdb/asterix-doc/src/main/markdown/sqlpp/2_expr.md
+++ b/asterixdb/asterix-doc/src/main/markdown/sqlpp/2_expr.md
@@ -29,9 +29,11 @@
                   | VariableReference
                   | ParenthesizedExpression
                   | FunctionCallExpression
-		  | Constructor
+                  | Constructor
 
-The most basic building block for any SQL++ expression is PrimaryExpression. This can be a simple literal (constant) value, a reference to a query variable that is in scope, a parenthesized expression, a function call, or a newly constructed instance of the data model (such as a newly constructed record or list of data model instances).
+The most basic building block for any SQL++ expression is PrimaryExpression. This can be a simple literal (constant)
+value, a reference to a query variable that is in scope, a parenthesized expression, a function call, or a newly
+constructed instance of the data model (such as a newly constructed record, array, or multiset of data model instances).
 
 ### <a id="Literals">Literals</a>
 
@@ -70,9 +72,9 @@
 
 ### <a id="Variable_references">Variable References</a>
 
-    VariableReference ::= <IDENTIFIER>|<DelimitedIdentifier>
-    <IDENTIFIER>  ::= <LETTER> (<LETTER> | <DIGIT> | "_" | "$")*
-    <LETTER>    ::= ["A" - "Z", "a" - "z"]
+    VariableReference     ::= <IDENTIFIER>|<DelimitedIdentifier>
+    <IDENTIFIER>          ::= <LETTER> (<LETTER> | <DIGIT> | "_" | "$")*
+    <LETTER>              ::= ["A" - "Z", "a" - "z"]
     DelimitedIdentifier   ::= "\`" (<ESCAPE_APOS> | ~["\'"])* "\`"
 
 A variable in SQL++ can be bound to any legal data model value. A variable reference refers to the value to which an in-scope variable is bound. (E.g., a variable binding may originate from one of the `FROM`, `WITH` or `LET` clauses of a `SELECT` statement or from an input parameter in the context of a function body.) Backticks, e.g., \`id\`, are used for delimited identifiers. Delimiting is needed when a variable's desired name clashes with a SQL++ keyword or includes characters not allowed in regular identifiers.
@@ -110,15 +112,25 @@
 
 ### <a id="Constructors">Constructors</a>
 
-    ListConstructor          ::= OrderedListConstructor | UnorderedListConstructor
-    OrderedListConstructor   ::= "[" ( Expression ( "," Expression )* )? "]"
-    UnorderedListConstructor ::= "{{" ( Expression ( "," Expression )* )? "}}"
+    CollectionConstructor    ::= ArrayConstructor | MultisetConstructor
+    ArrayConstructor         ::= "[" ( Expression ( "," Expression )* )? "]"
+    MultisetConstructor      ::= "{{" ( Expression ( "," Expression )* )? "}}"
     RecordConstructor        ::= "{" ( FieldBinding ( "," FieldBinding )* )? "}"
     FieldBinding             ::= Expression ":" Expression
 
-A major feature of SQL++ is its ability to construct new data model instances. This is accomplished using its constructors for each of the model's complex object structures, namely lists (ordered or unordered) and records. Ordered lists are like JSON arrays, while unordered lists have multiset (bag) semantics. Records are built from attributes that are field-name/field-value pairs, again like JSON. (See the data model document for more details on each.)
+A major feature of SQL++ is its ability to construct new data model instances. This is accomplished using its constructors
+for each of the model's complex object structures, namely arrays, multisets, and records.
+Arrays are like JSON arrays, while multisets have bag semantics.
+Records are built from fields that are field-name/field-value pairs, again like JSON.
+(See the [data model document](../datamodel.html) for more details on each.)
 
-The following examples illustrate how to construct a new ordered list with 3 items, a new record with 2 fields, and a new unordered list with 4 items, respectively. List elements can be homogeneous (as in the first example), which is the common case, or they may be heterogeneous (as in the third example). The data values and field name values used to construct lists and records in constructors are all simply SQL++ expressions. Thus, the list elements, field names, and field values used in constructors can be simple literals or they can come from query variable references or even arbitrarily complex SQL++ expressions (subqueries).
+The following examples illustrate how to construct a new array with 3 items, a new record with 2 fields,
+and a new multiset with 4 items, respectively. Array elements or multiset elements can be homogeneous (as in
+the first example),
+which is the common case, or they may be heterogeneous (as in the third example). The data values and field name values
+used to construct arrays, multisets, and records in constructors are all simply SQL++ expressions. Thus, the collection elements,
+field names, and field values used in constructors can be simple literals or they can come from query variable references
+or even arbitrarily complex SQL++ expressions (subqueries).
 
 ##### Examples
 
@@ -137,17 +149,22 @@
     Field           ::= "." Identifier
     Index           ::= "[" ( Expression | "?" ) "]"
 
-Components of complex types in the data model are accessed via path expressions. Path access can be applied to the result of a SQL++ expression that yields an instance of  a complex type, e.g., a record or list instance. For records, path access is based on field names. For ordered lists, path access is based on (zero-based) array-style indexing. SQL++ also supports an "I'm feeling lucky" style index accessor, [?], for selecting an arbitrary element from an ordered list. Attempts to access non-existent fields or out-of-bound list elements produce the special value `MISSING`.
+Components of complex types in the data model are accessed via path expressions. Path access can be applied to the result
+of a SQL++ expression that yields an instance of  a complex type, e.g., a record or array instance. For records,
+path access is based on field names. For arrays, path access is based on (zero-based) array-style indexing.
+SQL++ also supports an "I'm feeling lucky" style index accessor, [?], for selecting an arbitrary element from an array.
+ Attempts to access non-existent fields or out-of-bound array elements produce the special value `MISSING`.
 
-The following examples illustrate field access for a record, index-based element access for an ordered list, and also a composition thereof.
+The following examples illustrate field access for a record, index-based element access for an array, and also a
+composition thereof.
 
 ##### Examples
 
-    ({"name": "MyABCs", "list": [ "a", "b", "c"]}).list
+    ({"name": "MyABCs", "array": [ "a", "b", "c"]}).array
 
     (["a", "b", "c"])[2]
 
-    ({"name": "MyABCs", "list": [ "a", "b", "c"]}).list[2]
+    ({"name": "MyABCs", "array": [ "a", "b", "c"]}).array[2]
 
 ### <a id="Operator_expressions">Operator expressions</a>
 
@@ -294,7 +311,7 @@
 ### <a id="Quantified_expressions">Quantified expressions</a>
 
     QuantifiedExpression ::= ( (<ANY>|<SOME>) | <EVERY> ) Variable <IN> Expression ( "," Variable "in" Expression )*
-                             <SATISFIES> Expression
+                             <SATISFIES> Expression (<END>)?
 
 Quantified expressions are used for expressing existential or universal predicates involving the elements of a collection.
 
diff --git a/asterixdb/asterix-doc/src/main/markdown/sqlpp/3_query.md b/asterixdb/asterix-doc/src/main/markdown/sqlpp/3_query.md
index bfe4f0e..f1ebc47 100644
--- a/asterixdb/asterix-doc/src/main/markdown/sqlpp/3_query.md
+++ b/asterixdb/asterix-doc/src/main/markdown/sqlpp/3_query.md
@@ -789,8 +789,8 @@
 Collection-valued data is perfectly legal in most SQL++ contexts, and its data is schema-less,
 so a query processor rarely knows exactly what to expect where and such automatic conversion is often
 not desirable. Thus, in the queries above, the use of "[0]" extracts the first (i.e., 0th) element of
-a list-valued query expression's result; this is needed above, even though the result is a list of one
-element, to "de-listify" the list and obtain the desired scalar for the comparison.
+an array-valued query expression's result; this is needed above, even though the result is an array of one
+element, to extract the only element in the singleton array and obtain the desired scalar for the comparison.
 
 ## <a id="Let_clauses">LET clauses</a>
 Similar to `WITH` clauses, `LET` clauses can be useful when a (complex) expression is used several times within a query, allowing it to be written once to make the query more concise. The next query shows an example.
@@ -850,7 +850,7 @@
 not return singleton, single-column relations.
 Instead, they may return arbitrary collections.
 For example, the following query is a variant of the prior group-by query examples;
-it retrieves a list of up to two "dislike" messages per user.
+it retrieves an array of up to two "dislike" messages per user.
 
 ##### Example
 
@@ -874,14 +874,15 @@
 within a query the subquery occurs -- and again, its result is never automatically cast into a scalar.
 
 ## <a id="Vs_SQL-92">SQL++ vs. SQL-92</a>
-The following matrix is a quick "key differences cheat sheet" for SQL++ and SQL-92.
+The following matrix is a quick "SQL-92 compatibility cheat sheet" for SQL++.
 
 | Feature |  SQL++ | SQL-92 |
 |----------|--------|--------|
 | SELECT * | Returns nested records | Returns flattened concatenated records |
 | Subquery | Returns a collection  | The returned collection is cast into a scalar value if the subquery appears in a SELECT list or on one side of a comparison or as input to a function |
-| LEFT OUTER JOIN |  Fills in `MISSING` for non-matches  |   Fills in `NULL`(s) for non-matches    |
-| UNION ALL       | Allows heterogenous inputs and output | Input streams must be UNION-compatible and output field names are drawn from the first input stream
+| LEFT OUTER JOIN |  Fills in `MISSING`(s) for non-matches  |   Fills in `NULL`(s) for non-matches    |
+| UNION ALL       | Allows heterogeneous inputs and output | Input streams must be UNION-compatible and output field names are drawn from the first input stream
+| IN constant_expr | The constant expression has to be an array or multiset, i.e., [..,..,...] | The constant collection can be represented as comma-separated items in a paren pair |
 | String literal | Double quotes or single quotes | Single quotes only |
 | Delimited identifiers | Backticks | Double quotes |
 
diff --git a/asterixdb/asterix-doc/src/main/markdown/sqlpp/5_ddl.md b/asterixdb/asterix-doc/src/main/markdown/sqlpp/5_ddl.md
index 217a670..d236003 100644
--- a/asterixdb/asterix-doc/src/main/markdown/sqlpp/5_ddl.md
+++ b/asterixdb/asterix-doc/src/main/markdown/sqlpp/5_ddl.md
@@ -108,19 +108,19 @@
     TypeSpecification    ::= "TYPE" FunctionOrTypeName IfNotExists "AS" RecordTypeDef
     FunctionOrTypeName   ::= QualifiedName
     IfNotExists          ::= ( <IF> <NOT> <EXISTS> )?
-    TypeExpr             ::= RecordTypeDef | TypeReference | OrderedListTypeDef | UnorderedListTypeDef
+    TypeExpr             ::= RecordTypeDef | TypeReference | ArrayTypeDef | MultisetTypeDef
     RecordTypeDef        ::= ( <CLOSED> | <OPEN> )? "{" ( RecordField ( "," RecordField )* )? "}"
     RecordField          ::= Identifier ":" ( TypeExpr ) ( "?" )?
     NestedField          ::= Identifier ( "." Identifier )*
     IndexField           ::= NestedField ( ":" TypeReference )?
     TypeReference        ::= Identifier
-    OrderedListTypeDef   ::= "[" ( TypeExpr ) "]"
-    UnorderedListTypeDef ::= "{{" ( TypeExpr ) "}}"
+    ArrayTypeDef         ::= "[" ( TypeExpr ) "]"
+    MultisetTypeDef      ::= "{{" ( TypeExpr ) "}}"
 
 The CREATE TYPE statement is used to create a new named datatype.
 This type can then be used to create stored collections or utilized when defining one or more other datatypes.
 Much more information about the data model is available in the [data model reference guide](datamodel.html).
-A new type can be a record type, a renaming of another type, an ordered list type, or an unordered list type.
+A new type can be a record type, a renaming of another type, an array type, or a multiset type.
 A record type can be defined as being either open or closed.
 Instances of a closed record type are not permitted to contain fields other than those specified in the create type statement.
 Instances of an open record type may carry additional fields, and open is the default for new types if neither option is specified.
@@ -129,8 +129,8 @@
 Since it is defined as (defaulting to) being an open type,
 instances will be permitted to contain more than what is specified in the type definition.
 The first four fields are essentially traditional typed name/value pairs (much like SQL fields).
-The friendIds field is an unordered list of integers.
-The employment field is an ordered list of instances of another named record type, EmploymentType.
+The friendIds field is a multiset of integers.
+The employment field is an array of instances of another named record type, EmploymentType.
 
 ##### Example
 
@@ -178,7 +178,7 @@
     CompactionPolicy     ::= Identifier
 
 The CREATE DATASET statement is used to create a new dataset.
-Datasets are named, unordered collections of record type instances;
+Datasets are named, multisets of record type instances;
 they are where data lives persistently and are the usual targets for SQL++ queries.
 Datasets are typed, and the system ensures that their contents conform to their type definitions.
 An Internal dataset (the default kind) is a dataset whose content lives within and is managed by the system.
@@ -285,7 +285,7 @@
 a nested field residing within a record-valued user field in the ChirpMessages dataset.
 This index can be useful for accelerating exact-match queries, range search queries,
 and joins involving the nested screenName field.
-Such nested fields must be singular, i.e., one cannot index through (or on) a list-valued field.
+Such nested fields must be singular, i.e., one cannot index through (or on) an array-valued field.
 
 #### Example
 
diff --git a/asterixdb/asterix-doc/src/main/markdown/sqlpp/appendix_1_keywords.md b/asterixdb/asterix-doc/src/main/markdown/sqlpp/appendix_1_keywords.md
index b61528c..b0bb259 100644
--- a/asterixdb/asterix-doc/src/main/markdown/sqlpp/appendix_1_keywords.md
+++ b/asterixdb/asterix-doc/src/main/markdown/sqlpp/appendix_1_keywords.md
@@ -10,17 +10,15 @@
 | DELETE | DESC | DISCONNECT | DISTINCT | DROP | ELEMENT |
 | ELEMENT | EXPLAIN | ELSE | ENFORCED | END | EVERY |
 | EXCEPT | EXIST | EXTERNAL | FEED | FILTER | FLATTEN |
-| FOR | EXIST | EXTERNAL | FEED | FILTER | FLATTEN |
-| FROM | FULL | FUNCTION | GROUP | HAVING | HINTS |
-| IF | INTO | IN | INDEX | INGESTION | INNER |
-| INSERT | INTERNAL | INTERSECT | IS | JOIN | KEYWORD |
-| LEFT | LETTING | LET | LIKE | LIMIT | LOAD |
-| NODEGROUP | NGRAM | NOT | OFFSET | ON | OPEN |
-| OR | ORDER | OUTER | OUTPUT | PATH | POLICY |
-| PRE-SORTED | PRIMARY | RAW | REFRESH | RETURN | RTREE |
-| RUN | SATISFIES | SECONDARY | SELECT | SET | SOME |
-| TEMPORARY | THEN | TYPE | UNKNOWN | UNNEST | UPDATE |
-| USE | USING | VALUE | WHEN | WHERE | WITH |
-| WRITE |     |       |      |       |      |
-
+| FOR | FROM | FULL | FUNCTION | GROUP | HAVING |
+| HINTS | IF | INTO | IN | INDEX | INGESTION |
+| INNER | INSERT | INTERNAL | INTERSECT | IS | JOIN |
+| KEYWORD | LEFT | LETTING | LET | LIKE | LIMIT |
+| LOAD | NODEGROUP | NGRAM | NOT | OFFSET | ON |
+| OPEN | OR | ORDER | OUTER | OUTPUT | PATH |
+| POLICY | PRE-SORTED | PRIMARY | RAW | REFRESH | RETURN |
+| RTREE | RUN | SATISFIES | SECONDARY | SELECT | SET |
+| SOME | TEMPORARY | THEN | TYPE | UNKNOWN | UNNEST |
+| UPDATE | USE | USING | VALUE | WHEN | WHERE |
+| WITH | WRITE |     |     |     |     |
 
diff --git a/asterixdb/asterix-doc/src/site/markdown/aql/primer-sql-like.md b/asterixdb/asterix-doc/src/site/markdown/aql/primer-sql-like.md
deleted file mode 100644
index 9f15bb6..0000000
--- a/asterixdb/asterix-doc/src/site/markdown/aql/primer-sql-like.md
+++ /dev/null
@@ -1,907 +0,0 @@
-<!--
- ! Licensed to the Apache Software Foundation (ASF) under one
- ! or more contributor license agreements.  See the NOTICE file
- ! distributed with this work for additional information
- ! regarding copyright ownership.  The ASF licenses this file
- ! to you under the Apache License, Version 2.0 (the
- ! "License"); you may not use this file except in compliance
- ! with the License.  You may obtain a copy of the License at
- !
- !   http://www.apache.org/licenses/LICENSE-2.0
- !
- ! Unless required by applicable law or agreed to in writing,
- ! software distributed under the License is distributed on an
- ! "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ! KIND, either express or implied.  See the License for the
- ! specific language governing permissions and limitations
- ! under the License.
- !-->
-
-# AsterixDB 101: An ADM and AQL Primer (for SQL fans) #
-
-## Welcome to AsterixDB! ##
-This document introduces the main features of AsterixDB's data model (ADM) and query language (AQL) by example.
-The example is a simple scenario involving (synthetic) sample data modeled after data from the social domain.
-This document describes a set of sample ADM datasets, together with a set of illustrative AQL queries (in
-a SQL-like form), to introduce you to the "AsterixDB user experience".
-The complete set of steps required to create and load a handful of sample datasets, along with runnable queries
-and the expected results for each query, are included.
-
-This document assumes that you are at least vaguely familiar with AsterixDB and why you might want to use it.
-Most importantly, it assumes you already have a running instance of AsterixDB and that you know how to query
-it using AsterixDB's basic web interface.
-For more information on these topics, you should go through the steps in
-[Installing Asterix Using Managix](../install.html)
-before reading this document and make sure that you have a running AsterixDB instance ready to go.
-To get your feet wet, you should probably start with a simple local installation of AsterixDB on your favorite
-machine, accepting all of the default settings that Managix offers.
-Later you can graduate to trying AsterixDB on a cluster, its real intended home (since it targets Big Data).
-(Note: With the exception of specifying the correct locations where you put the source data for this example,
-there should no changes needed in your ADM or AQL statements to run the examples locally and/or to run them
-on a cluster when you are ready to take that step.)
-
-As you read through this document, you should try each step for yourself on your own AsterixDB instance.
-Once you have reached the end, you will be fully armed and dangerous, with all the basic AsterixDB knowledge
-that you'll need to start down the path of modeling, storing, and querying your own semistructured data.
-
-----
-## ADM: Modeling Semistructed Data in AsterixDB ##
-In this section you will learn all about modeling Big Data using
-ADM, the data model of the AsterixDB BDMS.
-
-### Dataverses, Datatypes, and Datasets ###
-The top-level organizing concept in the AsterixDB world is the _dataverse_.
-A dataverse---short for "data universe"---is a place (similar to a database in a relational DBMS) in which
-to create and manage the types, datasets, functions, and other artifacts for a given AsterixDB application.
-When you start using an AsterixDB instance for the first time, it starts out "empty"; it contains no data
-other than the AsterixDB system catalogs (which live in a special dataverse called the Metadata dataverse).
-To store your data in AsterixDB, you will first create a dataverse and then you use it for the _datatypes_
-and _datasets_ for managing your own data.
-A datatype tells AsterixDB what you know (or more accurately, what you want it to know) a priori about one
-of the kinds of data instances that you want AsterixDB to hold for you.
-A dataset is a collection of data instances of a datatype,
-and AsterixDB makes sure that the data instances that you put in it conform to its specified type.
-Since AsterixDB targets semistructured data, you can use _open_ datatypes and tell it as little or as
-much as you wish about your data up front; the more you tell it up front, the less information it will
-have to store repeatedly in the individual data instances that you give it.
-Instances of open datatypes are permitted to have additional content, beyond what the datatype says,
-as long as they at least contain the information prescribed by the datatype definition.
-Open typing allows data to vary from one instance to another and it leaves wiggle room for application
-evolution in terms of what might need to be stored in the future.
-If you want to restrict data instances in a dataset to have only what the datatype says, and nothing extra,
-you can define a _closed_ datatype for that dataset and AsterixDB will keep users from storing objects
-that have extra data in them.
-Datatypes are open by default unless you tell AsterixDB otherwise.
-Let's put these concepts to work
-
-Our little sample scenario involves hypothetical information about users of two popular social networks,
-Facebook and Twitter, and their messages.
-We'll start by defining a dataverse called "TinySocial" to hold our datatypes and datasets.
-The AsterixDB data model (ADM) is essentially a superset of JSON---it's what you get by extending
-JSON with more data types and additional data modeling constructs borrowed from object databases.
-The following is how we can create the TinySocial dataverse plus a set of ADM types for modeling
-Twitter users, their Tweets, Facebook users, their users' employment information, and their messages.
-(Note: Keep in mind that this is just a tiny and somewhat silly example intended for illustrating
-some of the key features of AsterixDB. :-))
-
-
-        drop dataverse TinySocial if exists;
-        create dataverse TinySocial;
-        use dataverse TinySocial;
-
-        create type TwitterUserType as open {
-            screen-name: string,
-            lang: string,
-            friends_count: int32,
-            statuses_count: int32,
-            name: string,
-            followers_count: int32
-        }
-        create type TweetMessageType as closed {
-            tweetid: string,
-            user: TwitterUserType,
-            sender-location: point?,
-            send-time: datetime,
-            referred-topics: {{ string }},
-            message-text: string
-        }
-        create type EmploymentType as open {
-            organization-name: string,
-            start-date: date,
-            end-date: date?
-        }
-        create type FacebookUserType as closed {
-            id: int32,
-            alias: string,
-            name: string,
-            user-since: datetime,
-            friend-ids: {{ int32 }},
-            employment: [EmploymentType]
-        }
-        create type FacebookMessageType as closed {
-            message-id: int32,
-            author-id: int32,
-            in-response-to: int32?,
-            sender-location: point?,
-            message: string
-        }
-
-
-The first three lines above tell AsterixDB to drop the old TinySocial dataverse, if one already
-exists, and then to create a brand new one and make it the focus of the statements that follow.
-The first type creation statement creates a datatype for holding information about Twitter users.
-It is a record type with a mix of integer and string data, very much like a (flat) relational tuple.
-The indicated fields are all mandatory, but because the type is open, additional fields are welcome.
-The second statement creates a datatype for Twitter messages; this shows how to specify a closed type.
-Interestingly (based on one of Twitter's APIs), each Twitter message actually embeds an instance of the
-sending user's information (current as of when the message was sent), so this is an example of a nested
-record in ADM.
-Twitter messages can optionally contain the sender's location, which is modeled via the sender-location
-field of spatial type _point_; the question mark following the field type indicates its optionality.
-An optional field is like a nullable field in SQL---it may be present or missing, but when it's present,
-its data type will conform to the datatype's specification.
-The send-time field illustrates the use of a temporal primitive type, _datetime_.
-Lastly, the referred-topics field illustrates another way that ADM is richer than the relational model;
-this field holds a bag (a.k.a. an unordered list) of strings.
-Since the overall datatype definition for Twitter messages says "closed", the fields that it lists are
-the only fields that instances of this type will be allowed to contain.
-The next two create type statements create a record type for holding information about one component of
-the employment history of a Facebook user and then a record type for holding the user information itself.
-The Facebook user type highlights a few additional ADM data model features.
-Its friend-ids field is a bag of integers, presumably the Facebook user ids for this user's friends,
-and its employment field is an ordered list of employment records.
-The final create type statement defines a type for handling the content of a Facebook message in our
-hypothetical social data storage scenario.
-
-Before going on, we need to once again emphasize the idea that AsterixDB is aimed at storing
-and querying not just Big Data, but Big _Semistructured_ Data.
-This means that most of the fields listed in the create type statements above could have been
-omitted without changing anything other than the resulting size of stored data instances on disk.
-AsterixDB stores its information about the fields defined a priori as separate metadata, whereas
-the information about other fields that are "just there" in instances of open datatypes is stored
-with each instance---making for more bits on disk and longer times for operations affected by
-data size (e.g., dataset scans).
-The only fields that _must_ be specified a priori are the primary key and any fields that you
-would like to build indexes on.
-
-
-### Creating Datasets and Indexes ###
-
-Now that we have defined our datatypes, we can move on and create datasets to store the actual data.
-(If we wanted to, we could even have several named datasets based on any one of these datatypes.)
-We can do this as follows, utilizing the DDL capabilities of AsterixDB.
-
-
-        use dataverse TinySocial;
-
-        create dataset FacebookUsers(FacebookUserType)
-        primary key id;
-
-        create dataset FacebookMessages(FacebookMessageType)
-        primary key message-id;
-
-        create dataset TwitterUsers(TwitterUserType)
-        primary key screen-name;
-
-        create dataset TweetMessages(TweetMessageType)
-        primary key tweetid
-        hints(cardinality=100);
-
-        create index fbUserSinceIdx on FacebookUsers(user-since);
-        create index fbAuthorIdx on FacebookMessages(author-id) type btree;
-        create index fbSenderLocIndex on FacebookMessages(sender-location) type rtree;
-        create index fbMessageIdx on FacebookMessages(message) type keyword;
-
-        from $ds in dataset Metadata.Dataset select $ds;
-        from $ix in dataset Metadata.Index select $ix;
-
-
-
-The ADM DDL statements above create four datasets for holding our social data in the TinySocial
-dataverse: FacebookUsers, FacebookMessages, TwitterUsers, and TweetMessages.
-The first statement creates the FacebookUsers data set.
-It specifies that this dataset will store data instances conforming to FacebookUserType and that
-it has a primary key which is the id field of each instance.
-The primary key information is used by AsterixDB to uniquely identify instances for the purpose
-of later lookup and for use in secondary indexes.
-Each AsterixDB dataset is stored (and indexed) in the form of a B+ tree on primary key;
-secondary indexes point to their indexed data by primary key.
-In AsterixDB clusters, the primary key is also used to hash-partition (a.k.a. shard) the
-dataset across the nodes of the cluster.
-The next three create dataset statements are similar.
-The last one illustrates an optional clause for providing useful hints to AsterixDB.
-In this case, the hint tells AsterixDB that the dataset definer is anticipating that the
-TweetMessages dataset will contain roughly 100 objects; knowing this can help AsterixDB
-to more efficiently manage and query this dataset.
-(AsterixDB does not yet gather and maintain data statistics; it will currently, abitrarily,
-assume a cardinality of one million objects per dataset in the absence of such an optional
-definition-time hint.)
-
-The create dataset statements above are followed by four more DDL statements, each of which
-creates a secondary index on a field of one of the datasets.
-The first one indexes the FacebookUsers dataset on its user-since field.
-This index will be a B+ tree index; its type is unspecified and _btree_ is the default type.
-The other three illustrate how you can explicitly specify the desired type of index.
-In addition to btree, _rtree_ and inverted _keyword_ indexes are supported by AsterixDB.
-Indexes can also have composite keys, and more advanced text indexing is available as well
-(ngram(k), where k is the desired gram length).
-
-### Querying the Metadata Dataverse ###
-
-The last two statements above show how you can use queries in AQL to examine the AsterixDB
-system catalogs and tell what artifacts you have created.
-Just as relational DBMSs use their own tables to store their catalogs, AsterixDB uses
-its own datasets to persist descriptions of its datasets, datatypes, indexes, and so on.
-Running the first of the two queries above will list all of your newly created datasets,
-and it will also show you a full list of all the metadata datasets.
-(You can then explore from there on your own if you are curious)
-These last two queries also illustrate one other factoid worth knowing:
-AsterixDB allows queries to span dataverses by allowing the optional use
-of fully-qualified dataset names (i.e., _dataversename.datasetname_)
-to reference datasets that live in a dataverse other than the one that
-was named in the most recently executed _use dataverse_ directive.
-
-----
-## Loading Data Into AsterixDB ##
-Okay, so far so good---AsterixDB is now ready for data, so let's give it some data to store
-Our next task will be to load some sample data into the four datasets that we just defined.
-Here we will load a tiny set of records, defined in ADM format (a superset of JSON), into each dataset.
-In the boxes below you can see the actual data instances contained in each of the provided sample files.
-In order to load this data yourself, you should first store the four corresponding `.adm` files
-(whose URLs are indicated on top of each box below) into a filesystem directory accessible to your
-running AsterixDB instance.
-Take a few minutes to look carefully at each of the sample data sets.
-This will give you a better sense of the nature of the data that we are about to load and query.
-We should note that ADM format is a textual serialization of what AsterixDB will actually store;
-when persisted in AsterixDB, the data format will be binary and the data in the predefined fields
-of the data instances will be stored separately from their associated field name and type metadata.
-
-[Twitter Users](../data/twu.adm)
-
-        {"screen-name":"NathanGiesen@211","lang":"en","friends_count":18,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416}
-        {"screen-name":"ColineGeyer@63","lang":"en","friends_count":121,"statuses_count":362,"name":"Coline Geyer","followers_count":17159}
-        {"screen-name":"NilaMilliron_tw","lang":"en","friends_count":445,"statuses_count":164,"name":"Nila Milliron","followers_count":22649}
-        {"screen-name":"ChangEwing_573","lang":"en","friends_count":182,"statuses_count":394,"name":"Chang Ewing","followers_count":32136}
-
-[Tweet Messages](../data/twm.adm)
-
-        {"tweetid":"1","user":{"screen-name":"NathanGiesen@211","lang":"en","friends_count":39339,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416},"sender-location":point("47.44,80.65"),"send-time":datetime("2008-04-26T10:10:00"),"referred-topics":{{"t-mobile","customization"}},"message-text":" love t-mobile its customization is good:)"}
-        {"tweetid":"2","user":{"screen-name":"ColineGeyer@63","lang":"en","friends_count":121,"statuses_count":362,"name":"Coline Geyer","followers_count":17159},"sender-location":point("32.84,67.14"),"send-time":datetime("2010-05-13T10:10:00"),"referred-topics":{{"verizon","shortcut-menu"}},"message-text":" like verizon its shortcut-menu is awesome:)"}
-        {"tweetid":"3","user":{"screen-name":"NathanGiesen@211","lang":"en","friends_count":39339,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416},"sender-location":point("29.72,75.8"),"send-time":datetime("2006-11-04T10:10:00"),"referred-topics":{{"motorola","speed"}},"message-text":" like motorola the speed is good:)"}
-        {"tweetid":"4","user":{"screen-name":"NathanGiesen@211","lang":"en","friends_count":39339,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416},"sender-location":point("39.28,70.48"),"send-time":datetime("2011-12-26T10:10:00"),"referred-topics":{{"sprint","voice-command"}},"message-text":" like sprint the voice-command is mind-blowing:)"}
-        {"tweetid":"5","user":{"screen-name":"NathanGiesen@211","lang":"en","friends_count":39339,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416},"sender-location":point("40.09,92.69"),"send-time":datetime("2006-08-04T10:10:00"),"referred-topics":{{"motorola","speed"}},"message-text":" can't stand motorola its speed is terrible:("}
-        {"tweetid":"6","user":{"screen-name":"ColineGeyer@63","lang":"en","friends_count":121,"statuses_count":362,"name":"Coline Geyer","followers_count":17159},"sender-location":point("47.51,83.99"),"send-time":datetime("2010-05-07T10:10:00"),"referred-topics":{{"iphone","voice-clarity"}},"message-text":" like iphone the voice-clarity is good:)"}
-        {"tweetid":"7","user":{"screen-name":"ChangEwing_573","lang":"en","friends_count":182,"statuses_count":394,"name":"Chang Ewing","followers_count":32136},"sender-location":point("36.21,72.6"),"send-time":datetime("2011-08-25T10:10:00"),"referred-topics":{{"samsung","platform"}},"message-text":" like samsung the platform is good"}
-        {"tweetid":"8","user":{"screen-name":"NathanGiesen@211","lang":"en","friends_count":39339,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416},"sender-location":point("46.05,93.34"),"send-time":datetime("2005-10-14T10:10:00"),"referred-topics":{{"t-mobile","shortcut-menu"}},"message-text":" like t-mobile the shortcut-menu is awesome:)"}
-        {"tweetid":"9","user":{"screen-name":"NathanGiesen@211","lang":"en","friends_count":39339,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416},"sender-location":point("36.86,74.62"),"send-time":datetime("2012-07-21T10:10:00"),"referred-topics":{{"verizon","voicemail-service"}},"message-text":" love verizon its voicemail-service is awesome"}
-        {"tweetid":"10","user":{"screen-name":"ColineGeyer@63","lang":"en","friends_count":121,"statuses_count":362,"name":"Coline Geyer","followers_count":17159},"sender-location":point("29.15,76.53"),"send-time":datetime("2008-01-26T10:10:00"),"referred-topics":{{"verizon","voice-clarity"}},"message-text":" hate verizon its voice-clarity is OMG:("}
-        {"tweetid":"11","user":{"screen-name":"NilaMilliron_tw","lang":"en","friends_count":445,"statuses_count":164,"name":"Nila Milliron","followers_count":22649},"sender-location":point("37.59,68.42"),"send-time":datetime("2008-03-09T10:10:00"),"referred-topics":{{"iphone","platform"}},"message-text":" can't stand iphone its platform is terrible"}
-        {"tweetid":"12","user":{"screen-name":"OliJackson_512","lang":"en","friends_count":445,"statuses_count":164,"name":"Oli Jackson","followers_count":22649},"sender-location":point("24.82,94.63"),"send-time":datetime("2010-02-13T10:10:00"),"referred-topics":{{"samsung","voice-command"}},"message-text":" like samsung the voice-command is amazing:)"}
-
-[Facebook Users](../data/fbu.adm)
-
-        {"id":1,"alias":"Margarita","name":"MargaritaStoddard","user-since":datetime("2012-08-20T10:10:00"),"friend-ids":{{2,3,6,10}},"employment":[{"organization-name":"Codetechno","start-date":date("2006-08-06")}]}
-        {"id":2,"alias":"Isbel","name":"IsbelDull","user-since":datetime("2011-01-22T10:10:00"),"friend-ids":{{1,4}},"employment":[{"organization-name":"Hexviafind","start-date":date("2010-04-27")}]}
-        {"id":3,"alias":"Emory","name":"EmoryUnk","user-since":datetime("2012-07-10T10:10:00"),"friend-ids":{{1,5,8,9}},"employment":[{"organization-name":"geomedia","start-date":date("2010-06-17"),"end-date":date("2010-01-26")}]}
-        {"id":4,"alias":"Nicholas","name":"NicholasStroh","user-since":datetime("2010-12-27T10:10:00"),"friend-ids":{{2}},"employment":[{"organization-name":"Zamcorporation","start-date":date("2010-06-08")}]}
-        {"id":5,"alias":"Von","name":"VonKemble","user-since":datetime("2010-01-05T10:10:00"),"friend-ids":{{3,6,10}},"employment":[{"organization-name":"Kongreen","start-date":date("2010-11-27")}]}
-        {"id":6,"alias":"Willis","name":"WillisWynne","user-since":datetime("2005-01-17T10:10:00"),"friend-ids":{{1,3,7}},"employment":[{"organization-name":"jaydax","start-date":date("2009-05-15")}]}
-        {"id":7,"alias":"Suzanna","name":"SuzannaTillson","user-since":datetime("2012-08-07T10:10:00"),"friend-ids":{{6}},"employment":[{"organization-name":"Labzatron","start-date":date("2011-04-19")}]}
-        {"id":8,"alias":"Nila","name":"NilaMilliron","user-since":datetime("2008-01-01T10:10:00"),"friend-ids":{{3}},"employment":[{"organization-name":"Plexlane","start-date":date("2010-02-28")}]}
-        {"id":9,"alias":"Woodrow","name":"WoodrowNehling","user-since":datetime("2005-09-20T10:10:00"),"friend-ids":{{3,10}},"employment":[{"organization-name":"Zuncan","start-date":date("2003-04-22"),"end-date":date("2009-12-13")}]}
-        {"id":10,"alias":"Bram","name":"BramHatch","user-since":datetime("2010-10-16T10:10:00"),"friend-ids":{{1,5,9}},"employment":[{"organization-name":"physcane","start-date":date("2007-06-05"),"end-date":date("2011-11-05")}]}
-
-[Facebook Messages](../data/fbm.adm)
-
-        {"message-id":1,"author-id":3,"in-response-to":2,"sender-location":point("47.16,77.75"),"message":" love sprint its shortcut-menu is awesome:)"}
-        {"message-id":2,"author-id":1,"in-response-to":4,"sender-location":point("41.66,80.87"),"message":" dislike iphone its touch-screen is horrible"}
-        {"message-id":3,"author-id":2,"in-response-to":4,"sender-location":point("48.09,81.01"),"message":" like samsung the plan is amazing"}
-        {"message-id":4,"author-id":1,"in-response-to":2,"sender-location":point("37.73,97.04"),"message":" can't stand at&t the network is horrible:("}
-        {"message-id":5,"author-id":6,"in-response-to":2,"sender-location":point("34.7,90.76"),"message":" love sprint the customization is mind-blowing"}
-        {"message-id":6,"author-id":2,"in-response-to":1,"sender-location":point("31.5,75.56"),"message":" like t-mobile its platform is mind-blowing"}
-        {"message-id":7,"author-id":5,"in-response-to":15,"sender-location":point("32.91,85.05"),"message":" dislike sprint the speed is horrible"}
-        {"message-id":8,"author-id":1,"in-response-to":11,"sender-location":point("40.33,80.87"),"message":" like verizon the 3G is awesome:)"}
-        {"message-id":9,"author-id":3,"in-response-to":12,"sender-location":point("34.45,96.48"),"message":" love verizon its wireless is good"}
-        {"message-id":10,"author-id":1,"in-response-to":12,"sender-location":point("42.5,70.01"),"message":" can't stand motorola the touch-screen is terrible"}
-        {"message-id":11,"author-id":1,"in-response-to":1,"sender-location":point("38.97,77.49"),"message":" can't stand at&t its plan is terrible"}
-        {"message-id":12,"author-id":10,"in-response-to":6,"sender-location":point("42.26,77.76"),"message":" can't stand t-mobile its voicemail-service is OMG:("}
-        {"message-id":13,"author-id":10,"in-response-to":4,"sender-location":point("42.77,78.92"),"message":" dislike iphone the voice-command is bad:("}
-        {"message-id":14,"author-id":9,"in-response-to":12,"sender-location":point("41.33,85.28"),"message":" love at&t its 3G is good:)"}
-        {"message-id":15,"author-id":7,"in-response-to":11,"sender-location":point("44.47,67.11"),"message":" like iphone the voicemail-service is awesome"}
-
-
-It's loading time! We can use AQL _load_ statements to populate our datasets with the sample records shown above.
-The following shows how loading can be done for data stored in `.adm` files in your local filesystem.
-*Note:* You _MUST_ replace the `<Host Name>` and `<Absolute File Path>` placeholders in each load
-statement below with valid values based on the host IP address (or host name) for the machine and
-directory that you have downloaded the provided `.adm` files to.
-As you do so, be very, very careful to retain the two slashes in the load statements, i.e.,
-do not delete the two slashes that appear in front of the absolute path to your `.adm` files.
-(This will lead to a three-slash character sequence at the start of each load statement's file
-input path specification.)
-
-
-        use dataverse TinySocial;
-
-        load dataset FacebookUsers using localfs
-        (("path"="<Host Name>://<Absolute File Path>/fbu.adm"),("format"="adm"));
-        load dataset FacebookMessages using localfs
-        (("path"="<Host Name>://<Absolute File Path>/fbm.adm"),("format"="adm"));
-        load dataset TwitterUsers using localfs
-        (("path"="<Host Name>://<Absolute File Path>/twu.adm"),("format"="adm"));
-        load dataset TweetMessages using localfs
-        (("path"="<Host Name>://<Absolute File Path>/twm.adm"),("format"="adm"));
-
-
-----
-## AQL: Querying Your AsterixDB Data ##
-Congratulations! You now have sample social data stored (and indexed) in AsterixDB.
-(You are part of an elite and adventurous group of individuals. :-))
-Now that you have successfully loaded the provided sample data into the datasets that we defined,
-you can start running queries against them.
-
-The query language for AsterixDB is AQL---the Asterix Query Language.
-AQL is loosely based on XQuery, the language developed and standardized in the early to mid 2000's
-by the World Wide Web Consortium (W3C) for querying semistructured data stored in their XML format.
-We have tossed all of the "XML cruft" out of their language but retained many of its core ideas.
-We did this because its design was developed over a period of years by a diverse committee of smart
-and experienced language designers, including "SQL people", "functional programming people", and
-"XML people", all of whom were focused on how to design a new query language that operates well over
-semistructured data.
-(We decided to stand on their shoulders instead of starting from scratch and revisiting many of the
-same issues.)
-Note that AQL is not SQL and not based on SQL: In other words, AsterixDB is fully "NoSQL compliant". :-)
-
-In this section we introduce AQL via a set of example queries, along with their expected results,
-based on the data above, to help you get started.
-Many of the most important features of AQL are presented in this set of representative queries.
-You can find more details in the document on the [Asterix Data Model (ADM)](datamodel.html),
-in the [AQL Reference Manual](manual.html), and a complete list of built-in functions is available
-in the [Asterix Functions](functions.html) document.
-
-AQL is an expression language.
-Even the expression 1+1 is a valid AQL query that evaluates to 2.
-(Try it for yourself!
-Okay, maybe that's _not_ the best use of a 512-node shared-nothing compute cluster.)
-Most useful AQL queries will be based on the _FLWOR_ (pronounced "flower") expression structure
-that AQL has borrowed from XQuery ((http://en.wikipedia.org/wiki/FLWOR)).
-The FLWOR expression syntax supports both the incremental binding (_for_) of variables to ADM data
-instances in a dataset (or in the result of any AQL expression, actually) and the full binding (_let_)
-of variables to entire intermediate results in a fashion similar to temporary views in the SQL world.
-FLWOR is an acronym that is short for _for_-_let_-_where_-_order by_-_return_,
-naming five of the most frequently used clauses from the syntax of a full AQL query.
-AQL also includes _group by_ and _limit_ clauses, as you will see shortly.
-Roughly speaking, for SQL afficiandos, the _for_ clause in AQL is like the _from_ clause in SQL,
-the _return_ clause in AQL is like the _select_ clause in SQL (but appears at the end instead of
-the beginning of a query), the _let_ clause in AQL is like SQL's _with_ clause, and the _where_
-and _order by_ clauses in both languages are similar.
-
-In order to allow SQL fans to write queries in their favored ways,
-AQL provides synonyms:  _from_ for _for_, _select_ for _return_,  _with_ for _let_, and
-_keeping_ for _with_ in the group by clause.
-
-Enough talk!
-Let's go ahead and try writing some queries and see about learning AQL by example.
-
-### Query 0-A - Exact-Match Lookup ###
-For our first query, let's find a Facebook user based on his or her user id.
-Suppose the user we want is the user whose id is 8:
-
-
-        use dataverse TinySocial;
-        from $user in dataset FacebookUsers
-        where $user.id = 8
-        select $user;
-
-The query's _from_ clause  binds the variable `$user` incrementally to the data instances residing in
-the dataset named FacebookUsers.
-Its _where_ clause selects only those bindings having a user id of interest, filtering out the rest.
-The _select_ clause returns the (entire) data instance for each binding that satisfies the predicate.
-Since this dataset is indexed on user id (its primary key), this query will be done via a quick index lookup.
-
-The expected result for our sample data is as follows:
-
-        { "id": 8, "alias": "Nila", "name": "NilaMilliron", "user-since": datetime("2008-01-01T10:10:00.000Z"), "friend-ids": {{ 3 }}, "employment": [ { "organization-name": "Plexlane", "start-date": date("2010-02-28"), "end-date": null } ] }
-
-
-### Query 0-B - Range Scan ###
-AQL, like SQL, supports a variety of different predicates.
-For example, for our next query, let's find the Facebook users whose ids are in the range between 2 and 4:
-
-        use dataverse TinySocial;
-
-        from $user in dataset FacebookUsers
-        where $user.id >= 2 and $user.id <= 4
-        select $user;
-
-This query's expected result, also evaluable using the primary index on user id, is:
-
-        { "id": 2, "alias": "Isbel", "name": "IsbelDull", "user-since": datetime("2011-01-22T10:10:00.000Z"), "friend-ids": {{ 1, 4 }}, "employment": [ { "organization-name": "Hexviafind", "start-date": date("2010-04-27"), "end-date": null } ] }
-        { "id": 3, "alias": "Emory", "name": "EmoryUnk", "user-since": datetime("2012-07-10T10:10:00.000Z"), "friend-ids": {{ 1, 5, 8, 9 }}, "employment": [ { "organization-name": "geomedia", "start-date": date("2010-06-17"), "end-date": date("2010-01-26") } ] }
-        { "id": 4, "alias": "Nicholas", "name": "NicholasStroh", "user-since": datetime("2010-12-27T10:10:00.000Z"), "friend-ids": {{ 2 }}, "employment": [ { "organization-name": "Zamcorporation", "start-date": date("2010-06-08"), "end-date": null } ] }
-
-
-### Query 1 - Other Query Filters ###
-AQL can do range queries on any data type that supports the appropriate set of comparators.
-As an example, this next query retrieves the Facebook users who joined between July 22, 2010 and July 29, 2012:
-
-        use dataverse TinySocial;
-        from $user in dataset FacebookUsers
-        where $user.user-since >= datetime('2010-07-22T00:00:00')
-          and $user.user-since <= datetime('2012-07-29T23:59:59')
-        select $user;
-
-The expected result for this query, also an indexable query, is as follows:
-
-        { "id": 2, "alias": "Isbel", "name": "IsbelDull", "user-since": datetime("2011-01-22T10:10:00.000Z"), "friend-ids": {{ 1, 4 }}, "employment": [ { "organization-name": "Hexviafind", "start-date": date("2010-04-27"), "end-date": null } ] }
-        { "id": 3, "alias": "Emory", "name": "EmoryUnk", "user-since": datetime("2012-07-10T10:10:00.000Z"), "friend-ids": {{ 1, 5, 8, 9 }}, "employment": [ { "organization-name": "geomedia", "start-date": date("2010-06-17"), "end-date": date("2010-01-26") } ] }
-        { "id": 4, "alias": "Nicholas", "name": "NicholasStroh", "user-since": datetime("2010-12-27T10:10:00.000Z"), "friend-ids": {{ 2 }}, "employment": [ { "organization-name": "Zamcorporation", "start-date": date("2010-06-08"), "end-date": null } ] }
-        { "id": 10, "alias": "Bram", "name": "BramHatch", "user-since": datetime("2010-10-16T10:10:00.000Z"), "friend-ids": {{ 1, 5, 9 }}, "employment": [ { "organization-name": "physcane", "start-date": date("2007-06-05"), "end-date": date("2011-11-05") } ] }
-
-
-### Query 2-A - Equijoin ###
-In addition to simply binding variables to data instances and returning them "whole",
-an AQL query can construct new ADM instances to return based on combinations of its variable bindings.
-This gives AQL the power to do joins much like those done using multi-table _from_ clauses in SQL.
-For example, suppose we wanted a list of all Facebook users paired with their associated messages,
-with the list enumerating the author name and the message text associated with each Facebook message.
-We could do this as follows in AQL:
-
-        use dataverse TinySocial;
-
-        from $user in dataset FacebookUsers
-        from $message in dataset FacebookMessages
-        where $message.author-id = $user.id
-        select {
-        "uname": $user.name,
-        "message": $message.message
-        };
-
-The result of this query is a sequence of new ADM instances, one for each author/message pair.
-Each instance in the result will be an ADM record containing two fields, "uname" and "message",
-containing the user's name and the message text, respectively, for each author/message pair.
-(Note that "uname" and "message" are both simple AQL expressions themselves---so in the most
-general case, even the resulting field names can be computed as part of the query, making AQL
-a very powerful tool for slicing and dicing semistructured data.)
-
-The expected result of this example AQL join query for our sample data set is:
-
-        { "uname": "MargaritaStoddard", "message": " dislike iphone its touch-screen is horrible" }
-        { "uname": "MargaritaStoddard", "message": " can't stand at&t the network is horrible:(" }
-        { "uname": "MargaritaStoddard", "message": " like verizon the 3G is awesome:)" }
-        { "uname": "MargaritaStoddard", "message": " can't stand motorola the touch-screen is terrible" }
-        { "uname": "MargaritaStoddard", "message": " can't stand at&t its plan is terrible" }
-        { "uname": "IsbelDull", "message": " like samsung the plan is amazing" }
-        { "uname": "IsbelDull", "message": " like t-mobile its platform is mind-blowing" }
-        { "uname": "EmoryUnk", "message": " love sprint its shortcut-menu is awesome:)" }
-        { "uname": "EmoryUnk", "message": " love verizon its wireless is good" }
-        { "uname": "VonKemble", "message": " dislike sprint the speed is horrible" }
-        { "uname": "WillisWynne", "message": " love sprint the customization is mind-blowing" }
-        { "uname": "SuzannaTillson", "message": " like iphone the voicemail-service is awesome" }
-        { "uname": "WoodrowNehling", "message": " love at&t its 3G is good:)" }
-        { "uname": "BramHatch", "message": " can't stand t-mobile its voicemail-service is OMG:(" }
-        { "uname": "BramHatch", "message": " dislike iphone the voice-command is bad:(" }
-
-
-### Query 2-B - Index join ###
-By default, AsterixDB evaluates equijoin queries using hash-based join methods that work
-well for doing ad hoc joins of very large data sets
-([http://en.wikipedia.org/wiki/Hash_join](http://en.wikipedia.org/wiki/Hash_join)).
-On a cluster, hash partitioning is employed as AsterixDB's divide-and-conquer strategy for
-computing large parallel joins.
-AsterixDB includes other join methods, but in the absence of data statistics and selectivity
-estimates, it doesn't (yet) have the know-how to intelligently choose among its alternatives.
-We therefore asked ourselves the classic question---WWOD?---What Would Oracle Do?---and in the
-interim, AQL includes a clunky (but useful) hint-based mechanism for addressing the occasional
-need to suggest to AsterixDB which join method it should use for a particular AQL query.
-
-The following query is similar to Query 2-A but includes a suggestion to AsterixDB that it
-should consider employing an index-based nested-loop join technique to process the query:
-
-        use dataverse TinySocial;
-
-        from $user in dataset FacebookUsers
-        from $message in dataset FacebookMessages
-        where $message.author-id /*+ indexnl */  = $user.id
-        select {
-        "uname": $user.name,
-        "message": $message.message
-        };
-
-The expected result is (of course) the same as before, modulo the order of the instances.
-Result ordering is (intentionally) undefined in AQL in the absence of an _order by_ clause.
-The query result for our sample data in this case is:
-
-        { "uname": "EmoryUnk", "message": " love sprint its shortcut-menu is awesome:)" }
-        { "uname": "MargaritaStoddard", "message": " dislike iphone its touch-screen is horrible" }
-        { "uname": "IsbelDull", "message": " like samsung the plan is amazing" }
-        { "uname": "MargaritaStoddard", "message": " can't stand at&t the network is horrible:(" }
-        { "uname": "WillisWynne", "message": " love sprint the customization is mind-blowing" }
-        { "uname": "IsbelDull", "message": " like t-mobile its platform is mind-blowing" }
-        { "uname": "VonKemble", "message": " dislike sprint the speed is horrible" }
-        { "uname": "MargaritaStoddard", "message": " like verizon the 3G is awesome:)" }
-        { "uname": "EmoryUnk", "message": " love verizon its wireless is good" }
-        { "uname": "MargaritaStoddard", "message": " can't stand motorola the touch-screen is terrible" }
-        { "uname": "MargaritaStoddard", "message": " can't stand at&t its plan is terrible" }
-        { "uname": "BramHatch", "message": " can't stand t-mobile its voicemail-service is OMG:(" }
-        { "uname": "BramHatch", "message": " dislike iphone the voice-command is bad:(" }
-        { "uname": "WoodrowNehling", "message": " love at&t its 3G is good:)" }
-        { "uname": "SuzannaTillson", "message": " like iphone the voicemail-service is awesome" }
-
-
-(It is worth knowing, with respect to influencing AsterixDB's query evaluation, that nested _from_
-clauses---a.k.a. joins--- are currently evaluated with the "outer" clause probing the data of the "inner"
-clause.)
-
-### Query 3 - Nested Outer Join ###
-In order to support joins between tables with missing/dangling join tuples, the designers of SQL ended
-up shoe-horning a subset of the relational algebra into SQL's _from_ clause syntax---and providing a
-variety of join types there for users to choose from.
-Left outer joins are particularly important in SQL, e.g., to print a summary of customers and orders,
-grouped by customer, without omitting those customers who haven't placed any orders yet.
-
-The AQL language supports nesting, both of queries and of query results, and the combination allows for
-an arguably cleaner/more natural approach to such queries.
-As an example, supposed we wanted, for each Facebook user, to produce a record that has his/her name
-plus a list of the messages written by that user.
-In SQL, this would involve a left outer join between users and messages, grouping by user, and having
-the user name repeated along side each message.
-In AQL, this sort of use case can be handled (more naturally) as follows:
-
-        use dataverse TinySocial;
-
-        from $user in dataset FacebookUsers
-        select {
-        "uname": $user.name,
-        "messages": from $message in dataset FacebookMessages
-                where $message.author-id = $user.id
-                select $message.message
-        };
-
-This AQL query binds the variable `$user` to the data instances in FacebookUsers;
-for each user, it constructs a result record containing a "uname" field with the user's
-name and a "messages" field with a nested collection of all messages for that user.
-The nested collection for each user is specified by using a correlated subquery.
-(Note: While it looks like nested loops could be involved in computing the result,
-AsterixDB recogizes the equivalence of such a query to an outerjoin, and it will
-use an efficient hash-based strategy when actually computing the query's result.)
-
-Here is this example query's expected output:
-
-        { "uname": "MargaritaStoddard", "messages": [ " dislike iphone its touch-screen is horrible", " can't stand at&t the network is horrible:(", " like verizon the 3G is awesome:)", " can't stand motorola the touch-screen is terrible", " can't stand at&t its plan is terrible" ] }
-        { "uname": "IsbelDull", "messages": [ " like samsung the plan is amazing", " like t-mobile its platform is mind-blowing" ] }
-        { "uname": "EmoryUnk", "messages": [ " love sprint its shortcut-menu is awesome:)", " love verizon its wireless is good" ] }
-        { "uname": "NicholasStroh", "messages": [  ] }
-        { "uname": "VonKemble", "messages": [ " dislike sprint the speed is horrible" ] }
-        { "uname": "WillisWynne", "messages": [ " love sprint the customization is mind-blowing" ] }
-        { "uname": "SuzannaTillson", "messages": [ " like iphone the voicemail-service is awesome" ] }
-        { "uname": "NilaMilliron", "messages": [  ] }
-        { "uname": "WoodrowNehling", "messages": [ " love at&t its 3G is good:)" ] }
-        { "uname": "BramHatch", "messages": [ " dislike iphone the voice-command is bad:(", " can't stand t-mobile its voicemail-service is OMG:(" ] }
-
-
-### Query 4 - Theta Join ###
-Not all joins are expressible as equijoins and computable using equijoin-oriented algorithms.
-The join predicates for some use cases involve predicates with functions; AsterixDB supports the
-expression of such queries and will still evaluate them as best it can using nested loop based
-techniques (and broadcast joins in the parallel case).
-
-As an example of such a use case, suppose that we wanted, for each tweet T, to find all of the
-other tweets that originated from within a circle of radius of 1 surrounding tweet T's location.
-In AQL, this can be specified in a manner similar to the previous query using one of the built-in
-functions on the spatial data type instead of id equality in the correlated query's _where_ clause:
-
-        use dataverse TinySocial;
-
-        from $t in dataset TweetMessages
-        select {
-        "message": $t.message-text,
-        "nearby-messages": from $t2 in dataset TweetMessages
-                    where spatial-distance($t.sender-location, $t2.sender-location) <= 1
-                    select { "msgtxt":$t2.message-text}
-        };
-
-Here is the expected result for this query:
-
-        { "message": " love t-mobile its customization is good:)", "nearby-messages": [ { "msgtxt": " love t-mobile its customization is good:)" } ] }
-        { "message": " hate verizon its voice-clarity is OMG:(", "nearby-messages": [ { "msgtxt": " like motorola the speed is good:)" }, { "msgtxt": " hate verizon its voice-clarity is OMG:(" } ] }
-        { "message": " can't stand iphone its platform is terrible", "nearby-messages": [ { "msgtxt": " can't stand iphone its platform is terrible" } ] }
-        { "message": " like samsung the voice-command is amazing:)", "nearby-messages": [ { "msgtxt": " like samsung the voice-command is amazing:)" } ] }
-        { "message": " like verizon its shortcut-menu is awesome:)", "nearby-messages": [ { "msgtxt": " like verizon its shortcut-menu is awesome:)" } ] }
-        { "message": " like motorola the speed is good:)", "nearby-messages": [ { "msgtxt": " hate verizon its voice-clarity is OMG:(" }, { "msgtxt": " like motorola the speed is good:)" } ] }
-        { "message": " like sprint the voice-command is mind-blowing:)", "nearby-messages": [ { "msgtxt": " like sprint the voice-command is mind-blowing:)" } ] }
-        { "message": " can't stand motorola its speed is terrible:(", "nearby-messages": [ { "msgtxt": " can't stand motorola its speed is terrible:(" } ] }
-        { "message": " like iphone the voice-clarity is good:)", "nearby-messages": [ { "msgtxt": " like iphone the voice-clarity is good:)" } ] }
-        { "message": " like samsung the platform is good", "nearby-messages": [ { "msgtxt": " like samsung the platform is good" } ] }
-        { "message": " like t-mobile the shortcut-menu is awesome:)", "nearby-messages": [ { "msgtxt": " like t-mobile the shortcut-menu is awesome:)" } ] }
-        { "message": " love verizon its voicemail-service is awesome", "nearby-messages": [ { "msgtxt": " love verizon its voicemail-service is awesome" } ] }
-
-
-### Query 5 - Fuzzy Join ###
-As another example of a non-equijoin use case, we could ask AsterixDB to find, for each Facebook user,
-all Twitter users with names "similar" to their name.
-AsterixDB supports a variety of "fuzzy match" functions for use with textual and set-based data.
-As one example, we could choose to use edit distance with a threshold of 3 as the definition of name
-similarity, in which case we could write the following query using AQL's operator-based syntax (~=)
-for testing whether or not two values are similar:
-
-        use dataverse TinySocial;
-
-        set simfunction "edit-distance";
-        set simthreshold "3";
-        from $fbu in dataset FacebookUsers
-        select {
-            "id": $fbu.id,
-            "name": $fbu.name,
-            "similar-users": from $t in dataset TweetMessages
-                    with $tu := $t.user
-                    where $tu.name ~= $fbu.name
-                    select {
-                    "twitter-screenname": $tu.screen-name,
-                    "twitter-name": $tu.name
-                    }
-        };
-
-The expected result for this query against our sample data is:
-
-        { "id": 1, "name": "MargaritaStoddard", "similar-users": [  ] }
-        { "id": 2, "name": "IsbelDull", "similar-users": [  ] }
-        { "id": 3, "name": "EmoryUnk", "similar-users": [  ] }
-        { "id": 4, "name": "NicholasStroh", "similar-users": [  ] }
-        { "id": 5, "name": "VonKemble", "similar-users": [  ] }
-        { "id": 6, "name": "WillisWynne", "similar-users": [  ] }
-        { "id": 7, "name": "SuzannaTillson", "similar-users": [  ] }
-        { "id": 8, "name": "NilaMilliron", "similar-users": [ { "twitter-screenname": "NilaMilliron_tw", "twitter-name": "Nila Milliron" } ] }
-        { "id": 9, "name": "WoodrowNehling", "similar-users": [  ] }
-        { "id": 10, "name": "BramHatch", "similar-users": [  ] }
-
-
-### Query 6 - Existential Quantification ###
-The expressive power of AQL includes support for queries involving "some" (existentially quantified)
-and "all" (universally quantified) query semantics.
-As an example of an existential AQL query, here we show a query to list the Facebook users who are currently employed.
-Such employees will have an employment history containing a record with the end-date value missing, which leads us to the
-following AQL query:
-
-        use dataverse TinySocial;
-
-        from $fbu in dataset FacebookUsers
-        where (some $e in $fbu.employment satisfies is-missing($e.end-date))
-        select $fbu;
-
-The expected result in this case is:
-
-        { "id": 1, "alias": "Margarita", "name": "MargaritaStoddard", "user-since": datetime("2012-08-20T10:10:00.000Z"), "friend-ids": {{ 2, 3, 6, 10 }}, "employment": [ { "organization-name": "Codetechno", "start-date": date("2006-08-06"), "end-date": null } ] }
-        { "id": 2, "alias": "Isbel", "name": "IsbelDull", "user-since": datetime("2011-01-22T10:10:00.000Z"), "friend-ids": {{ 1, 4 }}, "employment": [ { "organization-name": "Hexviafind", "start-date": date("2010-04-27"), "end-date": null } ] }
-        { "id": 4, "alias": "Nicholas", "name": "NicholasStroh", "user-since": datetime("2010-12-27T10:10:00.000Z"), "friend-ids": {{ 2 }}, "employment": [ { "organization-name": "Zamcorporation", "start-date": date("2010-06-08"), "end-date": null } ] }
-        { "id": 5, "alias": "Von", "name": "VonKemble", "user-since": datetime("2010-01-05T10:10:00.000Z"), "friend-ids": {{ 3, 6, 10 }}, "employment": [ { "organization-name": "Kongreen", "start-date": date("2010-11-27"), "end-date": null } ] }
-        { "id": 6, "alias": "Willis", "name": "WillisWynne", "user-since": datetime("2005-01-17T10:10:00.000Z"), "friend-ids": {{ 1, 3, 7 }}, "employment": [ { "organization-name": "jaydax", "start-date": date("2009-05-15"), "end-date": null } ] }
-        { "id": 7, "alias": "Suzanna", "name": "SuzannaTillson", "user-since": datetime("2012-08-07T10:10:00.000Z"), "friend-ids": {{ 6 }}, "employment": [ { "organization-name": "Labzatron", "start-date": date("2011-04-19"), "end-date": null } ] }
-        { "id": 8, "alias": "Nila", "name": "NilaMilliron", "user-since": datetime("2008-01-01T10:10:00.000Z"), "friend-ids": {{ 3 }}, "employment": [ { "organization-name": "Plexlane", "start-date": date("2010-02-28"), "end-date": null } ] }
-
-
-### Query 7 - Universal Quantification ###
-As an example of a universal AQL query, here we show a query to list the Facebook users who are currently unemployed.
-Such employees will have an employment history containing no records that miss end-date values, leading us to the
-following AQL query:
-
-        use dataverse TinySocial;
-
-        from $fbu in dataset FacebookUsers
-        where (every $e in $fbu.employment satisfies not(is-missing($e.end-date)))
-        select $fbu;
-
-Here is the expected result for our sample data:
-
-        { "id": 3, "alias": "Emory", "name": "EmoryUnk", "user-since": datetime("2012-07-10T10:10:00.000Z"), "friend-ids": {{ 1, 5, 8, 9 }}, "employment": [ { "organization-name": "geomedia", "start-date": date("2010-06-17"), "end-date": date("2010-01-26") } ] }
-        { "id": 9, "alias": "Woodrow", "name": "WoodrowNehling", "user-since": datetime("2005-09-20T10:10:00.000Z"), "friend-ids": {{ 3, 10 }}, "employment": [ { "organization-name": "Zuncan", "start-date": date("2003-04-22"), "end-date": date("2009-12-13") } ] }
-        { "id": 10, "alias": "Bram", "name": "BramHatch", "user-since": datetime("2010-10-16T10:10:00.000Z"), "friend-ids": {{ 1, 5, 9 }}, "employment": [ { "organization-name": "physcane", "start-date": date("2007-06-05"), "end-date": date("2011-11-05") } ] }
-
-
-### Query 8 - Simple Aggregation ###
-Like SQL, the AQL language of AsterixDB provides support for computing aggregates over large amounts of data.
-As a very simple example, the following AQL query computes the total number of Facebook users:
-
-        use dataverse TinySocial;
-
-        count(from $fbu in dataset FacebookUsers select $fbu);
-
-In AQL, aggregate functions can be applied to arbitrary subquery results; in this case, the count function
-is applied to the result of a query that enumerates the Facebook users.  The expected result here is:
-
-        10
-
-
-
-### Query 9-A - Grouping and Aggregation ###
-Also like SQL, AQL supports grouped aggregation.
-For every Twitter user, the following group-by/aggregate query counts the number of tweets sent by that user:
-
-        use dataverse TinySocial;
-
-        from $t in dataset TweetMessages
-        group by $uid := $t.user.screen-name keeping $t
-        select {
-        "user": $uid,
-        "count": count($t)
-        };
-
-The _from_ clause incrementally binds $t to tweets, and the _group by_ clause groups the tweets by its
-issuer's Twitter screen-name.
-Unlike SQL, where data is tabular---flat---the data model underlying AQL allows for nesting.
-Thus, following the _group by_ clause, the _select_ clause in this query sees a sequence of $t groups,
-with each such group having an associated $uid variable value (i.e., the tweeting user's screen name).
-In the context of the _select_ clause, due to "... keeping $t ...", $uid is bound to the tweeter's id and $t
-is bound to the _set_ of tweets issued by that tweeter.
-The _select_ clause constructs a result record containing the tweeter's user id and the count of the items
-in the associated tweet set.
-The query result will contain one such record per screen name.
-This query also illustrates another feature of AQL; notice that each user's screen name is accessed via a
-path syntax that traverses each tweet's nested record structure.
-
-Here is the expected result for this query over the sample data:
-
-        { "user": "ChangEwing_573", "count": 1 }
-        { "user": "ColineGeyer@63", "count": 3 }
-        { "user": "NathanGiesen@211", "count": 6 }
-        { "user": "NilaMilliron_tw", "count": 1 }
-        { "user": "OliJackson_512", "count": 1 }
-
-
-
-### Query 9-B - (Hash-Based) Grouping and Aggregation ###
-As for joins, AsterixDB has multiple evaluation strategies available for processing grouped aggregate queries.
-For grouped aggregation, the system knows how to employ both sort-based and hash-based aggregation methods,
-with sort-based methods being used by default and a hint being available to suggest that a different approach
-be used in processing a particular AQL query.
-
-The following query is similar to Query 9-A, but adds a hash-based aggregation hint:
-
-        use dataverse TinySocial;
-
-        from $t in dataset TweetMessages
-        /*+ hash*/
-        group by $uid := $t.user.screen-name keeping $t
-        select {
-        "user": $uid,
-        "count": count($t)
-        };
-
-Here is the expected result:
-
-        { "user": "OliJackson_512", "count": 1 }
-        { "user": "ColineGeyer@63", "count": 3 }
-        { "user": "NathanGiesen@211", "count": 6 }
-        { "user": "NilaMilliron_tw", "count": 1 }
-        { "user": "ChangEwing_573", "count": 1 }
-
-
-
-### Query 10 - Grouping and Limits ###
-In some use cases it is not necessary to compute the entire answer to a query.
-In some cases, just having the first _N_ or top _N_ results is sufficient.
-This is expressible in AQL using the _limit_ clause combined with the _order by_ clause.
-
-The following AQL  query returns the top 3 Twitter users based on who has issued the most tweets:
-
-        use dataverse TinySocial;
-
-        from $t in dataset TweetMessages
-        group by $uid := $t.user.screen-name keeping $t
-        with $c := count($t)
-        order by $c desc
-        limit 3
-        select {
-            "user": $uid,
-            "count": $c
-        };
-
-The expected result for this query is:
-
-        { "user": "NathanGiesen@211", "count": 6 }
-        { "user": "ColineGeyer@63", "count": 3 }
-        { "user": "NilaMilliron_tw", "count": 1 }
-
-
-### Query 11 - Left Outer Fuzzy Join ###
-As a last example of AQL and its query power, the following query, for each tweet,
-finds all of the tweets that are similar based on the topics that they refer to:
-
-        use dataverse TinySocial;
-
-        set simfunction "jaccard";
-        set simthreshold "0.3";
-        from $t in dataset TweetMessages
-        select {
-            "tweet": $t,
-            "similar-tweets": from $t2 in dataset TweetMessages
-                    where  $t2.referred-topics ~= $t.referred-topics
-                    and $t2.tweetid != $t.tweetid
-                    select $t2.referred-topics
-        };
-
-This query illustrates several things worth knowing in order to write fuzzy queries in AQL.
-First, as mentioned earlier, AQL offers an operator-based syntax for seeing whether two values are "similar" to one another or not.
-Second, recall that the referred-topics field of records of datatype TweetMessageType is a bag of strings.
-This query sets the context for its similarity join by requesting that Jaccard-based similarity semantics
-([http://en.wikipedia.org/wiki/Jaccard_index](http://en.wikipedia.org/wiki/Jaccard_index))
-be used for the query's similarity operator and that a similarity index of 0.3 be used as its similarity threshold.
-
-The expected result for this fuzzy join query is:
-
-        { "tweet": { "tweetid": "1", "user": { "screen-name": "NathanGiesen@211", "lang": "en", "friends_count": 39339, "statuses_count": 473, "name": "Nathan Giesen", "followers_count": 49416 }, "sender-location": point("47.44,80.65"), "send-time": datetime("2008-04-26T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "customization" }}, "message-text": " love t-mobile its customization is good:)" }, "similar-tweets": [ {{ "t-mobile", "shortcut-menu" }} ] }
-        { "tweet": { "tweetid": "10", "user": { "screen-name": "ColineGeyer@63", "lang": "en", "friends_count": 121, "statuses_count": 362, "name": "Coline Geyer", "followers_count": 17159 }, "sender-location": point("29.15,76.53"), "send-time": datetime("2008-01-26T10:10:00.000Z"), "referred-topics": {{ "verizon", "voice-clarity" }}, "message-text": " hate verizon its voice-clarity is OMG:(" }, "similar-tweets": [ {{ "iphone", "voice-clarity" }}, {{ "verizon", "voicemail-service" }}, {{ "verizon", "shortcut-menu" }} ] }
-        { "tweet": { "tweetid": "11", "user": { "screen-name": "NilaMilliron_tw", "lang": "en", "friends_count": 445, "statuses_count": 164, "name": "Nila Milliron", "followers_count": 22649 }, "sender-location": point("37.59,68.42"), "send-time": datetime("2008-03-09T10:10:00.000Z"), "referred-topics": {{ "iphone", "platform" }}, "message-text": " can't stand iphone its platform is terrible" }, "similar-tweets": [ {{ "iphone", "voice-clarity" }}, {{ "samsung", "platform" }} ] }
-        { "tweet": { "tweetid": "12", "user": { "screen-name": "OliJackson_512", "lang": "en", "friends_count": 445, "statuses_count": 164, "name": "Oli Jackson", "followers_count": 22649 }, "sender-location": point("24.82,94.63"), "send-time": datetime("2010-02-13T10:10:00.000Z"), "referred-topics": {{ "samsung", "voice-command" }}, "message-text": " like samsung the voice-command is amazing:)" }, "similar-tweets": [ {{ "samsung", "platform" }}, {{ "sprint", "voice-command" }} ] }
-        { "tweet": { "tweetid": "2", "user": { "screen-name": "ColineGeyer@63", "lang": "en", "friends_count": 121, "statuses_count": 362, "name": "Coline Geyer", "followers_count": 17159 }, "sender-location": point("32.84,67.14"), "send-time": datetime("2010-05-13T10:10:00.000Z"), "referred-topics": {{ "verizon", "shortcut-menu" }}, "message-text": " like verizon its shortcut-menu is awesome:)" }, "similar-tweets": [ {{ "verizon", "voicemail-service" }}, {{ "verizon", "voice-clarity" }}, {{ "t-mobile", "shortcut-menu" }} ] }
-        { "tweet": { "tweetid": "3", "user": { "screen-name": "NathanGiesen@211", "lang": "en", "friends_count": 39339, "statuses_count": 473, "name": "Nathan Giesen", "followers_count": 49416 }, "sender-location": point("29.72,75.8"), "send-time": datetime("2006-11-04T10:10:00.000Z"), "referred-topics": {{ "motorola", "speed" }}, "message-text": " like motorola the speed is good:)" }, "similar-tweets": [ {{ "motorola", "speed" }} ] }
-        { "tweet": { "tweetid": "4", "user": { "screen-name": "NathanGiesen@211", "lang": "en", "friends_count": 39339, "statuses_count": 473, "name": "Nathan Giesen", "followers_count": 49416 }, "sender-location": point("39.28,70.48"), "send-time": datetime("2011-12-26T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-command" }}, "message-text": " like sprint the voice-command is mind-blowing:)" }, "similar-tweets": [ {{ "samsung", "voice-command" }} ] }
-        { "tweet": { "tweetid": "5", "user": { "screen-name": "NathanGiesen@211", "lang": "en", "friends_count": 39339, "statuses_count": 473, "name": "Nathan Giesen", "followers_count": 49416 }, "sender-location": point("40.09,92.69"), "send-time": datetime("2006-08-04T10:10:00.000Z"), "referred-topics": {{ "motorola", "speed" }}, "message-text": " can't stand motorola its speed is terrible:(" }, "similar-tweets": [ {{ "motorola", "speed" }} ] }
-        { "tweet": { "tweetid": "6", "user": { "screen-name": "ColineGeyer@63", "lang": "en", "friends_count": 121, "statuses_count": 362, "name": "Coline Geyer", "followers_count": 17159 }, "sender-location": point("47.51,83.99"), "send-time": datetime("2010-05-07T10:10:00.000Z"), "referred-topics": {{ "iphone", "voice-clarity" }}, "message-text": " like iphone the voice-clarity is good:)" }, "similar-tweets": [ {{ "verizon", "voice-clarity" }}, {{ "iphone", "platform" }} ] }
-        { "tweet": { "tweetid": "7", "user": { "screen-name": "ChangEwing_573", "lang": "en", "friends_count": 182, "statuses_count": 394, "name": "Chang Ewing", "followers_count": 32136 }, "sender-location": point("36.21,72.6"), "send-time": datetime("2011-08-25T10:10:00.000Z"), "referred-topics": {{ "samsung", "platform" }}, "message-text": " like samsung the platform is good" }, "similar-tweets": [ {{ "iphone", "platform" }}, {{ "samsung", "voice-command" }} ] }
-        { "tweet": { "tweetid": "8", "user": { "screen-name": "NathanGiesen@211", "lang": "en", "friends_count": 39339, "statuses_count": 473, "name": "Nathan Giesen", "followers_count": 49416 }, "sender-location": point("46.05,93.34"), "send-time": datetime("2005-10-14T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "shortcut-menu" }}, "message-text": " like t-mobile the shortcut-menu is awesome:)" }, "similar-tweets": [ {{ "t-mobile", "customization" }}, {{ "verizon", "shortcut-menu" }} ] }
-        { "tweet": { "tweetid": "9", "user": { "screen-name": "NathanGiesen@211", "lang": "en", "friends_count": 39339, "statuses_count": 473, "name": "Nathan Giesen", "followers_count": 49416 }, "sender-location": point("36.86,74.62"), "send-time": datetime("2012-07-21T10:10:00.000Z"), "referred-topics": {{ "verizon", "voicemail-service" }}, "message-text": " love verizon its voicemail-service is awesome" }, "similar-tweets": [ {{ "verizon", "voice-clarity" }}, {{ "verizon", "shortcut-menu" }} ] }
-
-
-### Inserting New Data  ###
-In addition to loading and querying data, AsterixDB supports incremental additions to datasets via the AQL _insert_ statement.
-
-The following example adds a new tweet by user "NathanGiesen@211" to the TweetMessages dataset.
-(An astute reader may notice that this tweet was issued a half an hour after his last tweet, so his counts
-have all gone up in the interim, although he appears not to have moved in the last half hour.)
-
-        use dataverse TinySocial;
-
-        insert into dataset TweetMessages
-        (
-           {"tweetid":"13",
-            "user":
-                {"screen-name":"NathanGiesen@211",
-                 "lang":"en",
-                 "friends_count":39345,
-                 "statuses_count":479,
-                 "name":"Nathan Giesen",
-                 "followers_count":49420
-                },
-            "sender-location":point("47.44,80.65"),
-            "send-time":datetime("2008-04-26T10:10:35"),
-            "referred-topics":{{"tweeting"}},
-            "message-text":"tweety tweet, my fellow tweeters!"
-           }
-        );
-
-In general, the data to be inserted may be specified using any valid AQL query expression.
-The insertion of a single object instance, as in this example, is just a special case where
-the query expression happens to be a record constructor involving only constants.
-
-### Deleting Existing Data  ###
-In addition to inserting new data, AsterixDB supports deletion from datasets via the AQL _delete_ statement.
-The statement supports "searched delete" semantics, and its
-_where_ clause can involve any valid XQuery expression.
-
-The following example deletes the tweet that we just added from user "NathanGiesen@211".  (Easy come, easy go. :-))
-
-        use dataverse TinySocial;
-
-        delete $tm from dataset TweetMessages where $tm.tweetid = "13";
-
-It should be noted that one form of data change not yet supported by AsterixDB is in-place data modification (_update_).
-Currently, only insert and delete operations are supported; update is not.
-To achieve the effect of an update, two statements are currently needed---one to delete the old record from the
-dataset where it resides, and another to insert the new replacement record (with the same primary key but with
-different field values for some of the associated data content).
-
-### Transaction Support
-
-AsterixDB supports record-level ACID transactions that begin and terminate implicitly for each record inserted, deleted, or searched while a given AQL statement is being executed. This is quite similar to the level of transaction support found in today's NoSQL stores. AsterixDB does not support multi-statement transactions, and in fact an AQL statement that involves multiple records can itself involve multiple independent record-level transactions. An example consequence of this is that, when an AQL statement attempts to insert 1000 records, it is possible that the first 800 records could end up being committed while the remaining 200 records fail to be inserted. This situation could happen, for example, if a duplicate key exception occurs as the 801st insertion is attempted. If this happens, AsterixDB will report the error (e.g., a duplicate key exception) as the result of the offending AQL insert statement, and the application logic above will need to take the appropriate action(s) needed to assess the resulting state and to clean up and/or continue as appropriate.
-
-## Further Help ##
-That's it  You are now armed and dangerous with respect to semistructured data management using AsterixDB.
-
-AsterixDB is a powerful new BDMS---Big Data Management System---that we hope may usher in a new era of much
-more declarative Big Data management.
-AsterixDB is powerful, so use it wisely, and remember: "With great power comes great responsibility..." :-)
-
-Please e-mail the AsterixDB user group
-(users (at) asterixdb.apache.org)
-if you run into any problems or simply have further questions about the AsterixDB system, its features, or their proper use.
diff --git a/asterixdb/asterix-doc/src/site/markdown/aql/primer.md b/asterixdb/asterix-doc/src/site/markdown/aql/primer.md
index 35c870d..e07edb6 100644
--- a/asterixdb/asterix-doc/src/site/markdown/aql/primer.md
+++ b/asterixdb/asterix-doc/src/site/markdown/aql/primer.md
@@ -44,7 +44,6 @@
 Once you have reached the end, you will be fully armed and dangerous, with all the basic AsterixDB knowledge
 that you'll need to start down the path of modeling, storing, and querying your own semistructured data.
 
-----
 ## ADM: Modeling Semistructed Data in AsterixDB ##
 In this section you will learn all about modeling Big Data using
 ADM, the data model of the AsterixDB BDMS.
@@ -74,13 +73,13 @@
 Datatypes are open by default unless you tell AsterixDB otherwise.
 Let's put these concepts to work
 
-Our little sample scenario involves hypothetical information about users of two popular social networks,
-Facebook and Twitter, and their messages.
+Our little sample scenario involves information about users of two hypothetical social networks,
+Gleambook and Chirp, and their messages.
 We'll start by defining a dataverse called "TinySocial" to hold our datatypes and datasets.
 The AsterixDB data model (ADM) is essentially a superset of JSON---it's what you get by extending
 JSON with more data types and additional data modeling constructs borrowed from object databases.
-The following is how we can create the TinySocial dataverse plus a set of ADM types for modeling
-Twitter users, their Tweets, Facebook users, their users' employment information, and their messages.
+The following shows how we can create the TinySocial dataverse plus a set of ADM types for modeling
+Chirp users, their Chirps, Gleambook users, their users' employment information, and their messages.
 (Note: Keep in mind that this is just a tiny and somewhat silly example intended for illustrating
 some of the key features of AsterixDB. :-))
 
@@ -89,85 +88,82 @@
         create dataverse TinySocial;
         use dataverse TinySocial;
 
-        create type TwitterUserType as open {
-            screen-name: string,
+        create type ChirpUserType as {
+            screenName: string,
             lang: string,
-            friends_count: int64,
-            statuses_count: int64,
+            friendsCount: int,
+            statusesCount: int,
             name: string,
-            followers_count: int64
-        }
+            followersCount: int
+        };
 
-        create type TweetMessageType as closed {
-            tweetid: string,
-            user: TwitterUserType,
-            sender-location: point?,
-            send-time: datetime,
-            referred-topics: {{ string }},
-            message-text: string
-        }
+        create type ChirpMessageType as closed {
+            chirpId: string,
+            user: ChirpUserType,
+            senderLocation: point?,
+            sendTime: datetime,
+            referredTopics: {{ string }},
+            messageText: string
+        };
 
-        create type EmploymentType as open {
-            organization-name: string,
-            start-date: date,
-            end-date: date?
-        }
+        create type EmploymentType as {
+            organizationName: string,
+            startDate: date,
+            endDate: date?
+        };
 
-        create type FacebookUserType as closed {
-            id: int64,
+        create type GleambookUserType as {
+            id: int,
             alias: string,
             name: string,
-            user-since: datetime,
-            friend-ids: {{ int64 }},
+            userSince: datetime,
+            friendIds: {{ int }},
             employment: [EmploymentType]
-        }
+        };
 
-        create type FacebookMessageType as closed {
-            message-id: int64,
-            author-id: int64,
-            in-response-to: int64?,
-            sender-location: point?,
+        create type GleambookMessageType as {
+            messageId: int,
+            authorId: int,
+            inResponseTo: int?,
+            senderLocation: point?,
             message: string
-        }
-
-
+        };
 
 The first three lines above tell AsterixDB to drop the old TinySocial dataverse, if one already
 exists, and then to create a brand new one and make it the focus of the statements that follow.
-The first type creation statement creates a datatype for holding information about Twitter users.
+The first _create type_ statement creates a datatype for holding information about Chirp users.
 It is a record type with a mix of integer and string data, very much like a (flat) relational tuple.
 The indicated fields are all mandatory, but because the type is open, additional fields are welcome.
-The second statement creates a datatype for Twitter messages; this shows how to specify a closed type.
-Interestingly (based on one of Twitter's APIs), each Twitter message actually embeds an instance of the
+The second statement creates a datatype for Chirp messages; this shows how to specify a closed type.
+Interestingly (based on one of Chirp's APIs), each Chirp message actually embeds an instance of the
 sending user's information (current as of when the message was sent), so this is an example of a nested
 record in ADM.
-Twitter messages can optionally contain the sender's location, which is modeled via the sender-location
+Chirp messages can optionally contain the sender's location, which is modeled via the senderLocation
 field of spatial type _point_; the question mark following the field type indicates its optionality.
 An optional field is like a nullable field in SQL---it may be present or missing, but when it's present,
-its data type will conform to the datatype's specification.
-The send-time field illustrates the use of a temporal primitive type, _datetime_.
-Lastly, the referred-topics field illustrates another way that ADM is richer than the relational model;
-this field holds a bag (a.k.a. an unordered list) of strings.
-Since the overall datatype definition for Twitter messages says "closed", the fields that it lists are
+its value's data type will conform to the datatype's specification.
+The sendTime field illustrates the use of a temporal primitive type, _datetime_.
+Lastly, the referredTopics field illustrates another way that ADM is richer than the relational model;
+this field holds a bag (*a.k.a.* an unordered list) of strings.
+Since the overall datatype definition for Chirp messages says "closed", the fields that it lists are
 the only fields that instances of this type will be allowed to contain.
-The next two create type statements create a record type for holding information about one component of
-the employment history of a Facebook user and then a record type for holding the user information itself.
-The Facebook user type highlights a few additional ADM data model features.
-Its friend-ids field is a bag of integers, presumably the Facebook user ids for this user's friends,
+The next two _create type_ statements create a record type for holding information about one component of
+the employment history of a Gleambook user and then a record type for holding the user information itself.
+The Gleambook user type highlights a few additional ADM data model features.
+Its friendIds field is a bag of integers, presumably the Gleambook user ids for this user's friends,
 and its employment field is an ordered list of employment records.
-The final create type statement defines a type for handling the content of a Facebook message in our
+The final _create type_ statement defines a type for handling the content of a Gleambook message in our
 hypothetical social data storage scenario.
 
 Before going on, we need to once again emphasize the idea that AsterixDB is aimed at storing
 and querying not just Big Data, but Big _Semistructured_ Data.
-This means that most of the fields listed in the create type statements above could have been
+This means that most of the fields listed in the _create type_ statements above could have been
 omitted without changing anything other than the resulting size of stored data instances on disk.
 AsterixDB stores its information about the fields defined a priori as separate metadata, whereas
 the information about other fields that are "just there" in instances of open datatypes is stored
 with each instance---making for more bits on disk and longer times for operations affected by
 data size (e.g., dataset scans).
-The only fields that _must_ be specified a priori are the primary key.
-Indexes can be built on fields that don't belong to the pre-specified part of datatype's schema as long as their type is specified at index create time and and the _enforced_ keyword is provided at the end of the index definition.  (The _enforced_ keyword asks the system to ensure that the indexed field or fields conform to this specified type in all of the dataset's record instances where they are present.)  Additionally, indexed fields may be nested arbitrarily deep within a dataset's records as long as the nesting does not go pass through a list (be it ordered or unordered) along the way.
+The only fields that _must_ be specified a priori are the primary key fields of each dataset.
 
 ### Creating Datasets and Indexes ###
 
@@ -179,52 +175,50 @@
 
         use dataverse TinySocial;
 
-        create dataset FacebookUsers(FacebookUserType)
-        primary key id;
+        create dataset GleambookUsers(GleambookUserType)
+            primary key id;
 
-        create dataset FacebookMessages(FacebookMessageType)
-        primary key message-id;
+        create dataset GleambookMessages(GleambookMessageType)
+            primary key messageId;
 
-        create dataset TwitterUsers(TwitterUserType)
-        primary key screen-name;
+        create dataset ChirpUsers(ChirpUserType)
+            primary key screenName;
 
-        create dataset TweetMessages(TweetMessageType)
-        primary key tweetid
-        hints(cardinality=100);
+        create dataset ChirpMessages(ChirpMessageType)
+            primary key chirpId
+            hints(cardinality=100);
 
-        create index fbUserSinceIdx on FacebookUsers(user-since);
-        create index fbAuthorIdx on FacebookMessages(author-id) type btree;
-        create index fbSenderLocIndex on FacebookMessages(sender-location) type rtree;
-        create index fbMessageIdx on FacebookMessages(message) type keyword;
+        create index gbUserSinceIdx on GleambookUsers(userSince);
+        create index gbAuthorIdx on GleambookMessages(authorId) type btree;
+        create index gbSenderLocIndex on GleambookMessages(senderLocation) type rtree;
+        create index gbMessageIdx on GleambookMessages(message) type keyword;
 
         for $ds in dataset Metadata.Dataset return $ds;
         for $ix in dataset Metadata.Index return $ix;
 
-
-
-The ADM DDL statements above create four datasets for holding our social data in the TinySocial
-dataverse: FacebookUsers, FacebookMessages, TwitterUsers, and TweetMessages.
-The first statement creates the FacebookUsers data set.
-It specifies that this dataset will store data instances conforming to FacebookUserType and that
+The DDL statements above create four datasets for holding our social data in the TinySocial
+dataverse: GleambookUsers, GleambookMessages, ChirpUsers, and ChirpMessages.
+The first _create dataset_ statement creates the GleambookUsers data set.
+It specifies that this dataset will store data instances conforming to GleambookUserType and that
 it has a primary key which is the id field of each instance.
 The primary key information is used by AsterixDB to uniquely identify instances for the purpose
 of later lookup and for use in secondary indexes.
 Each AsterixDB dataset is stored (and indexed) in the form of a B+ tree on primary key;
 secondary indexes point to their indexed data by primary key.
-In AsterixDB clusters, the primary key is also used to hash-partition (a.k.a. shard) the
+In AsterixDB clusters, the primary key is also used to hash-partition (*a.k.a.* shard) the
 dataset across the nodes of the cluster.
-The next three create dataset statements are similar.
+The next three _create dataset_ statements are similar.
 The last one illustrates an optional clause for providing useful hints to AsterixDB.
 In this case, the hint tells AsterixDB that the dataset definer is anticipating that the
-TweetMessages dataset will contain roughly 100 objects; knowing this can help AsterixDB
+ChirpMessages dataset will contain roughly 100 objects; knowing this can help AsterixDB
 to more efficiently manage and query this dataset.
 (AsterixDB does not yet gather and maintain data statistics; it will currently, abitrarily,
 assume a cardinality of one million objects per dataset in the absence of such an optional
 definition-time hint.)
 
-The create dataset statements above are followed by four more DDL statements, each of which
+The _create dataset_ statements above are followed by four more DDL statements, each of which
 creates a secondary index on a field of one of the datasets.
-The first one indexes the FacebookUsers dataset on its user-since field.
+The first one indexes the GleambookUsers dataset on its userSince field.
 This index will be a B+ tree index; its type is unspecified and _btree_ is the default type.
 The other three illustrate how you can explicitly specify the desired type of index.
 In addition to btree, _rtree_ and inverted _keyword_ indexes are supported by AsterixDB.
@@ -246,9 +240,8 @@
 to reference datasets that live in a dataverse other than the one that
 was named in the most recently executed _use dataverse_ directive.
 
-----
 ## Loading Data Into AsterixDB ##
-Okay, so far so good---AsterixDB is now ready for data, so let's give it some data to store
+Okay, so far so good---AsterixDB is now ready for data, so let's give it some data to store.
 Our next task will be to load some sample data into the four datasets that we just defined.
 Here we will load a tiny set of records, defined in ADM format (a superset of JSON), into each dataset.
 In the boxes below you can see the actual data instances contained in each of the provided sample files.
@@ -261,61 +254,60 @@
 when persisted in AsterixDB, the data format will be binary and the data in the predefined fields
 of the data instances will be stored separately from their associated field name and type metadata.
 
-[Twitter Users](../data/twu.adm)
+[Chirp Users](../data/chu.adm)
 
-        {"screen-name":"NathanGiesen@211","lang":"en","friends_count":18,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416}
-        {"screen-name":"ColineGeyer@63","lang":"en","friends_count":121,"statuses_count":362,"name":"Coline Geyer","followers_count":17159}
-        {"screen-name":"NilaMilliron_tw","lang":"en","friends_count":445,"statuses_count":164,"name":"Nila Milliron","followers_count":22649}
-        {"screen-name":"ChangEwing_573","lang":"en","friends_count":182,"statuses_count":394,"name":"Chang Ewing","followers_count":32136}
+        {"screenName":"NathanGiesen@211","lang":"en","friendsCount":18,"statusesCount":473,"name":"Nathan Giesen","followersCount":49416}
+        {"screenName":"ColineGeyer@63","lang":"en","friendsCount":121,"statusesCount":362,"name":"Coline Geyer","followersCount":17159}
+        {"screenName":"NilaMilliron_tw","lang":"en","friendsCount":445,"statusesCount":164,"name":"Nila Milliron","followersCount":22649}
+        {"screenName":"ChangEwing_573","lang":"en","friendsCount":182,"statusesCount":394,"name":"Chang Ewing","followersCount":32136}
 
-[Tweet Messages](../data/twm.adm)
+[Chirp Messages](../data/chm.adm)
 
-        {"tweetid":"1","user":{"screen-name":"NathanGiesen@211","lang":"en","friends_count":39339,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416},"sender-location":point("47.44,80.65"),"send-time":datetime("2008-04-26T10:10:00"),"referred-topics":{{"t-mobile","customization"}},"message-text":" love t-mobile its customization is good:)"}
-        {"tweetid":"2","user":{"screen-name":"ColineGeyer@63","lang":"en","friends_count":121,"statuses_count":362,"name":"Coline Geyer","followers_count":17159},"sender-location":point("32.84,67.14"),"send-time":datetime("2010-05-13T10:10:00"),"referred-topics":{{"verizon","shortcut-menu"}},"message-text":" like verizon its shortcut-menu is awesome:)"}
-        {"tweetid":"3","user":{"screen-name":"NathanGiesen@211","lang":"en","friends_count":39339,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416},"sender-location":point("29.72,75.8"),"send-time":datetime("2006-11-04T10:10:00"),"referred-topics":{{"motorola","speed"}},"message-text":" like motorola the speed is good:)"}
-        {"tweetid":"4","user":{"screen-name":"NathanGiesen@211","lang":"en","friends_count":39339,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416},"sender-location":point("39.28,70.48"),"send-time":datetime("2011-12-26T10:10:00"),"referred-topics":{{"sprint","voice-command"}},"message-text":" like sprint the voice-command is mind-blowing:)"}
-        {"tweetid":"5","user":{"screen-name":"NathanGiesen@211","lang":"en","friends_count":39339,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416},"sender-location":point("40.09,92.69"),"send-time":datetime("2006-08-04T10:10:00"),"referred-topics":{{"motorola","speed"}},"message-text":" can't stand motorola its speed is terrible:("}
-        {"tweetid":"6","user":{"screen-name":"ColineGeyer@63","lang":"en","friends_count":121,"statuses_count":362,"name":"Coline Geyer","followers_count":17159},"sender-location":point("47.51,83.99"),"send-time":datetime("2010-05-07T10:10:00"),"referred-topics":{{"iphone","voice-clarity"}},"message-text":" like iphone the voice-clarity is good:)"}
-        {"tweetid":"7","user":{"screen-name":"ChangEwing_573","lang":"en","friends_count":182,"statuses_count":394,"name":"Chang Ewing","followers_count":32136},"sender-location":point("36.21,72.6"),"send-time":datetime("2011-08-25T10:10:00"),"referred-topics":{{"samsung","platform"}},"message-text":" like samsung the platform is good"}
-        {"tweetid":"8","user":{"screen-name":"NathanGiesen@211","lang":"en","friends_count":39339,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416},"sender-location":point("46.05,93.34"),"send-time":datetime("2005-10-14T10:10:00"),"referred-topics":{{"t-mobile","shortcut-menu"}},"message-text":" like t-mobile the shortcut-menu is awesome:)"}
-        {"tweetid":"9","user":{"screen-name":"NathanGiesen@211","lang":"en","friends_count":39339,"statuses_count":473,"name":"Nathan Giesen","followers_count":49416},"sender-location":point("36.86,74.62"),"send-time":datetime("2012-07-21T10:10:00"),"referred-topics":{{"verizon","voicemail-service"}},"message-text":" love verizon its voicemail-service is awesome"}
-        {"tweetid":"10","user":{"screen-name":"ColineGeyer@63","lang":"en","friends_count":121,"statuses_count":362,"name":"Coline Geyer","followers_count":17159},"sender-location":point("29.15,76.53"),"send-time":datetime("2008-01-26T10:10:00"),"referred-topics":{{"verizon","voice-clarity"}},"message-text":" hate verizon its voice-clarity is OMG:("}
-        {"tweetid":"11","user":{"screen-name":"NilaMilliron_tw","lang":"en","friends_count":445,"statuses_count":164,"name":"Nila Milliron","followers_count":22649},"sender-location":point("37.59,68.42"),"send-time":datetime("2008-03-09T10:10:00"),"referred-topics":{{"iphone","platform"}},"message-text":" can't stand iphone its platform is terrible"}
-        {"tweetid":"12","user":{"screen-name":"OliJackson_512","lang":"en","friends_count":445,"statuses_count":164,"name":"Oli Jackson","followers_count":22649},"sender-location":point("24.82,94.63"),"send-time":datetime("2010-02-13T10:10:00"),"referred-topics":{{"samsung","voice-command"}},"message-text":" like samsung the voice-command is amazing:)"}
+        {"chirpId":"1","user":{"screenName":"NathanGiesen@211","lang":"en","friendsCount":39339,"statusesCount":473,"name":"Nathan Giesen","followersCount":49416},"senderLocation":point("47.44,80.65"),"sendTime":datetime("2008-04-26T10:10:00"),"referredTopics":{{"t-mobile","customization"}},"messageText":" love t-mobile its customization is good:)"}
+        {"chirpId":"2","user":{"screenName":"ColineGeyer@63","lang":"en","friendsCount":121,"statusesCount":362,"name":"Coline Geyer","followersCount":17159},"senderLocation":point("32.84,67.14"),"sendTime":datetime("2010-05-13T10:10:00"),"referredTopics":{{"verizon","shortcut-menu"}},"messageText":" like verizon its shortcut-menu is awesome:)"}
+        {"chirpId":"3","user":{"screenName":"NathanGiesen@211","lang":"en","friendsCount":39339,"statusesCount":473,"name":"Nathan Giesen","followersCount":49416},"senderLocation":point("29.72,75.8"),"sendTime":datetime("2006-11-04T10:10:00"),"referredTopics":{{"motorola","speed"}},"messageText":" like motorola the speed is good:)"}
+        {"chirpId":"4","user":{"screenName":"NathanGiesen@211","lang":"en","friendsCount":39339,"statusesCount":473,"name":"Nathan Giesen","followersCount":49416},"senderLocation":point("39.28,70.48"),"sendTime":datetime("2011-12-26T10:10:00"),"referredTopics":{{"sprint","voice-command"}},"messageText":" like sprint the voice-command is mind-blowing:)"}
+        {"chirpId":"5","user":{"screenName":"NathanGiesen@211","lang":"en","friendsCount":39339,"statusesCount":473,"name":"Nathan Giesen","followersCount":49416},"senderLocation":point("40.09,92.69"),"sendTime":datetime("2006-08-04T10:10:00"),"referredTopics":{{"motorola","speed"}},"messageText":" can't stand motorola its speed is terrible:("}
+        {"chirpId":"6","user":{"screenName":"ColineGeyer@63","lang":"en","friendsCount":121,"statusesCount":362,"name":"Coline Geyer","followersCount":17159},"senderLocation":point("47.51,83.99"),"sendTime":datetime("2010-05-07T10:10:00"),"referredTopics":{{"iphone","voice-clarity"}},"messageText":" like iphone the voice-clarity is good:)"}
+        {"chirpId":"7","user":{"screenName":"ChangEwing_573","lang":"en","friendsCount":182,"statusesCount":394,"name":"Chang Ewing","followersCount":32136},"senderLocation":point("36.21,72.6"),"sendTime":datetime("2011-08-25T10:10:00"),"referredTopics":{{"samsung","platform"}},"messageText":" like samsung the platform is good"}
+        {"chirpId":"8","user":{"screenName":"NathanGiesen@211","lang":"en","friendsCount":39339,"statusesCount":473,"name":"Nathan Giesen","followersCount":49416},"senderLocation":point("46.05,93.34"),"sendTime":datetime("2005-10-14T10:10:00"),"referredTopics":{{"t-mobile","shortcut-menu"}},"messageText":" like t-mobile the shortcut-menu is awesome:)"}
+        {"chirpId":"9","user":{"screenName":"NathanGiesen@211","lang":"en","friendsCount":39339,"statusesCount":473,"name":"Nathan Giesen","followersCount":49416},"senderLocation":point("36.86,74.62"),"sendTime":datetime("2012-07-21T10:10:00"),"referredTopics":{{"verizon","voicemail-service"}},"messageText":" love verizon its voicemail-service is awesome"}
+        {"chirpId":"10","user":{"screenName":"ColineGeyer@63","lang":"en","friendsCount":121,"statusesCount":362,"name":"Coline Geyer","followersCount":17159},"senderLocation":point("29.15,76.53"),"sendTime":datetime("2008-01-26T10:10:00"),"referredTopics":{{"verizon","voice-clarity"}},"messageText":" hate verizon its voice-clarity is OMG:("}
+        {"chirpId":"11","user":{"screenName":"NilaMilliron_tw","lang":"en","friendsCount":445,"statusesCount":164,"name":"Nila Milliron","followersCount":22649},"senderLocation":point("37.59,68.42"),"sendTime":datetime("2008-03-09T10:10:00"),"referredTopics":{{"iphone","platform"}},"messageText":" can't stand iphone its platform is terrible"}
+        {"chirpId":"12","user":{"screenName":"OliJackson_512","lang":"en","friendsCount":445,"statusesCount":164,"name":"Oli Jackson","followersCount":22649},"senderLocation":point("24.82,94.63"),"sendTime":datetime("2010-02-13T10:10:00"),"referredTopics":{{"samsung","voice-command"}},"messageText":" like samsung the voice-command is amazing:)"}
 
-[Facebook Users](../data/fbu.adm)
+[Gleambook Users](../data/gbu.adm)
 
-        {"id":1,"alias":"Margarita","name":"MargaritaStoddard","user-since":datetime("2012-08-20T10:10:00"),"friend-ids":{{2,3,6,10}},"employment":[{"organization-name":"Codetechno","start-date":date("2006-08-06")}]}
-        {"id":2,"alias":"Isbel","name":"IsbelDull","user-since":datetime("2011-01-22T10:10:00"),"friend-ids":{{1,4}},"employment":[{"organization-name":"Hexviafind","start-date":date("2010-04-27")}]}
-        {"id":3,"alias":"Emory","name":"EmoryUnk","user-since":datetime("2012-07-10T10:10:00"),"friend-ids":{{1,5,8,9}},"employment":[{"organization-name":"geomedia","start-date":date("2010-06-17"),"end-date":date("2010-01-26")}]}
-        {"id":4,"alias":"Nicholas","name":"NicholasStroh","user-since":datetime("2010-12-27T10:10:00"),"friend-ids":{{2}},"employment":[{"organization-name":"Zamcorporation","start-date":date("2010-06-08")}]}
-        {"id":5,"alias":"Von","name":"VonKemble","user-since":datetime("2010-01-05T10:10:00"),"friend-ids":{{3,6,10}},"employment":[{"organization-name":"Kongreen","start-date":date("2010-11-27")}]}
-        {"id":6,"alias":"Willis","name":"WillisWynne","user-since":datetime("2005-01-17T10:10:00"),"friend-ids":{{1,3,7}},"employment":[{"organization-name":"jaydax","start-date":date("2009-05-15")}]}
-        {"id":7,"alias":"Suzanna","name":"SuzannaTillson","user-since":datetime("2012-08-07T10:10:00"),"friend-ids":{{6}},"employment":[{"organization-name":"Labzatron","start-date":date("2011-04-19")}]}
-        {"id":8,"alias":"Nila","name":"NilaMilliron","user-since":datetime("2008-01-01T10:10:00"),"friend-ids":{{3}},"employment":[{"organization-name":"Plexlane","start-date":date("2010-02-28")}]}
-        {"id":9,"alias":"Woodrow","name":"WoodrowNehling","user-since":datetime("2005-09-20T10:10:00"),"friend-ids":{{3,10}},"employment":[{"organization-name":"Zuncan","start-date":date("2003-04-22"),"end-date":date("2009-12-13")}]}
-        {"id":10,"alias":"Bram","name":"BramHatch","user-since":datetime("2010-10-16T10:10:00"),"friend-ids":{{1,5,9}},"employment":[{"organization-name":"physcane","start-date":date("2007-06-05"),"end-date":date("2011-11-05")}]}
+        {"id":1,"alias":"Margarita","name":"MargaritaStoddard","nickname":"Mags","userSince":datetime("2012-08-20T10:10:00"),"friendIds":{{2,3,6,10}},"employment":[{"organizationName":"Codetechno","startDate":date("2006-08-06")},{"organizationName":"geomedia","startDate":date("2010-06-17"),"endDate":date("2010-01-26")}],"gender":"F"}
+        {"id":2,"alias":"Isbel","name":"IsbelDull","nickname":"Izzy","userSince":datetime("2011-01-22T10:10:00"),"friendIds":{{1,4}},"employment":[{"organizationName":"Hexviafind","startDate":date("2010-04-27")}]}
+        {"id":3,"alias":"Emory","name":"EmoryUnk","userSince":datetime("2012-07-10T10:10:00"),"friendIds":{{1,5,8,9}},"employment":[{"organizationName":"geomedia","startDate":date("2010-06-17"),"endDate":date("2010-01-26")}]}
+        {"id":4,"alias":"Nicholas","name":"NicholasStroh","userSince":datetime("2010-12-27T10:10:00"),"friendIds":{{2}},"employment":[{"organizationName":"Zamcorporation","startDate":date("2010-06-08")}]}
+        {"id":5,"alias":"Von","name":"VonKemble","userSince":datetime("2010-01-05T10:10:00"),"friendIds":{{3,6,10}},"employment":[{"organizationName":"Kongreen","startDate":date("2010-11-27")}]}
+        {"id":6,"alias":"Willis","name":"WillisWynne","userSince":datetime("2005-01-17T10:10:00"),"friendIds":{{1,3,7}},"employment":[{"organizationName":"jaydax","startDate":date("2009-05-15")}]}
+        {"id":7,"alias":"Suzanna","name":"SuzannaTillson","userSince":datetime("2012-08-07T10:10:00"),"friendIds":{{6}},"employment":[{"organizationName":"Labzatron","startDate":date("2011-04-19")}]}
+        {"id":8,"alias":"Nila","name":"NilaMilliron","userSince":datetime("2008-01-01T10:10:00"),"friendIds":{{3}},"employment":[{"organizationName":"Plexlane","startDate":date("2010-02-28")}]}
+        {"id":9,"alias":"Woodrow","name":"WoodrowNehling","nickname":"Woody","userSince":datetime("2005-09-20T10:10:00"),"friendIds":{{3,10}},"employment":[{"organizationName":"Zuncan","startDate":date("2003-04-22"),"endDate":date("2009-12-13")}]}
+        {"id":10,"alias":"Bram","name":"BramHatch","userSince":datetime("2010-10-16T10:10:00"),"friendIds":{{1,5,9}},"employment":[{"organizationName":"physcane","startDate":date("2007-06-05"),"endDate":date("2011-11-05")}]}
 
-[Facebook Messages](../data/fbm.adm)
+[Gleambook Messages](../data/gbm.adm)
 
-        {"message-id":1,"author-id":3,"in-response-to":2,"sender-location":point("47.16,77.75"),"message":" love sprint its shortcut-menu is awesome:)"}
-        {"message-id":2,"author-id":1,"in-response-to":4,"sender-location":point("41.66,80.87"),"message":" dislike iphone its touch-screen is horrible"}
-        {"message-id":3,"author-id":2,"in-response-to":4,"sender-location":point("48.09,81.01"),"message":" like samsung the plan is amazing"}
-        {"message-id":4,"author-id":1,"in-response-to":2,"sender-location":point("37.73,97.04"),"message":" can't stand at&t the network is horrible:("}
-        {"message-id":5,"author-id":6,"in-response-to":2,"sender-location":point("34.7,90.76"),"message":" love sprint the customization is mind-blowing"}
-        {"message-id":6,"author-id":2,"in-response-to":1,"sender-location":point("31.5,75.56"),"message":" like t-mobile its platform is mind-blowing"}
-        {"message-id":7,"author-id":5,"in-response-to":15,"sender-location":point("32.91,85.05"),"message":" dislike sprint the speed is horrible"}
-        {"message-id":8,"author-id":1,"in-response-to":11,"sender-location":point("40.33,80.87"),"message":" like verizon the 3G is awesome:)"}
-        {"message-id":9,"author-id":3,"in-response-to":12,"sender-location":point("34.45,96.48"),"message":" love verizon its wireless is good"}
-        {"message-id":10,"author-id":1,"in-response-to":12,"sender-location":point("42.5,70.01"),"message":" can't stand motorola the touch-screen is terrible"}
-        {"message-id":11,"author-id":1,"in-response-to":1,"sender-location":point("38.97,77.49"),"message":" can't stand at&t its plan is terrible"}
-        {"message-id":12,"author-id":10,"in-response-to":6,"sender-location":point("42.26,77.76"),"message":" can't stand t-mobile its voicemail-service is OMG:("}
-        {"message-id":13,"author-id":10,"in-response-to":4,"sender-location":point("42.77,78.92"),"message":" dislike iphone the voice-command is bad:("}
-        {"message-id":14,"author-id":9,"in-response-to":12,"sender-location":point("41.33,85.28"),"message":" love at&t its 3G is good:)"}
-        {"message-id":15,"author-id":7,"in-response-to":11,"sender-location":point("44.47,67.11"),"message":" like iphone the voicemail-service is awesome"}
+        {"messageId":1,"authorId":3,"inResponseTo":2,"senderLocation":point("47.16,77.75"),"message":" love sprint its shortcut-menu is awesome:)"}
+        {"messageId":2,"authorId":1,"inResponseTo":4,"senderLocation":point("41.66,80.87"),"message":" dislike iphone its touch-screen is horrible"}
+        {"messageId":3,"authorId":2,"inResponseTo":4,"senderLocation":point("48.09,81.01"),"message":" like samsung the plan is amazing"}
+        {"messageId":4,"authorId":1,"inResponseTo":2,"senderLocation":point("37.73,97.04"),"message":" can't stand at&t the network is horrible:("}
+        {"messageId":5,"authorId":6,"inResponseTo":2,"senderLocation":point("34.7,90.76"),"message":" love sprint the customization is mind-blowing"}
+        {"messageId":6,"authorId":2,"inResponseTo":1,"senderLocation":point("31.5,75.56"),"message":" like t-mobile its platform is mind-blowing"}
+        {"messageId":7,"authorId":5,"inResponseTo":15,"senderLocation":point("32.91,85.05"),"message":" dislike sprint the speed is horrible"}
+        {"messageId":8,"authorId":1,"inResponseTo":11,"senderLocation":point("40.33,80.87"),"message":" like verizon the 3G is awesome:)"}
+        {"messageId":9,"authorId":3,"inResponseTo":12,"senderLocation":point("34.45,96.48"),"message":" love verizon its wireless is good"}
+        {"messageId":10,"authorId":1,"inResponseTo":12,"senderLocation":point("42.5,70.01"),"message":" can't stand motorola the touch-screen is terrible"}
+        {"messageId":11,"authorId":1,"inResponseTo":1,"senderLocation":point("38.97,77.49"),"message":" can't stand at&t its plan is terrible"}
+        {"messageId":12,"authorId":10,"inResponseTo":6,"senderLocation":point("42.26,77.76"),"message":" can't stand t-mobile its voicemail-service is OMG:("}
+        {"messageId":13,"authorId":10,"inResponseTo":4,"senderLocation":point("42.77,78.92"),"message":" dislike iphone the voice-command is bad:("}
+        {"messageId":14,"authorId":9,"inResponseTo":12,"senderLocation":point("41.33,85.28"),"message":" love at&t its 3G is good:)"}
+        {"messageId":15,"authorId":7,"inResponseTo":11,"senderLocation":point("44.47,67.11"),"message":" like iphone the voicemail-service is awesome"}
 
-
-It's loading time! We can use AQL _load_ statements to populate our datasets with the sample records shown above.
+It's loading time! We can use AQL _LOAD_ statements to populate our datasets with the sample records shown above.
 The following shows how loading can be done for data stored in `.adm` files in your local filesystem.
 *Note:* You _MUST_ replace the `<Host Name>` and `<Absolute File Path>` placeholders in each load
 statement below with valid values based on the host IP address (or host name) for the machine and
@@ -325,23 +317,20 @@
 (This will lead to a three-slash character sequence at the start of each load statement's file
 input path specification.)
 
-
         use dataverse TinySocial;
 
-        load dataset FacebookUsers using localfs
-        (("path"="<Host Name>://<Absolute File Path>/fbu.adm"),("format"="adm"));
+        load dataset GleambookUsers using localfs
+            (("path"="<Host Name>://<Absolute File Path>/gbu.adm"),("format"="adm"));
 
-        load dataset FacebookMessages using localfs
-        (("path"="<Host Name>://<Absolute File Path>/fbm.adm"),("format"="adm"));
+        load dataset GleambookMessages using localfs
+            (("path"="<Host Name>://<Absolute File Path>/gbm.adm"),("format"="adm"));
 
-        load dataset TwitterUsers using localfs
-        (("path"="<Host Name>://<Absolute File Path>/twu.adm"),("format"="adm"));
+        load dataset ChirpUsers using localfs
+            (("path"="<Host Name>://<Absolute File Path>/chu.adm"),("format"="adm"));
 
-        load dataset TweetMessages using localfs
-        (("path"="<Host Name>://<Absolute File Path>/twm.adm"),("format"="adm"));
+        load dataset ChirpMessages using localfs
+            (("path"="<Host Name>://<Absolute File Path>/chm.adm"),("format"="adm"));
 
-
-----
 ## AQL: Querying Your AsterixDB Data ##
 Congratulations! You now have sample social data stored (and indexed) in AsterixDB.
 (You are part of an elite and adventurous group of individuals. :-))
@@ -384,83 +373,96 @@
 the beginning of a query), the _let_ clause in AQL is like SQL's _with_ clause, and the _where_
 and _order by_ clauses in both languages are similar.
 
+Based on user demand, in order to let SQL afficiandos to write AQL queries in their favored ways,
+AQL supports a few synonyms:  _from_ for _for_, _select_ for _return_,  _with_ for _let_, and
+_keeping_ for _with_ in the group by clause.
+These have been found to help die-hard SQL fans to feel a little more at home in AQL and to be less
+likely to (mis)interpret _for_ as imperative looping, _return_ as returning from a function call,
+and so on.
+
 Enough talk!
 Let's go ahead and try writing some queries and see about learning AQL by example.
 
 ### Query 0-A - Exact-Match Lookup ###
-For our first query, let's find a Facebook user based on his or her user id.
+For our first query, let's find a Gleambook user based on his or her user id.
 Suppose the user we want is the user whose id is 8:
 
 
         use dataverse TinySocial;
 
-        for $user in dataset FacebookUsers
+        for $user in dataset GleambookUsers
         where $user.id = 8
         return $user;
 
 The query's _for_ clause  binds the variable `$user` incrementally to the data instances residing in
-the dataset named FacebookUsers.
+the dataset named GleambookUsers.
 Its _where_ clause selects only those bindings having a user id of interest, filtering out the rest.
 The _return_ clause returns the (entire) data instance for each binding that satisfies the predicate.
 Since this dataset is indexed on user id (its primary key), this query will be done via a quick index lookup.
 
 The expected result for our sample data is as follows:
 
-        { "id": 8, "alias": "Nila", "name": "NilaMilliron", "user-since": datetime("2008-01-01T10:10:00.000Z"), "friend-ids": {{ 3 }}, "employment": [ { "organization-name": "Plexlane", "start-date": date("2010-02-28"), "end-date": null } ] }
+        { "id": 8, "alias": "Nila", "name": "NilaMilliron", "userSince": datetime("2008-01-01T10:10:00.000Z"), "friendIds": {{ 3 }}, "employment": [ { "organizationName": "Plexlane", "startDate": date("2010-02-28") } ] }
 
 
-### Query 0-B - Range Scan ###
-AQL, like SQL, supports a variety of different predicates.
-For example, for our next query, let's find the Facebook users whose ids are in the range between 2 and 4:
+Note the using the SQL keyword synonyms, another way of phrasing the same query would be:
 
         use dataverse TinySocial;
 
-        for $user in dataset FacebookUsers
+        from $user in dataset GleambookUsers
+        where $user.id = 8
+        select $user;
+
+### Query 0-B - Range Scan ###
+AQL, like SQL, supports a variety of different predicates.
+For example, for our next query, let's find the Gleambook users whose ids are in the range between 2 and 4:
+
+        use dataverse TinySocial;
+
+        for $user in dataset GleambookUsers
         where $user.id >= 2 and $user.id <= 4
         return $user;
 
 This query's expected result, also evaluable using the primary index on user id, is:
 
-        { "id": 2, "alias": "Isbel", "name": "IsbelDull", "user-since": datetime("2011-01-22T10:10:00.000Z"), "friend-ids": {{ 1, 4 }}, "employment": [ { "organization-name": "Hexviafind", "start-date": date("2010-04-27"), "end-date": null } ] }
-        { "id": 3, "alias": "Emory", "name": "EmoryUnk", "user-since": datetime("2012-07-10T10:10:00.000Z"), "friend-ids": {{ 1, 5, 8, 9 }}, "employment": [ { "organization-name": "geomedia", "start-date": date("2010-06-17"), "end-date": date("2010-01-26") } ] }
-        { "id": 4, "alias": "Nicholas", "name": "NicholasStroh", "user-since": datetime("2010-12-27T10:10:00.000Z"), "friend-ids": {{ 2 }}, "employment": [ { "organization-name": "Zamcorporation", "start-date": date("2010-06-08"), "end-date": null } ] }
-
+        { "id": 2, "alias": "Isbel", "name": "IsbelDull", "userSince": datetime("2011-01-22T10:10:00.000Z"), "friendIds": {{ 1, 4 }}, "employment": [ { "organizationName": "Hexviafind", "startDate": date("2010-04-27") } ], "nickname": "Izzy" }
+        { "id": 4, "alias": "Nicholas", "name": "NicholasStroh", "userSince": datetime("2010-12-27T10:10:00.000Z"), "friendIds": {{ 2 }}, "employment": [ { "organizationName": "Zamcorporation", "startDate": date("2010-06-08") } ] }
+        { "id": 3, "alias": "Emory", "name": "EmoryUnk", "userSince": datetime("2012-07-10T10:10:00.000Z"), "friendIds": {{ 1, 5, 8, 9 }}, "employment": [ { "organizationName": "geomedia", "startDate": date("2010-06-17"), "endDate": date("2010-01-26") } ] }
 
 ### Query 1 - Other Query Filters ###
 AQL can do range queries on any data type that supports the appropriate set of comparators.
-As an example, this next query retrieves the Facebook users who joined between July 22, 2010 and July 29, 2012:
+As an example, this next query retrieves the Gleambook users who joined between July 22, 2010 and July 29, 2012:
 
         use dataverse TinySocial;
 
-        for $user in dataset FacebookUsers
-        where $user.user-since >= datetime('2010-07-22T00:00:00')
-          and $user.user-since <= datetime('2012-07-29T23:59:59')
+        for $user in dataset GleambookUsers
+        where $user.userSince >= datetime('2010-07-22T00:00:00')
+          and $user.userSince <= datetime('2012-07-29T23:59:59')
         return $user;
 
 The expected result for this query, also an indexable query, is as follows:
 
-        { "id": 2, "alias": "Isbel", "name": "IsbelDull", "user-since": datetime("2011-01-22T10:10:00.000Z"), "friend-ids": {{ 1, 4 }}, "employment": [ { "organization-name": "Hexviafind", "start-date": date("2010-04-27"), "end-date": null } ] }
-        { "id": 3, "alias": "Emory", "name": "EmoryUnk", "user-since": datetime("2012-07-10T10:10:00.000Z"), "friend-ids": {{ 1, 5, 8, 9 }}, "employment": [ { "organization-name": "geomedia", "start-date": date("2010-06-17"), "end-date": date("2010-01-26") } ] }
-        { "id": 4, "alias": "Nicholas", "name": "NicholasStroh", "user-since": datetime("2010-12-27T10:10:00.000Z"), "friend-ids": {{ 2 }}, "employment": [ { "organization-name": "Zamcorporation", "start-date": date("2010-06-08"), "end-date": null } ] }
-        { "id": 10, "alias": "Bram", "name": "BramHatch", "user-since": datetime("2010-10-16T10:10:00.000Z"), "friend-ids": {{ 1, 5, 9 }}, "employment": [ { "organization-name": "physcane", "start-date": date("2007-06-05"), "end-date": date("2011-11-05") } ] }
-
+        { "id": 2, "alias": "Isbel", "name": "IsbelDull", "userSince": datetime("2011-01-22T10:10:00.000Z"), "friendIds": {{ 1, 4 }}, "employment": [ { "organizationName": "Hexviafind", "startDate": date("2010-04-27") } ], "nickname": "Izzy" }
+        { "id": 4, "alias": "Nicholas", "name": "NicholasStroh", "userSince": datetime("2010-12-27T10:10:00.000Z"), "friendIds": {{ 2 }}, "employment": [ { "organizationName": "Zamcorporation", "startDate": date("2010-06-08") } ] }
+        { "id": 10, "alias": "Bram", "name": "BramHatch", "userSince": datetime("2010-10-16T10:10:00.000Z"), "friendIds": {{ 1, 5, 9 }}, "employment": [ { "organizationName": "physcane", "startDate": date("2007-06-05"), "endDate": date("2011-11-05") } ] }
+        { "id": 3, "alias": "Emory", "name": "EmoryUnk", "userSince": datetime("2012-07-10T10:10:00.000Z"), "friendIds": {{ 1, 5, 8, 9 }}, "employment": [ { "organizationName": "geomedia", "startDate": date("2010-06-17"), "endDate": date("2010-01-26") } ] }
 
 ### Query 2-A - Equijoin ###
 In addition to simply binding variables to data instances and returning them "whole",
 an AQL query can construct new ADM instances to return based on combinations of its variable bindings.
 This gives AQL the power to do joins much like those done using multi-table _from_ clauses in SQL.
-For example, suppose we wanted a list of all Facebook users paired with their associated messages,
-with the list enumerating the author name and the message text associated with each Facebook message.
+For example, suppose we wanted a list of all Gleambook users paired with their associated messages,
+with the list enumerating the author name and the message text associated with each Gleambook message.
 We could do this as follows in AQL:
 
         use dataverse TinySocial;
 
-        for $user in dataset FacebookUsers
-        for $message in dataset FacebookMessages
-        where $message.author-id = $user.id
+        for $user in dataset GleambookUsers
+        for $message in dataset GleambookMessages
+        where $message.authorId = $user.id
         return {
-        "uname": $user.name,
-        "message": $message.message
+            "uname": $user.name,
+            "message": $message.message
         };
 
 The result of this query is a sequence of new ADM instances, one for each author/message pair.
@@ -472,22 +474,33 @@
 
 The expected result of this example AQL join query for our sample data set is:
 
+        { "uname": "WillisWynne", "message": " love sprint the customization is mind-blowing" }
+        { "uname": "MargaritaStoddard", "message": " can't stand at&t its plan is terrible" }
         { "uname": "MargaritaStoddard", "message": " dislike iphone its touch-screen is horrible" }
         { "uname": "MargaritaStoddard", "message": " can't stand at&t the network is horrible:(" }
         { "uname": "MargaritaStoddard", "message": " like verizon the 3G is awesome:)" }
         { "uname": "MargaritaStoddard", "message": " can't stand motorola the touch-screen is terrible" }
-        { "uname": "MargaritaStoddard", "message": " can't stand at&t its plan is terrible" }
-        { "uname": "IsbelDull", "message": " like samsung the plan is amazing" }
         { "uname": "IsbelDull", "message": " like t-mobile its platform is mind-blowing" }
-        { "uname": "EmoryUnk", "message": " love sprint its shortcut-menu is awesome:)" }
-        { "uname": "EmoryUnk", "message": " love verizon its wireless is good" }
-        { "uname": "VonKemble", "message": " dislike sprint the speed is horrible" }
-        { "uname": "WillisWynne", "message": " love sprint the customization is mind-blowing" }
-        { "uname": "SuzannaTillson", "message": " like iphone the voicemail-service is awesome" }
+        { "uname": "IsbelDull", "message": " like samsung the plan is amazing" }
         { "uname": "WoodrowNehling", "message": " love at&t its 3G is good:)" }
         { "uname": "BramHatch", "message": " can't stand t-mobile its voicemail-service is OMG:(" }
         { "uname": "BramHatch", "message": " dislike iphone the voice-command is bad:(" }
+        { "uname": "EmoryUnk", "message": " love sprint its shortcut-menu is awesome:)" }
+        { "uname": "EmoryUnk", "message": " love verizon its wireless is good" }
+        { "uname": "VonKemble", "message": " dislike sprint the speed is horrible" }
+        { "uname": "SuzannaTillson", "message": " like iphone the voicemail-service is awesome" }
 
+Again, as an aside, note that the same query expressed using AQL's SQL keyword synonyms would be:
+
+        use dataverse TinySocial;
+
+        from $user in dataset GleambookUsers
+        from $message in dataset GleambookMessages
+        where $message.authorId = $user.id
+        select {
+            "uname": $user.name,
+            "message": $message.message
+        };
 
 ### Query 2-B - Index join ###
 By default, AsterixDB evaluates equijoin queries using hash-based join methods that work
@@ -506,12 +519,12 @@
 
         use dataverse TinySocial;
 
-        for $user in dataset FacebookUsers
-        for $message in dataset FacebookMessages
-        where $message.author-id /*+ indexnl */  = $user.id
+        for $user in dataset GleambookUsers
+        for $message in dataset GleambookMessages
+        where $message.authorId /*+ indexnl */  = $user.id
         return {
-        "uname": $user.name,
-        "message": $message.message
+            "uname": $user.name,
+            "message": $message.message
         };
 
 
@@ -519,22 +532,21 @@
 Result ordering is (intentionally) undefined in AQL in the absence of an _order by_ clause.
 The query result for our sample data in this case is:
 
+        { "uname": "IsbelDull", "message": " like t-mobile its platform is mind-blowing" }
+        { "uname": "MargaritaStoddard", "message": " can't stand at&t its plan is terrible" }
+        { "uname": "BramHatch", "message": " can't stand t-mobile its voicemail-service is OMG:(" }
+        { "uname": "WoodrowNehling", "message": " love at&t its 3G is good:)" }
         { "uname": "EmoryUnk", "message": " love sprint its shortcut-menu is awesome:)" }
         { "uname": "MargaritaStoddard", "message": " dislike iphone its touch-screen is horrible" }
-        { "uname": "IsbelDull", "message": " like samsung the plan is amazing" }
         { "uname": "MargaritaStoddard", "message": " can't stand at&t the network is horrible:(" }
-        { "uname": "WillisWynne", "message": " love sprint the customization is mind-blowing" }
-        { "uname": "IsbelDull", "message": " like t-mobile its platform is mind-blowing" }
-        { "uname": "VonKemble", "message": " dislike sprint the speed is horrible" }
+        { "uname": "BramHatch", "message": " dislike iphone the voice-command is bad:(" }
+        { "uname": "SuzannaTillson", "message": " like iphone the voicemail-service is awesome" }
         { "uname": "MargaritaStoddard", "message": " like verizon the 3G is awesome:)" }
         { "uname": "EmoryUnk", "message": " love verizon its wireless is good" }
         { "uname": "MargaritaStoddard", "message": " can't stand motorola the touch-screen is terrible" }
-        { "uname": "MargaritaStoddard", "message": " can't stand at&t its plan is terrible" }
-        { "uname": "BramHatch", "message": " can't stand t-mobile its voicemail-service is OMG:(" }
-        { "uname": "BramHatch", "message": " dislike iphone the voice-command is bad:(" }
-        { "uname": "WoodrowNehling", "message": " love at&t its 3G is good:)" }
-        { "uname": "SuzannaTillson", "message": " like iphone the voicemail-service is awesome" }
-
+        { "uname": "IsbelDull", "message": " like samsung the plan is amazing" }
+        { "uname": "WillisWynne", "message": " love sprint the customization is mind-blowing" }
+        { "uname": "VonKemble", "message": " dislike sprint the speed is horrible" }
 
 (It is worth knowing, with respect to influencing AsterixDB's query evaluation, that nested _for_
 clauses---a.k.a. joins--- are currently evaluated with the "outer" clause probing the data of the "inner"
@@ -549,7 +561,7 @@
 
 The AQL language supports nesting, both of queries and of query results, and the combination allows for
 an arguably cleaner/more natural approach to such queries.
-As an example, supposed we wanted, for each Facebook user, to produce a record that has his/her name
+As an example, supposed we wanted, for each Gleambook user, to produce a record that has his/her name
 plus a list of the messages written by that user.
 In SQL, this would involve a left outer join between users and messages, grouping by user, and having
 the user name repeated along side each message.
@@ -557,15 +569,15 @@
 
         use dataverse TinySocial;
 
-        for $user in dataset FacebookUsers
+        for $user in dataset GleambookUsers
         return {
-        "uname": $user.name,
-        "messages": for $message in dataset FacebookMessages
-                where $message.author-id = $user.id
-                return $message.message
+            "uname": $user.name,
+            "messages": for $message in dataset GleambookMessages
+                        where $message.authorId = $user.id
+                        return $message.message
         };
 
-This AQL query binds the variable `$user` to the data instances in FacebookUsers;
+This AQL query binds the variable `$user` to the data instances in GleambookUsers;
 for each user, it constructs a result record containing a "uname" field with the user's
 name and a "messages" field with a nested collection of all messages for that user.
 The nested collection for each user is specified by using a correlated subquery.
@@ -575,17 +587,16 @@
 
 Here is this example query's expected output:
 
-        { "uname": "MargaritaStoddard", "messages": [ " dislike iphone its touch-screen is horrible", " can't stand at&t the network is horrible:(", " like verizon the 3G is awesome:)", " can't stand motorola the touch-screen is terrible", " can't stand at&t its plan is terrible" ] }
-        { "uname": "IsbelDull", "messages": [ " like samsung the plan is amazing", " like t-mobile its platform is mind-blowing" ] }
-        { "uname": "EmoryUnk", "messages": [ " love sprint its shortcut-menu is awesome:)", " love verizon its wireless is good" ] }
-        { "uname": "NicholasStroh", "messages": [  ] }
-        { "uname": "VonKemble", "messages": [ " dislike sprint the speed is horrible" ] }
         { "uname": "WillisWynne", "messages": [ " love sprint the customization is mind-blowing" ] }
-        { "uname": "SuzannaTillson", "messages": [ " like iphone the voicemail-service is awesome" ] }
+        { "uname": "MargaritaStoddard", "messages": [ " can't stand at&t its plan is terrible", " dislike iphone its touch-screen is horrible", " can't stand at&t the network is horrible:(", " like verizon the 3G is awesome:)", " can't stand motorola the touch-screen is terrible" ] }
+        { "uname": "IsbelDull", "messages": [ " like t-mobile its platform is mind-blowing", " like samsung the plan is amazing" ] }
+        { "uname": "NicholasStroh", "messages": [  ] }
         { "uname": "NilaMilliron", "messages": [  ] }
         { "uname": "WoodrowNehling", "messages": [ " love at&t its 3G is good:)" ] }
-        { "uname": "BramHatch", "messages": [ " dislike iphone the voice-command is bad:(", " can't stand t-mobile its voicemail-service is OMG:(" ] }
-
+        { "uname": "BramHatch", "messages": [ " can't stand t-mobile its voicemail-service is OMG:(", " dislike iphone the voice-command is bad:(" ] }
+        { "uname": "EmoryUnk", "messages": [ " love sprint its shortcut-menu is awesome:)", " love verizon its wireless is good" ] }
+        { "uname": "VonKemble", "messages": [ " dislike sprint the speed is horrible" ] }
+        { "uname": "SuzannaTillson", "messages": [ " like iphone the voicemail-service is awesome" ] }
 
 ### Query 4 - Theta Join ###
 Not all joins are expressible as equijoins and computable using equijoin-oriented algorithms.
@@ -593,40 +604,40 @@
 expression of such queries and will still evaluate them as best it can using nested loop based
 techniques (and broadcast joins in the parallel case).
 
-As an example of such a use case, suppose that we wanted, for each tweet T, to find all of the
-other tweets that originated from within a circle of radius of 1 surrounding tweet T's location.
+As an example of such a use case, suppose that we wanted, for each chirp T, to find all of the
+other chirps that originated from within a circle of radius of 1 surrounding chirp T's location.
 In AQL, this can be specified in a manner similar to the previous query using one of the built-in
 functions on the spatial data type instead of id equality in the correlated query's _where_ clause:
 
         use dataverse TinySocial;
 
-        for $t in dataset TweetMessages
+        for $cm in dataset ChirpMessages
         return {
-        "message": $t.message-text,
-        "nearby-messages": for $t2 in dataset TweetMessages
-                    where spatial-distance($t.sender-location, $t2.sender-location) <= 1
-                    return { "msgtxt":$t2.message-text}
+            "message": $cm.messageText,
+            "nearbyMessages": for $cm2 in dataset ChirpMessages
+                              where spatial-distance($cm.senderLocation, $cm2.senderLocation) <= 1
+                              return { "msgtxt":$cm2.messageText}
         };
 
 Here is the expected result for this query:
 
-        { "message": " love t-mobile its customization is good:)", "nearby-messages": [ { "msgtxt": " love t-mobile its customization is good:)" } ] }
-        { "message": " hate verizon its voice-clarity is OMG:(", "nearby-messages": [ { "msgtxt": " like motorola the speed is good:)" }, { "msgtxt": " hate verizon its voice-clarity is OMG:(" } ] }
-        { "message": " can't stand iphone its platform is terrible", "nearby-messages": [ { "msgtxt": " can't stand iphone its platform is terrible" } ] }
-        { "message": " like samsung the voice-command is amazing:)", "nearby-messages": [ { "msgtxt": " like samsung the voice-command is amazing:)" } ] }
-        { "message": " like verizon its shortcut-menu is awesome:)", "nearby-messages": [ { "msgtxt": " like verizon its shortcut-menu is awesome:)" } ] }
-        { "message": " like motorola the speed is good:)", "nearby-messages": [ { "msgtxt": " hate verizon its voice-clarity is OMG:(" }, { "msgtxt": " like motorola the speed is good:)" } ] }
-        { "message": " like sprint the voice-command is mind-blowing:)", "nearby-messages": [ { "msgtxt": " like sprint the voice-command is mind-blowing:)" } ] }
-        { "message": " can't stand motorola its speed is terrible:(", "nearby-messages": [ { "msgtxt": " can't stand motorola its speed is terrible:(" } ] }
-        { "message": " like iphone the voice-clarity is good:)", "nearby-messages": [ { "msgtxt": " like iphone the voice-clarity is good:)" } ] }
-        { "message": " like samsung the platform is good", "nearby-messages": [ { "msgtxt": " like samsung the platform is good" } ] }
-        { "message": " like t-mobile the shortcut-menu is awesome:)", "nearby-messages": [ { "msgtxt": " like t-mobile the shortcut-menu is awesome:)" } ] }
-        { "message": " love verizon its voicemail-service is awesome", "nearby-messages": [ { "msgtxt": " love verizon its voicemail-service is awesome" } ] }
+        { "message": " can't stand iphone its platform is terrible", "nearbyMessages": [ { "msgtxt": " can't stand iphone its platform is terrible" } ] }
+        { "message": " like verizon its shortcut-menu is awesome:)", "nearbyMessages": [ { "msgtxt": " like verizon its shortcut-menu is awesome:)" } ] }
+        { "message": " like sprint the voice-command is mind-blowing:)", "nearbyMessages": [ { "msgtxt": " like sprint the voice-command is mind-blowing:)" } ] }
+        { "message": " love verizon its voicemail-service is awesome", "nearbyMessages": [ { "msgtxt": " love verizon its voicemail-service is awesome" } ] }
+        { "message": " love t-mobile its customization is good:)", "nearbyMessages": [ { "msgtxt": " love t-mobile its customization is good:)" } ] }
+        { "message": " can't stand motorola its speed is terrible:(", "nearbyMessages": [ { "msgtxt": " can't stand motorola its speed is terrible:(" } ] }
+        { "message": " like motorola the speed is good:)", "nearbyMessages": [ { "msgtxt": " like motorola the speed is good:)" }, { "msgtxt": " hate verizon its voice-clarity is OMG:(" } ] }
+        { "message": " like iphone the voice-clarity is good:)", "nearbyMessages": [ { "msgtxt": " like iphone the voice-clarity is good:)" } ] }
+        { "message": " like samsung the platform is good", "nearbyMessages": [ { "msgtxt": " like samsung the platform is good" } ] }
+        { "message": " hate verizon its voice-clarity is OMG:(", "nearbyMessages": [ { "msgtxt": " like motorola the speed is good:)" }, { "msgtxt": " hate verizon its voice-clarity is OMG:(" } ] }
+        { "message": " like samsung the voice-command is amazing:)", "nearbyMessages": [ { "msgtxt": " like samsung the voice-command is amazing:)" } ] }
+        { "message": " like t-mobile the shortcut-menu is awesome:)", "nearbyMessages": [ { "msgtxt": " like t-mobile the shortcut-menu is awesome:)" } ] }
 
 
 ### Query 5 - Fuzzy Join ###
-As another example of a non-equijoin use case, we could ask AsterixDB to find, for each Facebook user,
-all Twitter users with names "similar" to their name.
+As another example of a non-equijoin use case, we could ask AsterixDB to find, for each Gleambook user,
+all Chirp users with names "similar" to their name.
 AsterixDB supports a variety of "fuzzy match" functions for use with textual and set-based data.
 As one example, we could choose to use edit distance with a threshold of 3 as the definition of name
 similarity, in which case we could write the following query using AQL's operator-based syntax (~=)
@@ -637,125 +648,118 @@
         set simfunction "edit-distance";
         set simthreshold "3";
 
-        for $fbu in dataset FacebookUsers
+        for $gbu in dataset GleambookUsers
         return {
-            "id": $fbu.id,
-            "name": $fbu.name,
-            "similar-users": for $t in dataset TweetMessages
-                    let $tu := $t.user
-                    where $tu.name ~= $fbu.name
-                    return {
-                    "twitter-screenname": $tu.screen-name,
-                    "twitter-name": $tu.name
-                    }
+            "id": $gbu.id,
+            "name": $gbu.name,
+            "similarUsers": for $cm in dataset ChirpMessages
+                            let $cu := $cm.user
+                            where $cu.name ~= $gbu.name
+                            return {
+                                "chirpScreenname": $cu.screenName,
+                                "chirpName": $cu.name
+                            }
         };
 
 The expected result for this query against our sample data is:
 
-        { "id": 1, "name": "MargaritaStoddard", "similar-users": [  ] }
-        { "id": 2, "name": "IsbelDull", "similar-users": [  ] }
-        { "id": 3, "name": "EmoryUnk", "similar-users": [  ] }
-        { "id": 4, "name": "NicholasStroh", "similar-users": [  ] }
-        { "id": 5, "name": "VonKemble", "similar-users": [  ] }
-        { "id": 6, "name": "WillisWynne", "similar-users": [  ] }
-        { "id": 7, "name": "SuzannaTillson", "similar-users": [  ] }
-        { "id": 8, "name": "NilaMilliron", "similar-users": [ { "twitter-screenname": "NilaMilliron_tw", "twitter-name": "Nila Milliron" } ] }
-        { "id": 9, "name": "WoodrowNehling", "similar-users": [  ] }
-        { "id": 10, "name": "BramHatch", "similar-users": [  ] }
-
+        { "id": 6, "name": "WillisWynne", "similarUsers": [  ] }
+        { "id": 1, "name": "MargaritaStoddard", "similarUsers": [  ] }
+        { "id": 2, "name": "IsbelDull", "similarUsers": [  ] }
+        { "id": 4, "name": "NicholasStroh", "similarUsers": [  ] }
+        { "id": 8, "name": "NilaMilliron", "similarUsers": [ { "chirpScreenname": "NilaMilliron_tw", "chirpName": "Nila Milliron" } ] }
+        { "id": 9, "name": "WoodrowNehling", "similarUsers": [  ] }
+        { "id": 10, "name": "BramHatch", "similarUsers": [  ] }
+        { "id": 3, "name": "EmoryUnk", "similarUsers": [  ] }
+        { "id": 5, "name": "VonKemble", "similarUsers": [  ] }
+        { "id": 7, "name": "SuzannaTillson", "similarUsers": [  ] }
 
 ### Query 6 - Existential Quantification ###
 The expressive power of AQL includes support for queries involving "some" (existentially quantified)
 and "all" (universally quantified) query semantics.
-As an example of an existential AQL query, here we show a query to list the Facebook users who are currently employed.
-Such employees will have an employment history containing a record with the end-date value missing, which leads us to the
+As an example of an existential AQL query, here we show a query to list the Gleambook users who are currently employed.
+Such employees will have an employment history containing a record with the endDate value missing, which leads us to the
 following AQL query:
 
         use dataverse TinySocial;
 
-        for $fbu in dataset FacebookUsers
-        where (some $e in $fbu.employment satisfies is-missing($e.end-date))
-        return $fbu;
+        for $gbu in dataset GleambookUsers
+        where (some $e in $gbu.employment satisfies is-missing($e.endDate))
+        return $gbu;
 
 The expected result in this case is:
 
-        { "id": 1, "alias": "Margarita", "name": "MargaritaStoddard", "user-since": datetime("2012-08-20T10:10:00.000Z"), "friend-ids": {{ 2, 3, 6, 10 }}, "employment": [ { "organization-name": "Codetechno", "start-date": date("2006-08-06"), "end-date": null } ] }
-        { "id": 2, "alias": "Isbel", "name": "IsbelDull", "user-since": datetime("2011-01-22T10:10:00.000Z"), "friend-ids": {{ 1, 4 }}, "employment": [ { "organization-name": "Hexviafind", "start-date": date("2010-04-27"), "end-date": null } ] }
-        { "id": 4, "alias": "Nicholas", "name": "NicholasStroh", "user-since": datetime("2010-12-27T10:10:00.000Z"), "friend-ids": {{ 2 }}, "employment": [ { "organization-name": "Zamcorporation", "start-date": date("2010-06-08"), "end-date": null } ] }
-        { "id": 5, "alias": "Von", "name": "VonKemble", "user-since": datetime("2010-01-05T10:10:00.000Z"), "friend-ids": {{ 3, 6, 10 }}, "employment": [ { "organization-name": "Kongreen", "start-date": date("2010-11-27"), "end-date": null } ] }
-        { "id": 6, "alias": "Willis", "name": "WillisWynne", "user-since": datetime("2005-01-17T10:10:00.000Z"), "friend-ids": {{ 1, 3, 7 }}, "employment": [ { "organization-name": "jaydax", "start-date": date("2009-05-15"), "end-date": null } ] }
-        { "id": 7, "alias": "Suzanna", "name": "SuzannaTillson", "user-since": datetime("2012-08-07T10:10:00.000Z"), "friend-ids": {{ 6 }}, "employment": [ { "organization-name": "Labzatron", "start-date": date("2011-04-19"), "end-date": null } ] }
-        { "id": 8, "alias": "Nila", "name": "NilaMilliron", "user-since": datetime("2008-01-01T10:10:00.000Z"), "friend-ids": {{ 3 }}, "employment": [ { "organization-name": "Plexlane", "start-date": date("2010-02-28"), "end-date": null } ] }
-
+        { "id": 6, "alias": "Willis", "name": "WillisWynne", "userSince": datetime("2005-01-17T10:10:00.000Z"), "friendIds": {{ 1, 3, 7 }}, "employment": [ { "organizationName": "jaydax", "startDate": date("2009-05-15") } ] }
+        { "id": 1, "alias": "Margarita", "name": "MargaritaStoddard", "userSince": datetime("2012-08-20T10:10:00.000Z"), "friendIds": {{ 2, 3, 6, 10 }}, "employment": [ { "organizationName": "Codetechno", "startDate": date("2006-08-06") }, { "organizationName": "geomedia", "startDate": date("2010-06-17"), "endDate": date("2010-01-26") } ], "nickname": "Mags", "gender": "F" }
+        { "id": 2, "alias": "Isbel", "name": "IsbelDull", "userSince": datetime("2011-01-22T10:10:00.000Z"), "friendIds": {{ 1, 4 }}, "employment": [ { "organizationName": "Hexviafind", "startDate": date("2010-04-27") } ], "nickname": "Izzy" }
+        { "id": 4, "alias": "Nicholas", "name": "NicholasStroh", "userSince": datetime("2010-12-27T10:10:00.000Z"), "friendIds": {{ 2 }}, "employment": [ { "organizationName": "Zamcorporation", "startDate": date("2010-06-08") } ] }
+        { "id": 8, "alias": "Nila", "name": "NilaMilliron", "userSince": datetime("2008-01-01T10:10:00.000Z"), "friendIds": {{ 3 }}, "employment": [ { "organizationName": "Plexlane", "startDate": date("2010-02-28") } ] }
+        { "id": 5, "alias": "Von", "name": "VonKemble", "userSince": datetime("2010-01-05T10:10:00.000Z"), "friendIds": {{ 3, 6, 10 }}, "employment": [ { "organizationName": "Kongreen", "startDate": date("2010-11-27") } ] }
+        { "id": 7, "alias": "Suzanna", "name": "SuzannaTillson", "userSince": datetime("2012-08-07T10:10:00.000Z"), "friendIds": {{ 6 }}, "employment": [ { "organizationName": "Labzatron", "startDate": date("2011-04-19") } ] }
 
 ### Query 7 - Universal Quantification ###
-As an example of a universal AQL query, here we show a query to list the Facebook users who are currently unemployed.
-Such employees will have an employment history containing no records that miss end-date values, leading us to the
+As an example of a universal AQL query, here we show a query to list the Gleambook users who are currently unemployed.
+Such employees will have an employment history containing no records that miss endDate values, leading us to the
 following AQL query:
 
         use dataverse TinySocial;
 
-        for $fbu in dataset FacebookUsers
-        where (every $e in $fbu.employment satisfies not(is-missing($e.end-date)))
-        return $fbu;
+        for $gbu in dataset GleambookUsers
+        where (every $e in $gbu.employment satisfies not(is-missing($e.endDate)))
+        return $gbu;
 
 Here is the expected result for our sample data:
 
-        { "id": 3, "alias": "Emory", "name": "EmoryUnk", "user-since": datetime("2012-07-10T10:10:00.000Z"), "friend-ids": {{ 1, 5, 8, 9 }}, "employment": [ { "organization-name": "geomedia", "start-date": date("2010-06-17"), "end-date": date("2010-01-26") } ] }
-        { "id": 9, "alias": "Woodrow", "name": "WoodrowNehling", "user-since": datetime("2005-09-20T10:10:00.000Z"), "friend-ids": {{ 3, 10 }}, "employment": [ { "organization-name": "Zuncan", "start-date": date("2003-04-22"), "end-date": date("2009-12-13") } ] }
-        { "id": 10, "alias": "Bram", "name": "BramHatch", "user-since": datetime("2010-10-16T10:10:00.000Z"), "friend-ids": {{ 1, 5, 9 }}, "employment": [ { "organization-name": "physcane", "start-date": date("2007-06-05"), "end-date": date("2011-11-05") } ] }
-
+        { "id": 9, "alias": "Woodrow", "name": "WoodrowNehling", "userSince": datetime("2005-09-20T10:10:00.000Z"), "friendIds": {{ 3, 10 }}, "employment": [ { "organizationName": "Zuncan", "startDate": date("2003-04-22"), "endDate": date("2009-12-13") } ], "nickname": "Woody" }
+        { "id": 10, "alias": "Bram", "name": "BramHatch", "userSince": datetime("2010-10-16T10:10:00.000Z"), "friendIds": {{ 1, 5, 9 }}, "employment": [ { "organizationName": "physcane", "startDate": date("2007-06-05"), "endDate": date("2011-11-05") } ] }
+        { "id": 3, "alias": "Emory", "name": "EmoryUnk", "userSince": datetime("2012-07-10T10:10:00.000Z"), "friendIds": {{ 1, 5, 8, 9 }}, "employment": [ { "organizationName": "geomedia", "startDate": date("2010-06-17"), "endDate": date("2010-01-26") } ] }
 
 ### Query 8 - Simple Aggregation ###
 Like SQL, the AQL language of AsterixDB provides support for computing aggregates over large amounts of data.
-As a very simple example, the following AQL query computes the total number of Facebook users:
+As a very simple example, the following AQL query computes the total number of Gleambook users:
 
         use dataverse TinySocial;
 
-        count(for $fbu in dataset FacebookUsers return $fbu);
+        count(for $gbu in dataset GleambookUsers return $gbu);
 
 In AQL, aggregate functions can be applied to arbitrary subquery results; in this case, the count function
-is applied to the result of a query that enumerates the Facebook users.  The expected result here is:
+is applied to the result of a query that enumerates the Gleambook users.  The expected result here is:
 
         10
 
-
-
 ### Query 9-A - Grouping and Aggregation ###
 Also like SQL, AQL supports grouped aggregation.
-For every Twitter user, the following group-by/aggregate query counts the number of tweets sent by that user:
+For every Chirp user, the following group-by/aggregate query counts the number of chirps sent by that user:
 
         use dataverse TinySocial;
 
-        for $t in dataset TweetMessages
-        group by $uid := $t.user.screen-name with $t
+        for $cm in dataset ChirpMessages
+        group by $uid := $cm.user.screenName with $cm
         return {
-        "user": $uid,
-        "count": count($t)
+            "user": $uid,
+            "count": count($cm)
         };
 
-The _for_ clause incrementally binds $t to tweets, and the _group by_ clause groups the tweets by its
-issuer's Twitter screen-name.
+The _for_ clause incrementally binds $cm to chirps, and the _group by_ clause groups the chirps by its
+issuer's Chirp screenName.
 Unlike SQL, where data is tabular---flat---the data model underlying AQL allows for nesting.
-Thus, following the _group by_ clause, the _return_ clause in this query sees a sequence of $t groups,
-with each such group having an associated $uid variable value (i.e., the tweeting user's screen name).
-In the context of the return clause, due to "... with $t ...", $uid is bound to the tweeter's id and $t
-is bound to the _set_ of tweets issued by that tweeter.
-The return clause constructs a result record containing the tweeter's user id and the count of the items
-in the associated tweet set.
+Thus, following the _group by_ clause, the _return_ clause in this query sees a sequence of $cm groups,
+with each such group having an associated $uid variable value (i.e., the chirping user's screen name).
+In the context of the return clause, due to "... with $cm ...", $uid is bound to the chirper's id and $cm
+is bound to the _set_ of chirps issued by that chirper.
+The return clause constructs a result record containing the chirper's user id and the count of the items
+in the associated chirp set.
 The query result will contain one such record per screen name.
 This query also illustrates another feature of AQL; notice that each user's screen name is accessed via a
-path syntax that traverses each tweet's nested record structure.
+path syntax that traverses each chirp's nested record structure.
 
 Here is the expected result for this query over the sample data:
 
+        { "user": "OliJackson_512", "count": 1 }
         { "user": "ChangEwing_573", "count": 1 }
         { "user": "ColineGeyer@63", "count": 3 }
         { "user": "NathanGiesen@211", "count": 6 }
         { "user": "NilaMilliron_tw", "count": 1 }
-        { "user": "OliJackson_512", "count": 1 }
-
-
 
 ### Query 9-B - (Hash-Based) Grouping and Aggregation ###
 As for joins, AsterixDB has multiple evaluation strategies available for processing grouped aggregate queries.
@@ -767,36 +771,34 @@
 
         use dataverse TinySocial;
 
-        for $t in dataset TweetMessages
+        for $cm in dataset ChirpMessages
         /*+ hash*/
-        group by $uid := $t.user.screen-name with $t
+        group by $uid := $cm.user.screenName with $cm
         return {
-        "user": $uid,
-        "count": count($t)
+            "user": $uid,
+            "count": count($cm)
         };
 
 Here is the expected result:
 
         { "user": "OliJackson_512", "count": 1 }
+        { "user": "ChangEwing_573", "count": 1 }
         { "user": "ColineGeyer@63", "count": 3 }
         { "user": "NathanGiesen@211", "count": 6 }
         { "user": "NilaMilliron_tw", "count": 1 }
-        { "user": "ChangEwing_573", "count": 1 }
-
-
 
 ### Query 10 - Grouping and Limits ###
 In some use cases it is not necessary to compute the entire answer to a query.
 In some cases, just having the first _N_ or top _N_ results is sufficient.
 This is expressible in AQL using the _limit_ clause combined with the _order by_ clause.
 
-The following AQL  query returns the top 3 Twitter users based on who has issued the most tweets:
+The following AQL  query returns the top 3 Chirp users based on who has issued the most chirps:
 
         use dataverse TinySocial;
 
-        for $t in dataset TweetMessages
-        group by $uid := $t.user.screen-name with $t
-        let $c := count($t)
+        for $cm in dataset ChirpMessages
+        group by $uid := $cm.user.screenName with $cm
+        let $c := count($cm)
         order by $c desc
         limit 3
         return {
@@ -808,74 +810,72 @@
 
         { "user": "NathanGiesen@211", "count": 6 }
         { "user": "ColineGeyer@63", "count": 3 }
-        { "user": "NilaMilliron_tw", "count": 1 }
-
+        { "user": "OliJackson_512", "count": 1 }
 
 ### Query 11 - Left Outer Fuzzy Join ###
-As a last example of AQL and its query power, the following query, for each tweet,
-finds all of the tweets that are similar based on the topics that they refer to:
+As a last example of AQL and its query power, the following query, for each chirp,
+finds all of the chirps that are similar based on the topics that they refer to:
 
         use dataverse TinySocial;
 
         set simfunction "jaccard";
         set simthreshold "0.3";
 
-        for $t in dataset TweetMessages
+        for $cm in dataset ChirpMessages
         return {
-            "tweet": $t,
-            "similar-tweets": for $t2 in dataset TweetMessages
-                    where  $t2.referred-topics ~= $t.referred-topics
-                    and $t2.tweetid != $t.tweetid
-                    return $t2.referred-topics
+            "chirp": $cm,
+            "similarChirps": for $cm2 in dataset ChirpMessages
+                             where  $cm2.referredTopics ~= $cm.referredTopics
+                             and $cm2.chirpId != $cm.chirpId
+                             return $cm2.referredTopics
         };
 
 This query illustrates several things worth knowing in order to write fuzzy queries in AQL.
 First, as mentioned earlier, AQL offers an operator-based syntax for seeing whether two values are "similar" to one another or not.
-Second, recall that the referred-topics field of records of datatype TweetMessageType is a bag of strings.
+Second, recall that the referredTopics field of records of datatype ChirpMessageType is a bag of strings.
 This query sets the context for its similarity join by requesting that Jaccard-based similarity semantics
 ([http://en.wikipedia.org/wiki/Jaccard_index](http://en.wikipedia.org/wiki/Jaccard_index))
 be used for the query's similarity operator and that a similarity index of 0.3 be used as its similarity threshold.
 
 The expected result for this fuzzy join query is:
 
-        { "tweet": { "tweetid": "1", "user": { "screen-name": "NathanGiesen@211", "lang": "en", "friends_count": 39339, "statuses_count": 473, "name": "Nathan Giesen", "followers_count": 49416 }, "sender-location": point("47.44,80.65"), "send-time": datetime("2008-04-26T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "customization" }}, "message-text": " love t-mobile its customization is good:)" }, "similar-tweets": [ {{ "t-mobile", "shortcut-menu" }} ] }
-        { "tweet": { "tweetid": "10", "user": { "screen-name": "ColineGeyer@63", "lang": "en", "friends_count": 121, "statuses_count": 362, "name": "Coline Geyer", "followers_count": 17159 }, "sender-location": point("29.15,76.53"), "send-time": datetime("2008-01-26T10:10:00.000Z"), "referred-topics": {{ "verizon", "voice-clarity" }}, "message-text": " hate verizon its voice-clarity is OMG:(" }, "similar-tweets": [ {{ "iphone", "voice-clarity" }}, {{ "verizon", "voicemail-service" }}, {{ "verizon", "shortcut-menu" }} ] }
-        { "tweet": { "tweetid": "11", "user": { "screen-name": "NilaMilliron_tw", "lang": "en", "friends_count": 445, "statuses_count": 164, "name": "Nila Milliron", "followers_count": 22649 }, "sender-location": point("37.59,68.42"), "send-time": datetime("2008-03-09T10:10:00.000Z"), "referred-topics": {{ "iphone", "platform" }}, "message-text": " can't stand iphone its platform is terrible" }, "similar-tweets": [ {{ "iphone", "voice-clarity" }}, {{ "samsung", "platform" }} ] }
-        { "tweet": { "tweetid": "12", "user": { "screen-name": "OliJackson_512", "lang": "en", "friends_count": 445, "statuses_count": 164, "name": "Oli Jackson", "followers_count": 22649 }, "sender-location": point("24.82,94.63"), "send-time": datetime("2010-02-13T10:10:00.000Z"), "referred-topics": {{ "samsung", "voice-command" }}, "message-text": " like samsung the voice-command is amazing:)" }, "similar-tweets": [ {{ "samsung", "platform" }}, {{ "sprint", "voice-command" }} ] }
-        { "tweet": { "tweetid": "2", "user": { "screen-name": "ColineGeyer@63", "lang": "en", "friends_count": 121, "statuses_count": 362, "name": "Coline Geyer", "followers_count": 17159 }, "sender-location": point("32.84,67.14"), "send-time": datetime("2010-05-13T10:10:00.000Z"), "referred-topics": {{ "verizon", "shortcut-menu" }}, "message-text": " like verizon its shortcut-menu is awesome:)" }, "similar-tweets": [ {{ "verizon", "voicemail-service" }}, {{ "verizon", "voice-clarity" }}, {{ "t-mobile", "shortcut-menu" }} ] }
-        { "tweet": { "tweetid": "3", "user": { "screen-name": "NathanGiesen@211", "lang": "en", "friends_count": 39339, "statuses_count": 473, "name": "Nathan Giesen", "followers_count": 49416 }, "sender-location": point("29.72,75.8"), "send-time": datetime("2006-11-04T10:10:00.000Z"), "referred-topics": {{ "motorola", "speed" }}, "message-text": " like motorola the speed is good:)" }, "similar-tweets": [ {{ "motorola", "speed" }} ] }
-        { "tweet": { "tweetid": "4", "user": { "screen-name": "NathanGiesen@211", "lang": "en", "friends_count": 39339, "statuses_count": 473, "name": "Nathan Giesen", "followers_count": 49416 }, "sender-location": point("39.28,70.48"), "send-time": datetime("2011-12-26T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-command" }}, "message-text": " like sprint the voice-command is mind-blowing:)" }, "similar-tweets": [ {{ "samsung", "voice-command" }} ] }
-        { "tweet": { "tweetid": "5", "user": { "screen-name": "NathanGiesen@211", "lang": "en", "friends_count": 39339, "statuses_count": 473, "name": "Nathan Giesen", "followers_count": 49416 }, "sender-location": point("40.09,92.69"), "send-time": datetime("2006-08-04T10:10:00.000Z"), "referred-topics": {{ "motorola", "speed" }}, "message-text": " can't stand motorola its speed is terrible:(" }, "similar-tweets": [ {{ "motorola", "speed" }} ] }
-        { "tweet": { "tweetid": "6", "user": { "screen-name": "ColineGeyer@63", "lang": "en", "friends_count": 121, "statuses_count": 362, "name": "Coline Geyer", "followers_count": 17159 }, "sender-location": point("47.51,83.99"), "send-time": datetime("2010-05-07T10:10:00.000Z"), "referred-topics": {{ "iphone", "voice-clarity" }}, "message-text": " like iphone the voice-clarity is good:)" }, "similar-tweets": [ {{ "verizon", "voice-clarity" }}, {{ "iphone", "platform" }} ] }
-        { "tweet": { "tweetid": "7", "user": { "screen-name": "ChangEwing_573", "lang": "en", "friends_count": 182, "statuses_count": 394, "name": "Chang Ewing", "followers_count": 32136 }, "sender-location": point("36.21,72.6"), "send-time": datetime("2011-08-25T10:10:00.000Z"), "referred-topics": {{ "samsung", "platform" }}, "message-text": " like samsung the platform is good" }, "similar-tweets": [ {{ "iphone", "platform" }}, {{ "samsung", "voice-command" }} ] }
-        { "tweet": { "tweetid": "8", "user": { "screen-name": "NathanGiesen@211", "lang": "en", "friends_count": 39339, "statuses_count": 473, "name": "Nathan Giesen", "followers_count": 49416 }, "sender-location": point("46.05,93.34"), "send-time": datetime("2005-10-14T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "shortcut-menu" }}, "message-text": " like t-mobile the shortcut-menu is awesome:)" }, "similar-tweets": [ {{ "t-mobile", "customization" }}, {{ "verizon", "shortcut-menu" }} ] }
-        { "tweet": { "tweetid": "9", "user": { "screen-name": "NathanGiesen@211", "lang": "en", "friends_count": 39339, "statuses_count": 473, "name": "Nathan Giesen", "followers_count": 49416 }, "sender-location": point("36.86,74.62"), "send-time": datetime("2012-07-21T10:10:00.000Z"), "referred-topics": {{ "verizon", "voicemail-service" }}, "message-text": " love verizon its voicemail-service is awesome" }, "similar-tweets": [ {{ "verizon", "voice-clarity" }}, {{ "verizon", "shortcut-menu" }} ] }
-
+        { "chirp": { "chirpId": "11", "user": { "screenName": "NilaMilliron_tw", "lang": "en", "friendsCount": 445, "statusesCount": 164, "name": "Nila Milliron", "followersCount": 22649 }, "senderLocation": point("37.59,68.42"), "sendTime": datetime("2008-03-09T10:10:00.000Z"), "referredTopics": {{ "iphone", "platform" }}, "messageText": " can't stand iphone its platform is terrible" }, "similarChirps": [ {{ "iphone", "voice-clarity" }}, {{ "samsung", "platform" }} ] }
+        { "chirp": { "chirpId": "2", "user": { "screenName": "ColineGeyer@63", "lang": "en", "friendsCount": 121, "statusesCount": 362, "name": "Coline Geyer", "followersCount": 17159 }, "senderLocation": point("32.84,67.14"), "sendTime": datetime("2010-05-13T10:10:00.000Z"), "referredTopics": {{ "verizon", "shortcut-menu" }}, "messageText": " like verizon its shortcut-menu is awesome:)" }, "similarChirps": [ {{ "verizon", "voicemail-service" }}, {{ "verizon", "voice-clarity" }}, {{ "t-mobile", "shortcut-menu" }} ] }
+        { "chirp": { "chirpId": "4", "user": { "screenName": "NathanGiesen@211", "lang": "en", "friendsCount": 39339, "statusesCount": 473, "name": "Nathan Giesen", "followersCount": 49416 }, "senderLocation": point("39.28,70.48"), "sendTime": datetime("2011-12-26T10:10:00.000Z"), "referredTopics": {{ "sprint", "voice-command" }}, "messageText": " like sprint the voice-command is mind-blowing:)" }, "similarChirps": [ {{ "samsung", "voice-command" }} ] }
+        { "chirp": { "chirpId": "9", "user": { "screenName": "NathanGiesen@211", "lang": "en", "friendsCount": 39339, "statusesCount": 473, "name": "Nathan Giesen", "followersCount": 49416 }, "senderLocation": point("36.86,74.62"), "sendTime": datetime("2012-07-21T10:10:00.000Z"), "referredTopics": {{ "verizon", "voicemail-service" }}, "messageText": " love verizon its voicemail-service is awesome" }, "similarChirps": [ {{ "verizon", "shortcut-menu" }}, {{ "verizon", "voice-clarity" }} ] }
+        { "chirp": { "chirpId": "1", "user": { "screenName": "NathanGiesen@211", "lang": "en", "friendsCount": 39339, "statusesCount": 473, "name": "Nathan Giesen", "followersCount": 49416 }, "senderLocation": point("47.44,80.65"), "sendTime": datetime("2008-04-26T10:10:00.000Z"), "referredTopics": {{ "t-mobile", "customization" }}, "messageText": " love t-mobile its customization is good:)" }, "similarChirps": [ {{ "t-mobile", "shortcut-menu" }} ] }
+        { "chirp": { "chirpId": "5", "user": { "screenName": "NathanGiesen@211", "lang": "en", "friendsCount": 39339, "statusesCount": 473, "name": "Nathan Giesen", "followersCount": 49416 }, "senderLocation": point("40.09,92.69"), "sendTime": datetime("2006-08-04T10:10:00.000Z"), "referredTopics": {{ "motorola", "speed" }}, "messageText": " can't stand motorola its speed is terrible:(" }, "similarChirps": [ {{ "motorola", "speed" }} ] }
+        { "chirp": { "chirpId": "3", "user": { "screenName": "NathanGiesen@211", "lang": "en", "friendsCount": 39339, "statusesCount": 473, "name": "Nathan Giesen", "followersCount": 49416 }, "senderLocation": point("29.72,75.8"), "sendTime": datetime("2006-11-04T10:10:00.000Z"), "referredTopics": {{ "motorola", "speed" }}, "messageText": " like motorola the speed is good:)" }, "similarChirps": [ {{ "motorola", "speed" }} ] }
+        { "chirp": { "chirpId": "6", "user": { "screenName": "ColineGeyer@63", "lang": "en", "friendsCount": 121, "statusesCount": 362, "name": "Coline Geyer", "followersCount": 17159 }, "senderLocation": point("47.51,83.99"), "sendTime": datetime("2010-05-07T10:10:00.000Z"), "referredTopics": {{ "iphone", "voice-clarity" }}, "messageText": " like iphone the voice-clarity is good:)" }, "similarChirps": [ {{ "iphone", "platform" }}, {{ "verizon", "voice-clarity" }} ] }
+        { "chirp": { "chirpId": "7", "user": { "screenName": "ChangEwing_573", "lang": "en", "friendsCount": 182, "statusesCount": 394, "name": "Chang Ewing", "followersCount": 32136 }, "senderLocation": point("36.21,72.6"), "sendTime": datetime("2011-08-25T10:10:00.000Z"), "referredTopics": {{ "samsung", "platform" }}, "messageText": " like samsung the platform is good" }, "similarChirps": [ {{ "iphone", "platform" }}, {{ "samsung", "voice-command" }} ] }
+        { "chirp": { "chirpId": "10", "user": { "screenName": "ColineGeyer@63", "lang": "en", "friendsCount": 121, "statusesCount": 362, "name": "Coline Geyer", "followersCount": 17159 }, "senderLocation": point("29.15,76.53"), "sendTime": datetime("2008-01-26T10:10:00.000Z"), "referredTopics": {{ "verizon", "voice-clarity" }}, "messageText": " hate verizon its voice-clarity is OMG:(" }, "similarChirps": [ {{ "verizon", "shortcut-menu" }}, {{ "verizon", "voicemail-service" }}, {{ "iphone", "voice-clarity" }} ] }
+        { "chirp": { "chirpId": "12", "user": { "screenName": "OliJackson_512", "lang": "en", "friendsCount": 445, "statusesCount": 164, "name": "Oli Jackson", "followersCount": 22649 }, "senderLocation": point("24.82,94.63"), "sendTime": datetime("2010-02-13T10:10:00.000Z"), "referredTopics": {{ "samsung", "voice-command" }}, "messageText": " like samsung the voice-command is amazing:)" }, "similarChirps": [ {{ "sprint", "voice-command" }}, {{ "samsung", "platform" }} ] }
+        { "chirp": { "chirpId": "8", "user": { "screenName": "NathanGiesen@211", "lang": "en", "friendsCount": 39339, "statusesCount": 473, "name": "Nathan Giesen", "followersCount": 49416 }, "senderLocation": point("46.05,93.34"), "sendTime": datetime("2005-10-14T10:10:00.000Z"), "referredTopics": {{ "t-mobile", "shortcut-menu" }}, "messageText": " like t-mobile the shortcut-menu is awesome:)" }, "similarChirps": [ {{ "verizon", "shortcut-menu" }}, {{ "t-mobile", "customization" }} ] }
 
 ### Inserting New Data  ###
 In addition to loading and querying data, AsterixDB supports incremental additions to datasets via the AQL _insert_ statement.
 
-The following example adds a new tweet by user "NathanGiesen@211" to the TweetMessages dataset.
-(An astute reader may notice that this tweet was issued a half an hour after his last tweet, so his counts
+The following example adds a new chirp by user "NathanGiesen@211" to the ChirpMessages dataset.
+(An astute reader may notice that this chirp was issued a half an hour after his last chirp, so his counts
 have all gone up in the interim, although he appears not to have moved in the last half hour.)
 
         use dataverse TinySocial;
 
-        insert into dataset TweetMessages
+        insert into dataset ChirpMessages
         (
-           {"tweetid":"13",
+           {"chirpId": "13",
             "user":
-                {"screen-name":"NathanGiesen@211",
-                 "lang":"en",
-                 "friends_count":39345,
-                 "statuses_count":479,
-                 "name":"Nathan Giesen",
-                 "followers_count":49420
+                {"screenName": "NathanGiesen@211",
+                 "lang": "en",
+                 "friendsCount": 39345,
+                 "statusesCount": 479,
+                 "name": "Nathan Giesen",
+                 "followersCount": 49420
                 },
-            "sender-location":point("47.44,80.65"),
-            "send-time":datetime("2008-04-26T10:10:35"),
-            "referred-topics":{{"tweeting"}},
-            "message-text":"tweety tweet, my fellow tweeters!"
+            "senderLocation": point("47.44,80.65"),
+            "sendTime": datetime("2008-04-26T10:10:35"),
+            "referredTopics": {{"chirping"}},
+            "messageText": "chirpy chirp, my fellow chirpers!"
            }
         );
 
@@ -888,11 +888,11 @@
 The statement supports "searched delete" semantics, and its
 _where_ clause can involve any valid XQuery expression.
 
-The following example deletes the tweet that we just added from user "NathanGiesen@211".  (Easy come, easy go. :-))
+The following example deletes the chirp that we just added from user "NathanGiesen@211".  (Easy come, easy go. :-))
 
         use dataverse TinySocial;
 
-        delete $tm from dataset TweetMessages where $tm.tweetid = "13";
+        delete $cm from dataset ChirpMessages where $cm.chirpId = "13";
 
 It should be noted that one form of data change not yet supported by AsterixDB is in-place data modification (_update_).
 Currently, only insert and delete operations are supported; update is not.
@@ -904,27 +904,27 @@
 In addition to loading, querying, inserting, and deleting data, AsterixDB supports upserting
 records using the AQL _upsert_ statement.
 
-The following example deletes the tweet with the tweetid = 20 (if exists) and inserts the
-new tweet with tweetid=20 and the user "SwanSmitty" to the TweetMessages dataset. The two
+The following example deletes the chirp with chirpId = 20 (if one exists) and inserts the
+new chirp with chirpId = 20 by user "SwanSmitty" to the ChirpMessages dataset. The two
 operations (delete if found and insert) are performed as an atomic operation that is either
 performed completely or not at all.
 
         use dataverse TinySocial;
-        upsert into dataset TweetMessages
+        upsert into dataset ChirpMessages
         (
-           {"tweetid":"20",
+           {"chirpId": "20",
             "user":
-                {"screen-name":"SwanSmitty",
-                 "lang":"en",
-                 "friends_count":91345,
-                 "statuses_count":4079,
-                 "name":"Swanson Smith",
-                 "followers_count":50420
+                {"screenName": "SwanSmitty",
+                 "lang": "en",
+                 "friendsCount": 91345,
+                 "statusesCount": 4079,
+                 "name": "Swanson Smith",
+                 "followersCount": 50420
                 },
-            "sender-location":point("47.44,80.65"),
-            "send-time":datetime("2008-04-26T10:10:35"),
-            "referred-topics":{{"football"}},
-            "message-text":"football is the best sport, period.!"
+            "senderLocation": point("47.44,80.65"),
+            "sendTime": datetime("2008-04-26T10:10:35"),
+            "referredTopics": {{"football"}},
+            "messageText": "football is the best sport, period.!"
            }
         );
 
@@ -932,29 +932,30 @@
 For example, the following statement might be used to double the followers count of all existing users.
 
         use dataverse TinySocial;
-        upsert into dataset TweetUsers
+        upsert into dataset ChirpUsers
         (
-           for $user in dataset TweetUsers
+           for $user in dataset ChirpUsers
            return {
-            "screen-name":$user.screen-name,
-            "lang":$user.lang,
-            "friends_count":$user.friends_count,
-            "statuses_count":$user.statuses_count,
-            "name":$user.name,
-            "followers_count":$user.followers_count*2
+            "screenName": $user.screenName,
+            "lang": $user.lang,
+            "friendsCount": $user.friendsCount,
+            "statusesCount": $user.statusesCount,
+            "name": $user.name,
+            "followersCount": $user.followersCount * 2
            }
         );
 
-Note that an upsert operation is executed in two steps, the query is performed,query locks
-are released, and then its result is upserted into the dataset. This means that the record
-can be modified between computing the query result and performing the upsert.
+Note that such an upsert operation is executed in two steps:
+The query is performed, after which the query's locks are released,
+and then its result is upserted into the dataset.
+This means that a record can be modified between computing the query result and performing the upsert.
 
 ### Transaction Support
 
 AsterixDB supports record-level ACID transactions that begin and terminate implicitly for each record inserted, deleted, or searched while a given AQL statement is being executed. This is quite similar to the level of transaction support found in today's NoSQL stores. AsterixDB does not support multi-statement transactions, and in fact an AQL statement that involves multiple records can itself involve multiple independent record-level transactions. An example consequence of this is that, when an AQL statement attempts to insert 1000 records, it is possible that the first 800 records could end up being committed while the remaining 200 records fail to be inserted. This situation could happen, for example, if a duplicate key exception occurs as the 801st insertion is attempted. If this happens, AsterixDB will report the error (e.g., a duplicate key exception) as the result of the offending AQL insert statement, and the application logic above will need to take the appropriate action(s) needed to assess the resulting state and to clean up and/or continue as appropriate.
 
 ## Further Help ##
-That's it  You are now armed and dangerous with respect to semistructured data management using AsterixDB.
+That's it!  You are now armed and dangerous with respect to semistructured data management using AsterixDB and AQL.
 
 AsterixDB is a powerful new BDMS---Big Data Management System---that we hope may usher in a new era of much
 more declarative Big Data management.
diff --git a/asterixdb/asterix-doc/src/site/markdown/aql/datamodel.md b/asterixdb/asterix-doc/src/site/markdown/datamodel.md
similarity index 100%
rename from asterixdb/asterix-doc/src/site/markdown/aql/datamodel.md
rename to asterixdb/asterix-doc/src/site/markdown/datamodel.md
diff --git a/asterixdb/asterix-doc/src/site/markdown/feeds/tutorial.md b/asterixdb/asterix-doc/src/site/markdown/feeds/tutorial.md
index 39e04b1..fb06a92 100644
--- a/asterixdb/asterix-doc/src/site/markdown/feeds/tutorial.md
+++ b/asterixdb/asterix-doc/src/site/markdown/feeds/tutorial.md
@@ -61,22 +61,17 @@
         create dataverse feeds;
         use dataverse feeds;
 
-        create type TwitterUser if not exists as open{
-            screen_name: string,
-            language: string,
-            friends_count: int32,
-            status_count: int32,
-            name: string,
-            followers_count: int32
+        create type TwitterUser as closed {
+                screen_name: string,
+                lang: string,
+                friends_count: int32,
+                statuses_count: int32
         };
-        create type Tweet if not exists as open{
-            id: string,
-            user: TwitterUser,
-            latitude:double,
-            longitude:double,
-            created_at:string,
-            message_text:string
-        };
+
+        create type Tweet as open {
+          id: int64,
+          user: TwitterUser
+        }
 
         create dataset Tweets (Tweet)
         primary key id;
@@ -103,6 +98,7 @@
 
         create feed TwitterFeed if not exists using "push_twitter"
         (("type-name"="Tweet"),
+         ("format"="twitter-status"),
          ("consumer.key"="************"),
          ("consumer.secret"="**************"),
          ("access.token"="**********"),
@@ -185,6 +181,7 @@
         create feed my_feed using
         rss_feed (
            ("type-name"="Rss"),
+           ("format"="rss"),
            ("url"="http://rss.cnn.com/rss/edition.rss")
         );
 
diff --git a/asterixdb/asterix-doc/src/site/markdown/sqlpp/primer-sqlpp.md b/asterixdb/asterix-doc/src/site/markdown/sqlpp/primer-sqlpp.md
index 3cfb30b..af63520 100644
--- a/asterixdb/asterix-doc/src/site/markdown/sqlpp/primer-sqlpp.md
+++ b/asterixdb/asterix-doc/src/site/markdown/sqlpp/primer-sqlpp.md
@@ -45,7 +45,6 @@
 Once you have reached the end of this tutorial, you will be fully armed and dangerous, with all the basic AsterixDB knowledge
 that you'll need to start down the path of modeling, storing, and querying your own semistructured data.
 
-----
 ## ADM: Modeling Semistructed Data in AsterixDB ##
 In this section you will learn all about modeling Big Data using
 ADM, the data model of the AsterixDB BDMS.
@@ -247,7 +246,6 @@
 the dataset being queried (e.g., _SELECT VALUE ds_ in the first statement returns the entire
 record from the metadata dataset containing the descriptions of all datasets.
 
-----
 ## Loading Data Into AsterixDB ##
 Okay, so far so good---AsterixDB is now ready for data, so let's give it some data to store.
 Our next task will be to load some sample data into the four datasets that we just defined.
@@ -339,7 +337,6 @@
         LOAD DATASET ChirpMessages USING localfs
             (("path"="<Host Name>://<Absolute File Path>/chm.adm"),("format"="adm"));
 
-----
 ## SQL++: Querying Your AsterixDB Data ##
 Congratulations! You now have sample social data stored (and indexed) in AsterixDB.
 (You are part of an elite and adventurous group of individuals. :-))
diff --git a/asterixdb/asterix-doc/src/site/site.xml b/asterixdb/asterix-doc/src/site/site.xml
index 33ce7eb..4c0ba6e 100644
--- a/asterixdb/asterix-doc/src/site/site.xml
+++ b/asterixdb/asterix-doc/src/site/site.xml
@@ -85,7 +85,7 @@
     </menu>
 
     <menu name="Data Model">
-      <item name="The Asterix Data Model" href="aql/datamodel.html"/>
+      <item name="The Asterix Data Model" href="datamodel.html"/>
     </menu>
 
     <menu name="Queries - SQL++">
@@ -108,7 +108,6 @@
 
     <menu name="API/SDK">
       <item name="HTTP API" href="api.html"/>
-      <item name="Javascript SDK" href="aql/js-sdk.html"/>
     </menu>
 
     <menu ref="reports"/>
diff --git a/asterixdb/asterix-docker/docker/asterix-configuration.xml b/asterixdb/asterix-docker/docker/asterix-configuration.xml
index ea43da9..fe9827c 100644
--- a/asterixdb/asterix-docker/docker/asterix-configuration.xml
+++ b/asterixdb/asterix-docker/docker/asterix-configuration.xml
@@ -101,7 +101,10 @@
     <name>storage.memorycomponent.numpages</name>
     <value>256</value>
     <description>The number of pages to allocate for a memory component.
-      (Default = 256)
+      This budget is shared by all the memory components of the primary
+      index and all its secondary indexes across all I/O devices on a node.
+      Note: in-memory components usually has fill factor of 75% since
+      the pages are 75% full and the remaining 25% is un-utilized. (Default = 256)
     </description>
   </property>
   <property>
@@ -123,9 +126,11 @@
   <property>
     <name>storage.memorycomponent.globalbudget</name>
     <value>512MB</value>
-    <description>The total size of memory in bytes that the sum of all
-      open memory
-      components cannot exceed. (Default = "536870192" // 512MB)
+    <description>The total size of memory in bytes that the sum of all open memory
+      components cannot exceed. Consider this as the buffer cache for all memory
+      components of all indexes in a node. When this budget is fully used, a victim
+      dataset will be chosen. The chosen dataset must be evicted and closed to make
+      a space for another dataset. (Default = 512MB)
     </description>
   </property>
 
diff --git a/asterixdb/asterix-experiments/src/main/resources/ingestion-experiment-binary-and-configs/configs/asterix-configuration.xml b/asterixdb/asterix-experiments/src/main/resources/ingestion-experiment-binary-and-configs/configs/asterix-configuration.xml
index c642cbb..f4bb47a 100644
--- a/asterixdb/asterix-experiments/src/main/resources/ingestion-experiment-binary-and-configs/configs/asterix-configuration.xml
+++ b/asterixdb/asterix-experiments/src/main/resources/ingestion-experiment-binary-and-configs/configs/asterix-configuration.xml
@@ -72,7 +72,11 @@
   <property>
     <name>storage.memorycomponent.numpages</name>
     <value>8192</value>
-    <description>
+    <description>The number of pages to allocate for a memory component.
+      This budget is shared by all the memory components of the primary
+      index and all its secondary indexes across all I/O devices on a node.
+      Note: in-memory components usually has fill factor of 75% since
+      the pages are 75% full and the remaining 25% is un-utilized. (Default = 256)
     </description>
   </property>
   <!--// Buffer size per dataset for in-memory components -->
@@ -96,9 +100,11 @@
   <property>
     <name>storage.memorycomponent.globalbudget</name>
     <value>4196MB</value>
-    <description>[4GB + 100MB]The total size of memory in bytes that the sum of all
-      open memory
-      components cannot exceed. (Default = "536870192" // 512MB)
+    <description>The total size of memory in bytes that the sum of all open memory
+      components cannot exceed. Consider this as the buffer cache for all memory
+      components of all indexes in a node. When this budget is fully used, a victim
+      dataset will be chosen. The chosen dataset must be evicted and closed to make
+      a space for another dataset. (Default = 512MB)
     </description>
   </property>
 
diff --git a/asterixdb/asterix-external-data/pom.xml b/asterixdb/asterix-external-data/pom.xml
index c3cf3c4..245f340 100644
--- a/asterixdb/asterix-external-data/pom.xml
+++ b/asterixdb/asterix-external-data/pom.xml
@@ -244,10 +244,26 @@
       <artifactId>hadoop-client</artifactId>
       <type>jar</type>
       <scope>compile</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>com.sun.jersey.jersey-test-framework</groupId>
+          <artifactId>jersey-test-framework-grizzly2</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-hdfs</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>javax.servlet.jsp</groupId>
+          <artifactId>jsp-api</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>javax.servlet</groupId>
+          <artifactId>servlet-api</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>net.java.dev.rome</groupId>
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
index c5167c1..7d27e45 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
@@ -103,6 +103,10 @@
     public static final String KEY_RECORD_FORMAT = "record-format";
     public static final String KEY_META_TYPE_NAME = "meta-type-name";
     public static final String READER_STREAM = "stream";
+    public static final String KEY_HTTP_PROXY_HOST = "http-proxy-host";
+    public static final String KEY_HTTP_PROXY_PORT = "http-proxy-port";
+    public static final String KEY_HTTP_PROXY_USER = "http-proxy-user";
+    public static final String KEY_HTTP_PROXY_PASSWORD = "http-proxy-password";
     /**
      * HDFS class names
      */
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/TwitterUtil.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/TwitterUtil.java
index 70d31c0..d8f375b 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/TwitterUtil.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/TwitterUtil.java
@@ -18,14 +18,6 @@
  */
 package org.apache.asterix.external.util;
 
-import org.apache.asterix.common.exceptions.AsterixException;
-import twitter4j.FilterQuery;
-import twitter4j.Twitter;
-import twitter4j.TwitterFactory;
-import twitter4j.TwitterStream;
-import twitter4j.TwitterStreamFactory;
-import twitter4j.conf.ConfigurationBuilder;
-
 import java.io.InputStream;
 import java.util.HashMap;
 import java.util.Map;
@@ -35,6 +27,15 @@
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.asterix.common.exceptions.AsterixException;
+
+import twitter4j.FilterQuery;
+import twitter4j.Twitter;
+import twitter4j.TwitterFactory;
+import twitter4j.TwitterStream;
+import twitter4j.TwitterStreamFactory;
+import twitter4j.conf.ConfigurationBuilder;
+
 public class TwitterUtil {
 
     private static Logger LOGGER = Logger.getLogger(TwitterUtil.class.getName());
@@ -65,12 +66,11 @@
     /**
      * Gets more than one bounding box from a sequences of coordinates
      * (following Twitter formats) + predefined location names, as US and EU.
-     *
      * E.g., for EU and US, we would use -29.7, 79.2, 36.7, 72.0; -124.848974,
-     *      -66.885444, 24.396308, 49.384358.
+     * -66.885444, 24.396308, 49.384358.
      *
      * @param locationValue
-     *          String value of the location coordinates or names (comma-separated)
+     *            String value of the location coordinates or names (comma-separated)
      * @return
      * @throws AsterixException
      */
@@ -219,9 +219,25 @@
         cb.setOAuthConsumerSecret(oAuthConsumerSecret);
         cb.setOAuthAccessToken(oAuthAccessToken);
         cb.setOAuthAccessTokenSecret(oAuthAccessTokenSecret);
+        configureProxy(cb, configuration);
         return cb;
     }
 
+    private static void configureProxy(ConfigurationBuilder cb, Map<String, String> configuration) {
+        final String httpProxyHost = configuration.get(ExternalDataConstants.KEY_HTTP_PROXY_HOST);
+        final String httpProxyPort = configuration.get(ExternalDataConstants.KEY_HTTP_PROXY_PORT);
+        if (httpProxyHost != null && httpProxyPort != null) {
+            cb.setHttpProxyHost(httpProxyHost);
+            cb.setHttpProxyPort(Integer.parseInt(httpProxyPort));
+            final String httpProxyUser = configuration.get(ExternalDataConstants.KEY_HTTP_PROXY_USER);
+            final String httpProxyPassword = configuration.get(ExternalDataConstants.KEY_HTTP_PROXY_PASSWORD);
+            if (httpProxyUser != null && httpProxyPassword != null) {
+                cb.setHttpProxyUser(httpProxyUser);
+                cb.setHttpProxyPassword(httpProxyPassword);
+            }
+        }
+    }
+
     public static void initializeConfigurationWithAuthInfo(Map<String, String> configuration) throws AsterixException {
         String authMode = configuration.get(AuthenticationConstants.AUTHENTICATION_MODE);
         if (authMode == null) {
diff --git a/asterixdb/asterix-installer/pom.xml b/asterixdb/asterix-installer/pom.xml
index 2b849c2..b3bd196 100644
--- a/asterixdb/asterix-installer/pom.xml
+++ b/asterixdb/asterix-installer/pom.xml
@@ -311,5 +311,12 @@
       <version>${hadoop.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-minicluster</artifactId>
+      <version>${hadoop.version}</version>
+      <type>jar</type>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 </project>
diff --git a/asterixdb/asterix-installer/src/main/resources/conf/asterix-configuration.xml b/asterixdb/asterix-installer/src/main/resources/conf/asterix-configuration.xml
index aa8ad8d..ed8e70a 100644
--- a/asterixdb/asterix-installer/src/main/resources/conf/asterix-configuration.xml
+++ b/asterixdb/asterix-installer/src/main/resources/conf/asterix-configuration.xml
@@ -76,7 +76,10 @@
     <name>storage.memorycomponent.numpages</name>
     <value>256</value>
     <description>The number of pages to allocate for a memory component.
-      (Default = 256)
+      This budget is shared by all the memory components of the primary
+      index and all its secondary indexes across all I/O devices on a node.
+      Note: in-memory components usually has fill factor of 75% since
+      the pages are 75% full and the remaining 25% is un-utilized. (Default = 256)
     </description>
   </property>
 
@@ -99,9 +102,11 @@
   <property>
     <name>storage.memorycomponent.globalbudget</name>
     <value>1GB</value>
-    <description>The total size of memory in bytes that the sum of all
-      open memory
-      components cannot exceed. (Default = "536870192" // 512MB)
+    <description>The total size of memory in bytes that the sum of all open memory
+      components cannot exceed. Consider this as the buffer cache for all memory
+      components of all indexes in a node. When this budget is fully used, a victim
+      dataset will be chosen. The chosen dataset must be evicted and closed to make
+      a space for another dataset. (Default = 512MB)
     </description>
   </property>
 
diff --git a/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixRestartIT.java b/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixRestartIT.java
new file mode 100644
index 0000000..6d7eaa4
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixRestartIT.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.installer.test;
+
+import java.io.File;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Logger;
+
+import org.apache.asterix.event.model.AsterixInstance.State;
+import org.apache.asterix.test.aql.TestExecutor;
+import org.apache.asterix.test.base.RetainLogsRule;
+import org.apache.asterix.testframework.context.TestCaseContext;
+import org.apache.commons.lang3.StringUtils;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestRule;
+
+//This is just a simple derivative of all of the other IT test cases specifically for test cases which revolve around
+//some behavior where stopping and starting Asterix causes issues
+public class AsterixRestartIT {
+
+    private static final String PATH_BASE = "src/test/resources/integrationts/restart/";
+    private static final String PATH_ACTUAL = "target" + File.separator + "ittest" + File.separator;
+    private static final Logger LOGGER = Logger.getLogger(AsterixRestartIT.class.getName());
+    private static List<TestCaseContext> testCaseCollection;
+    private static String reportPath = new File(
+            StringUtils.join(new String[] { "target", "failsafe-reports" }, File.separator)).getAbsolutePath();
+
+    private final TestExecutor testExecutor = new TestExecutor();
+    private static String scriptHomePath;
+    private static File asterixInstallerPath;
+    private static ProcessBuilder pb;
+    private static Map<String, String> env;
+
+    @Rule
+    public TestRule retainLogs = new RetainLogsRule(AsterixInstallerIntegrationUtil.getManagixHome(), reportPath);
+
+    @BeforeClass
+    public static void setUp() throws Exception {
+        try {
+            pb = new ProcessBuilder();
+            env = pb.environment();
+            asterixInstallerPath = new File(System.getProperty("user.dir"));
+            scriptHomePath = asterixInstallerPath + File.separator + "src" + File.separator + "test" + File.separator
+                    + "resources" + File.separator + "integrationts" + File.separator + "restart"
+                    + File.separator + "scripts";
+            env.put("SCRIPT_HOME", scriptHomePath);
+            AsterixInstallerIntegrationUtil.init();
+            AsterixInstallerIntegrationUtil.transformIntoRequiredState(State.ACTIVE);
+            TestCaseContext.Builder b = new TestCaseContext.Builder();
+            testCaseCollection = b.build(new File(PATH_BASE));
+        } catch (Throwable th) {
+            th.printStackTrace();
+            throw th;
+        }
+    }
+
+    @AfterClass
+    public static void tearDown() throws Exception {
+        AsterixInstallerIntegrationUtil.deinit();
+    }
+
+    @Test
+    public void test() throws Exception {
+        for (TestCaseContext testCaseCtx : testCaseCollection) {
+            testExecutor.executeTest(PATH_ACTUAL, testCaseCtx, pb, false);
+        }
+
+    }
+
+    public static void main(String[] args) throws Exception {
+        try {
+            setUp();
+            new AsterixRestartIT().test();
+        } catch (Exception e) {
+            e.printStackTrace();
+            LOGGER.info("TEST CASES FAILED");
+        } finally {
+            tearDown();
+        }
+    }
+
+}
diff --git a/asterixdb/asterix-installer/src/test/resources/integrationts/asterix-configuration.xml b/asterixdb/asterix-installer/src/test/resources/integrationts/asterix-configuration.xml
index e36c33b..9992009 100644
--- a/asterixdb/asterix-installer/src/test/resources/integrationts/asterix-configuration.xml
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/asterix-configuration.xml
@@ -76,7 +76,10 @@
     <name>storage.memorycomponent.numpages</name>
     <value>256</value>
     <description>The number of pages to allocate for a memory component.
-      (Default = 256)
+      This budget is shared by all the memory components of the primary
+      index and all its secondary indexes across all I/O devices on a node.
+      Note: in-memory components usually has fill factor of 75% since
+      the pages are 75% full and the remaining 25% is un-utilized. (Default = 256)
     </description>
   </property>
 
@@ -99,9 +102,11 @@
   <property>
     <name>storage.memorycomponent.globalbudget</name>
     <value>1GB</value>
-    <description>The total size of memory in bytes that the sum of all
-      open memory
-      components cannot exceed. (Default = "536870192" // 512MB)
+    <description>The total size of memory in bytes that the sum of all open memory
+      components cannot exceed. Consider this as the buffer cache for all memory
+      components of all indexes in a node. When this budget is fully used, a victim
+      dataset will be chosen. The chosen dataset must be evicted and closed to make
+      a space for another dataset. (Default = 512MB)
     </description>
   </property>
 
diff --git a/asterixdb/asterix-installer/src/test/resources/integrationts/restart/828.h1w.adm b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/828.h1w.adm
new file mode 100644
index 0000000..1664fa9
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/828.h1w.adm
@@ -0,0 +1,100 @@
+{"create_at":datetime("2016-08-28T09:06:02.000"),"id":int64("769929038727086081"),"text":"My nephew just made my day https://t.co/ZG16634ckU","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("532607253"),"name":"Alonzo","screen_name":"ZoDiamond777","lang":"en","location":"MIA,FLA 777","create_at":date("2012-03-21"),"description":"JOKER","followers_count":685,"friends_count":653,"statues_count":33064},"place":{"country":"United States","country_code":"United States","full_name":"Kendall, FL","id":"9b46dccb3cfb880c","name":"Kendall","place_type":"city","bounding_box":rectangle("-80.389344,25.628844 -80.304896,25.715128")},"geo_tag":{"stateID":12,"stateName":"Florida","countyID":12086,"countyName":"Miami-Dade","cityID":1236100,"cityName":"Kendall"}}
+{"create_at":datetime("2016-08-28T09:06:02.000"),"id":int64("769929038852943872"),"text":"Boat ride!! \uD83D\uDEE5 on the river (@ Corporate Tailgate Boat Rentals in Chicago, IL) https://t.co/cb3KfMYlVS","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("2871341"),"name":"Sharon H.","screen_name":"ReadingChick","lang":"en","location":"The Western Suburbs","create_at":date("2007-03-29"),"description":"Children's Librarians Grow Life Long Learners. More affable than rumor would have it. Happily married to Hubba Hubba.","followers_count":997,"friends_count":777,"statues_count":21876},"place":{"country":"United States","country_code":"United States","full_name":"Chicago, IL","id":"1d9a5370a355ab0c","name":"Chicago","place_type":"city","bounding_box":rectangle("-87.940033,41.644102 -87.523993,42.023067")},"coordinate":point("-87.6574459,41.90307574"),"geo_tag":{"stateID":17,"stateName":"Illinois","countyID":17031,"countyName":"Cook","cityID":1714000,"cityName":"Chicago"}}
+{"create_at":datetime("2016-08-28T09:06:02.000"),"id":int64("769929039272300544"),"text":"#ThisWeek the #CLITERATI demo was released. Check it out #PortlandPunx #DIY #hardcore #crust #dbeat https://t.co/uclG4Mi1kx","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"hashtags":{{"ThisWeek","CLITERATI","PortlandPunx","DIY","hardcore","crust","dbeat"}},"user":{"id":int64("417393117"),"name":"Ami Lawless","screen_name":"AmiLawless","lang":"en","location":"San Fran & Portlandia","create_at":date("2011-11-20"),"description":"SF Weekly's Best DJ 2014. former punk/metal DJ @ RadioValencia,columnist for a few zines,vox Cliterati/VOETSEK/Bedrucken/Dairy Queens","followers_count":2322,"friends_count":3060,"statues_count":7218},"place":{"country":"United States","country_code":"United States","full_name":"Portland, OR","id":"ac88a4f17a51c7fc","name":"Portland","place_type":"city","bounding_box":rectangle("-122.790065,45.421863 -122.471751,45.650941")},"geo_tag":{"stateID":41,"stateName":"Oregon","countyID":41051,"countyName":"Multnomah","cityID":4159000,"cityName":"Portland"}}
+{"create_at":datetime("2016-08-28T09:06:02.000"),"id":int64("769929038798397440"),"text":"\uD83D\uDE02\uD83D\uDE2D https://t.co/bwpyV7dHXi","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"und","is_retweet":false,"user":{"id":int64("402413445"),"name":"Keyur Patel \u3072","screen_name":"kpatel_6","lang":"en","location":"null","create_at":date("2011-10-31"),"description":"SJJ '18 Snapchat: Silver908","followers_count":161,"friends_count":155,"statues_count":5734},"place":{"country":"United States","country_code":"United States","full_name":"Toledo, OH","id":"7068dd9474ab6973","name":"Toledo","place_type":"city","bounding_box":rectangle("-83.694776,41.580375 -83.454566,41.732806")},"geo_tag":{"stateID":39,"stateName":"Ohio","countyID":39095,"countyName":"Lucas","cityID":3977000,"cityName":"Toledo"}}
+{"create_at":datetime("2016-08-28T09:06:01.000"),"id":int64("769929038341242880"),"text":"life's a beach\n#huntingtonbeach @ Huntington Beach, California https://t.co/csJVJp8Swh","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"hashtags":{{"huntingtonbeach"}},"user":{"id":int64("710094718256844801"),"name":"Geny","screen_name":"itsgenyc","lang":"en","location":"Dallas, TX","create_at":date("2016-03-16"),"description":"expert when it comes to being awkward","followers_count":59,"friends_count":135,"statues_count":242},"place":{"country":"United States","country_code":"United States","full_name":"Huntington Beach, CA","id":"80eb17ffe368fc9a","name":"Huntington Beach","place_type":"city","bounding_box":rectangle("-118.082615,33.628991 -117.91485,33.756093")},"coordinate":point("-118.0,33.6929"),"geo_tag":{"stateID":6,"stateName":"California","countyID":6059,"countyName":"Orange","cityID":636000,"cityName":"Huntington Beach"}}
+{"create_at":datetime("2016-08-28T09:06:02.000"),"id":int64("769929039448506368"),"text":"Biking to the vineyard city style. #nyc #vineyard #hudson #winetime #sunday #sundayfunday @ City\u2026 https://t.co/qZzVi7n2vD","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"hashtags":{{"nyc","vineyard","hudson","winetime","sunday","sundayfunday"}},"user":{"id":int64("18413391"),"name":"Shelly Lipton","screen_name":"ShellyLipton","lang":"en","location":"new york city","create_at":date("2008-12-27"),"description":"digital marketer, advisor, business developer, toy recycler, biking fanatic, art junkie, wine lover, head of marketing @Roomiapp","followers_count":2372,"friends_count":1561,"statues_count":6137},"place":{"country":"United States","country_code":"United States","full_name":"Manhattan, NY","id":"01a9a39529b27f36","name":"Manhattan","place_type":"city","bounding_box":rectangle("-74.026675,40.683935 -73.910408,40.877483")},"coordinate":point("-74.0119355,40.7199192"),"geo_tag":{"stateID":36,"stateName":"New York","countyID":36061,"countyName":"New York","cityID":36061,"cityName":"Manhattan"}}
+{"create_at":datetime("2016-08-28T09:06:02.000"),"id":int64("769929039633014784"),"text":"Everyday \uD83D\uDC94 https://t.co/oNPuRMwgM6","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("2209170043"),"name":"Queen Double DD","screen_name":"QueenDoubleDD","lang":"en","location":"Texas.\u2741","create_at":date("2013-11-22"),"description":"18 \u2022sc: si-drill \u2022My hearts become to cold to break \u2022 UST Volleyball","followers_count":1151,"friends_count":1271,"statues_count":6535},"place":{"country":"United States","country_code":"United States","full_name":"Houston, TX","id":"1c69a67ad480e1b1","name":"Houston","place_type":"city","bounding_box":rectangle("-95.823268,29.522325 -95.069705,30.154665")},"geo_tag":{"stateID":48,"stateName":"Texas","countyID":48201,"countyName":"Harris","cityID":4835000,"cityName":"Houston"}}
+{"create_at":datetime("2016-08-28T09:06:02.000"),"id":int64("769929039683260416"),"text":"It takes \"guts and integrity\" to parrot the Leftist echo chamber https://t.co/9qefvaPxMy","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("51599151"),"name":"Nisley the White","screen_name":"SonOfMokeHill","lang":"en","location":"Post-American Hellhole, NV","create_at":date("2009-06-27"),"description":"The Left seeks to ban Christianity and Free Speech. Anyone who proclaims an allegiance to either but fails to back Trump is the Left's ally.","followers_count":194,"friends_count":180,"statues_count":10172},"place":{"country":"United States","country_code":"United States","full_name":"Las Vegas, NV","id":"5c2b5e46ab891f07","name":"Las Vegas","place_type":"city","bounding_box":rectangle("-115.384091,36.129459 -115.062159,36.336371")},"geo_tag":{"stateID":32,"stateName":"Nevada","countyID":32003,"countyName":"Clark","cityID":3240000,"cityName":"Las Vegas"}}
+{"create_at":datetime("2016-08-28T09:06:01.000"),"id":int64("769929037821075456"),"text":"when walking through school https://t.co/3YDKVgzLcF","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("1271362543"),"name":"Briley Bodin \u2661","screen_name":"BrileyBodin17","lang":"en","location":"Nashville, TN","create_at":date("2013-03-15"),"description":"null","followers_count":1039,"friends_count":765,"statues_count":2524},"place":{"country":"United States","country_code":"United States","full_name":"Brentwood, TN","id":"42835dec78de1327","name":"Brentwood","place_type":"city","bounding_box":rectangle("-86.869446,35.939893 -86.686525,36.05065")},"geo_tag":{"stateID":47,"stateName":"Tennessee","countyID":47187,"countyName":"Williamson","cityID":4708280,"cityName":"Brentwood"}}
+{"create_at":datetime("2016-08-28T09:06:02.000"),"id":int64("769929039943467009"),"text":"@rgj I'm a New York Giants fan and I support this 49er quarterback why should we support a ountry that don't support us.#blacklivesmatter","in_reply_to_status":int64("769922393481412608"),"in_reply_to_user":int64("9690012"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"hashtags":{{"blacklivesmatter"}},"user_mentions":{{9690012}},"user":{"id":int64("441431532"),"name":"Ken E,Williams jr","screen_name":"nygiant150","lang":"en","location":" Bethlehem PA","create_at":date("2011-12-19"),"description":"I love classical and Celtic music I also love to learn and experience new things and I'm a big fan of women comedians","followers_count":1192,"friends_count":3120,"statues_count":8413},"place":{"country":"United States","country_code":"United States","full_name":"Bethlehem, PA","id":"128ae72e3854b273","name":"Bethlehem","place_type":"city","bounding_box":rectangle("-75.4314,40.578043 -75.302993,40.672508")},"geo_tag":{"stateID":42,"stateName":"Pennsylvania","countyID":42095,"countyName":"Northampton","cityID":4206088,"cityName":"Bethlehem"}}
+{"create_at":datetime("2016-08-28T09:06:02.000"),"id":int64("769929039998021632"),"text":"Day 1 #AfroPunkBK2016 #AfroPunk #BigHairDontCare #Fringe #Melanin #NaturalHair #Aztec #Nike @\u2026 https://t.co/QhAyfTnSdi","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"und","is_retweet":false,"hashtags":{{"AfroPunkBK2016","AfroPunk","BigHairDontCare","Fringe","Melanin","NaturalHair","Aztec","Nike"}},"user":{"id":int64("106653718"),"name":"Asia K","screen_name":"ZingItsAsiaK","lang":"en","location":"Cincinnati\u21E8Chicago","create_at":date("2010-01-19"),"description":"Freckle faced chick with that Zing! \nActress|Singer|Songwritter. \nMy EP #TheZingRoom Coming Soon.","followers_count":519,"friends_count":319,"statues_count":15440},"place":{"country":"United States","country_code":"United States","full_name":"Brooklyn, NY","id":"011add077f4d2da3","name":"Brooklyn","place_type":"city","bounding_box":rectangle("-74.041878,40.570842 -73.855673,40.739434")},"coordinate":point("-73.95,40.65"),"geo_tag":{"stateID":36,"stateName":"New York","countyID":36047,"countyName":"Kings","cityID":36047,"cityName":"Brooklyn"}}
+{"create_at":datetime("2016-08-28T09:06:02.000"),"id":int64("769929040241143808"),"text":"\uD83D\uDC40\uD83D\uDE48 https://t.co/jnByCgyRgF","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"und","is_retweet":false,"user":{"id":int64("2907351446"),"name":"Michael Houston","screen_name":"TheHeadhunter31","lang":"en","location":"null","create_at":date("2014-11-22"),"description":"If they hate, they watchin, if they watchin, they jus apart of the fan club, let em hate #31 #WBU","followers_count":426,"friends_count":520,"statues_count":1584},"place":{"country":"United States","country_code":"United States","full_name":"Lubbock, TX","id":"3f3f6803f117606d","name":"Lubbock","place_type":"city","bounding_box":rectangle("-102.033765,33.44712 -101.760581,33.693933")},"geo_tag":{"stateID":48,"stateName":"Texas","countyID":48303,"countyName":"Lubbock","cityID":4845000,"cityName":"Lubbock"}}
+{"create_at":datetime("2016-08-28T09:06:02.000"),"id":int64("769929040299958272"),"text":"@MichaelHewitt23 @Amtrak @GovGaryJohnson @SpaceX competition is necessary for our citizens","in_reply_to_status":int64("769891257858465792"),"in_reply_to_user":int64("409501734"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{409501734,119166791,95713333,34743251}},"user":{"id":int64("21556912"),"name":"Michael Fabiano","screen_name":"MichaelFabiano","lang":"en","location":"NYC, SF, London, Paris","create_at":date("2009-02-22"),"description":"Pilot, Car Nut, Yankees-aholic, Interested in tech, Proud American, Tenor.","followers_count":4195,"friends_count":1765,"statues_count":4683},"place":{"country":"United States","country_code":"United States","full_name":"Philadelphia, PA","id":"e4a0d228eb6be76b","name":"Philadelphia","place_type":"city","bounding_box":rectangle("-75.280284,39.871811 -74.955712,40.13792")},"geo_tag":{"stateID":42,"stateName":"Pennsylvania","countyID":42101,"countyName":"Philadelphia","cityID":4260000,"cityName":"Philadelphia"}}
+{"create_at":datetime("2016-08-28T09:06:02.000"),"id":int64("769929040958332928"),"text":"I pull up on you then I pop at your kid","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("2463887766"),"name":"Brett Favre","screen_name":"_R_D_B","lang":"en","location":"the trap","create_at":date("2014-04-25"),"description":"I am running for president in 2033","followers_count":113,"friends_count":113,"statues_count":1741},"place":{"country":"United States","country_code":"United States","full_name":"Deerfield Beach, FL","id":"4ebdbc556ccd2f12","name":"Deerfield Beach","place_type":"city","bounding_box":rectangle("-80.170343,26.274467 -80.074368,26.327929")},"geo_tag":{"stateID":12,"stateName":"Florida","countyID":12011,"countyName":"Broward","cityID":1216725,"cityName":"Deerfield Beach"}}
+{"create_at":datetime("2016-08-28T09:06:02.000"),"id":int64("769929040874668032"),"text":"Twitter https://t.co/GdyFgLlK8A","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("755831881946497024"),"name":"kingK\u00AE","screen_name":"_kaniyac","lang":"en","location":"WithyourMCM","create_at":date("2016-07-20"),"description":"AMOSC:kaniyac \u2728 810\u2708251 C/o 2017","followers_count":180,"friends_count":248,"statues_count":4984},"place":{"country":"United States","country_code":"United States","full_name":"Mobile, AL","id":"d049033410e9e81b","name":"Mobile","place_type":"city","bounding_box":rectangle("-88.301598,30.523874 -88.021513,30.843424")},"geo_tag":{"stateID":1,"stateName":"Alabama","countyID":1097,"countyName":"Mobile","cityID":150000,"cityName":"Mobile"}}
+{"create_at":datetime("2016-08-28T09:06:02.000"),"id":int64("769929041893666816"),"text":"@lillipoop ya bae it a lil day trip","in_reply_to_status":int64("769928937241767936"),"in_reply_to_user":int64("2452391970"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{2452391970}},"user":{"id":int64("368394931"),"name":"yardley yeti","screen_name":"Ktmilllz","lang":"en","location":"Yardl","create_at":date("2011-09-05"),"description":"I love Jesus but I drink a little","followers_count":406,"friends_count":445,"statues_count":14211},"place":{"country":"United States","country_code":"United States","full_name":"Philadelphia, PA","id":"e4a0d228eb6be76b","name":"Philadelphia","place_type":"city","bounding_box":rectangle("-75.280284,39.871811 -74.955712,40.13792")},"geo_tag":{"stateID":42,"stateName":"Pennsylvania","countyID":42101,"countyName":"Philadelphia","cityID":4260000,"cityName":"Philadelphia"}}
+{"create_at":datetime("2016-08-28T09:06:02.000"),"id":int64("769929042120368128"),"text":"Finally getting around to packing up my room today","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("3030534190"),"name":"ekoorb","screen_name":"brookeblxckwell","lang":"en","location":"DET~CHI","create_at":date("2015-02-11"),"description":"\u2728the truth is out there\u2728 \uD83D\uDC7B:earthtobroooke \uD83D\uDCF7:brookeblxckwell","followers_count":206,"friends_count":169,"statues_count":2214},"place":{"country":"United States","country_code":"United States","full_name":"Roseville, MI","id":"7b082d03ee1b544d","name":"Roseville","place_type":"city","bounding_box":rectangle("-82.968959,42.479787 -82.902872,42.539749")},"geo_tag":{"stateID":26,"stateName":"Michigan","countyID":26099,"countyName":"Macomb","cityID":2669800,"cityName":"Roseville"}}
+{"create_at":datetime("2016-08-28T09:06:02.000"),"id":int64("769929041990279172"),"text":"@berskeezz 1st floor Jacobs! 103","in_reply_to_status":int64("769928796594135040"),"in_reply_to_user":int64("2344315182"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{2344315182}},"user":{"id":int64("220887418"),"name":"Katie Nageotte","screen_name":"ktnago13","lang":"en","location":"OFalls/Ashland/Kville","create_at":date("2010-11-28"),"description":"I like to ride poles on occasion. #adidas #TeamPacer #pickybars. Set your goals to experience your dreams, and your life will be truly extraordinary.","followers_count":1718,"friends_count":522,"statues_count":9589},"place":{"country":"United States","country_code":"United States","full_name":"Ashland, OH","id":"fefd06f07572907a","name":"Ashland","place_type":"city","bounding_box":rectangle("-82.352896,40.835537 -82.275563,40.893194")},"geo_tag":{"stateID":39,"stateName":"Ohio","countyID":39005,"countyName":"Ashland","cityID":3902568,"cityName":"Ashland"}}
+{"create_at":datetime("2016-08-28T09:06:02.000"),"id":int64("769929042514550785"),"text":"Trump has been a model citizen for years generosity &giving jobs even in movies respected &still ppl r jealous Trump wins Presidency\uD83C\uDDFA\uD83C\uDDF8\uD83C\uDF89","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("4872762849"),"name":"JoAnn","screen_name":"JoAnn82236460","lang":"en","location":"Pennsylvania, USA","create_at":date("2016-02-02"),"description":"I Bleed red white&Blue \u2764\uFE0F\u271DCertified Nurse Aide Daughter of Marine w 2 sons\u2764\uFE0FMe some Trump","followers_count":313,"friends_count":167,"statues_count":4462},"place":{"country":"United States","country_code":"United States","full_name":"Lawnton, PA","id":"cfa6b7cbdb3c503c","name":"Lawnton","place_type":"city","bounding_box":rectangle("-76.813992,40.254739 -76.778085,40.273166")},"geo_tag":{"stateID":42,"stateName":"Pennsylvania","countyID":42043,"countyName":"Dauphin","cityID":4241944,"cityName":"Lawnton"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929042858430464"),"text":"@becccamichele @courtdoz17 come snuggle with me","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("343115245"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{343115245,1613990726}},"user":{"id":int64("513609590"),"name":"Holly Brewer\u2764\uFE0F","screen_name":"Hollypop1717","lang":"en","location":"cypress // huntsville","create_at":date("2012-03-03"),"description":"Instagram @holly_brewer17 ~ SHSU '18 @classicb2016","followers_count":645,"friends_count":765,"statues_count":5143},"place":{"country":"United States","country_code":"United States","full_name":"Huntsville, TX","id":"85d3434ace478e35","name":"Huntsville","place_type":"city","bounding_box":rectangle("-95.600652,30.643296 -95.493887,30.768881")},"geo_tag":{"stateID":48,"stateName":"Texas","countyID":48471,"countyName":"Walker","cityID":4835528,"cityName":"Huntsville"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929042686451712"),"text":"loooook @JordanHanz!! you're AMAZING \u2728 https://t.co/JsKDz8415b","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{1878020600}},"user":{"id":int64("156829213"),"name":"\u1D0B\u026A\u1D1B\u1D1B\u028F","screen_name":"VeeSikk","lang":"en","location":"ig & sc - veesikk","create_at":date("2010-06-17"),"description":"\u2022 horngry","followers_count":2151,"friends_count":245,"statues_count":37762},"place":{"country":"United States","country_code":"United States","full_name":"Bakersfield, CA","id":"960993b9cfdffda9","name":"Bakersfield","place_type":"city","bounding_box":rectangle("-119.172179,35.255821 -118.878147,35.437982")},"geo_tag":{"stateID":6,"stateName":"California","countyID":6029,"countyName":"Kern","cityID":603526,"cityName":"Bakersfield"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929043172954113"),"text":"@baileythebutt guess I'm goin in after work!!!","in_reply_to_status":int64("769928967608348676"),"in_reply_to_user":int64("126525007"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{126525007}},"user":{"id":int64("126525007"),"name":"witch \u26B0 pussy","screen_name":"baileythebutt","lang":"en","location":"Kissass City, MO","create_at":date("2010-03-25"),"description":"a hot and sweaty midwestern mess. industrial prostitute. \u2692\u26D3they/them she/her and sometimes he/him","followers_count":360,"friends_count":224,"statues_count":17345},"place":{"country":"United States","country_code":"United States","full_name":"Kansas City, MO","id":"9a974dfc8efb32a0","name":"Kansas City","place_type":"city","bounding_box":rectangle("-94.733122,38.868002 -94.385441,39.332095")},"geo_tag":{"stateID":29,"stateName":"Missouri","countyID":29095,"countyName":"Jackson","cityID":2938000,"cityName":"Kansas City"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929042992791555"),"text":"\uD83D\uDE0D https://t.co/tZL2Li81ha","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"und","is_retweet":false,"user":{"id":int64("1728388064"),"name":"future icon \u2728","screen_name":"0fficial_j0sh","lang":"en","location":"atl ","create_at":date("2013-09-04"),"description":"90sjunkie, socially aware. janet and ariana enthusiast. music fanatic. I tweet a lot.","followers_count":814,"friends_count":613,"statues_count":34271},"place":{"country":"United States","country_code":"United States","full_name":"Redan, GA","id":"b39bbbbd69b97fc0","name":"Redan","place_type":"city","bounding_box":rectangle("-84.197462,33.714249 -84.115297,33.761826")},"geo_tag":{"stateID":13,"stateName":"Georgia","countyID":13089,"countyName":"DeKalb","cityID":1363952,"cityName":"Redan"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929043386830848"),"text":"Lol let's try this again @ Strawberry Fields https://t.co/gDamSNlExk","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("720476509"),"name":"Hannah Kahn","screen_name":"H_Kizzzle","lang":"en","location":"null","create_at":date("2012-07-27"),"description":"I drive a Camry and go to Disney -- too nitt to quit PSU 2019","followers_count":489,"friends_count":323,"statues_count":16180},"place":{"country":"United States","country_code":"United States","full_name":"State College, PA","id":"22c613c36f32f0d1","name":"State College","place_type":"city","bounding_box":rectangle("-77.917295,40.749326 -77.798924,40.817749")},"coordinate":point("-77.847069,40.785414"),"geo_tag":{"stateID":42,"stateName":"Pennsylvania","countyID":42027,"countyName":"Centre","cityID":4273808,"cityName":"State College"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929043932086273"),"text":".@Strava can you please support emoji for activity titles. Thanks! \uD83D\uDE01\uD83C\uDFC3\uD83D\uDEB4","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{42924530}},"user":{"id":int64("105062323"),"name":"Colin Gardiner","screen_name":"ColinGardiner","lang":"en","location":"San Francisco","create_at":date("2010-01-14"),"description":"VP of Product/Analytics @Tripping, avid ultra-marathoner, beer/coffee drinker and tireless optimizer of online products. Love me some monetization!","followers_count":376,"friends_count":1043,"statues_count":2323},"place":{"country":"United States","country_code":"United States","full_name":"San Francisco, CA","id":"5a110d312052166f","name":"San Francisco","place_type":"city","bounding_box":rectangle("-122.514926,37.708075 -122.357031,37.833238")},"geo_tag":{"stateID":6,"stateName":"California","countyID":6075,"countyName":"San Francisco","cityID":667000,"cityName":"San Francisco"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929043802148864"),"text":"girl i was sleep https://t.co/vdmF1iP6ny","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("3302842837"),"name":"September 13th\u264D\uFE0F","screen_name":"kaaayla_09","lang":"en","location":"somewhere probably sleep","create_at":date("2015-07-31"),"description":"| sc: kayla91300 | all i need in this life of sin , is my very bestfriend , @woahnijah \u2764\uFE0F","followers_count":462,"friends_count":528,"statues_count":10816},"place":{"country":"United States","country_code":"United States","full_name":"Arlington, TX","id":"6e315e1f96e0450a","name":"Arlington","place_type":"city","bounding_box":rectangle("-97.233811,32.586565 -97.037464,32.817135")},"geo_tag":{"stateID":48,"stateName":"Texas","countyID":48439,"countyName":"Tarrant","cityID":4804000,"cityName":"Arlington"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929043789619200"),"text":"@positiveimagep @MyBlackMatters https://t.co/SgBudwcGvW","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("514692295"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"und","is_retweet":false,"user_mentions":{{514692295,436721105}},"user":{"id":int64("2811707723"),"name":"Tanya J Ajala","screen_name":"AjalaTanya","lang":"en","location":"Mother Africa","create_at":date("2014-10-06"),"description":"Obama Girl, Keepin It Real, Thinker","followers_count":849,"friends_count":1975,"statues_count":19078},"place":{"country":"United States","country_code":"United States","full_name":"Staten Island, NY","id":"00c55f041e27dc51","name":"Staten Island","place_type":"city","bounding_box":rectangle("-74.255641,40.495865 -74.052253,40.648887")},"geo_tag":{"stateID":36,"stateName":"New York","countyID":36085,"countyName":"Richmond","cityID":36085,"cityName":"Staten Island"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929044032970754"),"text":"Thank god my boy Miguel didn't have to have surgery so he should have a speedy recovery and my boy Seth \uD83D\uDE4F\uD83C\uDFFE","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("541516851"),"name":"\u26A1\uFE0F\u00A1\u00B0O Ducks\u00B0\u00A1\u26A1\uFE0F","screen_name":"JayKing561","lang":"en","location":"Eugene Oregon","create_at":date("2012-03-31"),"description":"God,Family and Oregon \u270A ! #RipCT&SD #WTD #GoDucks #GoEagles #OKC #Cavs #ChiefsKingdom","followers_count":972,"friends_count":819,"statues_count":71010},"place":{"country":"United States","country_code":"United States","full_name":"Palm Bay, FL","id":"9979d3480f2d1e45","name":"Palm Bay","place_type":"city","bounding_box":rectangle("-80.737408,27.910056 -80.566228,28.063838")},"geo_tag":{"stateID":12,"stateName":"Florida","countyID":12009,"countyName":"Brevard","cityID":1254000,"cityName":"Palm Bay"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929044276109313"),"text":"@eldiablito_72 oh for sure. & it's about obsession. Fincher who is obsessive & meticulous when making a film. It's wonderful","in_reply_to_status":int64("769927309876867073"),"in_reply_to_user":int64("15103682"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{15103682}},"user":{"id":int64("357628677"),"name":"Collin Llewellyn","screen_name":"collinllewellyn","lang":"en","location":"Los Angeles, CA","create_at":date("2011-08-18"),"description":"Writer. #MovieADay list:https://docs.google.com/document/d/1Gx6nAmM1iIiExx36SRegoAW7-4EJfy3BtQIvikosmrY/edit","followers_count":376,"friends_count":395,"statues_count":15437},"place":{"country":"United States","country_code":"United States","full_name":"Los Angeles, CA","id":"3b77caf94bfc81fe","name":"Los Angeles","place_type":"city","bounding_box":rectangle("-118.668404,33.704538 -118.155409,34.337041")},"geo_tag":{"stateID":6,"stateName":"California","countyID":6037,"countyName":"Los Angeles","cityID":644000,"cityName":"Los Angeles"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929044603174914"),"text":"so happy babe came to visit me \u2764\uFE0F\uD83D\uDC97","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("1384603092"),"name":"aaliyah reyes \u2661","screen_name":"aaliyahreyes_9","lang":"en","location":"WSU","create_at":date("2013-04-27"),"description":"| 8.29.12 \u2661 |","followers_count":756,"friends_count":594,"statues_count":8002},"place":{"country":"United States","country_code":"United States","full_name":"Wichita, KS","id":"1661ada9b2b18024","name":"Wichita","place_type":"city","bounding_box":rectangle("-97.534906,37.562483 -97.152924,37.80531")},"geo_tag":{"stateID":20,"stateName":"Kansas","countyID":20173,"countyName":"Sedgwick","cityID":2079000,"cityName":"Wichita"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929044431409156"),"text":"I love u more than I love college. @ Florence, Alabama https://t.co/Ffujgcg5Jj","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("339492609"),"name":"lucifer \u2661","screen_name":"maiamarae","lang":"en","location":"finding myself","create_at":date("2011-07-20"),"description":"18. I ain't shit yea, I know.","followers_count":1483,"friends_count":567,"statues_count":29040},"place":{"country":"United States","country_code":"United States","full_name":"Florence, AL","id":"7ae765412ef88940","name":"Florence","place_type":"city","bounding_box":rectangle("-87.721811,34.77898 -87.580289,34.8896")},"coordinate":point("-87.6629,34.8203"),"geo_tag":{"stateID":1,"stateName":"Alabama","countyID":1077,"countyName":"Lauderdale","cityID":126896,"cityName":"Florence"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929044251074561"),"text":"@erich_irwin I'll come to Fairfield whenever I please","in_reply_to_status":int64("769928956267073536"),"in_reply_to_user":int64("2579319880"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{2579319880}},"user":{"id":int64("2632520573"),"name":"Michael Masuck","screen_name":"mike_masuck7","lang":"en","location":"X '18","create_at":date("2014-06-22"),"description":"When in trouble, tuck for double","followers_count":188,"friends_count":198,"statues_count":930},"place":{"country":"United States","country_code":"United States","full_name":"White Oak, OH","id":"f23ba07ebf2e9c82","name":"White Oak","place_type":"city","bounding_box":rectangle("-84.637123,39.204409 -84.569018,39.232544")},"geo_tag":{"stateID":39,"stateName":"Ohio","countyID":39061,"countyName":"Hamilton","cityID":3984812,"cityName":"White Oak"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929044531884032"),"text":"Di ako mag pupuyat mamaya.","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"tl","is_retweet":false,"user":{"id":int64("722943621427630081"),"name":"Cristine Mae","screen_name":"Sendangxx","lang":"en","location":"Kissimmee, FL","create_at":date("2016-04-20"),"description":"Prinsesa na abnormal \u274C","followers_count":975,"friends_count":2380,"statues_count":7851},"place":{"country":"United States","country_code":"United States","full_name":"Kissimmee, FL","id":"c2809aa3b2c93fb2","name":"Kissimmee","place_type":"city","bounding_box":rectangle("-81.47749,28.250764 -81.327204,28.347977")},"geo_tag":{"stateID":12,"stateName":"Florida","countyID":12097,"countyName":"Osceola","cityID":1236950,"cityName":"Kissimmee"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929045073010688"),"text":"@NPCoachRalph Well done! #AroundNewPal","in_reply_to_status":int64("769912371636686848"),"in_reply_to_user":int64("1204389294"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"hashtags":{{"AroundNewPal"}},"user_mentions":{{1204389294}},"user":{"id":int64("2766725447"),"name":"Bringingtech2you","screen_name":"bringingtech2u","lang":"en","location":"New Palestine, IN","create_at":date("2014-09-10"),"description":"Digital Storyteller via Captured Images | Social Media Brand Strategist |#WomenInBusiness Advocate | Micro Blogger | #bringingtech2you","followers_count":816,"friends_count":1897,"statues_count":2103},"place":{"country":"United States","country_code":"United States","full_name":"New Pal","id":"07d9dd62fe082001","name":"New Pal","place_type":"poi","bounding_box":rectangle("-85.9297091,39.7551529 -85.929709,39.755153")},"coordinate":point("-85.929709,39.755153"),"geo_tag":{"stateID":18,"stateName":"Indiana","countyID":18059,"countyName":"Hancock"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929044695523328"),"text":"My life motto https://t.co/rNl6KdjI0q","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("1260732067"),"name":"Darclyn","screen_name":"dkatjones14","lang":"en","location":"null","create_at":date("2013-03-11"),"description":"I can do all things through Christ who strengthens me... dancing is my lifestyle 7\u202214\u202216","followers_count":162,"friends_count":250,"statues_count":4092},"place":{"country":"United States","country_code":"United States","full_name":"Porterville, CA","id":"daa5fcec75a430ae","name":"Porterville","place_type":"city","bounding_box":rectangle("-119.080552,36.029555 -118.977713,36.119995")},"geo_tag":{"stateID":6,"stateName":"California","countyID":6107,"countyName":"Tulare","cityID":658240,"cityName":"Porterville"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929044343320576"),"text":"First #dragonboat practice this year w/LV Dragon Boat Club. Team practice in a few weeks for the annual... https://t.co/MiOgVPkl4x","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"hashtags":{{"dragonboat"}},"user":{"id":int64("110500685"),"name":"This Mom Wines","screen_name":"ThisMomWinesLV","lang":"en","location":"Henderson, NV","create_at":date("2010-02-01"),"description":"Crazy-busy #mom and career chick. Recent #ASUgrad, #wine lover, #runner, #community #advocate, #blogger & #Vegas native. Tweets are my own.","followers_count":2345,"friends_count":1702,"statues_count":31425},"place":{"country":"United States","country_code":"United States","full_name":"Nevada, USA","id":"d374fb61a20fb74f","name":"Nevada","place_type":"admin","bounding_box":rectangle("-120.00574,35.002086 -114.039649,42.002208")},"coordinate":point("-114.93106327,36.10256306"),"geo_tag":{"stateID":32,"stateName":"Nevada","countyID":32003,"countyName":"Clark","cityID":3231900,"cityName":"Henderson"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929045538549760"),"text":"I've never had a Mcgriddle","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("4472590819"),"name":"Tay\u2744\uFE0F","screen_name":"_LilTaaay","lang":"en","location":"Houston, TX","create_at":date("2015-12-05"),"description":"varsity cheerleader \u2728","followers_count":455,"friends_count":344,"statues_count":6916},"place":{"country":"United States","country_code":"United States","full_name":"Bellaire, TX","id":"bc39561011b12bc6","name":"Bellaire","place_type":"city","bounding_box":rectangle("-95.484647,29.689372 -95.447422,29.725996")},"geo_tag":{"stateID":48,"stateName":"Texas","countyID":48201,"countyName":"Harris","cityID":4807300,"cityName":"Bellaire"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929044984893440"),"text":"imma tell my grandparents to bring me some more food \uD83D\uDE0A","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("341283351"),"name":"$.","screen_name":"hayred_","lang":"en","location":"New Orleans, LA \u2708\uFE0F Dallas, TX","create_at":date("2011-07-23"),"description":"don't DM me, you're waisting your time","followers_count":506,"friends_count":354,"statues_count":16551},"place":{"country":"United States","country_code":"United States","full_name":"Denton, TX","id":"f77b0bf942a40070","name":"Denton","place_type":"city","bounding_box":rectangle("-97.187543,33.128938 -97.041998,33.276053")},"geo_tag":{"stateID":48,"stateName":"Texas","countyID":48121,"countyName":"Denton","cityID":4819972,"cityName":"Denton"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929045358194688"),"text":"@iHipsterLee do a collab with @LILUZIVERT \uD83D\uDD25\uD83D\uDD25\uD83D\uDD25\uD83D\uDD25","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("189801036"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{189801036,1599608046}},"user":{"id":int64("238377468"),"name":"\u2728Kalynn\u2728","screen_name":"KalynnMiles","lang":"en","location":"Los Angeles, CA","create_at":date("2011-01-14"),"description":"\u2728I'm poppin\u2728","followers_count":443,"friends_count":345,"statues_count":1930},"place":{"country":"United States","country_code":"United States","full_name":"Los Angeles, CA","id":"3b77caf94bfc81fe","name":"Los Angeles","place_type":"city","bounding_box":rectangle("-118.668404,33.704538 -118.155409,34.337041")},"geo_tag":{"stateID":6,"stateName":"California","countyID":6037,"countyName":"Los Angeles","cityID":644000,"cityName":"Los Angeles"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929044225908736"),"text":"https://t.co/v8Y7zicNgf","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"und","is_retweet":false,"user":{"id":int64("3650680279"),"name":"Guillermo Silva","screen_name":"gusilemo","lang":"en","location":"null","create_at":date("2015-09-22"),"description":"null","followers_count":2,"friends_count":0,"statues_count":24098},"place":{"country":"United States","country_code":"United States","full_name":"California, USA","id":"fbd6d2f5a4e4a15e","name":"California","place_type":"admin","bounding_box":rectangle("-124.482003,32.528832 -114.131212,42.009519")},"coordinate":point("-123.01247162,37.69942798"),"geo_tag":{"stateID":6,"stateName":"California","countyID":6075,"countyName":"San Francisco","cityID":667000,"cityName":"San Francisco"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929046318645248"),"text":"Doctor thinks Bella could be here anytime between 5-7 weeks, I cannot wait to meet her \uD83D\uDE0D\uD83D\uDC76\uD83C\uDFFC","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("2878634469"),"name":"Brooklyn","screen_name":"brooooklynbragg","lang":"en","location":"null","create_at":date("2014-11-15"),"description":"my heart belongs to Tyler & Isabella Shannon. soon to be mommy Oct. 2016","followers_count":123,"friends_count":181,"statues_count":3186},"place":{"country":"United States","country_code":"United States","full_name":"Elwood, IN","id":"1fcc8e05dd8c7569","name":"Elwood","place_type":"city","bounding_box":rectangle("-85.869124,40.233425 -85.807713,40.295039")},"geo_tag":{"stateID":18,"stateName":"Indiana","countyID":18095,"countyName":"Madison","cityID":1821070,"cityName":"Elwood"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929046000017409"),"text":"Man I'm still drunk lmao what? Today's a good day","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("706944049429684225"),"name":"Robert Wood","screen_name":"robertwoodam","lang":"en","location":"Tennessee, USA","create_at":date("2016-03-07"),"description":"Let's go. Big plays","followers_count":178,"friends_count":125,"statues_count":6458},"place":{"country":"United States","country_code":"United States","full_name":"Gallatin, TN","id":"e08aaac2b23fd3a3","name":"Gallatin","place_type":"city","bounding_box":rectangle("-86.550888,36.316163 -86.378795,36.428963")},"geo_tag":{"stateID":47,"stateName":"Tennessee","countyID":47165,"countyName":"Sumner","cityID":4728540,"cityName":"Gallatin"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929045827985408"),"text":"McChicken https://t.co/WAwv9WAtTG","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("993977766"),"name":"sheeray'a","screen_name":"Poesia_Politica","lang":"en","location":"Cleveland","create_at":date("2012-12-06"),"description":"null","followers_count":49,"friends_count":239,"statues_count":808},"place":{"country":"United States","country_code":"United States","full_name":"Columbus, OH","id":"3df0e3eb1e91170b","name":"Columbus","place_type":"city","bounding_box":rectangle("-83.200169,39.832013 -82.771375,40.157354")},"geo_tag":{"stateID":39,"stateName":"Ohio","countyID":39049,"countyName":"Franklin","cityID":3918000,"cityName":"Columbus"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929045828067330"),"text":"MY HUSBAND IS SO HOT LOOK AT HIM https://t.co/G3CwSHVCz4","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("549674066"),"name":"pop princess","screen_name":"CarlyShore","lang":"en","location":"null","create_at":date("2012-04-09"),"description":"null","followers_count":447,"friends_count":411,"statues_count":5909},"place":{"country":"United States","country_code":"United States","full_name":"Palm Harbor, FL","id":"c5a8a3c8523b835e","name":"Palm Harbor","place_type":"city","bounding_box":rectangle("-82.786128,28.048627 -82.707574,28.1231")},"geo_tag":{"stateID":12,"stateName":"Florida","countyID":12103,"countyName":"Pinellas","cityID":1254350,"cityName":"Palm Harbor"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929045375016964"),"text":"there's nothing \" lowkey \" \uD83D\uDD11 about my love for him \uD83D\uDC93 https://t.co/sut8yzIzBR","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("2348377802"),"name":"l i l \u2743","screen_name":"xo_lilly02","lang":"en","location":"null","create_at":date("2014-02-17"),"description":"http://m.apple \uD83D\uDC9B","followers_count":420,"friends_count":368,"statues_count":9383},"place":{"country":"United States","country_code":"United States","full_name":"Los Angeles, CA","id":"3b77caf94bfc81fe","name":"Los Angeles","place_type":"city","bounding_box":rectangle("-118.668404,33.704538 -118.155409,34.337041")},"geo_tag":{"stateID":6,"stateName":"California","countyID":6037,"countyName":"Los Angeles","cityID":644000,"cityName":"Los Angeles"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929046645874689"),"text":"Drink more water.","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("719302034"),"name":"you","screen_name":"LiferMusic208","lang":"en","location":"Pocatello IDAHO","create_at":date("2012-07-26"),"description":"Real is Rare. Who really cares? if you don't like the road your on start paving another one.","followers_count":1248,"friends_count":726,"statues_count":40426},"place":{"country":"United States","country_code":"United States","full_name":"Pocatello, ID","id":"d15336e5b45c79bb","name":"Pocatello","place_type":"city","bounding_box":rectangle("-112.496051,42.806434 -112.373604,42.920598")},"geo_tag":{"stateID":16,"stateName":"Idaho","countyID":16005,"countyName":"Bannock","cityID":1664090,"cityName":"Pocatello"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929046243340288"),"text":"luv you syd!! \u2764\uFE0F\u2764\uFE0F\u2764\uFE0F\u2764\uFE0F https://t.co/OfPWCtDQ1n","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("3085265295"),"name":"lailai","screen_name":"laiyoncee","lang":"en","location":"null","create_at":date("2015-03-10"),"description":"then there's me","followers_count":1078,"friends_count":407,"statues_count":22582},"place":{"country":"United States","country_code":"United States","full_name":"Chicago, IL","id":"1d9a5370a355ab0c","name":"Chicago","place_type":"city","bounding_box":rectangle("-87.940033,41.644102 -87.523993,42.023067")},"geo_tag":{"stateID":17,"stateName":"Illinois","countyID":17031,"countyName":"Cook","cityID":1714000,"cityName":"Chicago"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929046494937092"),"text":"Me & ChiChi Is Due For A Date","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("237555768"),"name":"kkay\u2763\uFE0F","screen_name":"_aaaalyak","lang":"en","location":"null","create_at":date("2011-01-12"),"description":"null","followers_count":1147,"friends_count":693,"statues_count":87933},"place":{"country":"United States","country_code":"United States","full_name":"Manhattan, NY","id":"01a9a39529b27f36","name":"Manhattan","place_type":"city","bounding_box":rectangle("-74.026675,40.683935 -73.910408,40.877483")},"geo_tag":{"stateID":36,"stateName":"New York","countyID":36061,"countyName":"New York","cityID":36061,"cityName":"Manhattan"}}
+{"create_at":datetime("2016-08-28T09:06:04.000"),"id":int64("769929046817779716"),"text":"The new @fettywap track is giving me life rn \uD83D\uDE0E","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{2601175671}},"user":{"id":int64("816653695"),"name":"Katie.","screen_name":"KatieTaylorGang","lang":"en","location":"Houston, Texas","create_at":date("2012-09-10"),"description":"Marketing Director. University of Houston Grad. Go Coogs","followers_count":760,"friends_count":474,"statues_count":7034},"place":{"country":"United States","country_code":"United States","full_name":"Houston, TX","id":"1c69a67ad480e1b1","name":"Houston","place_type":"city","bounding_box":rectangle("-95.823268,29.522325 -95.069705,30.154665")},"geo_tag":{"stateID":48,"stateName":"Texas","countyID":48201,"countyName":"Harris","cityID":4835000,"cityName":"Houston"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929046587215874"),"text":"Lol @whoooolz YES way @flumemusic set me free last night \uD83D\uDC50\uD83C\uDFFC\uD83D\uDC45\u2728\uD83D\uDC99 @\u2026 https://t.co/mA4a5ZOFFX","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{143445593,297882651}},"user":{"id":int64("502100576"),"name":"adolf","screen_name":"rissyyyyyyyy","lang":"en","location":"lost in the crowd","create_at":date("2012-02-24"),"description":"dude I'm a mermaid","followers_count":506,"friends_count":431,"statues_count":23015},"place":{"country":"United States","country_code":"United States","full_name":"Queens, NY","id":"00c39537733fa112","name":"Queens","place_type":"city","bounding_box":rectangle("-73.962582,40.541722 -73.699793,40.800037")},"coordinate":point("-73.84927797,40.71991471"),"geo_tag":{"stateID":36,"stateName":"New York","countyID":36081,"countyName":"Queens","cityID":36081,"cityName":"Queens"}}
+{"create_at":datetime("2016-08-28T09:06:03.000"),"id":int64("769929046339780609"),"text":"@heyifeellike @rachel_dufty","in_reply_to_status":int64("769781692663943169"),"in_reply_to_user":int64("1468571624"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"und","is_retweet":false,"user_mentions":{{1468571624,2374524218}},"user":{"id":int64("2396646080"),"name":"Hayden(:","screen_name":"haydenaudrey02","lang":"en","location":"null","create_at":date("2014-03-18"),"description":"F.C. Union Mi\u26BD\uFE0F","followers_count":406,"friends_count":1343,"statues_count":5398},"place":{"country":"United States","country_code":"United States","full_name":"Troy, MI","id":"4e284ea3fff91c09","name":"Troy","place_type":"city","bounding_box":rectangle("-83.209206,42.533782 -83.086881,42.624224")},"geo_tag":{"stateID":26,"stateName":"Michigan","countyID":26125,"countyName":"Oakland","cityID":2680700,"cityName":"Troy"}}
+{"create_at":datetime("2016-08-28T09:06:04.000"),"id":int64("769929046914375680"),"text":"@GFOOLEY 125 will suffice I'm not even gonna tax you","in_reply_to_status":int64("769928602423062528"),"in_reply_to_user":int64("237523183"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{237523183}},"user":{"id":int64("930847412"),"name":"yoko chanel","screen_name":"scozzyyoko","lang":"en","location":"DC||ATL","create_at":date("2012-11-06"),"description":"yoko has returned they cry...","followers_count":1094,"friends_count":555,"statues_count":71183},"place":{"country":"United States","country_code":"United States","full_name":"Atlanta, GA","id":"8173485c72e78ca5","name":"Atlanta","place_type":"city","bounding_box":rectangle("-84.576827,33.647503 -84.289385,33.886886")},"geo_tag":{"stateID":13,"stateName":"Georgia","countyID":13121,"countyName":"Fulton","cityID":1304000,"cityName":"Atlanta"}}
+{"create_at":datetime("2016-08-28T09:06:04.000"),"id":int64("769929046914240513"),"text":"@Ms_Miri I did it once, people were hardcore...bring a towel you gonna sweat girl","in_reply_to_status":int64("769927228146671616"),"in_reply_to_user":int64("275801072"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{275801072}},"user":{"id":int64("224479831"),"name":"ImranQ","screen_name":"ImranQ805","lang":"en","location":"Studio City, Los Angeles","create_at":date("2010-12-08"),"description":"#Clippers #Angels #Rams #GKG #IQjotd. I don't want to grow up. I don't want to grow up. If growing up means being like you. Then I dont want to be like you.\u270C","followers_count":551,"friends_count":644,"statues_count":37183},"place":{"country":"United States","country_code":"United States","full_name":"Los Angeles, CA","id":"3b77caf94bfc81fe","name":"Los Angeles","place_type":"city","bounding_box":rectangle("-118.668404,33.704538 -118.155409,34.337041")},"geo_tag":{"stateID":6,"stateName":"California","countyID":6037,"countyName":"Los Angeles","cityID":644000,"cityName":"Los Angeles"}}
+{"create_at":datetime("2016-08-28T09:06:04.000"),"id":int64("769929047052779520"),"text":"were arguing over how much boneless wings cost. \uD83D\uDE11","in_reply_to_status":int64("769919242804363265"),"in_reply_to_user":int64("2427775609"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("2427775609"),"name":"bails","screen_name":"xoxobaii","lang":"en","location":"nature","create_at":date("2014-04-04"),"description":"Lucidious // Sincerely Yours","followers_count":815,"friends_count":945,"statues_count":18131},"place":{"country":"United States","country_code":"United States","full_name":"Durand, MI","id":"5c14e13491807c2c","name":"Durand","place_type":"city","bounding_box":rectangle("-84.011944,42.900403 -83.972297,42.928967")},"geo_tag":{"stateID":26,"stateName":"Michigan","countyID":26155,"countyName":"Shiawassee","cityID":2623500,"cityName":"Durand"}}
+{"create_at":datetime("2016-08-28T09:06:04.000"),"id":int64("769929046922780673"),"text":"Realizing our collective power, teaching by example, Hot Dang - We're Alive! #yogarocksthepark\u2026 https://t.co/iS6FSB8Ai6","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"hashtags":{{"yogarocksthepark"}},"user":{"id":int64("304484756"),"name":"Ti - theCodingYogini","screen_name":"TiWegmeyer","lang":"en","location":"Denver, CO","create_at":date("2011-05-24"),"description":"Living my Yoga","followers_count":39,"friends_count":256,"statues_count":189},"place":{"country":"United States","country_code":"United States","full_name":"Denver, CO","id":"b49b3053b5c25bf5","name":"Denver","place_type":"city","bounding_box":rectangle("-105.109815,39.614151 -104.734372,39.812975")},"coordinate":point("-104.993366,39.73087"),"geo_tag":{"stateID":8,"stateName":"Colorado","countyID":8031,"countyName":"Denver","cityID":820000,"cityName":"Denver"}}
+{"create_at":datetime("2016-08-28T09:06:04.000"),"id":int64("769929047686180864"),"text":"@WhiteBoyWade15 @iRose_ThanBloom we studied under THE bird brown","in_reply_to_status":int64("769928139401113600"),"in_reply_to_user":int64("499427722"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{499427722,608360025}},"user":{"id":int64("1250458676"),"name":"Justin Green","screen_name":"Wolver_Green","lang":"en","location":"null","create_at":date("2013-03-07"),"description":"CCCC SU 19'","followers_count":421,"friends_count":180,"statues_count":9398},"place":{"country":"United States","country_code":"United States","full_name":"Salisbury, MD","id":"00caf39d503a84e9","name":"Salisbury","place_type":"city","bounding_box":rectangle("-75.71412,38.28923 -75.487032,38.431613")},"geo_tag":{"stateID":24,"stateName":"Maryland","countyID":24045,"countyName":"Wicomico","cityID":2469925,"cityName":"Salisbury"}}
+{"create_at":datetime("2016-08-28T09:06:04.000"),"id":int64("769929047740715008"),"text":"I like how Hansen wears that random piece of leather to hide his gut. Who cares about any of these fat fucks?\n#WatchROH","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"hashtags":{{"WatchROH"}},"user":{"id":int64("74214410"),"name":"sevenwithcheese","screen_name":"sevenwithcheese","lang":"en","location":"Pittsburgh, PA","create_at":date("2009-09-14"),"description":"We are not the same. I'm an American. You're a sick asshole.","followers_count":1244,"friends_count":404,"statues_count":221941},"place":{"country":"United States","country_code":"United States","full_name":"Pittsburgh, PA","id":"946ccd22e1c9cda1","name":"Pittsburgh","place_type":"city","bounding_box":rectangle("-80.095586,40.36158 -79.865793,40.501198")},"geo_tag":{"stateID":42,"stateName":"Pennsylvania","countyID":42003,"countyName":"Allegheny","cityID":4261000,"cityName":"Pittsburgh"}}
+{"create_at":datetime("2016-08-28T09:06:04.000"),"id":int64("769929047270764544"),"text":"who do u love i wanna lounge wit u https://t.co/n9XZWYCwts","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("407377447"),"name":"Amy Pham","screen_name":"iamamypham","lang":"en","location":"Los Angeles, CA","create_at":date("2011-11-07"),"description":"DJ / HOST / ACTRESS / PIZZA","followers_count":20074,"friends_count":314,"statues_count":4363},"place":{"country":"United States","country_code":"United States","full_name":"FYF Fest","id":"07d9e1fb67882001","name":"FYF Fest","place_type":"poi","bounding_box":rectangle("-118.2886941,34.015403899999995 -118.288694,34.015404")},"coordinate":point("-118.288694,34.015404"),"geo_tag":{"stateID":6,"stateName":"California","countyID":6037,"countyName":"Los Angeles","cityID":644000,"cityName":"Los Angeles"}}
+{"create_at":datetime("2016-08-28T09:06:04.000"),"id":int64("769929048281710592"),"text":"When you're sick but still have to run the sound board for morning #worship! Early mornings\u2026 https://t.co/gYwlyLBVE9","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"hashtags":{{"worship"}},"user":{"id":int64("1073904494"),"name":"Sykter Ink Media","screen_name":"SykterInkMedia","lang":"en","location":"Canada","create_at":date("2013-01-09"),"description":"Mission Statement: To provide a constant source of encouragement in order to build up and connect us all as one universal community.","followers_count":87,"friends_count":367,"statues_count":394},"place":{"country":"Canada","country_code":"Canada","full_name":"Prince George, British Columbia","id":"0582c884a3ab504d","name":"Prince George","place_type":"city","bounding_box":rectangle("-122.900929,53.812891 -122.604368,54.043014")},"coordinate":point("-122.80065161,53.85577729"),"geo_tag":{"stateID":2,"stateName":"Alaska","countyID":2016,"countyName":"Aleutians West"}}
+{"create_at":datetime("2016-08-28T09:06:04.000"),"id":int64("769929049053343744"),"text":"anyone going to lagoon today?","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("2730856314"),"name":"noah bachman","screen_name":"noahb7_","lang":"en","location":"Salt Lake, UT","create_at":date("2014-08-13"),"description":"nobody's that good","followers_count":334,"friends_count":353,"statues_count":456},"place":{"country":"United States","country_code":"United States","full_name":"South Jordan, UT","id":"b76a96fd566f9172","name":"South Jordan","place_type":"city","bounding_box":rectangle("-112.031592,40.536852 -111.894963,40.582109")},"geo_tag":{"stateID":49,"stateName":"Utah","countyID":49035,"countyName":"Salt Lake","cityID":4970850,"cityName":"South Jordan"}}
+{"create_at":datetime("2016-08-28T09:06:04.000"),"id":int64("769929048990580736"),"text":"For the last 3 weeks I've been working and have no time it chill \uD83D\uDE1E shit sucks","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("1048872067"),"name":"\u017Eoomonkey","screen_name":"Pharaoh_Jr","lang":"en","location":"Winter Haven, FL","create_at":date("2012-12-30"),"description":"hey I'm junior & yeah| Carrabba's| gym|Game designer| Artist| #SnowGang| Instagram: pharaoh_jr101 | Snapchat: juniorhyppolite|","followers_count":524,"friends_count":391,"statues_count":10339},"place":{"country":"United States","country_code":"United States","full_name":"Winter Haven, FL","id":"6098c1080dfc7af3","name":"Winter Haven","place_type":"city","bounding_box":rectangle("-81.770135,27.939559 -81.627638,28.095052")},"geo_tag":{"stateID":12,"stateName":"Florida","countyID":12105,"countyName":"Polk","cityID":1278275,"cityName":"Winter Haven"}}
+{"create_at":datetime("2016-08-28T09:06:04.000"),"id":int64("769929048705212417"),"text":"Enjoying this show. Ozzy+History =comfort. \uD83D\uDE43 https://t.co/Qg6gLDkodl","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("297078789"),"name":"Robin Courter","screen_name":"tuesdaydreams","lang":"en","location":"Winterfell","create_at":date("2011-05-11"),"description":"Forever Presley's person. Junk Food Vegetarian. Born on a Tuesday. Here for the chatter. #StrongerTogether","followers_count":153,"friends_count":592,"statues_count":3212},"place":{"country":"United States","country_code":"United States","full_name":"Coram, NY","id":"92e1e697abf56722","name":"Coram","place_type":"city","bounding_box":rectangle("-73.039115,40.837693 -72.972416,40.921065")},"geo_tag":{"stateID":36,"stateName":"New York","countyID":36103,"countyName":"Suffolk","cityID":3618157,"cityName":"Coram"}}
+{"create_at":datetime("2016-08-28T09:06:04.000"),"id":int64("769929049737195520"),"text":"Sunday enjoying the reminding warm days of summer","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("163215561"),"name":"Jose Espejo","screen_name":"jlespejo72","lang":"en","location":"Chicago, Illinois","create_at":date("2010-07-05"),"description":"Architect/Designer","followers_count":101,"friends_count":426,"statues_count":2248},"place":{"country":"United States","country_code":"United States","full_name":"Chicago, IL","id":"1d9a5370a355ab0c","name":"Chicago","place_type":"city","bounding_box":rectangle("-87.940033,41.644102 -87.523993,42.023067")},"geo_tag":{"stateID":17,"stateName":"Illinois","countyID":17031,"countyName":"Cook","cityID":1714000,"cityName":"Chicago"}}
+{"create_at":datetime("2016-08-28T09:06:04.000"),"id":int64("769929050068426752"),"text":"Great race fellas. Congrats @nico_rosberg @danielricciardo @LewisHamilton . That was a fun weekend. #BelgiumGP #F1","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"hashtags":{{"BelgiumGP","F1"}},"user_mentions":{{115044590,214413743,213969309}},"user":{"id":int64("367388829"),"name":"Chris Bythewood","screen_name":"Cbyt","lang":"en","location":"LA/ NY","create_at":date("2011-09-03"),"description":"Writer- Shots Fired on Fox. Lover of all Sports. Tryer of new things. #ShotsFiredFox premiering Spring 2017 http://www.fox.com/shots-fired","followers_count":514,"friends_count":469,"statues_count":4658},"place":{"country":"United States","country_code":"United States","full_name":"Los Angeles, CA","id":"3b77caf94bfc81fe","name":"Los Angeles","place_type":"city","bounding_box":rectangle("-118.668404,33.704538 -118.155409,34.337041")},"geo_tag":{"stateID":6,"stateName":"California","countyID":6037,"countyName":"Los Angeles","cityID":644000,"cityName":"Los Angeles"}}
+{"create_at":datetime("2016-08-28T09:06:04.000"),"id":int64("769929049808396289"),"text":"@StevenJV Interesting. How can you leave one Tart in the package? It's already open! Can't do it. Gotta eat both. It's a moral imperative.","in_reply_to_status":int64("769928647310315520"),"in_reply_to_user":int64("1174791"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{1174791}},"user":{"id":int64("33433404"),"name":"Jerry Fahrni","screen_name":"JFahrni","lang":"en","location":"Fresno, Caifornia","create_at":date("2009-04-19"),"description":"Pharmacist with interest in automation and technology whether it's related to healthcare or not. Consultant, speaker, dad, husband, football fan. Central Valley","followers_count":1718,"friends_count":380,"statues_count":22054},"place":{"country":"United States","country_code":"United States","full_name":"Fresno, CA","id":"944c03c1d85ef480","name":"Fresno","place_type":"city","bounding_box":rectangle("-119.93251,36.648905 -119.632419,36.923179")},"geo_tag":{"stateID":6,"stateName":"California","countyID":6019,"countyName":"Fresno","cityID":627000,"cityName":"Fresno"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929051008106496"),"text":"Beginning set given land form. Forth i seed signs she'd meat light divided they're. Make.","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("3976141697"),"name":"Patricia Snyder","screen_name":"pasnoda","lang":"en","location":"null","create_at":date("2015-10-16"),"description":"null","followers_count":17,"friends_count":0,"statues_count":32985},"place":{"country":"United States","country_code":"United States","full_name":"California, USA","id":"fbd6d2f5a4e4a15e","name":"California","place_type":"admin","bounding_box":rectangle("-124.482003,32.528832 -114.131212,42.009519")},"coordinate":point("-123.01228019,37.69913047"),"geo_tag":{"stateID":6,"stateName":"California","countyID":6075,"countyName":"San Francisco","cityID":667000,"cityName":"San Francisco"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929051205144576"),"text":"Truth by @ThePirch #pirch #kitchen #design #home #quotes #food #cupcakes #sweets #dessert #fun\u2026 https://t.co/37zkjN7XBS","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"hashtags":{{"pirch","kitchen","design","home","quotes","food","cupcakes","sweets","dessert","fun"}},"user":{"id":int64("40921996"),"name":"Sarah Lipton","screen_name":"sarah4canes","lang":"en","location":"San Diego, CA","create_at":date("2009-05-18"),"description":"Floridian trying out West Coast life - PR pro - USMC wife","followers_count":511,"friends_count":1921,"statues_count":1951},"place":{"country":"United States","country_code":"United States","full_name":"San Diego, CA","id":"a592bd6ceb1319f7","name":"San Diego","place_type":"city","bounding_box":rectangle("-117.282538,32.53962 -116.92744,33.080404")},"coordinate":point("-117.2098658,32.8736134"),"geo_tag":{"stateID":6,"stateName":"California","countyID":6073,"countyName":"San Diego","cityID":666000,"cityName":"San Diego"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929051662200832"),"text":"If he can't look you in your eyes, he lying.","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("1975745216"),"name":"Jeremy Bazan","screen_name":"BazanJeremy","lang":"en","location":"Weslaco, tx ","create_at":date("2013-10-20"),"description":"22. iLift. DreamChaser. Father First. SC: jeremy_bazan24 \u270C\uFE0F","followers_count":843,"friends_count":1443,"statues_count":6013},"place":{"country":"United States","country_code":"United States","full_name":"Gonzales, LA","id":"82a3427fa492ed52","name":"Gonzales","place_type":"city","bounding_box":rectangle("-90.959148,30.167772 -90.820958,30.292323")},"geo_tag":{"stateID":22,"stateName":"Louisiana","countyID":22005,"countyName":"Ascension","cityID":2229850,"cityName":"Gonzales"}}
+{"create_at":datetime("2016-08-28T09:06:04.000"),"id":int64("769929049061810176"),"text":"\uD83D\uDE0C\uD83D\uDE0C\uD83D\uDE0C https://t.co/IPl1mF3ZQL","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"und","is_retweet":false,"user":{"id":int64("2209223862"),"name":"b moore","screen_name":"superradbrad","lang":"en","location":"hails","create_at":date("2013-11-22"),"description":"null","followers_count":285,"friends_count":329,"statues_count":2972},"place":{"country":"United States","country_code":"United States","full_name":"Tulsa, OK","id":"cb74aaf709812e0f","name":"Tulsa","place_type":"city","bounding_box":rectangle("-96.065628,35.968624 -95.761656,36.250159")},"geo_tag":{"stateID":40,"stateName":"Oklahoma","countyID":40143,"countyName":"Tulsa","cityID":4075000,"cityName":"Tulsa"}}
+{"create_at":datetime("2016-08-28T09:06:04.000"),"id":int64("769929049661698048"),"text":"Back To School Dance \u2B50\uFE0F\uD83D\uDC9E\uD83D\uDD25 https://t.co/YlTODhiAut","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("2409854756"),"name":"Mariee\u2764\uFE0F","screen_name":"JohnmarieB","lang":"en","location":"null","create_at":date("2014-03-24"),"description":"~There is no fear in love, but Gods perfect love drives out fear~ 1 John 4:18 DKG\u2764\uFE0F Young&Blessed","followers_count":1450,"friends_count":1386,"statues_count":15383},"place":{"country":"United States","country_code":"United States","full_name":"Avondale, LA","id":"a84a422764eaf197","name":"Avondale","place_type":"city","bounding_box":rectangle("-90.221926,29.881335 -90.167855,29.930316")},"geo_tag":{"stateID":22,"stateName":"Louisiana","countyID":22051,"countyName":"Jefferson","cityID":2203810,"cityName":"Avondale"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929051481993216"),"text":"@Yummiieee__ Don't regret it though lol","in_reply_to_status":int64("769928950395068416"),"in_reply_to_user":int64("1400900870"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{1400900870}},"user":{"id":int64("341280524"),"name":"Master Debater \u2649","screen_name":"LameGuyTy","lang":"en","location":"CLEVELAND","create_at":date("2011-07-23"),"description":"In order to succeed your desire for success should be greater than your fear of failure. 3-7-14 But if you really wanna go hard.... U.S Army 13B","followers_count":1250,"friends_count":1116,"statues_count":33903},"place":{"country":"United States","country_code":"United States","full_name":"Lakewood, OH","id":"888482aa70a3bc61","name":"Lakewood","place_type":"city","bounding_box":rectangle("-81.8375,41.463245 -81.768603,41.49759")},"geo_tag":{"stateID":39,"stateName":"Ohio","countyID":39035,"countyName":"Cuyahoga","cityID":3941664,"cityName":"Lakewood"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929052228427776"),"text":"I havent been that high in a while","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("1317722118"),"name":"Erikson","screen_name":"__ErikV","lang":"en","location":"Des Moines, IA","create_at":date("2013-03-30"),"description":"ig : Evj420","followers_count":360,"friends_count":283,"statues_count":7010},"place":{"country":"United States","country_code":"United States","full_name":"Des Moines, IA","id":"1c67f9d9cbae7f69","name":"Des Moines","place_type":"city","bounding_box":rectangle("-93.709504,41.501409 -93.503235,41.651466")},"geo_tag":{"stateID":19,"stateName":"Iowa","countyID":19153,"countyName":"Polk","cityID":1921000,"cityName":"Des Moines"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929051028848640"),"text":"Lovely night celebrating @chargergirl80 birthday with @maggi09 - happy birthday Glynnda love you #friends \uD83D\uDE18\u2764\uFE0F https://t.co/ia6b44bMgR","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"hashtags":{{"friends"}},"user_mentions":{{37336740,19871655}},"user":{"id":int64("375300259"),"name":"Cassy Thorpe","screen_name":"cassythorpe","lang":"en","location":"Lewisham, South East London","create_at":date("2011-09-17"),"description":"I love london & my bajan roots","followers_count":168,"friends_count":161,"statues_count":1989},"place":{"country":"United States","country_code":"United States","full_name":"Paradise, NV","id":"8fa6d7a33b83ef26","name":"Paradise","place_type":"city","bounding_box":rectangle("-115.209254,35.984784 -115.061076,36.137145")},"geo_tag":{"stateID":32,"stateName":"Nevada","countyID":32003,"countyName":"Clark","cityID":3254600,"cityName":"Paradise"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929052350210048"),"text":"Sept. 10 cant get here any faster great birthday present to me @CMPunk ..good luck dude.im pulling for ya man!","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{177345928}},"user":{"id":int64("921422222"),"name":"chad hoover dimera","screen_name":"chadhooverdimer","lang":"en","location":"anywhere town usa","create_at":date("2012-11-02"),"description":"white dude","followers_count":81,"friends_count":99,"statues_count":529},"place":{"country":"United States","country_code":"United States","full_name":"Dundalk, MD","id":"808665307aa255fe","name":"Dundalk","place_type":"city","bounding_box":rectangle("-76.529818,39.222702 -76.447351,39.306282")},"geo_tag":{"stateID":24,"stateName":"Maryland","countyID":24005,"countyName":"Baltimore","cityID":2423975,"cityName":"Dundalk"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929052345999360"),"text":"on sum faded luv \uD83D\uDE0B","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("3925699045"),"name":"Brittaney.","screen_name":"_brittaneytheis","lang":"en","location":"Weatherford, OK","create_at":date("2015-10-17"),"description":"leave every bottle as empty as my promises","followers_count":504,"friends_count":402,"statues_count":9105},"place":{"country":"United States","country_code":"United States","full_name":"Weatherford, OK","id":"7da27eb58e1638f5","name":"Weatherford","place_type":"city","bounding_box":rectangle("-98.725251,35.517222 -98.626215,35.558904")},"geo_tag":{"stateID":40,"stateName":"Oklahoma","countyID":40039,"countyName":"Custer","cityID":4079450,"cityName":"Weatherford"}}
+{"create_at":datetime("2016-08-28T09:06:04.000"),"id":int64("769929050764771328"),"text":"https://t.co/MAJYOYm6wh","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"und","is_retweet":false,"user":{"id":int64("750822905077006336"),"name":"Lori smith","screen_name":"POstableproud","lang":"en","location":"Georgia, USA","create_at":date("2016-07-06"),"description":"Fan girl of #SSD #POstables and Downton Abbey","followers_count":71,"friends_count":126,"statues_count":4267},"place":{"country":"United States","country_code":"United States","full_name":"Trenton, GA","id":"01a31c587ddd0f28","name":"Trenton","place_type":"city","bounding_box":rectangle("-85.523271,34.85373 -85.492643,34.897411")},"geo_tag":{"stateID":13,"stateName":"Georgia","countyID":13083,"countyName":"Dade","cityID":1377372,"cityName":"Trenton"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929053000245249"),"text":"Muero \uD83D\uDC37 https://t.co/jx8mATZYnv","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"es","is_retweet":false,"user":{"id":int64("3249498035"),"name":"Dani Porras \u2741","screen_name":"daniporras6","lang":"en","location":"null","create_at":date("2015-05-12"),"description":"null","followers_count":150,"friends_count":245,"statues_count":5933},"place":{"country":"United States","country_code":"United States","full_name":"Plantation, FL","id":"7df9a00dcf914d5e","name":"Plantation","place_type":"city","bounding_box":rectangle("-80.330201,26.088262 -80.196833,26.160753")},"geo_tag":{"stateID":12,"stateName":"Florida","countyID":12115,"countyName":"Sarasota","cityID":1257450,"cityName":"Plantation"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929052442337280"),"text":"I bet they all sing the lyrics to this song. https://t.co/TcBC1FG4nK","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("377261318"),"name":"Malik Moor\u00E8","screen_name":"mylifeasmalik","lang":"en","location":"texas","create_at":date("2011-09-21"),"description":"EWU' 20","followers_count":1176,"friends_count":391,"statues_count":32269},"place":{"country":"United States","country_code":"United States","full_name":"Moses Lake, WA","id":"513be78e6847eb1a","name":"Moses Lake","place_type":"city","bounding_box":rectangle("-119.363589,47.078794 -119.241434,47.162879")},"geo_tag":{"stateID":53,"stateName":"Washington","countyID":53025,"countyName":"Grant","cityID":5347245,"cityName":"Moses Lake"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929053130354688"),"text":"@BrittanytB i'm dying lmao","in_reply_to_status":int64("769918557752795136"),"in_reply_to_user":int64("51127889"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{51127889}},"user":{"id":int64("142462927"),"name":"alex","screen_name":"theSASHAfierce_","lang":"en","location":"Temple University","create_at":date("2010-05-10"),"description":"Tiramisu aficionado","followers_count":658,"friends_count":450,"statues_count":37752},"place":{"country":"United States","country_code":"United States","full_name":"Philadelphia, PA","id":"e4a0d228eb6be76b","name":"Philadelphia","place_type":"city","bounding_box":rectangle("-75.280284,39.871811 -74.955712,40.13792")},"geo_tag":{"stateID":42,"stateName":"Pennsylvania","countyID":42101,"countyName":"Philadelphia","cityID":4260000,"cityName":"Philadelphia"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929053381988352"),"text":"Another amazing trip, #NYC see you soon... Next stop #Amsterdam","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"hashtags":{{"NYC","Amsterdam"}},"user":{"id":int64("4318226487"),"name":"Sup Goyal","screen_name":"Sup_Goyal","lang":"en","location":"New York, NY","create_at":date("2015-11-29"),"description":"Instagram - @supgoyal","followers_count":196,"friends_count":254,"statues_count":77},"place":{"country":"United States","country_code":"United States","full_name":"Times Square","id":"07d9db04e7c85001","name":"Times Square","place_type":"poi","bounding_box":rectangle("-73.98626809999999,40.7564899 -73.986268,40.75649")},"coordinate":point("-73.986268,40.75649"),"geo_tag":{"stateID":36,"stateName":"New York","countyID":36061,"countyName":"New York","cityID":36061,"cityName":"Manhattan"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929053365166080"),"text":"Did you catch that??? https://t.co/6LmiS3qfMZ","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("86201399"),"name":"Johnny Olavarria","screen_name":"Olavarriajs","lang":"en","location":"null","create_at":date("2009-10-29"),"description":"United States Air Force Academy graduate; born Dom Rep & raised in NYC; true believer in what makes America GREAT; proud of our constitution; and NO free shit.","followers_count":558,"friends_count":990,"statues_count":4341},"place":{"country":"United States","country_code":"United States","full_name":"Fort Lauderdale, FL","id":"6c686af766d8429c","name":"Fort Lauderdale","place_type":"city","bounding_box":rectangle("-80.20811,26.080935 -80.090235,26.219801")},"geo_tag":{"stateID":12,"stateName":"Florida","countyID":12011,"countyName":"Broward","cityID":1224000,"cityName":"Fort Lauderdale"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929053771960320"),"text":"The same people that fronted on you will say \"I always knew you could do it\" when you make it.","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("90442557"),"name":"DAT TRILL MF\u264F","screen_name":"TheGr0ve","lang":"en","location":"Long Beach, Ca","create_at":date("2009-11-16"),"description":"Long Beach, CA 90806\nLet The Force Be With You\n#SmokingSectionPodcast Coming soon!!!\nInstagram: @Dtatelb","followers_count":262,"friends_count":158,"statues_count":19256},"place":{"country":"United States","country_code":"United States","full_name":"Los Angeles, CA","id":"3b77caf94bfc81fe","name":"Los Angeles","place_type":"city","bounding_box":rectangle("-118.668404,33.704538 -118.155409,34.337041")},"geo_tag":{"stateID":6,"stateName":"California","countyID":6037,"countyName":"Los Angeles","cityID":644000,"cityName":"Los Angeles"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929053952471044"),"text":"@nochiefs @mooshakins @Peetweefish @DaveinTexas Unrelated: I don't think Paddy was even born when the McDLT was introduced or discontinued.","in_reply_to_status":int64("769928376945512448"),"in_reply_to_user":int64("14078674"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{14078674,375194919,705544666469404674,14706964}},"user":{"id":int64("390223259"),"name":"Scott, As Seen on TV","screen_name":"DunsScottus","lang":"en","location":"Virginia / Washington, DC Area","create_at":date("2011-10-13"),"description":"Conservative/libertarian. Catholic. Previously a software engineer for more than a decade, now an attorney. Opinions are my own. \u039C\u039F\u039B\u03A9\u039D \u039B\u0391\u0392\u0395","followers_count":629,"friends_count":530,"statues_count":30605},"place":{"country":"United States","country_code":"United States","full_name":"Alexandria, VA","id":"d6819fe60643ebc1","name":"Alexandria","place_type":"city","bounding_box":rectangle("-77.144435,38.789907 -77.037304,38.844853")},"geo_tag":{"stateID":51,"stateName":"Virginia","countyID":51059,"countyName":"Fairfax","cityID":5101000,"cityName":"Alexandria"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929054182977536"),"text":"@BeckyDamissus yep in Reno now.Thanks again for sorting the tickets ...","in_reply_to_status":int64("769680785859227648"),"in_reply_to_user":int64("594144678"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{594144678}},"user":{"id":int64("79210652"),"name":"Sam Trickett","screen_name":"Samtrickett1","lang":"en","location":"null","create_at":date("2009-10-02"),"description":"Official Page of Sam Trickett - Professional Poker Player - http://Facebook.com/Sam.trickett - http://Instagram.com/samtrickett7","followers_count":57157,"friends_count":7219,"statues_count":3844},"place":{"country":"United States","country_code":"United States","full_name":"Reno, NV","id":"4b25aded08900fd8","name":"Reno","place_type":"city","bounding_box":rectangle("-119.953849,39.350749 -119.700515,39.674123")},"geo_tag":{"stateID":32,"stateName":"Nevada","countyID":32031,"countyName":"Washoe","cityID":3260600,"cityName":"Reno"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929054526943233"),"text":"Not ready for classes to start","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("61600111"),"name":"ash","screen_name":"Ashlyn58","lang":"en","location":"null","create_at":date("2009-07-30"),"description":"Harry Styles and pizza slices\u2764\uFE0F","followers_count":360,"friends_count":335,"statues_count":21224},"place":{"country":"United States","country_code":"United States","full_name":"Nacogdoches, TX","id":"ebf78e870cecf27c","name":"Nacogdoches","place_type":"city","bounding_box":rectangle("-94.704218,31.55485 -94.594405,31.673033")},"geo_tag":{"stateID":48,"stateName":"Texas","countyID":48347,"countyName":"Nacogdoches","cityID":4850256,"cityName":"Nacogdoches"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929054677970944"),"text":"Do you want to be people IN the way of people OF the way?\n- @cmsloan00 \n#TWCKinston","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"hashtags":{{"TWCKinston"}},"user_mentions":{{234931359}},"user":{"id":int64("552335577"),"name":"Harley Gracen Smith","screen_name":"harley_gracen94","lang":"en","location":"Kinston, NC","create_at":date("2012-04-12"),"description":"[The heart of man plans his way, but the Lord establishes his steps. \u2022Proverbs 16:9\u2022] //Jesus and Coffee are the way to my heart//","followers_count":314,"friends_count":339,"statues_count":3315},"place":{"country":"United States","country_code":"United States","full_name":"Kinston, NC","id":"1427f884feb4cb70","name":"Kinston","place_type":"city","bounding_box":rectangle("-77.652885,35.227946 -77.543004,35.323017")},"geo_tag":{"stateID":37,"stateName":"North Carolina","countyID":37107,"countyName":"Lenoir","cityID":3735920,"cityName":"Kinston"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929054740844544"),"text":"things i like: $$\nthings i don't like: working til 12 last night and having to be back for a 7 hour shift at 1pm","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("2415876928"),"name":"emilia grace","screen_name":"emmyyyyfischer","lang":"en","location":"thotbox pizza","create_at":date("2014-03-16"),"description":"going real slow down the highway of life with no regrets","followers_count":376,"friends_count":571,"statues_count":6117},"place":{"country":"United States","country_code":"United States","full_name":"Noblesville, IN","id":"013e3bc05a18abdc","name":"Noblesville","place_type":"city","bounding_box":rectangle("-86.090354,39.993436 -85.932944,40.10716")},"geo_tag":{"stateID":18,"stateName":"Indiana","countyID":18057,"countyName":"Hamilton","cityID":1854180,"cityName":"Noblesville"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929054539636736"),"text":"Saturday in the park \uD83C\uDDE8\uD83C\uDDF1\u26BE\uFE0F @ Globe Life Park in Arlington https://t.co/O3Q38hOiuD","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("18689312"),"name":"K\u2B50\uFE0FD0\u20E3M\u2B50\uFE0F","screen_name":"KADOMA","lang":"en","location":"3814 S. Shepherd Dr. HTX 77098","create_at":date("2009-01-06"),"description":"FOUNDER: #SneakerSummit\u00AE | CONTRIBUTOR: #TeamTrill #YGET | IG/Snap: Kadoma713 | 2016 DATES: #HTown @nrgparkfan Sun. 12/4, #TexasSkateJam @southsidehtx Sat. 11/5","followers_count":2279,"friends_count":1619,"statues_count":19667},"place":{"country":"United States","country_code":"United States","full_name":"Arlington, TX","id":"6e315e1f96e0450a","name":"Arlington","place_type":"city","bounding_box":rectangle("-97.233811,32.586565 -97.037464,32.817135")},"coordinate":point("-97.08253,32.7513099"),"geo_tag":{"stateID":48,"stateName":"Texas","countyID":48439,"countyName":"Tarrant","cityID":4804000,"cityName":"Arlington"}}
+{"create_at":datetime("2016-08-28T09:06:05.000"),"id":int64("769929054925492228"),"text":"I'm deadass bipolar and I'm crazy so please, do ya self a favor and stay away from me!","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("3452631862"),"name":"Oceana\u2728\u2766","screen_name":"Raven_A_HD","lang":"en","location":"NYC // FL","create_at":date("2015-08-26"),"description":"Weird Is The New Normal","followers_count":41,"friends_count":83,"statues_count":673},"place":{"country":"United States","country_code":"United States","full_name":"The Acreage, FL","id":"0012a07f55190853","name":"The Acreage","place_type":"city","bounding_box":rectangle("-80.346402,26.692974 -80.197405,26.824966")},"geo_tag":{"stateID":12,"stateName":"Florida","countyID":12099,"countyName":"Palm Beach","cityID":1271564,"cityName":"The Acreage"}}
+{"create_at":datetime("2016-08-28T09:06:06.000"),"id":int64("769929055474819074"),"text":"Absolutely agree, I was talking more about not admitting that you missed on Henry/ignoring it & not growing as scout https://t.co/0f6DIoU72x","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("407329611"),"name":"Blake Allen Murphy","screen_name":"blakemurphy7","lang":"en","location":"Phoenix, AZ","create_at":date("2011-11-07"),"description":"Writer for @revengeofbirds #BirdGang #DraftTwitter Former @AZRattlers Scouting Intern. Mac & Cheese w/ Ketchup is a hill I'll die on forever","followers_count":1365,"friends_count":2251,"statues_count":93250},"place":{"country":"United States","country_code":"United States","full_name":"Gilbert, AZ","id":"006b48995ede9bcc","name":"Gilbert","place_type":"city","bounding_box":rectangle("-111.842244,33.204608 -111.634889,33.385822")},"geo_tag":{"stateID":4,"stateName":"Arizona","countyID":4013,"countyName":"Maricopa","cityID":427400,"cityName":"Gilbert"}}
+{"create_at":datetime("2016-08-28T09:06:06.000"),"id":int64("769929056007577600"),"text":"@Mr__Soto pero porque \uD83D\uDE05","in_reply_to_status":int64("769928406091886593"),"in_reply_to_user":int64("54689831"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"es","is_retweet":false,"user_mentions":{{54689831}},"user":{"id":int64("545641253"),"name":"juliana santana","screen_name":"juuulzsantana","lang":"en","location":"Madrid, Spain","create_at":date("2012-04-04"),"description":"no me enganes","followers_count":3154,"friends_count":830,"statues_count":79117},"place":{"country":"United States","country_code":"United States","full_name":"Phoenix, AZ","id":"5c62ffb0f0f3479d","name":"Phoenix","place_type":"city","bounding_box":rectangle("-112.323914,33.29026 -111.925439,33.815465")},"geo_tag":{"stateID":4,"stateName":"Arizona","countyID":4013,"countyName":"Maricopa","cityID":455000,"cityName":"Phoenix"}}
+{"create_at":datetime("2016-08-28T09:06:06.000"),"id":int64("769929056217276422"),"text":"WE ONLY TIP CITIZENS \nWhy y'all start doing that now? I know you haven't been doing it on the past to people you considered immigrant.","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("4686344436"),"name":"Kellie hill","screen_name":"Kelliehillark","lang":"en","location":"Arkansas, USA","create_at":date("2015-12-31"),"description":"Just a bitch for Human Rights and change of non violent crime charges, Marjuianna legalization & BTW Fuck Trump.","followers_count":20,"friends_count":55,"statues_count":101},"place":{"country":"United States","country_code":"United States","full_name":"Benton, AR","id":"e148c35b55163567","name":"Benton","place_type":"city","bounding_box":rectangle("-92.651513,34.53414 -92.514669,34.650234")},"geo_tag":{"stateID":5,"stateName":"Arkansas","countyID":5125,"countyName":"Saline","cityID":505290,"cityName":"Benton"}}
+{"create_at":datetime("2016-08-28T09:06:06.000"),"id":int64("769929056091459584"),"text":"Debating on keeping my beard.","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("2177610329"),"name":"Here","screen_name":"WestSideMaj","lang":"en","location":"Cali \u2600\uFE0F","create_at":date("2013-11-11"),"description":"Get to the point, have something to say and don't waste nobody time\niHoop. \n\n- RIP Brianna. my light\u2764","followers_count":591,"friends_count":391,"statues_count":57799},"place":{"country":"United States","country_code":"United States","full_name":"Long Beach, CA","id":"01c060cf466c6ce3","name":"Long Beach","place_type":"city","bounding_box":rectangle("-118.250227,33.732905 -118.063194,33.885438")},"geo_tag":{"stateID":6,"stateName":"California","countyID":6037,"countyName":"Los Angeles","cityID":643000,"cityName":"Long Beach"}}
+{"create_at":datetime("2016-08-28T09:06:06.000"),"id":int64("769929055873400832"),"text":"@brutal_bex just stream anything and I'll watch.","in_reply_to_status":int64("769926941998714880"),"in_reply_to_user":int64("30589032"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{30589032}},"user":{"id":int64("2216908268"),"name":"focus2335 @ DCC","screen_name":"MarkJGusek","lang":"en","location":"TCKT","create_at":date("2013-11-26"),"description":"Life is what you make of it. Proud to be a part of #TCKT PSNID focus2335","followers_count":297,"friends_count":574,"statues_count":4877},"place":{"country":"United States","country_code":"United States","full_name":"Palm Bay, FL","id":"9979d3480f2d1e45","name":"Palm Bay","place_type":"city","bounding_box":rectangle("-80.737408,27.910056 -80.566228,28.063838")},"geo_tag":{"stateID":12,"stateName":"Florida","countyID":12009,"countyName":"Brevard","cityID":1254000,"cityName":"Palm Bay"}}
+{"create_at":datetime("2016-08-28T09:06:06.000"),"id":int64("769929056028614656"),"text":"If you really know me,then you know i love to dream","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("161862047"),"name":"FEARLESS \u274C","screen_name":"jovonda_maria","lang":"en","location":"null","create_at":date("2010-07-01"),"description":"keep your enemies close and watch your homies","followers_count":598,"friends_count":593,"statues_count":22788},"place":{"country":"United States","country_code":"United States","full_name":"Hampton, VA","id":"2f5f5ba43ec2f5a9","name":"Hampton","place_type":"city","bounding_box":rectangle("-76.451006,36.989568 -76.273546,37.112225")},"geo_tag":{"stateID":51,"stateName":"Virginia","countyID":51650,"countyName":"Hampton","cityID":5135000,"cityName":"Hampton"}}
+{"create_at":datetime("2016-08-28T09:06:06.000"),"id":int64("769929056364158976"),"text":"I wish I was sitting outside doing something so I could actually be enjoying it \u2600\uFE0F","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("822258355"),"name":"Emilyyyy","screen_name":"Marshx22","lang":"en","location":"null","create_at":date("2012-09-13"),"description":"null","followers_count":567,"friends_count":858,"statues_count":8996},"place":{"country":"United States","country_code":"United States","full_name":"Madison Heights, MI","id":"e67427d9b4126602","name":"Madison Heights","place_type":"city","bounding_box":rectangle("-83.126332,42.475983 -83.084518,42.534826")},"geo_tag":{"stateID":26,"stateName":"Michigan","countyID":26125,"countyName":"Oakland","cityID":2650560,"cityName":"Madison Heights"}}
+{"create_at":datetime("2016-08-28T09:06:06.000"),"id":int64("769929056330493952"),"text":"\u0647\u0644 \u064A\u062D\u0642 \u0644\u064A \u0625\u0646 \u0627\u0633\u0623\u0644: \u0647\u0644 \u0635\u0631\u0641\u062A \u0627\u0644\u062D\u0643\u0648\u0645\u0629 \u0627\u0644\u0633\u0639\u0648\u062F\u064A\u0629 \u0623\u0645\u0648\u0627\u0644\u0646\u0627 \u0641\u064A \u0627\u0633\u062A\u062B\u0645\u0627\u0631 \u0641\u0627\u0634\u0644 \u061F #\u0627\u0644\u062A\u062D\u0648\u0644_\u0627\u0644\u0648\u0637\u0646\u064A https://t.co/ppftAfmFtK","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"ar","is_retweet":false,"hashtags":{{"\u0627\u0644\u062A\u062D\u0648\u0644_\u0627\u0644\u0648\u0637\u0646\u064A"}},"user":{"id":int64("178459895"),"name":"Abdulaziz Adnan","screen_name":"Muqaddam88","lang":"en","location":"Michigan","create_at":date("2010-08-14"),"description":"\u0628\u0627\u062D\u062B \u062F\u0643\u062A\u0648\u0631\u0627\u0629 \u0641\u064A \u0645\u062C\u0627\u0644 Advertising + PR. \u0639\u062F\u0648 \u0644\u0644\u0645\u063A\u0627\u0644\u0637\u0627\u062A \u0648\u0627\u0644\u062A\u0644\u0641\u064A\u0642 \u0648\u0645\u062D\u0628 \u0644\u0623\u0635\u062D\u0627\u0628 \u0627\u0644\u062A\u0641\u0643\u064A\u0631 \u0627\u0644\u0645\u0639\u0642\u062F. Moto: \u0639\u062F\u0645 \u0641\u0647\u0645\u0643 \u0644\u0634\u064A\u0621 \u0644\u064A\u0633 \u062F\u0644\u064A\u0644\u0627 \u0639\u0644\u0649 \u0639\u062F\u0645 \u0648\u062C\u0648\u062F\u0647","followers_count":391,"friends_count":96,"statues_count":18375},"place":{"country":"United States","country_code":"United States","full_name":"Lansing, MI","id":"91eb113282d003a1","name":"Lansing","place_type":"city","bounding_box":rectangle("-84.63184,42.618566 -84.483958,42.805532")},"geo_tag":{"stateID":26,"stateName":"Michigan","countyID":26065,"countyName":"Ingham","cityID":2646000,"cityName":"Lansing"}}
+{"create_at":datetime("2016-08-28T09:06:06.000"),"id":int64("769929056720609280"),"text":"Championship game in an hour \u26BD\uFE0F","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("419300409"),"name":"Anton Jay","screen_name":"AntonJay_","lang":"en","location":"School of Architecture","create_at":date("2011-11-22"),"description":"\u03A9\u0394\u03A6 Portland State","followers_count":335,"friends_count":397,"statues_count":3908},"place":{"country":"United States","country_code":"United States","full_name":"Hillsboro, OR","id":"01bd241973160cac","name":"Hillsboro","place_type":"city","bounding_box":rectangle("-123.011705,45.488124 -122.859355,45.573434")},"geo_tag":{"stateID":41,"stateName":"Oregon","countyID":41067,"countyName":"Washington","cityID":4134100,"cityName":"Hillsboro"}}
+{"create_at":datetime("2016-08-28T09:06:06.000"),"id":int64("769929057840402432"),"text":"@ACE_caseyy I do it cause our moms would be my ass if I didnt \uD83D\uDE09 but thank you Casey \uD83D\uDE0A\u2764\uFE0F","in_reply_to_status":int64("769913049918472192"),"in_reply_to_user":int64("2460410515"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{2460410515}},"user":{"id":int64("289634970"),"name":"Jswag","screen_name":"Jacob_Wagner_22","lang":"en","location":"null","create_at":date("2011-04-28"),"description":"snappin necks and cashin checks","followers_count":676,"friends_count":648,"statues_count":2137},"place":{"country":"United States","country_code":"United States","full_name":"Sioux Falls, SD","id":"3605db70c687a01d","name":"Sioux Falls","place_type":"city","bounding_box":rectangle("-96.839502,43.465641 -96.622783,43.616388")},"geo_tag":{"stateID":46,"stateName":"South Dakota","countyID":46099,"countyName":"Minnehaha","cityID":4659020,"cityName":"Sioux Falls"}}
+{"create_at":datetime("2016-08-28T09:06:06.000"),"id":int64("769929057643429888"),"text":"anytime anyone comes to my house and bandit attacks them :-// https://t.co/OoqlP7HFlg","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user":{"id":int64("963565770"),"name":"abigail","screen_name":"abbyschoonover","lang":"en","location":"Above you","create_at":date("2012-11-21"),"description":"half blade and half silk | sc: abbyschoon","followers_count":1203,"friends_count":250,"statues_count":23417},"place":{"country":"United States","country_code":"United States","full_name":"Loves Park, IL","id":"9bfddbd45776b2bc","name":"Loves Park","place_type":"city","bounding_box":rectangle("-89.069204,42.293874 -88.964153,42.365885")},"geo_tag":{"stateID":17,"stateName":"Illinois","countyID":17201,"countyName":"Winnebago","cityID":1745031,"cityName":"Loves Park"}}
diff --git a/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.01.ddl.aql b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.01.ddl.aql
new file mode 100644
index 0000000..190891f
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.01.ddl.aql
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ /*
+ * Description  : Reproduction scenario for ASTERIXDB-1636
+ * Expected Res : Success
+ * Date         : Oct 5th 2016
+ */
+
+drop dataverse twitter if exists;
+create dataverse twitter if not exists;
+use dataverse twitter
+create type typeUser if not exists as open {
+    id: int64,
+    name: string,
+    screen_name : string,
+    lang : string,
+    location: string,
+    create_at: date,
+    description: string,
+    followers_count: int32,
+    friends_count: int32,
+    statues_count: int64
+}
+create type typePlace if not exists as open{
+    country : string,
+    country_code : string,
+    full_name : string,
+    id : string,
+    name : string,
+    place_type : string,
+    bounding_box : rectangle
+}
+create type typeGeoTag if not exists as open {
+    stateID: int32,
+    stateName: string,
+    countyID: int32,
+    countyName: string,
+    cityID: int32?,
+    cityName: string?
+}
+create type typeTweet if not exists as open{
+    create_at : datetime,
+    id: int64,
+    "text": string,
+    in_reply_to_status : int64,
+    in_reply_to_user : int64,
+    favorite_count : int64,
+    coordinate: point?,
+    retweet_count : int64,
+    lang : string,
+    is_retweet: boolean,
+    hashtags : {{ string }} ?,
+    user_mentions : {{ int64 }} ? ,
+    user : typeUser,
+    place : typePlace?,
+    geo_tag: typeGeoTag
+}
+
+create dataset ds_tweet(typeTweet) if not exists primary key id with filter on create_at;
+create index text_idx if not exists on ds_tweet("text") type keyword;
+create feed MessageFeed using localfs(
+("path"="localhost://../../../../../src/test/resources/integrationts/restart/828.h1w.adm"),
+("format"="adm"),
+("type-name"="typeTweet"));
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.02.ddl.aql
similarity index 79%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.02.ddl.aql
index af2f691..58dea6b 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.02.ddl.aql
@@ -16,4 +16,12 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+ /*
+ * Description  : Reproduction scenario for ASTERIXDB-1636
+ * Expected Res : Success
+ * Date         : Oct 5th 2016
+ */
+
+use dataverse twitter;
+connect feed MessageFeed to dataset ds_tweet;
diff --git a/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.03.script.aql b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.03.script.aql
new file mode 100644
index 0000000..d489e74
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.03.script.aql
@@ -0,0 +1 @@
+touch.sh
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.04.ddl.aql
similarity index 65%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.04.ddl.aql
index af2f691..cf520ca 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.04.ddl.aql
@@ -16,4 +16,20 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+ /*
+ * Description  : Reproduction scenario for ASTERIXDB-1636
+ * Expected Res : Success
+ * Date         : Oct 5th 2016
+ */
+
+use dataverse twitter;
+drop feed TweetFeed if exists;
+create feed TweetFeed using localfs
+(
+    ("path"="localhost://../../../../../target/tweets.json"),
+    ("format"="adm"),
+    ("type-name"="typeTweet")
+);
+set wait-for-completion-feed "true";
+connect feed TweetFeed to dataset ds_tweet;
diff --git a/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.05.mgx.aql b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.05.mgx.aql
new file mode 100644
index 0000000..d6e839a
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.05.mgx.aql
@@ -0,0 +1,2 @@
+stop -n asterix
+
diff --git a/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.06.mgx.aql b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.06.mgx.aql
new file mode 100644
index 0000000..1d6d3bc
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.06.mgx.aql
@@ -0,0 +1,2 @@
+start -n asterix
+
diff --git a/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.07.script.aql b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.07.script.aql
new file mode 100644
index 0000000..7503842
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.07.script.aql
@@ -0,0 +1 @@
+cat.sh
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.08.ddl.aql
similarity index 77%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.08.ddl.aql
index af2f691..860b8ed 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.08.ddl.aql
@@ -16,4 +16,12 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+ /*
+ * Description  : Reproduction scenario for ASTERIXDB-1636
+ * Expected Res : Success
+ * Date         : Oct 5th 2016
+ */
+use dataverse twitter;
+set wait-for-completion-feed "false";
+connect feed TweetFeed to dataset ds_tweet;
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.09.sleep.aql
similarity index 85%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
copy to asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.09.sleep.aql
index af2f691..49ccbce 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-function/feed-with-external-function.4.sleep.aql
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.09.sleep.aql
@@ -16,4 +16,10 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-10000
\ No newline at end of file
+
+ /*
+ * Description  : Reproduction scenario for ASTERIXDB-1636
+ * Expected Res : Success
+ * Date         : Oct 5th 2016
+ */
+1000
\ No newline at end of file
diff --git a/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.10.script.aql b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.10.script.aql
new file mode 100644
index 0000000..80bfb2c
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.10.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
diff --git a/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.11.script.aql b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.11.script.aql
new file mode 100644
index 0000000..d599fbe
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/queries/feed-restart/issue-1636/issue-1636.11.script.aql
@@ -0,0 +1 @@
+grep_log.sh
\ No newline at end of file
diff --git a/asterixdb/asterix-installer/src/test/resources/integrationts/restart/scripts/feed-restart/issue-1636/cat.sh b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/scripts/feed-restart/issue-1636/cat.sh
new file mode 100755
index 0000000..c111fde
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/scripts/feed-restart/issue-1636/cat.sh
@@ -0,0 +1,18 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+cat src/test/resources/integrationts/restart/tweets.json > target/tweets.json
diff --git a/asterixdb/asterix-installer/src/test/resources/integrationts/restart/scripts/feed-restart/issue-1636/grep_log.sh b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/scripts/feed-restart/issue-1636/grep_log.sh
new file mode 100755
index 0000000..b84732f
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/scripts/feed-restart/issue-1636/grep_log.sh
@@ -0,0 +1,20 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+if grep --quiet -m 1 'java.lang.ArrayIndexOutOfBoundsException' target/asterix-installer-*-binary-assembly/clusters/local/working_dir/logs/*.log ; then
+    echo "ERROR"
+fi
diff --git a/asterixdb/asterix-installer/src/test/resources/integrationts/restart/scripts/feed-restart/issue-1636/kill_cc_and_nc.sh b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/scripts/feed-restart/issue-1636/kill_cc_and_nc.sh
new file mode 100755
index 0000000..2582713
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/scripts/feed-restart/issue-1636/kill_cc_and_nc.sh
@@ -0,0 +1,18 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterixdb/asterix-installer/src/test/resources/integrationts/restart/scripts/feed-restart/issue-1636/touch.sh b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/scripts/feed-restart/issue-1636/touch.sh
new file mode 100755
index 0000000..4e0b75c
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/scripts/feed-restart/issue-1636/touch.sh
@@ -0,0 +1,21 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+if [ -f target/tweets.json ]; then
+    rm -f target/tweets.json
+fi
+touch target/tweets.json
\ No newline at end of file
diff --git a/asterixdb/asterix-installer/src/test/resources/integrationts/restart/testsuite.xml b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/testsuite.xml
new file mode 100644
index 0000000..44ae9b4
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/testsuite.xml
@@ -0,0 +1,28 @@
+<!--
+ ! Licensed to the Apache Software Foundation (ASF) under one
+ ! or more contributor license agreements.  See the NOTICE file
+ ! distributed with this work for additional information
+ ! regarding copyright ownership.  The ASF licenses this file
+ ! to you under the Apache License, Version 2.0 (the
+ ! "License"); you may not use this file except in compliance
+ ! with the License.  You may obtain a copy of the License at
+ !
+ !   http://www.apache.org/licenses/LICENSE-2.0
+ !
+ ! Unless required by applicable law or agreed to in writing,
+ ! software distributed under the License is distributed on an
+ ! "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ! KIND, either express or implied.  See the License for the
+ ! specific language governing permissions and limitations
+ ! under the License.
+ !-->
+<test-suite xmlns="urn:xml.testframework.asterix.apache.org" ResultOffsetPath="results" QueryOffsetPath="queries" QueryFileExtension=".aql">
+  <test-group name="restart">
+      <test-case FilePath="feed-restart">
+        <compilation-unit name="issue-1636">
+          <output-dir compare="Text">issue-1636</output-dir>
+        </compilation-unit>
+      </test-case>
+  </test-group>
+</test-suite>
+
diff --git a/asterixdb/asterix-installer/src/test/resources/integrationts/restart/tweets.json b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/tweets.json
new file mode 100644
index 0000000..d9fca06
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/restart/tweets.json
@@ -0,0 +1,39 @@
+{"create_at":datetime("2016-09-23T12:25:34.000"),"id":int64("779401340485271552"),"text":"https://t.co/90HQQzJDie","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"und","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Richmond, VA","id":"00f751614d8ce37b","name":"Richmond","place_type":"city","bounding_box":rectangle("-77.601044,37.447046 -77.385297,37.61272")},"geo_tag":{"stateID":51,"stateName":"Virginia","countyID":51760,"countyName":"Richmond","cityID":5167000,"cityName":"Richmond"},"user":{"id":int64("732261848024809473"),"name":"KUnderwood","screen_name":"KUnderw63665020","lang":"en","location":"Virginia, USA","create_at":date("2016-05-16"),"description":"*Riding On The Trump Train!*\nNO LISTS!\nRETWEETS CIRCULATE INFO THAT MSM WILL NOT!RETWEET ALL INFO GET PPL INFORMED!KNOWLEDGE IS OUR WEAPON!","followers_count":2239,"friends_count":2637,"statues_count":37837}}
+{"create_at":datetime("2016-09-23T12:25:48.000"),"id":int64("779401398928584704"),"text":"I think they should extend #SuicidePreventionAwarenessMonth from Sept until after the election, and permanently if Trump wins.","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"hashtags":{{"SuicidePreventionAwarenessMonth"}},"place":{"country":"United States","country_code":"United States","full_name":"Fort Worth, TX","id":"42e46bc3663a4b5f","name":"Fort Worth","place_type":"city","bounding_box":rectangle("-97.538285,32.569477 -97.033542,32.990456")},"geo_tag":{"stateID":48,"stateName":"Texas","countyID":48439,"countyName":"Tarrant","cityID":4827000,"cityName":"Fort Worth"},"user":{"id":int64("25033519"),"name":"Lily Barreda","screen_name":"lbarreda","lang":"en","location":"Fort Worth, TX","create_at":date("2009-03-17"),"description":"Dog mom, brain injury survivor-ish, really bad at writing bios.","followers_count":2363,"friends_count":304,"statues_count":1438}}
+{"create_at":datetime("2016-09-23T12:25:57.000"),"id":int64("779401435486093312"),"text":"Trump called Cruz a liar & his wife ugly, so Cruz is endorsing Trump. It's the kindergarten equivalent of pulling hair of the girl you like.","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Seattle, WA","id":"300bcc6e23a88361","name":"Seattle","place_type":"city","bounding_box":rectangle("-122.436232,47.495315 -122.224973,47.734319")},"geo_tag":{"stateID":53,"stateName":"Washington","countyID":53033,"countyName":"King","cityID":5363000,"cityName":"Seattle"},"user":{"id":int64("274147256"),"name":"JHunterJokes","screen_name":"jhuntercomedy","lang":"en","location":"seattle, washington","create_at":date("2011-03-29"),"description":"stand up. comedy writer. None of my tweets are based on fact. Except those about the #LADodgers #Redskins, my dogs, and beer.","followers_count":1072,"friends_count":1111,"statues_count":18889}}
+{"create_at":datetime("2016-09-23T12:26:14.000"),"id":int64("779401506772676608"),"text":"Eric Trump: My dad is the epitome of the American dream! He started with barely anything!\n\nMe: https://t.co/sdNNukaYRw","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Manhattan, NY","id":"01a9a39529b27f36","name":"Manhattan","place_type":"city","bounding_box":rectangle("-74.026675,40.683935 -73.910408,40.877483")},"geo_tag":{"stateID":36,"stateName":"New York","countyID":36061,"countyName":"New York","cityID":36061,"cityName":"Manhattan"},"user":{"id":int64("39267537"),"name":"Lucy Flawless","screen_name":"everythingsjake","lang":"en","location":"New York, NY","create_at":date("2009-05-11"),"description":"I could care.","followers_count":3283,"friends_count":417,"statues_count":161030}}
+{"create_at":datetime("2016-09-23T12:26:21.000"),"id":int64("779401534622736384"),"text":"Trump bunk... https://t.co/dNDg6SkmCg","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Brookdale, CA","id":"01289bf759f2ab45","name":"Brookdale","place_type":"city","bounding_box":rectangle("-122.122065,37.089913 -122.085464,37.113252")},"geo_tag":{"stateID":6,"stateName":"California","countyID":6087,"countyName":"Santa Cruz","cityID":608478,"cityName":"Brookdale"},"user":{"id":int64("44959505"),"name":"nancy bowen","screen_name":"lavenderblue27","lang":"en","location":"Brookdale, CA","create_at":date("2009-06-05"),"description":"Fairness, human rights & laughter, I think for myself, avid Olbermann, John Fugelsang, Beatles, John Clark, Jack Reacher fan #UniteBlue","followers_count":21429,"friends_count":21797,"statues_count":291006}}
+{"create_at":datetime("2016-09-23T12:26:22.000"),"id":int64("779401539894915072"),"text":"@JkgaddisJulie @CelesteHerget @ejoy2270 @StatesPoll agreed! Trump creates jobs! Look at his resume","in_reply_to_status":int64("779352835573985280"),"in_reply_to_user":int64("1340907289"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{1340907289,930575720,705433853289082880,4547869279}},"place":{"country":"United States","country_code":"United States","full_name":"Sterling, IL","id":"6ba87c2e2d3960d0","name":"Sterling","place_type":"city","bounding_box":rectangle("-89.771235,41.772244 -89.640186,41.827767")},"geo_tag":{"stateID":17,"stateName":"Illinois","countyID":17195,"countyName":"Whiteside","cityID":1772546,"cityName":"Sterling"},"user":{"id":int64("2752932015"),"name":"Kelley MD\u2764\uFE0FTrump","screen_name":"guthrie_kelley","lang":"en","location":"United States","create_at":date("2014-08-27"),"description":"M.D. and MSW . full time doc @ Federal public health dept. I'm a Mainer. now in IL. land locked. Christian !! Jesus calling.....the book to own. :)","followers_count":1900,"friends_count":189,"statues_count":13912}}
+{"create_at":datetime("2016-09-23T12:26:23.000"),"id":int64("779401543728652288"),"text":"If equality to some means hating all men and blaming them for all their problems and 2 failed relationships. YOU are more RETARED than Trump","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Savage, MD","id":"013e2fa93b1bd0be","name":"Savage","place_type":"city","bounding_box":rectangle("-76.852175,39.120076 -76.776884,39.179931")},"geo_tag":{"stateID":24,"stateName":"Maryland","countyID":24027,"countyName":"Howard","cityID":2470475,"cityName":"Savage"},"user":{"id":int64("446473873"),"name":"Oscar Villalobos","screen_name":"mistafuntastic","lang":"en","location":"Beltsville, MD ","create_at":date("2011-12-25"),"description":"@UoPeople #Dreamer #DCUnited \u26AB\uFE0F\u26BD\uFE0F #STEM #MLS #Physics","followers_count":218,"friends_count":235,"statues_count":13741}}
+{"create_at":datetime("2016-09-23T12:26:24.000"),"id":int64("779401550976258048"),"text":"WHAT IS THIS NONSENSE @KimKardashian ALSO @Caitlyn_Jenner GURL BYE https://t.co/G3pi9wJZSX","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{25365536,3303293865}},"place":{"country":"United States","country_code":"United States","full_name":"Odessa, TX","id":"2c0346ba4b733e24","name":"Odessa","place_type":"city","bounding_box":rectangle("-102.434966,31.792563 -102.253208,31.955114")},"geo_tag":{"stateID":48,"stateName":"Texas","countyID":48135,"countyName":"Ector","cityID":4853388,"cityName":"Odessa"},"user":{"id":int64("152033213"),"name":"ERNEST-O","screen_name":"emacys","lang":"en","location":"TEXAS \u272F","create_at":date("2010-06-04"),"description":"journalist / producer / currently rolling my eyes / aggressive @kimkardashian stan /","followers_count":602,"friends_count":1223,"statues_count":19777}}
+{"create_at":datetime("2016-09-23T12:26:27.000"),"id":int64("779401559415357440"),"text":"\"Look at me! Look at me! Hey, LOOK AT ME!\" https://t.co/Aog5ZWQeE1","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Manhattan, NY","id":"01a9a39529b27f36","name":"Manhattan","place_type":"city","bounding_box":rectangle("-74.026675,40.683935 -73.910408,40.877483")},"geo_tag":{"stateID":36,"stateName":"New York","countyID":36061,"countyName":"New York","cityID":36061,"cityName":"Manhattan"},"user":{"id":int64("995194771"),"name":"Steven Beck","screen_name":"becksnyc78","lang":"en","location":"New York","create_at":date("2012-12-07"),"description":"Florida born. NYC living. Wilhelmina Model. Runner. Photography. BUCS. FSU. MUFC.","followers_count":847,"friends_count":706,"statues_count":39316}}
+{"create_at":datetime("2016-09-23T12:26:31.000"),"id":int64("779401579803836416"),"text":"If they don't want to be considered enemies, let them come out and endorse DONAL J. TRUMP for president.\nDump the DOPE SMOKER.","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Astatula, FL","id":"dbbd875df397a247","name":"Astatula","place_type":"city","bounding_box":rectangle("-81.747783,28.701654 -81.714374,28.727329")},"geo_tag":{"stateID":12,"stateName":"Florida","countyID":12069,"countyName":"Lake","cityID":1202250,"cityName":"Astatula"},"user":{"id":int64("730763692007002112"),"name":"R A Sutton","screen_name":"rodger_sutton","lang":"en","location":"Central FLORIDA","create_at":date("2016-05-12"),"description":"United States Marine.\nFather,Grandfather,GREAT GRANDFATHER \nTrue Son of the Confederacy.\nAmerican Patriot, R.E.D.\nUS FREEDOM ARMY\nNRA Golden Eagle Member","followers_count":277,"friends_count":22,"statues_count":15286}}
+{"create_at":datetime("2016-09-23T12:26:34.000"),"id":int64("779401589417086976"),"text":"@MarkCubanFans @mcuban_NBA The 14 year old that owns an NBA team. Maybe he'll stick his tongue out at Trump.\u2026 https://t.co/VPYKYwngqj","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("614301557"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{614301557,716296544278851585}},"place":{"country":"United States","country_code":"United States","full_name":"Spokane, WA","id":"dc3747428fa88cab","name":"Spokane","place_type":"city","bounding_box":rectangle("-117.565226,47.5742 -117.303868,47.760676")},"geo_tag":{"stateID":53,"stateName":"Washington","countyID":53063,"countyName":"Spokane","cityID":5367000,"cityName":"Spokane"},"user":{"id":int64("3321633224"),"name":"DeplorableDBlack15NC","screen_name":"dblack15nc","lang":"en","location":"null","create_at":date("2015-08-20"),"description":"Catholic Gentleman. 4th Degree KoC. Retired USAF Senior NCO. DAV, VFW, VVA, AmLegion, Sons of Confederate Veterans, Unreconstructed Tar Heel. #MAGA","followers_count":1352,"friends_count":1474,"statues_count":23047}}
+{"create_at":datetime("2016-09-23T12:26:36.000"),"id":int64("779401599127093248"),"text":"My house is surrounded with trump supporters lol","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"North Royalton, OH","id":"4506ca1d65ca5b4d","name":"North Royalton","place_type":"city","bounding_box":rectangle("-81.785322,41.264708 -81.70169,41.350954")},"geo_tag":{"stateID":39,"stateName":"Ohio","countyID":39035,"countyName":"Cuyahoga","cityID":3957008,"cityName":"North Royalton"},"user":{"id":int64("120509237"),"name":"Mahmood A","screen_name":"yung_mood","lang":"en","location":"null","create_at":date("2010-03-06"),"description":"Yemeni! BAY AREA!!! living in CLE. IG mahmood_a snapchat mood1996","followers_count":1324,"friends_count":638,"statues_count":6930}}
+{"create_at":datetime("2016-09-23T12:26:51.000"),"id":int64("779401661273870336"),"text":"Why Are There Any Liberals Supporting Gary Johnson?\nhttps://t.co/KCOQqQrhaY","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Rancho Cucamonga, CA","id":"36ac79e68ace76e4","name":"Rancho Cucamonga","place_type":"city","bounding_box":rectangle("-117.637862,34.077184 -117.486443,34.166156")},"geo_tag":{"stateID":6,"stateName":"California","countyID":6071,"countyName":"San Bernardino","cityID":659451,"cityName":"Rancho Cucamonga"},"user":{"id":int64("611001007"),"name":"#M","screen_name":"digitalbooger","lang":"en","location":"Rancho Cucamonga","create_at":date("2012-06-17"),"description":"Rams back! Dodgers Lakers Kings USC Duke Bsktbl REM Who Tull U2 Floyd! Liberal Pacifist/Warmonger Chico St. alum AbsurdistNerd 1st Sci Fi novel coming","followers_count":1339,"friends_count":1502,"statues_count":7000}}
+{"create_at":datetime("2016-09-23T12:26:52.000"),"id":int64("779401665518637056"),"text":"@maddow @tedcruz @MSNBC , you are shame to the people to USA if you endorse Trump. where are your balls??","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("16129920"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{16129920,23022687,2836421}},"place":{"country":"United States","country_code":"United States","full_name":"Delray Beach, FL","id":"b4a0e9082dcc8436","name":"Delray Beach","place_type":"city","bounding_box":rectangle("-80.13094,26.420813 -80.05542,26.491482")},"geo_tag":{"stateID":12,"stateName":"Florida","countyID":12099,"countyName":"Palm Beach","cityID":1217100,"cityName":"Delray Beach"},"user":{"id":int64("199408437"),"name":"Nidia Gerlach","screen_name":"mamamia909","lang":"en","location":"Delray Beach, Fl. ","create_at":date("2010-10-06"),"description":"Nos encanta ser parte del progreso de nuestro pais. We love to be part of the progress of our Country.","followers_count":330,"friends_count":1491,"statues_count":1651}}
+{"create_at":datetime("2016-09-23T12:26:52.000"),"id":int64("779401667477250048"),"text":"#johnmccain #kellyayotte #ronjohnson #robportman #PatToomey #RichardBurr #markkirk #chuckgrassley \n #trump #gop\nhttps://t.co/YAVjNZ4NxD","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"und","is_retweet":false,"hashtags":{{"johnmccain","kellyayotte","ronjohnson","robportman","PatToomey","RichardBurr","markkirk","chuckgrassley","trump","gop"}},"place":{"country":"United States","country_code":"United States","full_name":"San Francisco, CA","id":"5a110d312052166f","name":"San Francisco","place_type":"city","bounding_box":rectangle("-122.514926,37.708075 -122.357031,37.833238")},"geo_tag":{"stateID":6,"stateName":"California","countyID":6075,"countyName":"San Francisco","cityID":667000,"cityName":"San Francisco"},"user":{"id":int64("216205407"),"name":"Ken Holsclaw","screen_name":"KenHolsclaw","lang":"en","location":"Earth","create_at":date("2010-11-15"),"description":"Basically good,kind, and smart. Hate bigots,false prophets, and mixing politics and religion. Killing women and children in a war should never be intentional.","followers_count":608,"friends_count":561,"statues_count":46347}}
+{"create_at":datetime("2016-09-23T12:26:55.000"),"id":int64("779401678567067648"),"text":"@MCuban will sit front row at debates. @HillaryClinton guest. Will he make #DonaldTrump squirm? #SiSePuede https://t.co/uG0HzbBULk","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("16228398"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"hashtags":{{"DonaldTrump","SiSePuede"}},"user_mentions":{{16228398,1339835893}},"place":{"country":"United States","country_code":"United States","full_name":"Dallas, TX","id":"18810aa5b43e76c7","name":"Dallas","place_type":"city","bounding_box":rectangle("-96.977527,32.620678 -96.54598,33.019039")},"geo_tag":{"stateID":48,"stateName":"Texas","countyID":48113,"countyName":"Dallas","cityID":4819000,"cityName":"Dallas"},"user":{"id":int64("63619403"),"name":"Wise Latinas Linked","screen_name":"wiselatinaslink","lang":"en","location":"USA","create_at":date("2009-08-06"),"description":"#Latina #Influencers #activists big block #voters & #consumers, recognized by 2011 & 2013 @LATISM social media leaders | Latina group on #Facebook w/ 7K members","followers_count":8087,"friends_count":5416,"statues_count":19195}}
+{"create_at":datetime("2016-09-23T12:26:55.000"),"id":int64("779401679787610112"),"text":"I still think that Ted Cruz is gonna Rick Roll Trump just like he did at the convention. They're both ass hats like that.","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Phoenix, AZ","id":"5c62ffb0f0f3479d","name":"Phoenix","place_type":"city","bounding_box":rectangle("-112.323914,33.29026 -111.925439,33.815465")},"geo_tag":{"stateID":4,"stateName":"Arizona","countyID":4013,"countyName":"Maricopa","cityID":455000,"cityName":"Phoenix"},"user":{"id":int64("25761955"),"name":"Tony Hernandez","screen_name":"HernandezTony","lang":"en","location":"Phoenix, AZ","create_at":date("2009-03-21"),"description":"Author","followers_count":985,"friends_count":583,"statues_count":35384}}
+{"create_at":datetime("2016-09-23T12:27:00.000"),"id":int64("779401699291168768"),"text":"@tedcruz HOWS YOUR DAD TODAY!!! IS IT REALLY WORTH IT!!! \uD83D\uDC4E\uD83C\uDFFD\uD83D\uDC4E\uD83D\uDC4E\uD83C\uDFFC\uD83D\uDC4E\uD83C\uDFFF\uD83D\uDC4E\uD83C\uDFFE\uD83D\uDC4E\uD83C\uDFFB\uD83D\uDC36\u274C\uD83D\uDC36\u274C\uD83D\uDC36\u274C\uD83D\uDC36 https://t.co/6MxunVNs8M","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("23022687"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{23022687}},"place":{"country":"United States","country_code":"United States","full_name":"Shinnecock Hills, NY","id":"a34194954d3e5af3","name":"Shinnecock Hills","place_type":"city","bounding_box":rectangle("-72.484145,40.875807 -72.43394,40.897898")},"geo_tag":{"stateID":36,"stateName":"New York","countyID":36103,"countyName":"Suffolk","cityID":3667048,"cityName":"Shinnecock Hills"},"user":{"id":int64("766964096126320640"),"name":"Corey B.","screen_name":"Coreybez1","lang":"en","location":"Shinnecock Hills, NY","create_at":date("2016-08-20"),"description":"null","followers_count":792,"friends_count":1865,"statues_count":15868}}
+{"create_at":datetime("2016-09-23T12:27:03.000"),"id":int64("779401710523383808"),"text":"Hillary on Inner City Youth \n\"Bring to Heel\"\nhttps://t.co/KyV1g5EXYP\n\nTrump - \"Spend Billions\" on Inner City Youth \nhttps://t.co/PypeKtGTcJ","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"San Juan Capistrano, CA","id":"4aea239b6a146e77","name":"San Juan Capistrano","place_type":"city","bounding_box":rectangle("-117.686553,33.466555 -117.611591,33.54673")},"geo_tag":{"stateID":6,"stateName":"California","countyID":6059,"countyName":"Orange","cityID":668028,"cityName":"San Juan Capistrano"},"user":{"id":int64("187059363"),"name":"Flavius Aetius","screen_name":"StupidBoomers","lang":"en","location":"The Titanic State or CA","create_at":date("2010-09-04"),"description":"The Worst Generation-Liberal Baby Boomers-are destroying USA w their selfish, greedy & unsustainable entitlement policies-Young Vet-Media Bias MUST STOP","followers_count":13932,"friends_count":15315,"statues_count":147185}}
+{"create_at":datetime("2016-09-23T12:27:08.000"),"id":int64("779401732208066561"),"text":"Reality tv stars stick together ? Imagine the episode Kim K will film at White House if Donald is president :-) https://t.co/YVKUnW3G7V","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Princeton, NJ","id":"c833fbabba6fe48e","name":"Princeton","place_type":"city","bounding_box":rectangle("-74.710297,40.32495 -74.619759,40.37435")},"geo_tag":{"stateID":34,"stateName":"New Jersey","countyID":34021,"countyName":"Mercer","cityID":3460900,"cityName":"Princeton"},"user":{"id":int64("305318105"),"name":"Steven Strauss","screen_name":"Steven_Strauss","lang":"en","location":"Cambridge and Princeton ","create_at":date("2011-05-25"),"description":"John L. Weinberg/Goldman Sachs & Co. Visiting Professor at Princeton's Woodrow Wilson School. OpEd contributor at LA Times, USA Today. Usual disclaimers apply","followers_count":43156,"friends_count":4007,"statues_count":84058}}
+{"create_at":datetime("2016-09-23T12:27:12.000"),"id":int64("779401752088940544"),"text":"@tedcruz must NOT endorse Trump! #NeverTrump #HillNo","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("23022687"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"hashtags":{{"NeverTrump","HillNo"}},"user_mentions":{{23022687}},"place":{"country":"United States","country_code":"United States","full_name":"Oakland Park, FL","id":"b230af2256484995","name":"Oakland Park","place_type":"city","bounding_box":rectangle("-80.196625,26.150489 -80.117027,26.209352")},"geo_tag":{"stateID":12,"stateName":"Florida","countyID":12011,"countyName":"Broward","cityID":1250575,"cityName":"Oakland Park"},"user":{"id":int64("62919284"),"name":"Tony Gator","screen_name":"gatormiami","lang":"en","location":"MIAMI or thereabouts","create_at":date("2009-08-04"),"description":"Ultra Conservative, Great Gator Fan & Realtor!","followers_count":1724,"friends_count":2385,"statues_count":23132}}
+{"create_at":datetime("2016-09-23T12:27:19.000"),"id":int64("779401780375543808"),"text":"Kim Kardashian, wife to Kanye West from Chicago, mother of two biracial children.. Is voting Trump. Ha","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Bronx, NY","id":"002e24c6736f069d","name":"Bronx","place_type":"city","bounding_box":rectangle("-73.933612,40.785365 -73.765271,40.91533")},"geo_tag":{"stateID":36,"stateName":"New York","countyID":36005,"countyName":"Bronx","cityID":36005,"cityName":"Bronx"},"user":{"id":int64("169662144"),"name":"lani","screen_name":"kailaniskye","lang":"en","location":"Bronx, NY","create_at":date("2010-07-22"),"description":"issa knife.","followers_count":2854,"friends_count":301,"statues_count":40616}}
+{"create_at":datetime("2016-09-23T12:27:19.000"),"id":int64("779401780178321408"),"text":"@OMAROSA I have no intention of \"bowing\" to the bigoted, fascist Trump.. I will STAND, VOTE, FIGHT! https://t.co/jvmb1JC9pu #nevertrump","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("16799621"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"hashtags":{{"nevertrump"}},"user_mentions":{{16799621}},"place":{"country":"United States","country_code":"United States","full_name":"Glen Burnie, MD","id":"cad757363c1a85df","name":"Glen Burnie","place_type":"city","bounding_box":rectangle("-76.644717,39.131259 -76.563196,39.207912")},"geo_tag":{"stateID":24,"stateName":"Maryland","countyID":24003,"countyName":"Anne Arundel","cityID":2432650,"cityName":"Glen Burnie"},"user":{"id":int64("106288017"),"name":"Kelly Lyles","screen_name":"thekellylyles","lang":"en","location":"Washington, D.C. ","create_at":date("2010-01-18"),"description":"Hybrid Blue Dog Democrat. Tweets on #autos #equality #politics Tweets are my own opinion. RTs are not endorsements.","followers_count":192,"friends_count":790,"statues_count":4109}}
+{"create_at":datetime("2016-09-23T12:27:20.000"),"id":int64("779401782938206209"),"text":"https://t.co/fyLLenVync @Cernovich @LouDobbs @realDonaldTrump","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"und","is_retweet":false,"user_mentions":{{358545917,26487169,25073877}},"place":{"country":"United States","country_code":"United States","full_name":"Muscle Shoals, AL","id":"74d460734c3eca69","name":"Muscle Shoals","place_type":"city","bounding_box":rectangle("-87.680639,34.705252 -87.617362,34.762527")},"geo_tag":{"stateID":1,"stateName":"Alabama","countyID":1033,"countyName":"Colbert","cityID":153016,"cityName":"Muscle Shoals"},"user":{"id":int64("4852434473"),"name":"GTolbert","screen_name":"GTolbert9","lang":"en","location":"Alabama, USA","create_at":date("2016-01-26"),"description":"null","followers_count":647,"friends_count":559,"statues_count":22532}}
+{"create_at":datetime("2016-09-23T12:27:40.000"),"id":int64("779401869084983296"),"text":"Cincinnati Enquirer spurns Donald Trump to endorse first Democrat in nearly 100 years https://t.co/juhelRdfQX","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Brooklyn, NY","id":"011add077f4d2da3","name":"Brooklyn","place_type":"city","bounding_box":rectangle("-74.041878,40.570842 -73.855673,40.739434")},"geo_tag":{"stateID":36,"stateName":"New York","countyID":36047,"countyName":"Kings","cityID":36047,"cityName":"Brooklyn"},"user":{"id":int64("308851637"),"name":"Ingrid E Baptista","screen_name":"Ingridebap","lang":"en","location":"New York","create_at":date("2011-05-31"),"description":"null","followers_count":250,"friends_count":938,"statues_count":15376}}
+{"create_at":datetime("2016-09-23T12:27:44.000"),"id":int64("779401883588833280"),"text":"Uh nope add *sarcasm* pls I don't know you personally and there are too many trump trolls bought by Russia https://t.co/2kStJCm43a","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Las Vegas, NV","id":"5c2b5e46ab891f07","name":"Las Vegas","place_type":"city","bounding_box":rectangle("-115.384091,36.129459 -115.062159,36.336371")},"geo_tag":{"stateID":32,"stateName":"Nevada","countyID":32003,"countyName":"Clark","cityID":3240000,"cityName":"Las Vegas"},"user":{"id":int64("118837027"),"name":"Tracy Downey","screen_name":"msgoddessrises","lang":"en","location":"Viva Las Vegas NV.","create_at":date("2010-03-01"),"description":"Film Student/Writer #DivineInterventionLuckyChronicles CenterR-Ind #Nolabels #NeverTrump #MS #11yrs Proud innovator with a keyboard. God is in control.","followers_count":1715,"friends_count":340,"statues_count":157425}}
+{"create_at":datetime("2016-09-23T12:27:45.000"),"id":int64("779401890333196288"),"text":"@OliverNorgrove @LennyPetsPuppys @senatorshoshana @Popehat \nConsidering the crazy ass shit #Trump pulls, Gary looks like a RhodesScholar.","in_reply_to_status":int64("779352824077508608"),"in_reply_to_user":int64("2233524090"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"hashtags":{{"Trump"}},"user_mentions":{{2233524090,714755712,38445467,18839937}},"place":{"country":"United States","country_code":"United States","full_name":"Eugene, OR","id":"01241e9666cacdd2","name":"Eugene","place_type":"city","bounding_box":rectangle("-123.208615,43.988281 -123.036188,44.142961")},"geo_tag":{"stateID":41,"stateName":"Oregon","countyID":41039,"countyName":"Lane","cityID":4123850,"cityName":"Eugene"},"user":{"id":int64("21906070"),"name":"Remember Benghazi!","screen_name":"Kegan05","lang":"en","location":"Northwest USA","create_at":date("2009-02-25"),"description":"Political Junkie, Republican, Recovered Reagan Democrat of 20+ years. Slick Willy was my motivation for dumping the CommieCrats. Never looked back! #TGDN","followers_count":11446,"friends_count":10924,"statues_count":249793}}
+{"create_at":datetime("2016-09-23T12:27:47.000"),"id":int64("779401896033390593"),"text":"To terrible human beings in one picture https://t.co/PhJJsXtcGa","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Auburn, AL","id":"deb349182b3f42bb","name":"Auburn","place_type":"city","bounding_box":rectangle("-85.569969,32.532449 -85.413112,32.662041")},"geo_tag":{"stateID":1,"stateName":"Alabama","countyID":1081,"countyName":"Lee","cityID":103076,"cityName":"Auburn"},"user":{"id":int64("35630380"),"name":"Ben Stoner","screen_name":"Ben_Stoner96","lang":"en","location":"Auburn, AL","create_at":date("2009-04-26"),"description":"AU","followers_count":571,"friends_count":530,"statues_count":14829}}
+{"create_at":datetime("2016-09-23T12:27:47.000"),"id":int64("779401897304289281"),"text":"@guypbenson Cruz is still \u201Cyoung\u201D and ambitious - its probably also not a coincidence this occurring while Trump talking about SCOTUS.","in_reply_to_status":int64("779394688637775873"),"in_reply_to_user":int64("16193222"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{16193222}},"place":{"country":"United States","country_code":"United States","full_name":"Boynton Beach, FL","id":"005e3661711a29a9","name":"Boynton Beach","place_type":"city","bounding_box":rectangle("-80.173447,26.476484 -80.05236,26.590488")},"geo_tag":{"stateID":12,"stateName":"Florida","countyID":12099,"countyName":"Palm Beach","cityID":1207875,"cityName":"Boynton Beach"},"user":{"id":int64("14641687"),"name":"Aaron Von Gauss","screen_name":"AVonGauss","lang":"en","location":"Boynton Beach, FL","create_at":date("2008-05-03"),"description":"Technology Consultant who strangely does not live in Silicon Valley, Alley nor Wonderland.","followers_count":134,"friends_count":107,"statues_count":8100}}
+{"create_at":datetime("2016-09-23T12:27:52.000"),"id":int64("779401916715376641"),"text":"That says a lot. https://t.co/zKvbTvtmAx","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Shady Shores, TX","id":"6f357cae6be7a645","name":"Shady Shores","place_type":"city","bounding_box":rectangle("-97.065657,33.137751 -97.013767,33.177785")},"geo_tag":{"stateID":48,"stateName":"Texas","countyID":48121,"countyName":"Denton","cityID":4867100,"cityName":"Shady Shores"},"user":{"id":int64("351153807"),"name":"Nancy Bell","screen_name":"Flygirlfly625","lang":"en","location":"Shady Shores Texas","create_at":date("2011-08-08"),"description":"Occupational Therapist, HomeModification Consultant,NDT Certified. Ergonomic Specialist.Hobbies:Flying, Home Remodeling/Staging, and attempts to play the violin","followers_count":162,"friends_count":435,"statues_count":4024}}
+{"create_at":datetime("2016-09-23T12:27:53.000"),"id":int64("779401920461111297"),"text":"A vote is the ULTIMATE endorsement -After Bitter Primary Fight, Ted Cruz To Back Donald Trump https://t.co/4oFZzi9kGL","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Waverly, MI","id":"00534e655a405438","name":"Waverly","place_type":"city","bounding_box":rectangle("-86.117567,42.783159 -86.058053,42.856213")},"geo_tag":{"stateID":26,"stateName":"Michigan","countyID":26045,"countyName":"Eaton","cityID":2684800,"cityName":"Waverly"},"user":{"id":int64("728972859356250113"),"name":"Murphy for Congress","screen_name":"DMurphyCongress","lang":"en","location":"Grandville, MI","create_at":date("2016-05-07"),"description":"Voters have a choice! I'm Dennis and I hope to serve in Congress as a Dem from Michigan\u2019s 2nd District. Feel free to tweet questions & please vote in November!","followers_count":68,"friends_count":160,"statues_count":87}}
+{"create_at":datetime("2016-09-23T12:27:56.000"),"id":int64("779401933220184064"),"text":"He's not going to endorse. https://t.co/WEh2PwnWB6","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Rosenberg, TX","id":"00f2b6161655c5cc","name":"Rosenberg","place_type":"city","bounding_box":rectangle("-95.835024,29.482424 -95.699766,29.581688")},"geo_tag":{"stateID":48,"stateName":"Texas","countyID":48157,"countyName":"Fort Bend","cityID":4863284,"cityName":"Rosenberg"},"user":{"id":int64("1424259144"),"name":"Deborah Pearson","screen_name":"debpearsonTX","lang":"en","location":"In the Clouds protecting USA","create_at":date("2013-05-12"),"description":"Born and bred in Texas!! Conservative voter. God fearing, concerned about our REPUBLIC. God bless USA. GOD BLESS TEXAS! #NRA http://tedcruz.org","followers_count":2266,"friends_count":3262,"statues_count":18221}}
+{"create_at":datetime("2016-09-23T12:27:56.000"),"id":int64("779401936441315329"),"text":"Imagine thinking this https://t.co/2E79rFHCUo","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Brooklyn, NY","id":"011add077f4d2da3","name":"Brooklyn","place_type":"city","bounding_box":rectangle("-74.041878,40.570842 -73.855673,40.739434")},"geo_tag":{"stateID":36,"stateName":"New York","countyID":36047,"countyName":"Kings","cityID":36047,"cityName":"Brooklyn"},"user":{"id":int64("3099901028"),"name":"Brandy Jensen","screen_name":"BrandyLJensen","lang":"en","location":"Brooklyn, NY","create_at":date("2015-03-20"),"description":"behind every successful man is a woman pegging him","followers_count":8538,"friends_count":1446,"statues_count":32937}}
+{"create_at":datetime("2016-09-23T12:27:57.000"),"id":int64("779401938181971969"),"text":"I am a bit puzzled why the same people that harassed Marco about endorsing Trump are making excuses for Cruz now...","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Manlius, NY","id":"a91318fb3d0ef965","name":"Manlius","place_type":"city","bounding_box":rectangle("-76.026142,42.968277 -75.935566,43.093685")},"geo_tag":{"stateID":36,"stateName":"New York","countyID":36067,"countyName":"Onondaga","cityID":3645018,"cityName":"Manlius"},"user":{"id":int64("237089251"),"name":"Nick Lindquist","screen_name":"nick_lindquist","lang":"en","location":"Team Marco","create_at":date("2011-01-11"),"description":"LMC '19 \u2022 Marketing \u2022 Young Politico \u2022 @decainc Region 9 Officer/Alum \u2022 John Katko Intern \u2022 Contributor @RedMillennial \u2022 Font Aficionado \u2022 Dr. Pepper Addict","followers_count":934,"friends_count":783,"statues_count":13392}}
+{"create_at":datetime("2016-09-23T12:27:58.000"),"id":int64("779401941575237636"),"text":"They're rl idiots!!!! https://t.co/gdRfntHaXU","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Dale City, VA","id":"66cd2cdb819f4414","name":"Dale City","place_type":"city","bounding_box":rectangle("-77.443065,38.61099 -77.274813,38.68125")},"geo_tag":{"stateID":51,"stateName":"Virginia","countyID":51153,"countyName":"Prince William","cityID":5121088,"cityName":"Dale City"},"user":{"id":int64("3229093668"),"name":"Cheena Byrd","screen_name":"ChillCheena","lang":"en","location":"null","create_at":date("2015-05-28"),"description":"August 25\u2764\uFE0F #E4SooMs","followers_count":420,"friends_count":202,"statues_count":8596}}
+{"create_at":datetime("2016-09-23T12:28:00.000"),"id":int64("779401952438263808"),"text":"@KarenLeslieHall @speechboy71 It's a tossup. This is a dumbed down electorate, fed lies, rumors, rumors of lies daily. Trump is the product.","in_reply_to_status":int64("779371105081516032"),"in_reply_to_user":int64("1169239260"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{1169239260,113061485}},"place":{"country":"United States","country_code":"United States","full_name":"San Francisco, CA","id":"5a110d312052166f","name":"San Francisco","place_type":"city","bounding_box":rectangle("-122.514926,37.708075 -122.357031,37.833238")},"geo_tag":{"stateID":6,"stateName":"California","countyID":6075,"countyName":"San Francisco","cityID":667000,"cityName":"San Francisco"},"user":{"id":int64("977619834"),"name":"Blind Willies","screen_name":"williesband","lang":"en","location":"San Francisco","create_at":date("2012-11-28"),"description":"rock. \r\n\r\nStreaming: http://blindwillies.bandcamp.com\r\n\r\nhttp://facebook.com/blindwillies\r\n\r\nContact: blindwillies@blindwillies.net","followers_count":1156,"friends_count":952,"statues_count":6176}}
+{"create_at":datetime("2016-09-23T12:28:09.000"),"id":int64("779401988949811200"),"text":"@AlanColmes You are constantly looking ways to bash Trump? Open your eyes Allen and see what HRC has to offer. Nada. You do us a disservice","in_reply_to_status":int64("779393945889411072"),"in_reply_to_user":int64("14924233"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{14924233}},"place":{"country":"United States","country_code":"United States","full_name":"Huntertown, IN","id":"017e230d04270300","name":"Huntertown","place_type":"city","bounding_box":rectangle("-85.186546,41.199635 -85.104621,41.244575")},"geo_tag":{"stateID":18,"stateName":"Indiana","countyID":18003,"countyName":"Allen","cityID":1835266,"cityName":"Huntertown"},"user":{"id":int64("321627407"),"name":"Jeanne Boutilier","screen_name":"mtnest11","lang":"en","location":"Indiana, USA","create_at":date("2011-06-21"),"description":"Happy follower of spouse, 6 states / 11 houses. Love being a Grams, best job ever! Lover of life, family, friends & all breathing things. (except spiders)","followers_count":180,"friends_count":297,"statues_count":4288}}
+{"create_at":datetime("2016-09-23T12:28:11.000"),"id":int64("779401997527031808"),"text":"This newspaper just endorsed its first Democrat for president in almost a century, because Trump https://t.co/Wuudp4zhf1 via @motherjones","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"en","is_retweet":false,"user_mentions":{{18510860}},"place":{"country":"United States","country_code":"United States","full_name":"Dublin, CA","id":"003253f0107acd32","name":"Dublin","place_type":"city","bounding_box":rectangle("-121.979522,37.697406 -121.849535,37.733656")},"geo_tag":{"stateID":6,"stateName":"California","countyID":6001,"countyName":"Alameda","cityID":620018,"cityName":"Dublin"},"user":{"id":int64("1850731"),"name":"Joyce Tompsett","screen_name":"joycetompsett","lang":"en","location":"Nerdistan CA","create_at":date("2007-03-22"),"description":"Girl Geek, Tech Evangelist, Dir. of Silicon Valley Exec Briefing Ctr at Dell Technologies, yogini, world traveler, living moment by moment w/open heart.","followers_count":902,"friends_count":1335,"statues_count":9773}}
+{"create_at":datetime("2016-09-23T12:28:12.000"),"id":int64("779401999859064832"),"text":"\uD83D\uDE04 https://t.co/pE1oMWDVjL","in_reply_to_status":int64("-1"),"in_reply_to_user":int64("-1"),"favorite_count":int64("0"),"retweet_count":int64("0"),"lang":"und","is_retweet":false,"place":{"country":"United States","country_code":"United States","full_name":"Summerlin South, NV","id":"0134e6167ff7f6ec","name":"Summerlin South","place_type":"city","bounding_box":rectangle("-115.355825,36.082837 -115.314862,36.159081")},"geo_tag":{"stateID":32,"stateName":"Nevada","countyID":32003,"countyName":"Clark","cityID":3270900,"cityName":"Summerlin South"},"user":{"id":int64("50230100"),"name":"Tracey","screen_name":"tsh219","lang":"en","location":"Las Vegas,NV ","create_at":date("2009-06-23"),"description":"Native New Yorker, Educator, Doctoral (Ed.D.) Candidate. Waiting For My White Privilege To Kick In. #TrumpPence2016 #MAGA #NeverHillary","followers_count":766,"friends_count":522,"statues_count":5386}}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/parser/ScopeChecker.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/parser/ScopeChecker.java
index 07aa473..767eacb 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/parser/ScopeChecker.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/parser/ScopeChecker.java
@@ -199,13 +199,14 @@
         }
     }
 
-    protected String fixQuotes(String token) {
+    protected static String fixQuotes(String token) {
+        final String stripped = stripQuotes(token);
+        return stripped != null ? "'" + stripped + "'" : token;
+    }
+
+    protected static String stripQuotes(String token) {
         final int last = token.length() - 1;
-        if (token.charAt(0) == '"' && token.charAt(last) == '"') {
-            return "'" + token.substring(1, last) + "'";
-        } else {
-            return token;
-        }
+        return token.charAt(0) == '"' && token.charAt(last) == '"' ? token.substring(1, last) : null;
     }
 
     protected static String addEscapes(String str) {
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/CommonFunctionMapUtil.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/CommonFunctionMapUtil.java
index 1f7e4b0..6e7349e 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/CommonFunctionMapUtil.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/util/CommonFunctionMapUtil.java
@@ -48,6 +48,20 @@
         FUNCTION_NAME_MAP.put("smallint", "int16"); // smallint, internal: int16
         FUNCTION_NAME_MAP.put("integer", "int32"); // integer, internal: int32
         FUNCTION_NAME_MAP.put("bigint", "int64"); // bigint, internal: int64
+
+        // Type functions.
+        FUNCTION_NAME_MAP.put("isnull", "is-null"); // isnull, internal: is-null
+        FUNCTION_NAME_MAP.put("ismissing", "is-missing"); // ismissing, internal: is-missing
+        FUNCTION_NAME_MAP.put("isunknown", "is-unknown"); // isunknown, internal: is-unknown
+        FUNCTION_NAME_MAP.put("isboolean", "is-boolean"); // isboolean, internal: is-boolean
+        FUNCTION_NAME_MAP.put("isbool", "is-boolean"); // isbool, internal: is-boolean
+        FUNCTION_NAME_MAP.put("isnumber", "is-number"); // isnumber, internal: is-number
+        FUNCTION_NAME_MAP.put("isnum", "is-number"); // isnum, internal: is-number
+        FUNCTION_NAME_MAP.put("isstring", "is-string"); // isstring, internal: is-string
+        FUNCTION_NAME_MAP.put("isstr", "is-string"); // isstr, internal: is-string
+        FUNCTION_NAME_MAP.put("isarray", "is-array"); // isarray, internal: is-array
+        FUNCTION_NAME_MAP.put("isobject", "is-object"); // isobject, internal: is-object
+        FUNCTION_NAME_MAP.put("isobj", "is-object"); // isobj, internal: is-object
     }
 
     private CommonFunctionMapUtil() {
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj b/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj
index 6c4bc5c..f113894 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj
+++ b/asterixdb/asterix-lang-sqlpp/src/main/javacc/SQLPP.jj
@@ -298,7 +298,10 @@
                 message += fixQuotes(tokenImage[0]);
                 break;
             }
-            message += fixQuotes(tokenImage[tok.kind]) + " ";
+            final String fixedTokenImage = tokenImage[tok.kind];
+            if (! tok.image.equalsIgnoreCase(stripQuotes(fixedTokenImage))) {
+                message += fixQuotes(fixedTokenImage) + " ";
+            }
             message += quot + addEscapes(tok.image) + quot;
             tok = tok.next;
         }
@@ -2977,7 +2980,7 @@
       quantifiedList.add(pair);
     }
     )*
-     <SATISFIES> satisfiesExpr = Expression()
+     <SATISFIES> satisfiesExpr = Expression() (<END>)?
      {
        qc.setSatisfiesExpr(satisfiesExpr);
        qc.setQuantifiedList(quantifiedList);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/GarbageCollector.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/GarbageCollector.java
index d1efe11..8a3392a 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/GarbageCollector.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/GarbageCollector.java
@@ -18,26 +18,47 @@
  */
 package org.apache.asterix.metadata;
 
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
 /**
  * Periodically recycle temporary datasets.
  *
  * @author yingyib
  */
 public class GarbageCollector implements Runnable {
+    private static final Logger LOGGER = Logger.getLogger(GarbageCollector.class.getName());
 
-    private static long CLEANUP_PERIOD = 3600 * 24;
+    private static final long CLEANUP_PERIOD = 3600L * 24;
 
-    @Override
-    public void run() {
-        try {
-            synchronized (this) {
-                this.wait(CLEANUP_PERIOD);
-            }
-            MetadataManager.INSTANCE.cleanupTempDatasets();
-        } catch (Exception e) {
-            // Prints the stack trace to log.
-            e.printStackTrace();
-        }
+    static {
+        // Starts the garbage collector thread which
+        // should always be running.
+        Thread gcThread = new Thread(new GarbageCollector(), "Metadata GC");
+        gcThread.setDaemon(true);
+        gcThread.start();
     }
 
+    @Override
+    @SuppressWarnings("squid:S2142") // rethrow or interrupt thread on InterruptedException
+    public void run() {
+        LOGGER.info("Starting Metadata GC");
+        while (true) {
+            try {
+                synchronized (this) {
+                    this.wait(CLEANUP_PERIOD);
+                }
+                MetadataManager.INSTANCE.cleanupTempDatasets();
+            } catch (InterruptedException e) {
+                break;
+            } catch (Exception e) {
+                LOGGER.log(Level.WARNING, "Exception cleaning temp datasets", e);
+            }
+        }
+        LOGGER.info("Exiting Metadata GC");
+    }
+
+    public static void ensure() {
+        // no need to do anything, <clinit> does the work
+    }
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
index 70da097..6a324a1 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataManager.java
@@ -22,6 +22,7 @@
 import java.rmi.RemoteException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
@@ -49,6 +50,7 @@
 import org.apache.asterix.metadata.entities.NodeGroup;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.asterix.transaction.management.service.transaction.JobIdFactory;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 /**
  * Provides access to Asterix metadata via remote methods to the metadata node.
@@ -83,71 +85,34 @@
  * with transaction ids of regular jobs or other metadata transactions.
  */
 public class MetadataManager implements IMetadataManager {
-    private static final int INITIAL_SLEEP_TIME = 64;
-    private static final int RETRY_MULTIPLIER = 5;
-    private static final int MAX_RETRY_COUNT = 10;
-
-    // Set in init().
-    public static MetadataManager INSTANCE;
     private final MetadataCache cache = new MetadataCache();
-    private final IAsterixStateProxy proxy;
-    private IMetadataNode metadataNode;
+    protected final IAsterixStateProxy proxy;
+    protected IMetadataNode metadataNode;
     private final ReadWriteLock metadataLatch;
-    private final AsterixMetadataProperties metadataProperties;
-    public boolean rebindMetadataNode = false;
+    protected boolean rebindMetadataNode = false;
 
-    public MetadataManager(IAsterixStateProxy proxy, AsterixMetadataProperties metadataProperties) {
-        if (proxy == null) {
-            throw new Error("Null proxy given to MetadataManager.");
+    // TODO(mblow): replace references of this (non-constant) field with a method, update field name accordingly
+    public static IMetadataManager INSTANCE;
+
+    private MetadataManager(IAsterixStateProxy proxy, IMetadataNode metadataNode) {
+        this(proxy);
+        if (metadataNode == null) {
+            throw new IllegalArgumentException("Null metadataNode given to MetadataManager");
         }
-        this.proxy = proxy;
-        this.metadataProperties = metadataProperties;
-        this.metadataNode = null;
-        this.metadataLatch = new ReentrantReadWriteLock(true);
+        this.metadataNode = metadataNode;
     }
 
-    public MetadataManager(IAsterixStateProxy proxy, IMetadataNode metadataNode) {
-        if (metadataNode == null) {
-            throw new Error("Null metadataNode given to MetadataManager.");
+    private MetadataManager(IAsterixStateProxy proxy) {
+        if (proxy == null) {
+            throw new IllegalArgumentException("Null proxy given to MetadataManager");
         }
         this.proxy = proxy;
-        this.metadataProperties = null;
-        this.metadataNode = metadataNode;
         this.metadataLatch = new ReentrantReadWriteLock(true);
     }
 
     @Override
-    public void init() throws RemoteException, MetadataException {
-        // Could be synchronized on any object. Arbitrarily chose proxy.
-        synchronized (proxy) {
-            if (metadataNode != null && !rebindMetadataNode) {
-                return;
-            }
-            try {
-                int retry = 0;
-                int sleep = INITIAL_SLEEP_TIME;
-                while (retry++ < MAX_RETRY_COUNT) {
-                    metadataNode = proxy.getMetadataNode();
-                    if (metadataNode != null) {
-                        rebindMetadataNode = false;
-                        break;
-                    }
-                    Thread.sleep(sleep);
-                    sleep *= RETRY_MULTIPLIER;
-                }
-            } catch (InterruptedException e) {
-                throw new MetadataException(e);
-            }
-            if (metadataNode == null) {
-                throw new Error("Failed to get the MetadataNode.\n" + "The MetadataNode was configured to run on NC: "
-                        + metadataProperties.getMetadataNodeName());
-            }
-        }
-
-        // Starts the garbage collector thread which
-        // should always be running.
-        Thread garbageCollectorThread = new Thread(new GarbageCollector());
-        garbageCollectorThread.start();
+    public void init() throws HyracksDataException {
+        GarbageCollector.ensure();
     }
 
     @Override
@@ -243,7 +208,7 @@
     @Override
     public List<Dataset> getDataverseDatasets(MetadataTransactionContext ctx, String dataverseName)
             throws MetadataException {
-        List<Dataset> dataverseDatasets = new ArrayList<Dataset>();
+        List<Dataset> dataverseDatasets = new ArrayList<>();
         // add uncommitted temporary datasets
         for (Dataset dataset : ctx.getDataverseDatasets(dataverseName)) {
             if (dataset.getDatasetDetails().isTemp()) {
@@ -339,7 +304,7 @@
     @Override
     public List<Index> getDatasetIndexes(MetadataTransactionContext ctx, String dataverseName, String datasetName)
             throws MetadataException {
-        List<Index> datasetIndexes = new ArrayList<Index>();
+        List<Index> datasetIndexes = new ArrayList<>();
         Dataset dataset = findDataset(ctx, dataverseName, datasetName);
         if (dataset == null) {
             return datasetIndexes;
@@ -373,7 +338,7 @@
     public CompactionPolicy getCompactionPolicy(MetadataTransactionContext ctx, String dataverse, String policyName)
             throws MetadataException {
 
-        CompactionPolicy compactionPolicy = null;
+        CompactionPolicy compactionPolicy;
         try {
             compactionPolicy = metadataNode.getCompactionPolicy(ctx.getJobId(), dataverse, policyName);
         } catch (RemoteException e) {
@@ -434,7 +399,7 @@
             ARecordType aRecType = (ARecordType) datatype.getDatatype();
             return new Datatype(
                     datatype.getDataverseName(), datatype.getDatatypeName(), new ARecordType(aRecType.getTypeName(),
-                            aRecType.getFieldNames(), aRecType.getFieldTypes(), aRecType.isOpen()),
+                    aRecType.getFieldNames(), aRecType.getFieldTypes(), aRecType.isOpen()),
                     datatype.getIsAnonymous());
         }
         try {
@@ -710,7 +675,7 @@
     @Override
     public DatasourceAdapter getAdapter(MetadataTransactionContext ctx, String dataverseName, String name)
             throws MetadataException {
-        DatasourceAdapter adapter = null;
+        DatasourceAdapter adapter;
         try {
             adapter = metadataNode.getAdapter(ctx.getJobId(), dataverseName, name);
         } catch (RemoteException e) {
@@ -733,7 +698,7 @@
     @Override
     public List<Library> getDataverseLibraries(MetadataTransactionContext ctx, String dataverseName)
             throws MetadataException {
-        List<Library> dataverseLibaries = null;
+        List<Library> dataverseLibaries;
         try {
             // Assuming that the transaction can read its own writes on the
             // metadata node.
@@ -759,7 +724,7 @@
     @Override
     public Library getLibrary(MetadataTransactionContext ctx, String dataverseName, String libraryName)
             throws MetadataException, RemoteException {
-        Library library = null;
+        Library library;
         try {
             library = metadataNode.getLibrary(ctx.getJobId(), dataverseName, libraryName);
         } catch (RemoteException e) {
@@ -792,18 +757,18 @@
     public FeedPolicyEntity getFeedPolicy(MetadataTransactionContext ctx, String dataverse, String policyName)
             throws MetadataException {
 
-        FeedPolicyEntity FeedPolicy = null;
+        FeedPolicyEntity feedPolicy;
         try {
-            FeedPolicy = metadataNode.getFeedPolicy(ctx.getJobId(), dataverse, policyName);
+            feedPolicy = metadataNode.getFeedPolicy(ctx.getJobId(), dataverse, policyName);
         } catch (RemoteException e) {
             throw new MetadataException(e);
         }
-        return FeedPolicy;
+        return feedPolicy;
     }
 
     @Override
     public Feed getFeed(MetadataTransactionContext ctx, String dataverse, String feedName) throws MetadataException {
-        Feed feed = null;
+        Feed feed;
         try {
             feed = metadataNode.getFeed(ctx.getJobId(), dataverse, feedName);
         } catch (RemoteException e) {
@@ -814,7 +779,7 @@
 
     @Override
     public void dropFeed(MetadataTransactionContext ctx, String dataverse, String feedName) throws MetadataException {
-        Feed feed = null;
+        Feed feed;
         try {
             feed = metadataNode.getFeed(ctx.getJobId(), dataverse, feedName);
             metadataNode.dropFeed(ctx.getJobId(), dataverse, feedName);
@@ -834,6 +799,7 @@
         ctx.addFeed(feed);
     }
 
+    @Override
     public List<DatasourceAdapter> getDataverseAdapters(MetadataTransactionContext mdTxnCtx, String dataverse)
             throws MetadataException {
         List<DatasourceAdapter> dataverseAdapters;
@@ -845,9 +811,10 @@
         return dataverseAdapters;
     }
 
+    @Override
     public void dropFeedPolicy(MetadataTransactionContext mdTxnCtx, String dataverseName, String policyName)
             throws MetadataException {
-        FeedPolicyEntity feedPolicy = null;
+        FeedPolicyEntity feedPolicy;
         try {
             feedPolicy = metadataNode.getFeedPolicy(mdTxnCtx.getJobId(), dataverseName, policyName);
             metadataNode.dropFeedPolicy(mdTxnCtx.getJobId(), dataverseName, policyName);
@@ -901,7 +868,7 @@
 
     @Override
     public ExternalFile getExternalFile(MetadataTransactionContext ctx, String dataverseName, String datasetName,
-            Integer fileNumber) throws MetadataException {
+                                        Integer fileNumber) throws MetadataException {
         ExternalFile file;
         try {
             file = metadataNode.getExternalFile(ctx.getJobId(), dataverseName, datasetName, fileNumber);
@@ -939,7 +906,7 @@
         cache.cleanupTempDatasets();
     }
 
-    private Dataset findDataset(MetadataTransactionContext ctx, String dataverseName, String datasetName) {
+    public Dataset findDataset(MetadataTransactionContext ctx, String dataverseName, String datasetName) {
         Dataset dataset = ctx.getDataset(dataverseName, datasetName);
         if (dataset == null) {
             dataset = cache.getDataset(dataverseName, datasetName);
@@ -969,7 +936,8 @@
 
     @Override
     public <T extends IExtensionMetadataEntity> List<T> getEntities(MetadataTransactionContext mdTxnCtx,
-            IExtensionMetadataSearchKey searchKey) throws MetadataException {
+                                                                    IExtensionMetadataSearchKey searchKey)
+            throws MetadataException {
         try {
             return metadataNode.getEntities(mdTxnCtx.getJobId(), searchKey);
         } catch (RemoteException e) {
@@ -977,7 +945,49 @@
         }
     }
 
-    public static synchronized void instantiate(MetadataManager metadataManager) {
-        MetadataManager.INSTANCE = metadataManager;
+    @Override
+    public void rebindMetadataNode() {
+        rebindMetadataNode = true;
+    }
+
+    public static void initialize(IAsterixStateProxy proxy, AsterixMetadataProperties metadataProperties) {
+        INSTANCE = new CCMetadataManagerImpl(proxy, metadataProperties);
+    }
+
+    public static void initialize(IAsterixStateProxy proxy, MetadataNode metadataNode) {
+        INSTANCE = new MetadataManager(proxy, metadataNode);
+    }
+
+    private static class CCMetadataManagerImpl extends MetadataManager {
+        private final AsterixMetadataProperties metadataProperties;
+
+        public CCMetadataManagerImpl(IAsterixStateProxy proxy, AsterixMetadataProperties metadataProperties) {
+            super(proxy);
+            this.metadataProperties = metadataProperties;
+        }
+
+        @Override
+        public synchronized void init() throws HyracksDataException {
+            if (metadataNode != null && !rebindMetadataNode) {
+                return;
+            }
+            try {
+                metadataNode = proxy.waitForMetadataNode(metadataProperties.getRegistrationTimeoutSecs(),
+                        TimeUnit.SECONDS);
+                if (metadataNode != null) {
+                    rebindMetadataNode = false;
+                } else {
+                    throw new HyracksDataException("The MetadataNode failed to bind before the configured timeout ("
+                            + metadataProperties.getRegistrationTimeoutSecs() + " seconds); the MetadataNode was " +
+                            "configured to run on NC: " + metadataProperties.getMetadataNodeName());
+                }
+            } catch (InterruptedException e) {
+                Thread.currentThread().interrupt();
+                throw new HyracksDataException(e);
+            } catch (RemoteException e) {
+                throw new HyracksDataException(e);
+            }
+            super.init();
+        }
     }
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IAsterixStateProxy.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IAsterixStateProxy.java
index 7717a79..c94e159 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IAsterixStateProxy.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IAsterixStateProxy.java
@@ -22,12 +22,13 @@
 import java.io.Serializable;
 import java.rmi.Remote;
 import java.rmi.RemoteException;
+import java.util.concurrent.TimeUnit;
 
 /**
  * Interface for setting/getting distributed state of Asterix.
  */
 public interface IAsterixStateProxy extends Remote, Serializable {
-    public void setMetadataNode(IMetadataNode metadataNode) throws RemoteException;
+    void setMetadataNode(IMetadataNode metadataNode) throws RemoteException;
 
-    public IMetadataNode getMetadataNode() throws RemoteException;
+    IMetadataNode waitForMetadataNode(long waitFor, TimeUnit timeUnit) throws RemoteException, InterruptedException;
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataManager.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataManager.java
index 0acc027..feb4db0 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataManager.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IMetadataManager.java
@@ -24,6 +24,7 @@
 
 import org.apache.asterix.common.exceptions.ACIDException;
 import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.common.metadata.IMetadataBootstrap;
 import org.apache.asterix.external.indexing.ExternalFile;
 import org.apache.asterix.metadata.MetadataException;
 import org.apache.asterix.metadata.MetadataTransactionContext;
@@ -39,7 +40,6 @@
 import org.apache.asterix.metadata.entities.Library;
 import org.apache.asterix.metadata.entities.Node;
 import org.apache.asterix.metadata.entities.NodeGroup;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
 
 /**
  * A metadata manager provides user access to Asterix metadata (e.g., types,
@@ -53,16 +53,7 @@
  * finer levels is the responsibility of the metadata node, not the metadata
  * manager or its user.
  */
-public interface IMetadataManager {
-
-    /**
-     * Initializes the metadata manager, e.g., finds the remote metadata node.
-     *
-     * @throws RemoteException
-     *             If an error occurred while contacting the proxy for finding
-     *             the metadata node.
-     */
-    void init() throws RemoteException, MetadataException;
+public interface IMetadataManager extends IMetadataBootstrap {
 
     /**
      * Begins a transaction on the metadata node.
@@ -256,7 +247,8 @@
      *            Name of the datavers holding the given dataset.
      * @param datasetName
      *            Name of the dataset holding the index.
-     * @indexName Name of the index to retrieve.
+     * @param indexName
+     *            Name of the index to retrieve.
      * @return An Index instance.
      * @throws MetadataException
      *             For example, if the index does not exist.
@@ -273,7 +265,8 @@
      *            Name of the datavers holding the given dataset.
      * @param datasetName
      *            Name of the dataset holding the index.
-     * @indexName Name of the index to retrieve.
+     * @param indexName
+     *            Name of the index to retrieve.
      * @throws MetadataException
      *             For example, if the index does not exist.
      */
@@ -406,7 +399,7 @@
     /**
      * @param mdTxnCtx
      *            MetadataTransactionContext of an active metadata transaction.
-     * @param function
+     * @param adapter
      *            An instance of type Adapter that represents the adapter being
      *            added
      * @throws MetadataException
@@ -418,7 +411,7 @@
      *            MetadataTransactionContext of an active metadata transaction.
      * @param dataverseName
      *            the dataverse associated with the adapter being searched
-     * @param Name
+     * @param name
      *            name of the adapter
      * @return
      * @throws MetadataException
@@ -438,6 +431,18 @@
     void dropAdapter(MetadataTransactionContext ctx, String dataverseName, String name) throws MetadataException;
 
     /**
+     *
+     * @param ctx
+     *            MetadataTransactionContext of an active metadata transaction.
+     * @param dataverseName
+     *            the dataverse whose associated adapters are being requested
+     * @return
+     * @throws MetadataException
+     */
+    List<DatasourceAdapter> getDataverseAdapters(MetadataTransactionContext ctx, String dataverseName)
+            throws MetadataException;
+
+    /**
      * @param ctx
      * @param policy
      * @throws MetadataException
@@ -497,6 +502,14 @@
      * @param ctx
      * @param dataverse
      * @param policyName
+     * @throws MetadataException
+     */
+    void dropFeedPolicy(MetadataTransactionContext ctx, String dataverse, String policyName) throws MetadataException;
+
+    /**
+     * @param ctx
+     * @param dataverse
+     * @param policyName
      * @return
      * @throws MetadataException
      */
@@ -526,7 +539,7 @@
      * @param libraryName
      *            Name of library to be deleted. MetadataException for example,
      *            if the library does not exists.
-     * @throws RemoteException
+     * @throws MetadataException
      */
     void dropLibrary(MetadataTransactionContext ctx, String dataverseName, String libraryName) throws MetadataException;
 
@@ -540,7 +553,6 @@
      *            Library to be added
      * @throws MetadataException
      *             for example, if the library is already added.
-     * @throws RemoteException
      */
     void addLibrary(MetadataTransactionContext ctx, Library library) throws MetadataException;
 
@@ -567,7 +579,6 @@
      *            dataverse asociated with the library that is to be retrieved.
      * @return Library
      * @throws MetadataException
-     * @throws RemoteException
      */
     List<Library> getDataverseLibraries(MetadataTransactionContext ctx, String dataverseName) throws MetadataException;
 
@@ -671,9 +682,13 @@
      * @param searchKey
      * @return
      * @throws MetadataException
-     * @throws HyracksDataException
      */
     <T extends IExtensionMetadataEntity> List<T> getEntities(MetadataTransactionContext mdTxnCtx,
-            IExtensionMetadataSearchKey searchKey) throws MetadataException, HyracksDataException;
+            IExtensionMetadataSearchKey searchKey) throws MetadataException;
 
+    /**
+     * Indicate when the metadata node has left or rejoined the cluster, and the MetadataManager should
+     * rebind it
+     */
+    void rebindMetadataNode();
 }
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/AsterixStateProxy.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/AsterixStateProxy.java
index 2f881be..da6bb54 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/AsterixStateProxy.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/AsterixStateProxy.java
@@ -21,6 +21,7 @@
 
 import java.rmi.RemoteException;
 import java.rmi.server.UnicastRemoteObject;
+import java.util.concurrent.TimeUnit;
 import java.util.logging.Logger;
 
 import org.apache.asterix.metadata.api.IAsterixStateProxy;
@@ -36,8 +37,8 @@
     private IMetadataNode metadataNode;
     private static final IAsterixStateProxy cc = new AsterixStateProxy();
 
-    public static IAsterixStateProxy registerRemoteObject() throws RemoteException {
-        IAsterixStateProxy stub = (IAsterixStateProxy) UnicastRemoteObject.exportObject(cc, 0);
+    public static IAsterixStateProxy registerRemoteObject(int metadataCallbackPort) throws RemoteException {
+        IAsterixStateProxy stub = (IAsterixStateProxy) UnicastRemoteObject.exportObject(cc, metadataCallbackPort);
         LOGGER.info("Asterix Distributed State Proxy Bound");
         return stub;
     }
@@ -48,12 +49,21 @@
     }
 
     @Override
-    public void setMetadataNode(IMetadataNode metadataNode) throws RemoteException {
+    public synchronized void setMetadataNode(IMetadataNode metadataNode) {
         this.metadataNode = metadataNode;
+        notifyAll();
     }
 
     @Override
-    public IMetadataNode getMetadataNode() throws RemoteException {
-        return this.metadataNode;
+    public IMetadataNode waitForMetadataNode(long waitFor, TimeUnit timeUnit) throws InterruptedException {
+        synchronized (this) {
+            long timeToWait = TimeUnit.MILLISECONDS.convert(waitFor, timeUnit);
+            while (metadataNode == null && timeToWait > 0) {
+                long startTime = System.currentTimeMillis();
+                wait(timeToWait);
+                timeToWait -= System.currentTimeMillis() - startTime;
+            }
+            return metadataNode;
+        }
     }
 }
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/builders/RecordBuilder.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/builders/RecordBuilder.java
index ca7c0b0..d247350 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/builders/RecordBuilder.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/builders/RecordBuilder.java
@@ -172,14 +172,6 @@
         addNullOrMissingField(fid, value.getByteArray(), value.getStartOffset());
     }
 
-    public void addField(int fid, byte[] value) {
-        closedPartOffsets[fid] = closedPartOutputStream.size();
-        // We assume the tag is not included (closed field)
-        closedPartOutputStream.write(value, 0, value.length);
-        numberOfClosedFields++;
-        addNullOrMissingField(fid, value, 0);
-    }
-
     private void addNullOrMissingField(int fid, byte[] data, int offset) {
         if (containsOptionalField) {
             byte nullByte = (byte) (1 << (7 - 2 * (fid % 4)));
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java
index 55c9d13..fb5d0c7 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java
@@ -214,8 +214,7 @@
 
     @SuppressWarnings("unchecked")
     public static void serializeSimpleSchemalessRecord(List<Pair<String, String>> record, DataOutput dataOutput,
-            boolean writeTypeTag)
-            throws HyracksDataException {
+            boolean writeTypeTag) throws HyracksDataException {
         ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
                 .getSerializerDeserializer(BuiltinType.ASTRING);
         RecordBuilder confRecordBuilder = new RecordBuilder();
@@ -268,67 +267,39 @@
 
     public static final int getFieldOffsetById(byte[] serRecord, int offset, int fieldId, int nullBitmapSize,
             boolean isOpen) {
-        byte nullTestCode = (byte) (1 << (7 - 2 * (fieldId % 4)));
-        byte missingTestCode = (byte) (1 << (7 - 2 * (fieldId % 4) - 1));
+        final byte nullTestCode = (byte) (1 << (7 - 2 * (fieldId % 4)));
+        final byte missingTestCode = (byte) (1 << (7 - 2 * (fieldId % 4) - 1));
+
+        //early exit if not Record
+        if (serRecord[offset] != ATypeTag.SERIALIZED_RECORD_TYPE_TAG) {
+            return -1;
+        }
+
+        //advance to isExpanded or numberOfSchemaFields
+        int pointer = offset + 5;
+
         if (isOpen) {
-            if (serRecord[0 + offset] == ATypeTag.RECORD.serialize()) {
-                // 5 is the index of the byte that determines whether the record
-                // is expanded or not, i.e. it has an open part.
-                if (serRecord[5 + offset] == 1) { // true
-                    if (nullBitmapSize > 0) {
-                        // 14 = tag (1) + record Size (4) + isExpanded (1) +
-                        // offset of openPart (4) + number of closed fields (4)
-                        int pos = 14 + offset + fieldId / 4;
-                        if ((serRecord[pos] & nullTestCode) == 0) {
-                            // the field value is null
-                            return 0;
-                        }
-                        if ((serRecord[pos] & missingTestCode) == 0) {
-                            // the field value is missing
-                            return -1;
-                        }
-                    }
-                    return offset + AInt32SerializerDeserializer.getInt(serRecord,
-                            14 + offset + nullBitmapSize + (4 * fieldId));
-                } else {
-                    if (nullBitmapSize > 0) {
-                        // 9 = tag (1) + record Size (4) + isExpanded (1) +
-                        // number of closed fields (4)
-                        int pos = 10 + offset + fieldId / 4;
-                        if ((serRecord[pos] & nullTestCode) == 0) {
-                            // the field value is null
-                            return 0;
-                        }
-                        if ((serRecord[pos] & missingTestCode) == 0) {
-                            // the field value is missing
-                            return -1;
-                        }
-                    }
-                    return offset + AInt32SerializerDeserializer.getInt(serRecord,
-                            10 + offset + nullBitmapSize + (4 * fieldId));
-                }
-            } else {
+            final boolean isExpanded = serRecord[pointer] == 1;
+            //if isExpanded, advance to numberOfSchemaFields
+            pointer += 1 + (isExpanded ? 4 : 0);
+        }
+
+        //advance to nullBitmap
+        pointer += 4;
+
+        if (nullBitmapSize > 0) {
+            final int pos = pointer + fieldId / 4;
+            if ((serRecord[pos] & nullTestCode) == 0) {
+                // the field value is null
+                return 0;
+            }
+            if ((serRecord[pos] & missingTestCode) == 0) {
+                // the field value is missing
                 return -1;
             }
-        } else {
-            if (serRecord[offset] != ATypeTag.SERIALIZED_RECORD_TYPE_TAG) {
-                return Integer.MIN_VALUE;
-            }
-            if (nullBitmapSize > 0) {
-                // 9 = tag (1) + record Size (4) + number of closed fields
-                // (4)
-                int pos = 9 + offset + fieldId / 4;
-                if ((serRecord[pos] & nullTestCode) == 0) {
-                    // the field value is null
-                    return 0;
-                }
-                if ((serRecord[pos] & missingTestCode) == 0) {
-                    // the field value is missing
-                    return -1;
-                }
-            }
-            return offset + AInt32SerializerDeserializer.getInt(serRecord, 9 + offset + nullBitmapSize + (4 * fieldId));
         }
+
+        return offset + AInt32SerializerDeserializer.getInt(serRecord, pointer + nullBitmapSize + (4 * fieldId));
     }
 
     public static final int getFieldOffsetByName(byte[] serRecord, int start, int len, byte[] fieldName, int nstart)
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/AsterixBuiltinFunctions.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/AsterixBuiltinFunctions.java
index 6e983f1..4de9456 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/AsterixBuiltinFunctions.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/AsterixBuiltinFunctions.java
@@ -55,6 +55,7 @@
 import org.apache.asterix.om.typecomputer.impl.AnyTypeComputer;
 import org.apache.asterix.om.typecomputer.impl.BooleanFunctionTypeComputer;
 import org.apache.asterix.om.typecomputer.impl.BooleanOnlyTypeComputer;
+import org.apache.asterix.om.typecomputer.impl.BooleanOrMissingTypeComputer;
 import org.apache.asterix.om.typecomputer.impl.CastTypeComputer;
 import org.apache.asterix.om.typecomputer.impl.ClosedRecordConstructorResultType;
 import org.apache.asterix.om.typecomputer.impl.CollectionMemberResultType;
@@ -765,6 +766,16 @@
     public static final FunctionIdentifier IS_NULL = AlgebricksBuiltinFunctions.IS_NULL;
     public static final FunctionIdentifier IS_UNKOWN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
             "is-unknown", 1);
+    public static final FunctionIdentifier IS_BOOLEAN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+            "is-boolean", 1);
+    public static final FunctionIdentifier IS_NUMBER = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "is-number",
+            1);
+    public static final FunctionIdentifier IS_STRING = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "is-string",
+            1);
+    public static final FunctionIdentifier IS_ARRAY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "is-array",
+            1);
+    public static final FunctionIdentifier IS_OBJECT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "is-object",
+            1);
 
     public static final FunctionIdentifier IS_SYSTEM_NULL = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
             "is-system-null", 1);
@@ -795,10 +806,15 @@
 
         // first, take care of Algebricks builtin functions
         addFunction(IS_MISSING, BooleanOnlyTypeComputer.INSTANCE, true);
-        addFunction(IS_NULL, BooleanOnlyTypeComputer.INSTANCE, true);
         addFunction(IS_UNKOWN, BooleanOnlyTypeComputer.INSTANCE, true);
-        addFunction(IS_SYSTEM_NULL, BooleanOnlyTypeComputer.INSTANCE, true);
-        addFunction(NOT, BooleanFunctionTypeComputer.INSTANCE, true);
+        addFunction(IS_NULL, BooleanOrMissingTypeComputer.INSTANCE, true);
+        addFunction(IS_SYSTEM_NULL, ABooleanTypeComputer.INSTANCE, true);
+        addFunction(IS_BOOLEAN, ABooleanTypeComputer.INSTANCE, true);
+        addFunction(IS_NUMBER, ABooleanTypeComputer.INSTANCE, true);
+        addFunction(IS_STRING, ABooleanTypeComputer.INSTANCE, true);
+        addFunction(IS_ARRAY, ABooleanTypeComputer.INSTANCE, true);
+        addFunction(IS_OBJECT, ABooleanTypeComputer.INSTANCE, true);
+        addFunction(NOT, ABooleanTypeComputer.INSTANCE, true);
 
         addPrivateFunction(EQ, BooleanFunctionTypeComputer.INSTANCE, true);
         addPrivateFunction(LE, BooleanFunctionTypeComputer.INSTANCE, true);
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/ARecordVisitablePointable.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/ARecordVisitablePointable.java
index 68fff2f..d3c18a9 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/ARecordVisitablePointable.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/ARecordVisitablePointable.java
@@ -75,17 +75,17 @@
 
     private final int numberOfSchemaFields;
     private final int[] fieldOffsets;
-    private final IVisitablePointable nullReference = AFlatValuePointable.FACTORY.create(null);
-    private final IVisitablePointable missingReference = AFlatValuePointable.FACTORY.create(null);
+    private final IVisitablePointable nullReference = PointableAllocator.allocateUnrestableEmpty();
+    private final IVisitablePointable missingReference = PointableAllocator.allocateUnrestableEmpty();
 
     private int closedPartTypeInfoSize = 0;
-    private int offsetArrayOffset;
     private ATypeTag typeTag;
 
     /**
      * private constructor, to prevent constructing it arbitrarily
      *
      * @param inputType
+     *            inputType should not be null. Use FULLY_OPEN_RECORD_TYPE instead.
      */
     public ARecordVisitablePointable(ARecordType inputType) {
         this.inputRecType = inputType;
@@ -165,27 +165,26 @@
 
         boolean isExpanded = false;
         int openPartOffset = 0;
-        int s = start;
-        int recordOffset = s;
-        if (inputRecType == null) {
-            openPartOffset = s + AInt32SerializerDeserializer.getInt(b, s + 6);
-            s += 8;
-            isExpanded = true;
-        } else {
-            if (inputRecType.isOpen()) {
-                isExpanded = b[s + 5] == 1 ? true : false;
-                if (isExpanded) {
-                    openPartOffset = s + AInt32SerializerDeserializer.getInt(b, s + 6);
-                    s += 10;
-                } else {
-                    s += 6;
-                }
-            } else {
-                s += 5;
+        int recordOffset = start;
+        int offsetArrayOffset;
+
+        //advance to either isExpanded or numberOfSchemaFields
+        int s = start + 5;
+        //inputRecType will never be null.
+        if (inputRecType.isOpen()) {
+            isExpanded = b[s] == 1;
+            //advance either to openPartOffset or numberOfSchemaFields
+            s += 1;
+            if (isExpanded) {
+                openPartOffset = start + AInt32SerializerDeserializer.getInt(b, s);
+                //advance to numberOfSchemaFields
+                s += 4;
             }
         }
+
         try {
             if (numberOfSchemaFields > 0) {
+                //advance to nullBitMap if hasOptionalFields, or fieldOffsets
                 s += 4;
                 int nullBitMapOffset = 0;
                 boolean hasOptionalFields = NonTaggedFormatUtil.hasOptionalField(inputRecType);
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/PointableAllocator.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/PointableAllocator.java
index 0f66a57..c88b95c 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/PointableAllocator.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/PointableAllocator.java
@@ -63,6 +63,16 @@
     }
 
     /**
+     * This method should ONLY be used for long lasting IVisitablePointable.
+     *
+     * @return
+     *         a generic type IVisitablePointable.
+     */
+    public static IVisitablePointable allocateUnrestableEmpty() {
+        return AFlatValuePointable.FACTORY.create(null);
+    }
+
+    /**
      * allocate closed part value pointable
      *
      * @param type
@@ -98,8 +108,8 @@
                 if (listItemType.isDerivedType())
                     return allocateFieldValue(listItemType, b, offset + 1);
                 else
-                    return listValueAllocator.allocate(unorederedListTypeAllocator.allocate(TypeTagUtil
-                            .getBuiltinTypeByTag(listItemType)));
+                    return listValueAllocator.allocate(
+                            unorederedListTypeAllocator.allocate(TypeTagUtil.getBuiltinTypeByTag(listItemType)));
             }
         } else if (typeTag.equals(ATypeTag.ORDEREDLIST)) {
             ATypeTag listItemType = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(b[offset]);
@@ -109,8 +119,8 @@
                 if (listItemType.isDerivedType())
                     return allocateFieldValue(listItemType, b, offset + 1);
                 else
-                    return listValueAllocator.allocate(orederedListTypeAllocator.allocate(TypeTagUtil
-                            .getBuiltinTypeByTag(listItemType)));
+                    return listValueAllocator.allocate(
+                            orederedListTypeAllocator.allocate(TypeTagUtil.getBuiltinTypeByTag(listItemType)));
             }
         } else
             return flatValueAllocator.allocate(null);
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ACastVisitor.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ACastVisitor.java
index 00c4075..e57fa06 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ACastVisitor.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ACastVisitor.java
@@ -110,8 +110,8 @@
             arg.first.set(accessor);
             return null;
         }
-        ATypeTag inputTypeTag =
-                EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(accessor.getByteArray()[accessor.getStartOffset()]);
+        ATypeTag inputTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER
+                .deserialize(accessor.getByteArray()[accessor.getStartOffset()]);
         if (!needPromote(inputTypeTag, reqTypeTag)) {
             arg.first.set(accessor);
         } else {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/AListCaster.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/AListCaster.java
index 5dee990..7115827 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/AListCaster.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/AListCaster.java
@@ -43,11 +43,9 @@
  * ACastVisitor.
  */
 class AListCaster {
-    // pointable allocator
-    private final PointableAllocator allocator = new PointableAllocator();
 
     // for storing the cast result
-    private final IVisitablePointable itemTempReference = allocator.allocateEmpty();
+    private final IVisitablePointable itemTempReference = PointableAllocator.allocateUnrestableEmpty();
     private final Triple<IVisitablePointable, IAType, Boolean> itemVisitorArg = new Triple<>(itemTempReference, null,
             null);
 
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ARecordCaster.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ARecordCaster.java
index 0293fcf..7e1fe46 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ARecordCaster.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ARecordCaster.java
@@ -71,18 +71,18 @@
     private final DataOutputStream dos = new DataOutputStream(bos);
 
     private final RecordBuilder recBuilder = new RecordBuilder();
-    private final IVisitablePointable nullTypeTag = allocator.allocateEmpty();
-    private final IVisitablePointable missingTypeTag = allocator.allocateEmpty();
+    private final IVisitablePointable nullTypeTag = PointableAllocator.allocateUnrestableEmpty();
+    private final IVisitablePointable missingTypeTag = PointableAllocator.allocateUnrestableEmpty();
 
-    private final IBinaryComparator fieldNameComparator =
-            PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY).createBinaryComparator();
+    private final IBinaryComparator fieldNameComparator = PointableBinaryComparatorFactory
+            .of(UTF8StringPointable.FACTORY).createBinaryComparator();
 
     private final ByteArrayAccessibleOutputStream outputBos = new ByteArrayAccessibleOutputStream();
     private final DataOutputStream outputDos = new DataOutputStream(outputBos);
 
-    private final IVisitablePointable fieldTempReference = allocator.allocateEmpty();
-    private final Triple<IVisitablePointable, IAType, Boolean> nestedVisitorArg =
-            new Triple<>(fieldTempReference, null, null);
+    private final IVisitablePointable fieldTempReference = PointableAllocator.allocateUnrestableEmpty();
+    private final Triple<IVisitablePointable, IAType, Boolean> nestedVisitorArg = new Triple<>(fieldTempReference, null,
+            null);
 
     private int numInputFields = 0;
 
@@ -155,6 +155,7 @@
     private void loadRequiredType(ARecordType reqType) throws IOException {
         reqFieldNames.clear();
         reqFieldTypeTags.clear();
+        allocator.reset();
 
         cachedReqType = reqType;
         int numSchemaFields = reqType.getFieldTypes().length;
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/BooleanOrMissingTypeComputer.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/BooleanOrMissingTypeComputer.java
new file mode 100644
index 0000000..982971a
--- /dev/null
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/BooleanOrMissingTypeComputer.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.om.typecomputer.impl;
+
+import org.apache.asterix.om.typecomputer.base.IResultTypeComputer;
+import org.apache.asterix.om.types.AUnionType;
+import org.apache.asterix.om.types.BuiltinType;
+import org.apache.asterix.om.types.IAType;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import org.apache.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+
+public class BooleanOrMissingTypeComputer implements IResultTypeComputer {
+
+    public static final BooleanOrMissingTypeComputer INSTANCE = new BooleanOrMissingTypeComputer();
+
+    private BooleanOrMissingTypeComputer() {
+    }
+
+    @Override
+    public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+            IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+        return AUnionType.createMissableType(BuiltinType.ABOOLEAN);
+    }
+
+}
diff --git a/asterixdb/asterix-om/src/test/java/org/apache/asterix/om/typecomputer/TypeComputerTest.java b/asterixdb/asterix-om/src/test/java/org/apache/asterix/om/typecomputer/TypeComputerTest.java
index 760fd18..f48aa7c 100644
--- a/asterixdb/asterix-om/src/test/java/org/apache/asterix/om/typecomputer/TypeComputerTest.java
+++ b/asterixdb/asterix-om/src/test/java/org/apache/asterix/om/typecomputer/TypeComputerTest.java
@@ -61,6 +61,7 @@
         exceptionalTypeComputers.add("AMissingTypeComputer");
         exceptionalTypeComputers.add("NullableDoubleTypeComputer");
         exceptionalTypeComputers.add("RecordMergeTypeComputer");
+        exceptionalTypeComputers.add("BooleanOrMissingTypeComputer");
 
         // Tests all usual type computers.
         Reflections reflections = new Reflections("org.apache.asterix.om.typecomputer", new SubTypesScanner(false));
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreatePolygonDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreatePolygonDescriptor.java
index 53958b4..9ba0e65 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreatePolygonDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreatePolygonDescriptor.java
@@ -118,9 +118,10 @@
                                 out.writeByte(ATypeTag.SERIALIZED_POLYGON_TYPE_TAG);
                                 out.writeShort(listAccessor.size() / 2);
 
+                                final int skipTypeTag = listAccessor.itemsAreSelfDescribing() ? 1 : 0;
                                 for (int i = 0; i < listAccessor.size() / 2; i++) {
-                                    int firstDoubleOffset = listAccessor.getItemOffset(i * 2);
-                                    int secondDobuleOffset = listAccessor.getItemOffset((i * 2) + 1);
+                                    int firstDoubleOffset = listAccessor.getItemOffset(i * 2) + skipTypeTag;
+                                    int secondDobuleOffset = listAccessor.getItemOffset((i * 2) + 1) + skipTypeTag;
 
                                     APointSerializerDeserializer.serialize(
                                             ADoubleSerializerDeserializer.getDouble(listBytes, firstDoubleOffset),
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/IsArrayDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/IsArrayDescriptor.java
new file mode 100644
index 0000000..c8cb313
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/IsArrayDescriptor.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.runtime.evaluators.functions;
+
+import org.apache.asterix.om.functions.AsterixBuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptor;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import org.apache.asterix.runtime.evaluators.common.AbstractTypeCheckEvaluator;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+
+public class IsArrayDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+    private static final long serialVersionUID = 1L;
+    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new IsArrayDescriptor();
+        }
+    };
+
+    @Override
+    public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args)
+            throws AlgebricksException {
+        return new IScalarEvaluatorFactory() {
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws AlgebricksException {
+                final IScalarEvaluator eval = args[0].createScalarEvaluator(ctx);
+                return new AbstractTypeCheckEvaluator(eval) {
+
+                    @Override
+                    protected Value isMatch(byte typeTag) {
+                        return typeTag == ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG ? Value.TRUE : Value.FALSE;
+                    }
+                };
+            }
+        };
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return AsterixBuiltinFunctions.IS_ARRAY;
+    }
+
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/IsBooleanDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/IsBooleanDescriptor.java
new file mode 100644
index 0000000..89318bd
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/IsBooleanDescriptor.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.runtime.evaluators.functions;
+
+import org.apache.asterix.om.functions.AsterixBuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptor;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import org.apache.asterix.runtime.evaluators.common.AbstractTypeCheckEvaluator;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+
+public class IsBooleanDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+    private static final long serialVersionUID = 1L;
+    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new IsBooleanDescriptor();
+        }
+    };
+
+    @Override
+    public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args)
+            throws AlgebricksException {
+        return new IScalarEvaluatorFactory() {
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws AlgebricksException {
+                final IScalarEvaluator eval = args[0].createScalarEvaluator(ctx);
+                return new AbstractTypeCheckEvaluator(eval) {
+
+                    @Override
+                    protected Value isMatch(byte typeTag) {
+                        return typeTag == ATypeTag.SERIALIZED_BOOLEAN_TYPE_TAG ? Value.TRUE : Value.FALSE;
+                    }
+                };
+            }
+        };
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return AsterixBuiltinFunctions.IS_BOOLEAN;
+    }
+
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/IsNumberDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/IsNumberDescriptor.java
new file mode 100644
index 0000000..418d778
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/IsNumberDescriptor.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.runtime.evaluators.functions;
+
+import org.apache.asterix.om.functions.AsterixBuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptor;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import org.apache.asterix.runtime.evaluators.common.AbstractTypeCheckEvaluator;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+
+public class IsNumberDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+    private static final long serialVersionUID = 1L;
+    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new IsNumberDescriptor();
+        }
+    };
+
+    @Override
+    public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args)
+            throws AlgebricksException {
+        return new IScalarEvaluatorFactory() {
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws AlgebricksException {
+                final IScalarEvaluator eval = args[0].createScalarEvaluator(ctx);
+                return new AbstractTypeCheckEvaluator(eval) {
+
+                    @Override
+                    protected Value isMatch(byte typeTag) {
+                        return typeTag == ATypeTag.SERIALIZED_INT8_TYPE_TAG
+                                || typeTag == ATypeTag.SERIALIZED_INT16_TYPE_TAG
+                                || typeTag == ATypeTag.SERIALIZED_INT32_TYPE_TAG
+                                || typeTag == ATypeTag.SERIALIZED_INT64_TYPE_TAG
+                                || typeTag == ATypeTag.SERIALIZED_FLOAT_TYPE_TAG
+                                || typeTag == ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG ? Value.TRUE : Value.FALSE;
+                    }
+                };
+            }
+        };
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return AsterixBuiltinFunctions.IS_NUMBER;
+    }
+
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/IsObjectDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/IsObjectDescriptor.java
new file mode 100644
index 0000000..5a3f2d3
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/IsObjectDescriptor.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.runtime.evaluators.functions;
+
+import org.apache.asterix.om.functions.AsterixBuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptor;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import org.apache.asterix.runtime.evaluators.common.AbstractTypeCheckEvaluator;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+
+public class IsObjectDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+    private static final long serialVersionUID = 1L;
+    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new IsObjectDescriptor();
+        }
+    };
+
+    @Override
+    public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args)
+            throws AlgebricksException {
+        return new IScalarEvaluatorFactory() {
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws AlgebricksException {
+                final IScalarEvaluator eval = args[0].createScalarEvaluator(ctx);
+                return new AbstractTypeCheckEvaluator(eval) {
+
+                    @Override
+                    protected Value isMatch(byte typeTag) {
+                        return typeTag == ATypeTag.SERIALIZED_RECORD_TYPE_TAG ? Value.TRUE : Value.FALSE;
+                    }
+                };
+            }
+        };
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return AsterixBuiltinFunctions.IS_OBJECT;
+    }
+
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/IsStringDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/IsStringDescriptor.java
new file mode 100644
index 0000000..44c5a6c
--- /dev/null
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/IsStringDescriptor.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.runtime.evaluators.functions;
+
+import org.apache.asterix.om.functions.AsterixBuiltinFunctions;
+import org.apache.asterix.om.functions.IFunctionDescriptor;
+import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import org.apache.asterix.runtime.evaluators.common.AbstractTypeCheckEvaluator;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+
+public class IsStringDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+    private static final long serialVersionUID = 1L;
+    public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+        @Override
+        public IFunctionDescriptor createFunctionDescriptor() {
+            return new IsStringDescriptor();
+        }
+    };
+
+    @Override
+    public IScalarEvaluatorFactory createEvaluatorFactory(final IScalarEvaluatorFactory[] args)
+            throws AlgebricksException {
+        return new IScalarEvaluatorFactory() {
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws AlgebricksException {
+                final IScalarEvaluator eval = args[0].createScalarEvaluator(ctx);
+                return new AbstractTypeCheckEvaluator(eval) {
+
+                    @Override
+                    protected Value isMatch(byte typeTag) {
+                        return typeTag == ATypeTag.SERIALIZED_STRING_TYPE_TAG ? Value.TRUE : Value.FALSE;
+                    }
+                };
+            }
+        };
+    }
+
+    @Override
+    public FunctionIdentifier getIdentifier() {
+        return AsterixBuiltinFunctions.IS_STRING;
+    }
+
+}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/CompleteFailbackRequestMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/CompleteFailbackRequestMessage.java
index 5cb1a6a..dba3bc7 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/CompleteFailbackRequestMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/CompleteFailbackRequestMessage.java
@@ -63,7 +63,7 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException {
+    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
         NodeControllerService ncs = (NodeControllerService) cs;
         IAsterixAppRuntimeContext appContext =
                 (IAsterixAppRuntimeContext) ncs.getApplicationContext().getApplicationObject();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/CompleteFailbackResponseMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/CompleteFailbackResponseMessage.java
index 4ae73ea..cb56c39 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/CompleteFailbackResponseMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/CompleteFailbackResponseMessage.java
@@ -48,7 +48,7 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException {
+    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
         ClusterStateManager.INSTANCE.processCompleteFailbackResponse(this);
     }
 }
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/PreparePartitionsFailbackRequestMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/PreparePartitionsFailbackRequestMessage.java
index c112366..7283b89 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/PreparePartitionsFailbackRequestMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/PreparePartitionsFailbackRequestMessage.java
@@ -72,7 +72,7 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException {
+    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
         NodeControllerService ncs = (NodeControllerService) cs;
         IAsterixAppRuntimeContext appContext =
                 (IAsterixAppRuntimeContext) ncs.getApplicationContext().getApplicationObject();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/PreparePartitionsFailbackResponseMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/PreparePartitionsFailbackResponseMessage.java
index db89f7c..d87cd23 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/PreparePartitionsFailbackResponseMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/PreparePartitionsFailbackResponseMessage.java
@@ -39,7 +39,7 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException {
+    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
         ClusterStateManager.INSTANCE.processPreparePartitionsFailbackResponse(this);
     }
 
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReplicaEventMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReplicaEventMessage.java
index fc55968..7776543 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReplicaEventMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReplicaEventMessage.java
@@ -54,7 +54,7 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException {
+    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
         NodeControllerService ncs = (NodeControllerService) cs;
         IAsterixAppRuntimeContext appContext =
                 (IAsterixAppRuntimeContext) ncs.getApplicationContext().getApplicationObject();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdMessage.java
index f9f6233..c1319e0 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdMessage.java
@@ -48,7 +48,7 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException {
+    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
         IAsterixResourceIdManager resourceIdManager =
                 AsterixAppContextInfo.INSTANCE.getResourceIdManager();
         resourceIdManager.report(src, maxResourceId);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdRequestMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdRequestMessage.java
index 203104e..a1290df 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdRequestMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdRequestMessage.java
@@ -27,7 +27,7 @@
     private static final long serialVersionUID = 1L;
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException {
+    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
         ReportMaxResourceIdMessage.send((NodeControllerService) cs);
     }
 
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestMessage.java
index afe2427..a2c8d74 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestMessage.java
@@ -38,7 +38,7 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException {
+    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
         try {
             ICCMessageBroker broker =
                     (ICCMessageBroker) AsterixAppContextInfo.INSTANCE.getCCApplicationContext().getMessageBroker();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/TakeoverMetadataNodeRequestMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/TakeoverMetadataNodeRequestMessage.java
index e877f52..7264c88 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/TakeoverMetadataNodeRequestMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/TakeoverMetadataNodeRequestMessage.java
@@ -35,7 +35,7 @@
     private static final Logger LOGGER = Logger.getLogger(TakeoverMetadataNodeRequestMessage.class.getName());
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException {
+    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
         NodeControllerService ncs = (NodeControllerService) cs;
         IAsterixAppRuntimeContext appContext =
                 (IAsterixAppRuntimeContext) ncs.getApplicationContext().getApplicationObject();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/TakeoverMetadataNodeResponseMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/TakeoverMetadataNodeResponseMessage.java
index 2466e2b..d3c3502 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/TakeoverMetadataNodeResponseMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/TakeoverMetadataNodeResponseMessage.java
@@ -37,7 +37,7 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException {
+    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
         ClusterStateManager.INSTANCE.processMetadataNodeTakeoverResponse(this);
     }
 
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/TakeoverPartitionsRequestMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/TakeoverPartitionsRequestMessage.java
index e024eed..e78f159 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/TakeoverPartitionsRequestMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/TakeoverPartitionsRequestMessage.java
@@ -74,7 +74,7 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException {
+    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
         NodeControllerService ncs = (NodeControllerService) cs;
         IAsterixAppRuntimeContext appContext =
                 (IAsterixAppRuntimeContext) ncs.getApplicationContext().getApplicationObject();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/TakeoverPartitionsResponseMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/TakeoverPartitionsResponseMessage.java
index a4a5226..3adc8e9 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/TakeoverPartitionsResponseMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/TakeoverPartitionsResponseMessage.java
@@ -49,7 +49,7 @@
     }
 
     @Override
-    public void handle(IControllerService cs) throws HyracksDataException {
+    public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
         ClusterStateManager.INSTANCE.processPartitionTakeoverResponse(this);
     }
 
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/util/AsterixAppContextInfo.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/util/AsterixAppContextInfo.java
index 471d3d3..592f9df 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/util/AsterixAppContextInfo.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/util/AsterixAppContextInfo.java
@@ -19,6 +19,7 @@
 package org.apache.asterix.runtime.util;
 
 import java.io.IOException;
+import java.util.function.Supplier;
 import java.util.logging.Logger;
 
 import org.apache.asterix.common.cluster.IGlobalRecoveryMaanger;
@@ -37,8 +38,8 @@
 import org.apache.asterix.common.dataflow.IAsterixApplicationContextInfo;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.library.ILibraryManager;
+import org.apache.asterix.common.metadata.IMetadataBootstrap;
 import org.apache.asterix.common.transactions.IAsterixResourceIdManager;
-import org.apache.hyracks.api.application.IApplicationConfig;
 import org.apache.hyracks.api.application.ICCApplicationContext;
 import org.apache.hyracks.api.client.IHyracksClientConnection;
 import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
@@ -66,6 +67,7 @@
     private AsterixReplicationProperties replicationProperties;
     private AsterixExtensionProperties extensionProperties;
     private MessagingProperties messagingProperties;
+    private Supplier<IMetadataBootstrap> metadataBootstrapSupplier;
     private IHyracksClientConnection hcc;
     private Object extensionManager;
     private volatile boolean initialized = false;
@@ -74,8 +76,10 @@
     }
 
     public static synchronized void initialize(ICCApplicationContext ccAppCtx, IHyracksClientConnection hcc,
-            IGlobalRecoveryMaanger globalRecoveryMaanger, ILibraryManager libraryManager,
-            IAsterixResourceIdManager resourceIdManager)
+                                               IGlobalRecoveryMaanger globalRecoveryMaanger,
+                                               ILibraryManager libraryManager,
+                                               IAsterixResourceIdManager resourceIdManager,
+                                               Supplier<IMetadataBootstrap> metadataBootstrapSupplier)
             throws AsterixException, IOException {
         if (INSTANCE.initialized) {
             throw new AsterixException(AsterixAppContextInfo.class.getSimpleName() + " has been initialized already");
@@ -88,14 +92,7 @@
         INSTANCE.resourceIdManager = resourceIdManager;
         // Determine whether to use old-style asterix-configuration.xml or new-style configuration.
         // QQQ strip this out eventually
-        AsterixPropertiesAccessor propertiesAccessor;
-        IApplicationConfig cfg = ccAppCtx.getAppConfig();
-        // QQQ this is NOT a good way to determine whether the config is valid
-        if (cfg.getString("cc", "cluster.address") != null) {
-            propertiesAccessor = new AsterixPropertiesAccessor(cfg);
-        } else {
-            propertiesAccessor = new AsterixPropertiesAccessor();
-        }
+        AsterixPropertiesAccessor propertiesAccessor = AsterixPropertiesAccessor.getInstance(ccAppCtx.getAppConfig());
         INSTANCE.compilerProperties = new AsterixCompilerProperties(propertiesAccessor);
         INSTANCE.externalProperties = new AsterixExternalProperties(propertiesAccessor);
         INSTANCE.metadataProperties = new AsterixMetadataProperties(propertiesAccessor);
@@ -107,6 +104,8 @@
         INSTANCE.hcc = hcc;
         INSTANCE.buildProperties = new AsterixBuildProperties(propertiesAccessor);
         INSTANCE.messagingProperties = new MessagingProperties(propertiesAccessor);
+        INSTANCE.metadataBootstrapSupplier = metadataBootstrapSupplier;
+
         Logger.getLogger("org.apache.asterix").setLevel(INSTANCE.externalProperties.getLogLevel());
         Logger.getLogger("org.apache.hyracks").setLevel(INSTANCE.externalProperties.getLogLevel());
     }
@@ -204,4 +203,8 @@
     public IAsterixResourceIdManager getResourceIdManager() {
         return resourceIdManager;
     }
+
+    public IMetadataBootstrap getMetadataBootstrap() {
+        return metadataBootstrapSupplier.get();
+    }
 }
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/util/ClusterStateManager.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/util/ClusterStateManager.java
index bc15788..942abe3 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/util/ClusterStateManager.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/util/ClusterStateManager.java
@@ -51,6 +51,8 @@
 import org.apache.asterix.runtime.message.TakeoverPartitionsResponseMessage;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.api.application.IClusterLifecycleListener.ClusterEventType;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.exceptions.HyracksException;
 import org.json.JSONException;
 import org.json.JSONObject;
 
@@ -109,7 +111,7 @@
         }
     }
 
-    public synchronized void removeNCConfiguration(String nodeId) {
+    public synchronized void removeNCConfiguration(String nodeId) throws HyracksException {
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("Removing configuration parameters for node id " + nodeId);
         }
@@ -139,7 +141,8 @@
         }
     }
 
-    public synchronized void addNCConfiguration(String nodeId, Map<String, String> configuration) {
+    public synchronized void addNCConfiguration(String nodeId, Map<String, String> configuration)
+            throws HyracksException {
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("Registering configuration parameters for node id " + nodeId);
         }
@@ -167,7 +170,7 @@
         updateNodePartitions(nodeId, true);
     }
 
-    private synchronized void updateNodePartitions(String nodeId, boolean added) {
+    private synchronized void updateNodePartitions(String nodeId, boolean added) throws HyracksDataException {
         ClusterPartition[] nodePartitions = node2PartitionsMap.get(nodeId);
         // if this isn't a storage node, it will not have cluster partitions
         if (nodePartitions != null) {
@@ -183,7 +186,7 @@
         }
     }
 
-    private synchronized void updateClusterState() {
+    private synchronized void updateClusterState() throws HyracksDataException {
         for (ClusterPartition p : clusterPartitions.values()) {
             if (!p.isActive()) {
                 state = ClusterState.UNUSABLE;
@@ -191,11 +194,14 @@
                 return;
             }
         }
-        //if all storage partitions are active as well as the metadata node, then the cluster is active
+        // if all storage partitions are active as well as the metadata node, then the cluster is active
         if (metadataNodeActive) {
+            state = ClusterState.PENDING;
+            LOGGER.info("Cluster is now " + state);
+            AsterixAppContextInfo.INSTANCE.getMetadataBootstrap().init();
             state = ClusterState.ACTIVE;
-            LOGGER.info("Cluster is now ACTIVE");
-            //start global recovery
+            LOGGER.info("Cluster is now " + state);
+            // start global recovery
             AsterixAppContextInfo.INSTANCE.getGlobalRecoveryManager().startGlobalRecovery();
             if (autoFailover && !pendingProcessingFailbackPlans.isEmpty()) {
                 processPendingFailbackPlans();
@@ -412,19 +418,21 @@
         }
     }
 
-    public synchronized void processPartitionTakeoverResponse(TakeoverPartitionsResponseMessage reponse) {
-        for (Integer partitonId : reponse.getPartitions()) {
+    public synchronized void processPartitionTakeoverResponse(TakeoverPartitionsResponseMessage response)
+            throws HyracksDataException {
+        for (Integer partitonId : response.getPartitions()) {
             ClusterPartition partition = clusterPartitions.get(partitonId);
             partition.setActive(true);
-            partition.setActiveNodeId(reponse.getNodeId());
+            partition.setActiveNodeId(response.getNodeId());
         }
-        pendingTakeoverRequests.remove(reponse.getRequestId());
+        pendingTakeoverRequests.remove(response.getRequestId());
         resetClusterPartitionConstraint();
         updateClusterState();
     }
 
-    public synchronized void processMetadataNodeTakeoverResponse(TakeoverMetadataNodeResponseMessage reponse) {
-        currentMetadataNode = reponse.getNodeId();
+    public synchronized void processMetadataNodeTakeoverResponse(TakeoverMetadataNodeResponseMessage response)
+            throws HyracksDataException {
+        currentMetadataNode = response.getNodeId();
         metadataNodeActive = true;
         LOGGER.info("Current metadata node: " + currentMetadataNode);
         updateClusterState();
@@ -556,7 +564,8 @@
         }
     }
 
-    public synchronized void processCompleteFailbackResponse(CompleteFailbackResponseMessage reponse) {
+    public synchronized void processCompleteFailbackResponse(CompleteFailbackResponseMessage response)
+            throws HyracksDataException {
         /**
          * the failback plan completed successfully:
          * Remove all references to it.
@@ -564,7 +573,7 @@
          * Notify its replicas to reconnect to it.
          * Set the failing back node partitions as active.
          */
-        NodeFailbackPlan plan = planId2FailbackPlanMap.remove(reponse.getPlanId());
+        NodeFailbackPlan plan = planId2FailbackPlanMap.remove(response.getPlanId());
         String nodeId = plan.getNodeId();
         failedNodes.remove(nodeId);
         //notify impacted replicas they can reconnect to this node
diff --git a/asterixdb/asterix-server/pom.xml b/asterixdb/asterix-server/pom.xml
index 58b0393..919cb16 100644
--- a/asterixdb/asterix-server/pom.xml
+++ b/asterixdb/asterix-server/pom.xml
@@ -244,5 +244,12 @@
       <type>zip</type>
       <classifier>assembly</classifier>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-minicluster</artifactId>
+      <version>${hadoop.version}</version>
+      <type>jar</type>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 </project>
diff --git a/asterixdb/asterix-server/src/main/samples/local/conf/cc.conf b/asterixdb/asterix-server/src/main/samples/local/conf/cc.conf
index 5ef1cbf..b5f05d3 100644
--- a/asterixdb/asterix-server/src/main/samples/local/conf/cc.conf
+++ b/asterixdb/asterix-server/src/main/samples/local/conf/cc.conf
@@ -17,5 +17,5 @@
 [cc]
 cluster.address = 127.0.0.1
 
-[asterix]
+[app]
 log.level=INFO
diff --git a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/NCServiceExecutionIT.java b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/NCServiceExecutionIT.java
index e3a561d..77effb6 100644
--- a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/NCServiceExecutionIT.java
+++ b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/NCServiceExecutionIT.java
@@ -155,7 +155,7 @@
         }
     }
 
-    @Parameters
+    @Parameters(name = "NCServiceExecutionTest {index}: {0}")
     public static Collection<Object[]> tests() throws Exception {
         Collection<Object[]> testArgs = new ArrayList<Object[]>();
         TestCaseContext.Builder b = new TestCaseContext.Builder();
diff --git a/asterixdb/asterix-server/src/test/resources/NCServiceExecutionIT/cc.conf b/asterixdb/asterix-server/src/test/resources/NCServiceExecutionIT/cc.conf
index c4c76e6..c69873c 100644
--- a/asterixdb/asterix-server/src/test/resources/NCServiceExecutionIT/cc.conf
+++ b/asterixdb/asterix-server/src/test/resources/NCServiceExecutionIT/cc.conf
@@ -20,6 +20,6 @@
 cluster.address = 127.0.0.1
 app.class=org.apache.asterix.hyracks.bootstrap.CCApplicationEntryPoint
 
-[asterix]
+[app]
 storage.memorycomponent.globalbudget = 1073741824
 
diff --git a/asterixdb/asterix-yarn/pom.xml b/asterixdb/asterix-yarn/pom.xml
index a9a4a36..79b9822 100644
--- a/asterixdb/asterix-yarn/pom.xml
+++ b/asterixdb/asterix-yarn/pom.xml
@@ -232,5 +232,12 @@
       <artifactId>junit</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-minicluster</artifactId>
+      <version>${hadoop.version}</version>
+      <type>jar</type>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 </project>
diff --git a/asterixdb/asterix-yarn/src/main/resources/base-asterix-configuration.xml b/asterixdb/asterix-yarn/src/main/resources/base-asterix-configuration.xml
index 788cb76..a1b369e 100644
--- a/asterixdb/asterix-yarn/src/main/resources/base-asterix-configuration.xml
+++ b/asterixdb/asterix-yarn/src/main/resources/base-asterix-configuration.xml
@@ -76,7 +76,10 @@
     <name>storage.memorycomponent.numpages</name>
     <value>256</value>
     <description>The number of pages to allocate for a memory component.
-      (Default = 256)
+      This budget is shared by all the memory components of the primary
+      index and all its secondary indexes across all I/O devices on a node.
+      Note: in-memory components usually has fill factor of 75% since
+      the pages are 75% full and the remaining 25% is un-utilized. (Default = 256)
     </description>
   </property>
 
@@ -99,9 +102,11 @@
   <property>
     <name>storage.memorycomponent.globalbudget</name>
     <value>512MB</value>
-    <description>The total size of memory in bytes that the sum of all
-      open memory
-      components cannot exceed. (Default = "536870192" // 512MB)
+    <description>The total size of memory in bytes that the sum of all open memory
+      components cannot exceed. Consider this as the buffer cache for all memory
+      components of all indexes in a node. When this budget is fully used, a victim
+      dataset will be chosen. The chosen dataset must be evicted and closed to make
+      a space for another dataset. (Default = 512MB)
     </description>
   </property>
 
diff --git a/asterixdb/asterix-yarn/src/main/resources/configs/base-asterix-configuration.xml b/asterixdb/asterix-yarn/src/main/resources/configs/base-asterix-configuration.xml
index 788cb76..a1b369e 100644
--- a/asterixdb/asterix-yarn/src/main/resources/configs/base-asterix-configuration.xml
+++ b/asterixdb/asterix-yarn/src/main/resources/configs/base-asterix-configuration.xml
@@ -76,7 +76,10 @@
     <name>storage.memorycomponent.numpages</name>
     <value>256</value>
     <description>The number of pages to allocate for a memory component.
-      (Default = 256)
+      This budget is shared by all the memory components of the primary
+      index and all its secondary indexes across all I/O devices on a node.
+      Note: in-memory components usually has fill factor of 75% since
+      the pages are 75% full and the remaining 25% is un-utilized. (Default = 256)
     </description>
   </property>
 
@@ -99,9 +102,11 @@
   <property>
     <name>storage.memorycomponent.globalbudget</name>
     <value>512MB</value>
-    <description>The total size of memory in bytes that the sum of all
-      open memory
-      components cannot exceed. (Default = "536870192" // 512MB)
+    <description>The total size of memory in bytes that the sum of all open memory
+      components cannot exceed. Consider this as the buffer cache for all memory
+      components of all indexes in a node. When this budget is fully used, a victim
+      dataset will be chosen. The chosen dataset must be evicted and closed to make
+      a space for another dataset. (Default = 512MB)
     </description>
   </property>
 
diff --git a/asterixdb/pom.xml b/asterixdb/pom.xml
index 6cbe20a..161a625 100644
--- a/asterixdb/pom.xml
+++ b/asterixdb/pom.xml
@@ -66,8 +66,8 @@
     <algebricks.version>0.2.18-SNAPSHOT</algebricks.version>
     <hyracks.version>0.2.18-SNAPSHOT</hyracks.version>
     <hadoop.version>2.2.0</hadoop.version>
-    <junit.version>4.11</junit.version>
-    <commons.io.version>2.4</commons.io.version>
+    <junit.version>4.12</junit.version>
+    <commons.io.version>2.5</commons.io.version>
     <servlet.api.version>3.1.0</servlet.api.version>
     <json.version>20090211</json.version>
     <jacoco.version>0.7.6.201602180812</jacoco.version>
@@ -658,6 +658,11 @@
       </dependency>
       <dependency>
         <groupId>org.apache.hyracks</groupId>
+        <artifactId>algebricks-rewriter</artifactId>
+        <version>${algebricks.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hyracks</groupId>
         <artifactId>hyracks-api</artifactId>
         <version>${hyracks.version}</version>
       </dependency>
diff --git a/hyracks-fullstack/algebricks/algebricks-compiler/pom.xml b/hyracks-fullstack/algebricks/algebricks-compiler/pom.xml
index 13654f4..75684f3 100644
--- a/hyracks-fullstack/algebricks/algebricks-compiler/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-compiler/pom.xml
@@ -44,13 +44,28 @@
   <dependencies>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
-      <artifactId>algebricks-rewriter</artifactId>
+      <artifactId>algebricks-core</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
-      <artifactId>algebricks-core</artifactId>
+      <artifactId>algebricks-common</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>algebricks-data</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-lang3</artifactId>
+      <version>3.1</version>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/algebricks/algebricks-core/pom.xml b/hyracks-fullstack/algebricks/algebricks-core/pom.xml
index 498a656..cd4b374 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-core/pom.xml
@@ -44,16 +44,6 @@
   <dependencies>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-storage-am-btree</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-storage-am-rtree</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-dataflow-std</artifactId>
       <version>${project.version}</version>
     </dependency>
@@ -67,5 +57,35 @@
       <artifactId>algebricks-common</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>commons-io</groupId>
+      <artifactId>commons-io</artifactId>
+      <version>2.5</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>algebricks-data</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-data-std</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-lang3</artifactId>
+      <version>3.1</version>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/LogicalOperatorTag.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/LogicalOperatorTag.java
index 07c35a3..c7f9aa1 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/LogicalOperatorTag.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/LogicalOperatorTag.java
@@ -38,7 +38,6 @@
     MATERIALIZE,
     NESTEDTUPLESOURCE,
     ORDER,
-    PARTITIONINGSPLIT,
     PROJECT,
     RANGE_FORWARD,
     REPLICATE,
@@ -46,6 +45,7 @@
     SCRIPT,
     SELECT,
     SINK,
+    SPLIT,
     SUBPLAN,
     TOKENIZE,
     UNIONALL,
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/PhysicalOperatorTag.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/PhysicalOperatorTag.java
index 25a47ee..949f6c8 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/PhysicalOperatorTag.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/PhysicalOperatorTag.java
@@ -47,7 +47,6 @@
     NESTED_LOOP,
     NESTED_TUPLE_SOURCE,
     ONE_TO_ONE_EXCHANGE,
-    PARTITIONINGSPLIT,
     PRE_CLUSTERED_GROUP_BY,
     PRE_SORTED_DISTINCT_BY,
     RANDOM_PARTITION_EXCHANGE,
@@ -55,6 +54,7 @@
     RANGE_FORWARD,
     RANGE_PARTITION_EXCHANGE,
     RANGE_PARTITION_MERGE_EXCHANGE,
+    REPLICATE,
     RTREE_SEARCH,
     RUNNING_AGGREGATE,
     SINGLE_PARTITION_INVERTED_INDEX_SEARCH,
@@ -62,7 +62,7 @@
     SINK_WRITE,
     SORT_GROUP_BY,
     SORT_MERGE_EXCHANGE,
-    REPLICATE,
+    SPLIT,
     STABLE_SORT,
     STATS,
     STREAM_LIMIT,
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AbstractReplicateOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AbstractReplicateOperator.java
new file mode 100644
index 0000000..f883687
--- /dev/null
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AbstractReplicateOperator.java
@@ -0,0 +1,103 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.algebricks.core.algebra.operators.logical;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import org.apache.hyracks.algebricks.core.algebra.properties.VariablePropagationPolicy;
+import org.apache.hyracks.algebricks.core.algebra.typing.ITypingContext;
+import org.apache.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
+
+/**
+ * Abstract class for two replication related operator - replicate and split
+ * Replicate operator propagates all frames to all output branches.
+ * That is, each tuple will be propagated to all output branches.
+ * Split operator propagates each tuple in a frame to one output branch only.
+ */
+public abstract class AbstractReplicateOperator extends AbstractLogicalOperator {
+
+    private int outputArity;
+    protected boolean[] outputMaterializationFlags;
+    private List<Mutable<ILogicalOperator>> outputs;
+
+    public AbstractReplicateOperator(int outputArity) {
+        this.outputArity = outputArity;
+        this.outputMaterializationFlags = new boolean[outputArity];
+        this.outputs = new ArrayList<>();
+    }
+
+    public AbstractReplicateOperator(int outputArity, boolean[] outputMaterializationFlags) {
+        this.outputArity = outputArity;
+        this.outputMaterializationFlags = outputMaterializationFlags;
+        this.outputs = new ArrayList<>();
+    }
+
+    @Override
+    public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform transform)
+            throws AlgebricksException {
+        return false;
+    }
+
+    @Override
+    public VariablePropagationPolicy getVariablePropagationPolicy() {
+        return VariablePropagationPolicy.ALL;
+    }
+
+    @Override
+    public boolean isMap() {
+        return true;
+    }
+
+    @Override
+    public void recomputeSchema() {
+        schema = new ArrayList<LogicalVariable>(inputs.get(0).getValue().getSchema());
+    }
+
+    public void substituteVar(LogicalVariable v1, LogicalVariable v2) {
+        // do nothing
+    }
+
+    public int getOutputArity() {
+        return outputArity;
+    }
+
+    public void setOutputMaterializationFlags(boolean[] outputMaterializationFlags) {
+        this.outputMaterializationFlags = outputMaterializationFlags;
+    }
+
+    public boolean[] getOutputMaterializationFlags() {
+        return outputMaterializationFlags;
+    }
+
+    public List<Mutable<ILogicalOperator>> getOutputs() {
+        return outputs;
+    }
+
+    @Override
+    public IVariableTypeEnvironment computeOutputTypeEnvironment(ITypingContext ctx) throws AlgebricksException {
+        return createPropagatingAllInputsTypeEnvironment(ctx);
+    }
+
+}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/PartitioningSplitOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/PartitioningSplitOperator.java
deleted file mode 100644
index 1c5f324..0000000
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/PartitioningSplitOperator.java
+++ /dev/null
@@ -1,119 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hyracks.algebricks.core.algebra.operators.logical;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import org.apache.hyracks.algebricks.core.algebra.properties.VariablePropagationPolicy;
-import org.apache.hyracks.algebricks.core.algebra.typing.ITypingContext;
-import org.apache.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
-import org.apache.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
-import org.apache.hyracks.algebricks.runtime.operators.std.PartitioningSplitOperatorDescriptor;
-
-/**
- * Partitions it's input based on a given list of expressions.
- * Each expression is assumed to return true/false,
- * and there is exactly one output branch per expression (optionally, plus one default branch).
- * For each input tuple, the expressions are evaluated one-by-one,
- * and the tuple is written to first output branch whose corresponding
- * expression evaluates to true.
- * If all expressions evaluate to false, then
- * the tuple is written to the default output branch, if any was given.
- * If no output branch was given, then such tuples are simply dropped.
- * Given N expressions there may be N or N+1 output branches because the default output branch may be separate from the regular output branches.
- */
-public class PartitioningSplitOperator extends AbstractLogicalOperator {
-
-    private final List<Mutable<ILogicalExpression>> expressions;
-    private final int defaultBranchIndex;
-
-    public PartitioningSplitOperator(List<Mutable<ILogicalExpression>> expressions, int defaultBranchIndex) throws AlgebricksException {
-        this.expressions = expressions;
-        this.defaultBranchIndex = defaultBranchIndex;
-        // Check that the default output branch index is in [0, N], where N is the number of expressions.
-        if (defaultBranchIndex != PartitioningSplitOperatorDescriptor.NO_DEFAULT_BRANCH
-                && defaultBranchIndex > expressions.size()) {
-            throw new AlgebricksException("Default branch index out of bounds. Number of exprs given: "
-                    + expressions.size() + ". The maximum default branch index may therefore be: " + expressions.size());
-        }
-    }
-
-    public List<Mutable<ILogicalExpression>> getExpressions() {
-        return expressions;
-    }
-
-    public int getDefaultBranchIndex() {
-        return defaultBranchIndex;
-    }
-
-    public int getNumOutputBranches() {
-        return (defaultBranchIndex == expressions.size()) ? expressions.size() + 1 : expressions.size();
-    }
-
-    @Override
-    public LogicalOperatorTag getOperatorTag() {
-        return LogicalOperatorTag.PARTITIONINGSPLIT;
-    }
-
-    @Override
-    public void recomputeSchema() {
-        schema = new ArrayList<LogicalVariable>();
-        schema.addAll(inputs.get(0).getValue().getSchema());
-    }
-
-    @Override
-    public VariablePropagationPolicy getVariablePropagationPolicy() {
-        return VariablePropagationPolicy.ALL;
-    }
-
-    @Override
-    public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform visitor) throws AlgebricksException {
-        boolean b = false;
-        for (int i = 0; i < expressions.size(); i++) {
-            if (visitor.transform(expressions.get(i))) {
-                b = true;
-            }
-        }
-        return b;
-    }
-
-    @Override
-    public <R, T> R accept(ILogicalOperatorVisitor<R, T> visitor, T arg) throws AlgebricksException {
-        return visitor.visitPartitioningSplitOperator(this, arg);
-    }
-
-    @Override
-    public boolean isMap() {
-        return false;
-    }
-
-    @Override
-    public IVariableTypeEnvironment computeOutputTypeEnvironment(ITypingContext ctx) throws AlgebricksException {
-        return createPropagatingAllInputsTypeEnvironment(ctx);
-    }
-
-}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/ReplicateOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/ReplicateOperator.java
index 834107c..2d2fd0f 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/ReplicateOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/ReplicateOperator.java
@@ -18,36 +18,18 @@
  */
 package org.apache.hyracks.algebricks.core.algebra.operators.logical;
 
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
 import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import org.apache.hyracks.algebricks.core.algebra.properties.VariablePropagationPolicy;
-import org.apache.hyracks.algebricks.core.algebra.typing.ITypingContext;
-import org.apache.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
 import org.apache.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
 
-public class ReplicateOperator extends AbstractLogicalOperator {
-
-    private int outputArity;
-    private boolean[] outputMaterializationFlags;
-    private List<Mutable<ILogicalOperator>> outputs;
+public class ReplicateOperator extends AbstractReplicateOperator {
 
     public ReplicateOperator(int outputArity) {
-        this.outputArity = outputArity;
-        this.outputMaterializationFlags = new boolean[outputArity];
-        this.outputs = new ArrayList<>();
+        super(outputArity);
     }
 
     public ReplicateOperator(int outputArity, boolean[] outputMaterializationFlags) {
-        this.outputArity = outputArity;
-        this.outputMaterializationFlags = outputMaterializationFlags;
-        this.outputs = new ArrayList<>();
+        super(outputArity, outputMaterializationFlags);
     }
 
     @Override
@@ -60,52 +42,6 @@
         return visitor.visitReplicateOperator(this, arg);
     }
 
-    @Override
-    public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform transform)
-            throws AlgebricksException {
-        return false;
-    }
-
-    @Override
-    public VariablePropagationPolicy getVariablePropagationPolicy() {
-        return VariablePropagationPolicy.ALL;
-    }
-
-    @Override
-    public boolean isMap() {
-        return true;
-    }
-
-    @Override
-    public void recomputeSchema() {
-        schema = new ArrayList<LogicalVariable>(inputs.get(0).getValue().getSchema());
-    }
-
-    public void substituteVar(LogicalVariable v1, LogicalVariable v2) {
-        // do nothing
-    }
-
-    public int getOutputArity() {
-        return outputArity;
-    }
-
-    public void setOutputMaterializationFlags(boolean[] outputMaterializationFlags) {
-        this.outputMaterializationFlags = outputMaterializationFlags;
-    }
-
-    public boolean[] getOutputMaterializationFlags() {
-        return outputMaterializationFlags;
-    }
-
-    public List<Mutable<ILogicalOperator>> getOutputs() {
-        return outputs;
-    }
-
-    @Override
-    public IVariableTypeEnvironment computeOutputTypeEnvironment(ITypingContext ctx) throws AlgebricksException {
-        return createPropagatingAllInputsTypeEnvironment(ctx);
-    }
-
     public boolean isBlocker() {
         for (boolean requiresMaterialization : outputMaterializationFlags) {
             if (requiresMaterialization) {
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/SplitOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/SplitOperator.java
new file mode 100644
index 0000000..a996673
--- /dev/null
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/SplitOperator.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hyracks.algebricks.core.algebra.operators.logical;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import org.apache.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
+import org.apache.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
+
+/**
+ * Split Operator receives an expression. This expression will be evaluated to an integer value during the runtime.
+ * Based on its value, it propagates each tuple to the corresponding output frame. (e.g., first output = 0, ...)
+ * Thus, unlike Replicate operator that does unconditional propagation to all outputs,
+ * this does a conditional propagate operation.
+ */
+public class SplitOperator extends AbstractReplicateOperator {
+
+    // Expression that keeps the output branch information for each tuple
+    private final Mutable<ILogicalExpression> branchingExpression;
+
+    public SplitOperator(int outputArity, Mutable<ILogicalExpression> branchingExpression) {
+        super(outputArity);
+        this.branchingExpression = branchingExpression;
+    }
+
+    @Override
+    public LogicalOperatorTag getOperatorTag() {
+        return LogicalOperatorTag.SPLIT;
+    }
+
+    @Override
+    public <R, T> R accept(ILogicalOperatorVisitor<R, T> visitor, T arg) throws AlgebricksException {
+        return visitor.visitSplitOperator(this, arg);
+    }
+
+    public Mutable<ILogicalExpression> getBranchingExpression() {
+        return branchingExpression;
+    }
+
+    @Override
+    public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform visitor) throws AlgebricksException {
+        return visitor.transform(branchingExpression);
+    }
+
+    @Override
+    public void substituteVar(LogicalVariable v1, LogicalVariable v2) {
+        getBranchingExpression().getValue().substituteVar(v1, v2);
+    }
+
+}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/CardinalityInferenceVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/CardinalityInferenceVisitor.java
index d7e8864..1ab0606 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/CardinalityInferenceVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/CardinalityInferenceVisitor.java
@@ -48,7 +48,6 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RangeForwardOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
@@ -56,6 +55,7 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
@@ -176,12 +176,12 @@
     }
 
     @Override
-    public Long visitPartitioningSplitOperator(PartitioningSplitOperator op, Void arg) throws AlgebricksException {
+    public Long visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
         return op.getInputs().get(0).getValue().accept(this, arg);
     }
 
     @Override
-    public Long visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
+    public Long visitSplitOperator(SplitOperator op, Void arg) throws AlgebricksException {
         return op.getInputs().get(0).getValue().accept(this, arg);
     }
 
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
index af875f2..2bb43c9 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
@@ -30,7 +30,6 @@
 
 import org.apache.commons.lang3.mutable.Mutable;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException;
 import org.apache.hyracks.algebricks.common.utils.ListSet;
 import org.apache.hyracks.algebricks.common.utils.Pair;
 import org.apache.hyracks.algebricks.core.algebra.base.EquivalenceClass;
@@ -67,7 +66,6 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RangeForwardOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
@@ -75,6 +73,7 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
index 737b246..7c114c8 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
@@ -55,7 +55,6 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RangeForwardOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
@@ -63,6 +62,7 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
@@ -278,18 +278,6 @@
     }
 
     @Override
-    public Boolean visitPartitioningSplitOperator(PartitioningSplitOperator op, ILogicalOperator arg)
-            throws AlgebricksException {
-        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
-        if (aop.getOperatorTag() != LogicalOperatorTag.PARTITIONINGSPLIT) {
-            return Boolean.FALSE;
-        }
-        PartitioningSplitOperator partitionOpArg = (PartitioningSplitOperator) copyAndSubstituteVar(op, arg);
-        boolean isomorphic = compareExpressions(op.getExpressions(), partitionOpArg.getExpressions());
-        return isomorphic;
-    }
-
-    @Override
     public Boolean visitReplicateOperator(ReplicateOperator op, ILogicalOperator arg) throws AlgebricksException {
         AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
         if (aop.getOperatorTag() != LogicalOperatorTag.REPLICATE) {
@@ -299,6 +287,17 @@
     }
 
     @Override
+    public Boolean visitSplitOperator(SplitOperator op, ILogicalOperator arg) throws AlgebricksException {
+        AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
+        if (aop.getOperatorTag() != LogicalOperatorTag.SPLIT) {
+            return Boolean.FALSE;
+        }
+        SplitOperator sOpArg = (SplitOperator) copyAndSubstituteVar(op, arg);
+        boolean isomorphic = op.getBranchingExpression().getValue().equals(sOpArg.getBranchingExpression().getValue());
+        return isomorphic;
+    }
+
+    @Override
     public Boolean visitRangeForwardOperator(RangeForwardOperator op, ILogicalOperator arg) throws AlgebricksException {
         AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
         if (aop.getOperatorTag() != LogicalOperatorTag.RANGE_FORWARD) {
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java
index 87e4f71..7f099cb 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java
@@ -22,8 +22,8 @@
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
 import java.util.Map.Entry;
+import java.util.Set;
 
 import org.apache.commons.lang3.mutable.Mutable;
 import org.apache.commons.lang3.mutable.MutableObject;
@@ -57,7 +57,6 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RangeForwardOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
@@ -65,6 +64,7 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
@@ -166,14 +166,13 @@
     }
 
     @Override
-    public Void visitPartitioningSplitOperator(PartitioningSplitOperator op, ILogicalOperator arg)
-            throws AlgebricksException {
+    public Void visitReplicateOperator(ReplicateOperator op, ILogicalOperator arg) throws AlgebricksException {
         mapVariablesStandard(op, arg);
         return null;
     }
 
     @Override
-    public Void visitReplicateOperator(ReplicateOperator op, ILogicalOperator arg) throws AlgebricksException {
+    public Void visitSplitOperator(SplitOperator op, ILogicalOperator arg) throws AlgebricksException {
         mapVariablesStandard(op, arg);
         return null;
     }
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java
index 26f8637..d68f8d7 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java
@@ -52,19 +52,19 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RangeForwardOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
 import org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
 import org.apache.hyracks.algebricks.core.algebra.properties.FunctionalDependency;
 import org.apache.hyracks.algebricks.core.algebra.typing.ITypingContext;
@@ -385,14 +385,6 @@
     }
 
     @Override
-    public ILogicalOperator visitPartitioningSplitOperator(PartitioningSplitOperator op, ILogicalOperator arg)
-            throws AlgebricksException {
-        PartitioningSplitOperator opCopy = new PartitioningSplitOperator(
-                exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getExpressions()), op.getDefaultBranchIndex());
-        return opCopy;
-    }
-
-    @Override
     public ILogicalOperator visitProjectOperator(ProjectOperator op, ILogicalOperator arg) throws AlgebricksException {
         ProjectOperator opCopy = new ProjectOperator(deepCopyVariableList(op.getVariables()));
         deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
@@ -419,6 +411,12 @@
         return opCopy;
     }
 
+    public ILogicalOperator visitSplitOperator(SplitOperator op, ILogicalOperator arg) throws AlgebricksException {
+        SplitOperator opCopy = new SplitOperator(op.getOutputArity(), op.getBranchingExpression());
+        deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
+        return opCopy;
+    }
+
     @Override
     public ILogicalOperator visitMaterializeOperator(MaterializeOperator op, ILogicalOperator arg)
             throws AlgebricksException {
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java
index 58b2cd7..f7113c0 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java
@@ -55,6 +55,7 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
@@ -200,6 +201,11 @@
     }
 
     @Override
+    public Void visitSplitOperator(SplitOperator op, IOptimizationContext arg) throws AlgebricksException {
+        return null;
+    }
+
+    @Override
     public Void visitMaterializeOperator(MaterializeOperator op, IOptimizationContext arg) throws AlgebricksException {
         // TODO Auto-generated method stub
         return null;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/OperatorDeepCopyVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/OperatorDeepCopyVisitor.java
index 067a4bc..578dab0 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/OperatorDeepCopyVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/OperatorDeepCopyVisitor.java
@@ -53,7 +53,7 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RangeForwardOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
@@ -61,6 +61,7 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
@@ -68,7 +69,6 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.WriteResultOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
 import org.apache.hyracks.algebricks.core.algebra.util.OperatorManipulationUtil;
 import org.apache.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
 
@@ -169,16 +169,13 @@
     }
 
     @Override
-    public ILogicalOperator visitPartitioningSplitOperator(PartitioningSplitOperator op, Void arg)
-            throws AlgebricksException {
-        ArrayList<Mutable<ILogicalExpression>> newExpressions = new ArrayList<>();
-        deepCopyExpressionRefs(newExpressions, op.getExpressions());
-        return new PartitioningSplitOperator(newExpressions, op.getDefaultBranchIndex());
+    public ILogicalOperator visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
+        return new ReplicateOperator(op.getOutputArity());
     }
 
     @Override
-    public ILogicalOperator visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
-        return new ReplicateOperator(op.getOutputArity());
+    public ILogicalOperator visitSplitOperator(SplitOperator op, Void arg) throws AlgebricksException {
+        return new SplitOperator(op.getOutputArity(), op.getBranchingExpression());
     }
 
     @Override
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/PrimaryKeyVariablesVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/PrimaryKeyVariablesVisitor.java
index f3d2990..ec74b4e 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/PrimaryKeyVariablesVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/PrimaryKeyVariablesVisitor.java
@@ -49,7 +49,6 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RangeForwardOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
@@ -57,6 +56,7 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
@@ -175,13 +175,12 @@
     }
 
     @Override
-    public Void visitPartitioningSplitOperator(PartitioningSplitOperator op, IOptimizationContext ctx)
-            throws AlgebricksException {
+    public Void visitReplicateOperator(ReplicateOperator op, IOptimizationContext ctx) throws AlgebricksException {
         return null;
     }
 
     @Override
-    public Void visitReplicateOperator(ReplicateOperator op, IOptimizationContext ctx) throws AlgebricksException {
+    public Void visitSplitOperator(SplitOperator op, IOptimizationContext arg) throws AlgebricksException {
         return null;
     }
 
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/ProducedVariableVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/ProducedVariableVisitor.java
index 1a2c754..a5adfa4 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/ProducedVariableVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/ProducedVariableVisitor.java
@@ -48,12 +48,11 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.IntersectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestMapOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RangeForwardOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
@@ -61,6 +60,7 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
@@ -157,11 +157,6 @@
     }
 
     @Override
-    public Void visitPartitioningSplitOperator(PartitioningSplitOperator op, Void arg) throws AlgebricksException {
-        return null;
-    }
-
-    @Override
     public Void visitProjectOperator(ProjectOperator op, Void arg) throws AlgebricksException {
         return null;
     }
@@ -261,6 +256,11 @@
     }
 
     @Override
+    public Void visitSplitOperator(SplitOperator op, Void arg) throws AlgebricksException {
+        return null;
+    }
+
+    @Override
     public Void visitMaterializeOperator(MaterializeOperator op, Void arg) throws AlgebricksException {
         return null;
     }
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/SchemaVariableVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/SchemaVariableVisitor.java
index 93d878c..e1343d3 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/SchemaVariableVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/SchemaVariableVisitor.java
@@ -46,12 +46,11 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.IntersectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestMapOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RangeForwardOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
@@ -59,6 +58,7 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
@@ -180,12 +180,6 @@
     }
 
     @Override
-    public Void visitPartitioningSplitOperator(PartitioningSplitOperator op, Void arg) throws AlgebricksException {
-        standardLayout(op);
-        return null;
-    }
-
-    @Override
     public Void visitProjectOperator(ProjectOperator op, Void arg) throws AlgebricksException {
         schemaVariables.addAll(op.getVariables());
         return null;
@@ -295,6 +289,12 @@
     }
 
     @Override
+    public Void visitSplitOperator(SplitOperator op, Void arg) throws AlgebricksException {
+        standardLayout(op);
+        return null;
+    }
+
+    @Override
     public Void visitMaterializeOperator(MaterializeOperator op, Void arg) throws AlgebricksException {
         standardLayout(op);
         return null;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/SubstituteVariableVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/SubstituteVariableVisitor.java
index 3623221..5d6a7b3 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/SubstituteVariableVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/SubstituteVariableVisitor.java
@@ -46,13 +46,12 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.IntersectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestMapOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RangeForwardOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
@@ -60,6 +59,7 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
@@ -415,6 +415,13 @@
     }
 
     @Override
+    public Void visitSplitOperator(SplitOperator op, Pair<LogicalVariable, LogicalVariable> arg)
+            throws AlgebricksException {
+        op.substituteVar(arg.first, arg.second);
+        return null;
+    }
+
+    @Override
     public Void visitRangeForwardOperator(RangeForwardOperator op, Pair<LogicalVariable, LogicalVariable> arg)
             throws AlgebricksException {
         return null;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/UsedVariableVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/UsedVariableVisitor.java
index c0ca32e..e6a8032 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/UsedVariableVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/UsedVariableVisitor.java
@@ -52,7 +52,6 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RangeForwardOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
@@ -60,6 +59,7 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
@@ -233,14 +233,6 @@
     }
 
     @Override
-    public Void visitPartitioningSplitOperator(PartitioningSplitOperator op, Void arg) {
-        for (Mutable<ILogicalExpression> e : op.getExpressions()) {
-            e.getValue().getUsedVariables(usedVariables);
-        }
-        return null;
-    }
-
-    @Override
     public Void visitProjectOperator(ProjectOperator op, Void arg) {
         List<LogicalVariable> parameterVariables = op.getVariables();
         for (LogicalVariable v : parameterVariables) {
@@ -437,9 +429,12 @@
 
     @Override
     public Void visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
-        for (Mutable<ILogicalOperator> outputOp : op.getOutputs()) {
-            VariableUtilities.getUsedVariables(outputOp.getValue(), usedVariables);
-        }
+        return null;
+    }
+
+    @Override
+    public Void visitSplitOperator(SplitOperator op, Void arg) throws AlgebricksException {
+        op.getBranchingExpression().getValue().getUsedVariables(usedVariables);
         return null;
     }
 
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractReplicatePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractReplicatePOperator.java
new file mode 100644
index 0000000..1d13163
--- /dev/null
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractReplicatePOperator.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.algebricks.core.algebra.operators.physical;
+
+import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractReplicateOperator;
+import org.apache.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
+import org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
+import org.apache.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
+
+public abstract class AbstractReplicatePOperator extends AbstractPhysicalOperator {
+
+    @Override
+    public boolean isMicroOperator() {
+        return false;
+    }
+
+    @Override
+    public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
+            IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
+        return emptyUnaryRequirements();
+    }
+
+    @Override
+    public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context) {
+        AbstractLogicalOperator op2 = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
+        deliveredProperties = (StructuralPropertiesVector) op2.getDeliveredPhysicalProperties().clone();
+    }
+
+    @Override
+    public Pair<int[], int[]> getInputOutputDependencyLabels(ILogicalOperator op) {
+        int[] inputDependencyLabels = new int[] { 0 };
+        AbstractReplicateOperator rop = (AbstractReplicateOperator) op;
+        int[] outputDependencyLabels = new int[rop.getOutputArity()];
+        // change the labels of outputs that requires materialization to 1
+        boolean[] outputMaterializationFlags = rop.getOutputMaterializationFlags();
+        for (int i = 0; i < rop.getOutputArity(); i++) {
+            if (outputMaterializationFlags[i]) {
+                outputDependencyLabels[i] = 1;
+            }
+        }
+        return new Pair<>(inputDependencyLabels, outputDependencyLabels);
+    }
+
+    @Override
+    public boolean expensiveThanMaterialization() {
+        return false;
+    }
+}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ReplicatePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ReplicatePOperator.java
index 6501aeb..74739da 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ReplicatePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ReplicatePOperator.java
@@ -19,24 +19,19 @@
 package org.apache.hyracks.algebricks.core.algebra.operators.physical;
 
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.algebricks.common.utils.Pair;
 import org.apache.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
 import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
 import org.apache.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
-import org.apache.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
-import org.apache.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
-import org.apache.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
 import org.apache.hyracks.algebricks.core.jobgen.impl.JobGenContext;
 import org.apache.hyracks.algebricks.core.jobgen.impl.JobGenHelper;
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
-import org.apache.hyracks.dataflow.std.misc.SplitOperatorDescriptor;
+import org.apache.hyracks.dataflow.std.misc.ReplicateOperatorDescriptor;
 
-public class ReplicatePOperator extends AbstractPhysicalOperator {
+public class ReplicatePOperator extends AbstractReplicatePOperator {
 
     @Override
     public PhysicalOperatorTag getOperatorTag() {
@@ -44,55 +39,25 @@
     }
 
     @Override
-    public boolean isMicroOperator() {
-        return false;
-    }
-
-    @Override
-    public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
-            IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
-        return emptyUnaryRequirements();
-    }
-
-    @Override
-    public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context) {
-        AbstractLogicalOperator op2 = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
-        deliveredProperties = (StructuralPropertiesVector) op2.getDeliveredPhysicalProperties().clone();
-    }
-
-    @Override
     public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
             IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
             throws AlgebricksException {
         IOperatorDescriptorRegistry spec = builder.getJobSpec();
-        RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
+        RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
+                propagatedSchema, context);
 
         ReplicateOperator rop = (ReplicateOperator) op;
         int outputArity = rop.getOutputArity();
         boolean[] outputMaterializationFlags = rop.getOutputMaterializationFlags();
 
-        SplitOperatorDescriptor splitOpDesc = new SplitOperatorDescriptor(spec, recDescriptor, outputArity, outputMaterializationFlags);
+        ReplicateOperatorDescriptor splitOpDesc = new ReplicateOperatorDescriptor(spec, recDescriptor, outputArity,
+                outputMaterializationFlags);
         contributeOpDesc(builder, (AbstractLogicalOperator) op, splitOpDesc);
         ILogicalOperator src = op.getInputs().get(0).getValue();
         builder.contributeGraphEdge(src, 0, op, 0);
     }
 
     @Override
-    public Pair<int[], int[]> getInputOutputDependencyLabels(ILogicalOperator op) {
-        int[] inputDependencyLabels = new int[] { 0 };
-        ReplicateOperator rop = (ReplicateOperator) op;
-        int[] outputDependencyLabels = new int[rop.getOutputArity()];
-        // change the labels of outputs that requires materialization to 1
-        boolean[] outputMaterializationFlags = rop.getOutputMaterializationFlags();
-        for (int i = 0; i < rop.getOutputArity(); i++) {
-            if (outputMaterializationFlags[i]) {
-                outputDependencyLabels[i] = 1;
-            }
-        }
-        return new Pair<>(inputDependencyLabels, outputDependencyLabels);
-    }
-
-    @Override
     public boolean expensiveThanMaterialization() {
         return false;
     }
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SplitPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SplitPOperator.java
new file mode 100644
index 0000000..3b8aaab
--- /dev/null
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SplitPOperator.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.algebricks.core.algebra.operators.physical;
+
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
+import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import org.apache.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
+import org.apache.hyracks.algebricks.core.algebra.expressions.IExpressionRuntimeProvider;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
+import org.apache.hyracks.algebricks.core.jobgen.impl.JobGenContext;
+import org.apache.hyracks.algebricks.core.jobgen.impl.JobGenHelper;
+import org.apache.hyracks.algebricks.data.IBinaryIntegerInspectorFactory;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.algebricks.runtime.operators.std.SplitOperatorDescriptor;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
+
+public class SplitPOperator extends AbstractReplicatePOperator {
+
+    @Override
+    public PhysicalOperatorTag getOperatorTag() {
+        return PhysicalOperatorTag.SPLIT;
+    }
+
+    @Override
+    public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
+            IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
+            throws AlgebricksException {
+        SplitOperator sop = (SplitOperator) op;
+        int outputArity = sop.getOutputArity();
+
+        IOperatorDescriptorRegistry spec = builder.getJobSpec();
+        RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
+                propagatedSchema, context);
+
+        IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
+        IScalarEvaluatorFactory brachingExprEvalFactory = expressionRuntimeProvider.createEvaluatorFactory(
+                sop.getBranchingExpression().getValue(), context.getTypeEnvironment(op), inputSchemas, context);
+
+        IBinaryIntegerInspectorFactory intInsepctorFactory = context.getBinaryIntegerInspectorFactory();
+
+        SplitOperatorDescriptor splitOpDesc = new SplitOperatorDescriptor(spec, recDescriptor, outputArity,
+                brachingExprEvalFactory, intInsepctorFactory);
+
+        contributeOpDesc(builder, (AbstractLogicalOperator) op, splitOpDesc);
+        ILogicalOperator src = op.getInputs().get(0).getValue();
+        builder.contributeGraphEdge(src, 0, op, 0);
+    }
+}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java
index eb1780c..566f954 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java
@@ -41,6 +41,7 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteUpsertOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator.Kind;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.IntersectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestMapOperator;
@@ -49,7 +50,6 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RangeForwardOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
@@ -57,6 +57,7 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
@@ -64,7 +65,6 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.WriteResultOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator.Kind;
 import org.apache.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionVisitor;
 import org.apache.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
 
@@ -237,15 +237,6 @@
     }
 
     @Override
-    public Void visitPartitioningSplitOperator(PartitioningSplitOperator op, Integer indent)
-            throws AlgebricksException {
-        addIndent(indent).append("partitioning-split (");
-        pprintExprList(op.getExpressions(), indent);
-        buffer.append(")");
-        return null;
-    }
-
-    @Override
     public Void visitSubplanOperator(SubplanOperator op, Integer indent) throws AlgebricksException {
         addIndent(indent).append("subplan {");
         printNestedPlans(op, indent);
@@ -366,6 +357,14 @@
     @Override
     public Void visitRangeForwardOperator(RangeForwardOperator op, Integer indent) throws AlgebricksException {
         addIndent(indent).append("range forward ");
+        addIndent(indent).append("split " + branchingExpression.getValue().accept(exprVisitor, indent));
+        return null;
+    }
+
+    @Override
+    public Void visitSplitOperator(SplitOperator op, Integer indent) throws AlgebricksException {
+        Mutable<ILogicalExpression> branchingExpression = op.getBranchingExpression();
+        addIndent(indent).append("split " + branchingExpression.getValue().accept(exprVisitor, indent));
         return null;
     }
 
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/visitors/ILogicalOperatorVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/visitors/ILogicalOperatorVisitor.java
index 0548ab3..85332ca 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/visitors/ILogicalOperatorVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/visitors/ILogicalOperatorVisitor.java
@@ -34,12 +34,11 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.IntersectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestMapOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RangeForwardOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
@@ -47,6 +46,7 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
@@ -83,12 +83,12 @@
 
     public R visitProjectOperator(ProjectOperator op, T arg) throws AlgebricksException;
 
-    public R visitPartitioningSplitOperator(PartitioningSplitOperator op, T arg) throws AlgebricksException;
-
     public R visitReplicateOperator(ReplicateOperator op, T arg) throws AlgebricksException;
 
     public R visitRangeForwardOperator(RangeForwardOperator op, T arg) throws AlgebricksException;
 
+    public R visitSplitOperator(SplitOperator op, T arg) throws AlgebricksException;
+
     public R visitMaterializeOperator(MaterializeOperator op, T arg) throws AlgebricksException;
 
     public R visitScriptOperator(ScriptOperator op, T arg) throws AlgebricksException;
diff --git a/hyracks-fullstack/algebricks/algebricks-data/pom.xml b/hyracks-fullstack/algebricks/algebricks-data/pom.xml
index 8502577..b35694b 100644
--- a/hyracks-fullstack/algebricks/algebricks-data/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-data/pom.xml
@@ -54,8 +54,18 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-util</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-dataflow-common</artifactId>
       <version>0.2.18-SNAPSHOT</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/algebricks/algebricks-examples/piglet-example/pom.xml b/hyracks-fullstack/algebricks/algebricks-examples/piglet-example/pom.xml
index a1dfa39..4f576cb 100644
--- a/hyracks-fullstack/algebricks/algebricks-examples/piglet-example/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-examples/piglet-example/pom.xml
@@ -122,5 +122,50 @@
       <artifactId>hyracks-util</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>algebricks-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>algebricks-rewriter</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>algebricks-runtime</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>algebricks-data</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-std</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-data-std</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-lang3</artifactId>
+      <version>3.1</version>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/pom.xml b/hyracks-fullstack/algebricks/algebricks-rewriter/pom.xml
index e42f3f0..c507911 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/pom.xml
@@ -47,5 +47,20 @@
       <artifactId>algebricks-core</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>algebricks-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-lang3</artifactId>
+      <version>3.1</version>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantVariablesRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantVariablesRule.java
index eb72db0..2f0913b 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantVariablesRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantVariablesRule.java
@@ -25,7 +25,6 @@
 import java.util.Map;
 
 import org.apache.commons.lang3.mutable.Mutable;
-
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 import org.apache.hyracks.algebricks.common.utils.Pair;
 import org.apache.hyracks.algebricks.common.utils.Triple;
@@ -85,11 +84,6 @@
         return modified;
     }
 
-    @Override
-    public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
-        return false;
-    }
-
     private void updateEquivalenceClassMap(LogicalVariable lhs, LogicalVariable rhs) {
         List<LogicalVariable> equivalentVars = equivalentVarsMap.get(rhs);
         if (equivalentVars == null) {
@@ -105,10 +99,11 @@
     private boolean removeRedundantVariables(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
             throws AlgebricksException {
         AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+        LogicalOperatorTag opTag = op.getOperatorTag();
         boolean modified = false;
 
         // Update equivalence class map.
-        if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+        if (opTag == LogicalOperatorTag.ASSIGN) {
             AssignOperator assignOp = (AssignOperator) op;
             int numVars = assignOp.getVariables().size();
             for (int i = 0; i < numVars; i++) {
@@ -125,7 +120,7 @@
         }
 
         // Replace variable references with their first representative.
-        if (op.getOperatorTag() == LogicalOperatorTag.PROJECT) {
+        if (opTag == LogicalOperatorTag.PROJECT) {
             // The project operator does not use expressions, so we need to replace it's variables manually.
             if (replaceProjectVars((ProjectOperator) op)) {
                 modified = true;
@@ -154,7 +149,7 @@
         }
 
         // Deal with re-mapping of variables in group by.
-        if (op.getOperatorTag() == LogicalOperatorTag.GROUP) {
+        if (opTag == LogicalOperatorTag.GROUP) {
             if (handleGroupByVarRemapping((GroupByOperator) op)) {
                 modified = true;
             }
@@ -164,6 +159,12 @@
             context.computeAndSetTypeEnvironmentForOperator(op);
             context.addToDontApplySet(this, op);
         }
+
+        // Clears the equivalent variable map if the current operator is the root operator
+        // in the query plan.
+        if (opTag == LogicalOperatorTag.DISTRIBUTE_RESULT || opTag == LogicalOperatorTag.SINK) {
+            equivalentVarsMap.clear();
+        }
         return modified;
     }
 
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java
index 55831f0..5bf4e68 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java
@@ -383,7 +383,7 @@
             // We may have visited this operator before if there are multiple
             // paths in the plan.
             if (accumulatedUsedVarFromRootMap.containsKey(opRef)) {
-                accumulatedUsedVarFromRootMap.get(opRef).addAll(usedVarsSetInThisOp);
+                accumulatedUsedVarFromRootMap.get(opRef).addAll(accumulatedUsedVarFromRootSet);
             } else {
                 accumulatedUsedVarFromRootMap.put(opRef, new HashSet<LogicalVariable>(accumulatedUsedVarFromRootSet));
             }
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/ReplaceNtsWithSubplanInputOperatorVisitor.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/ReplaceNtsWithSubplanInputOperatorVisitor.java
index ce86332..29e96f6 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/ReplaceNtsWithSubplanInputOperatorVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/ReplaceNtsWithSubplanInputOperatorVisitor.java
@@ -48,13 +48,13 @@
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.PartitioningSplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RangeForwardOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
@@ -175,13 +175,12 @@
     }
 
     @Override
-    public ILogicalOperator visitPartitioningSplitOperator(PartitioningSplitOperator op, Void arg)
-            throws AlgebricksException {
+    public ILogicalOperator visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
         return visit(op);
     }
 
     @Override
-    public ILogicalOperator visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
+    public ILogicalOperator visitSplitOperator(SplitOperator op, Void arg) throws AlgebricksException {
         return visit(op);
     }
 
diff --git a/hyracks-fullstack/algebricks/algebricks-runtime/pom.xml b/hyracks-fullstack/algebricks/algebricks-runtime/pom.xml
index 38c86c5..17dd1ea 100644
--- a/hyracks-fullstack/algebricks/algebricks-runtime/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-runtime/pom.xml
@@ -44,16 +44,6 @@
   <dependencies>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-storage-am-btree</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-storage-am-rtree</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-dataflow-std</artifactId>
       <version>${project.version}</version>
     </dependency>
@@ -67,5 +57,25 @@
       <artifactId>algebricks-data</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.json</groupId>
+      <artifactId>json</artifactId>
+      <version>20090211</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-data-std</artifactId>
+      <version>${project.version}</version>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/std/PartitioningSplitOperatorDescriptor.java b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/std/PartitioningSplitOperatorDescriptor.java
deleted file mode 100644
index 2d5c929..0000000
--- a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/std/PartitioningSplitOperatorDescriptor.java
+++ /dev/null
@@ -1,217 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hyracks.algebricks.runtime.operators.std;
-
-import java.io.DataOutput;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.algebricks.data.IBinaryBooleanInspector;
-import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
-import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
-import org.apache.hyracks.api.comm.IFrame;
-import org.apache.hyracks.api.comm.IFrameWriter;
-import org.apache.hyracks.api.comm.VSizeFrame;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
-import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
-import org.apache.hyracks.data.std.api.IPointable;
-import org.apache.hyracks.data.std.primitive.VoidPointable;
-import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import org.apache.hyracks.dataflow.common.comm.util.FrameUtils;
-import org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference;
-import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputOperatorNodePushable;
-
-public class PartitioningSplitOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-    private static final long serialVersionUID = 1L;
-    public static int NO_DEFAULT_BRANCH = -1;
-
-    private final IScalarEvaluatorFactory[] evalFactories;
-    private final IBinaryBooleanInspector boolInspector;
-    private final int defaultBranchIndex;
-
-    public PartitioningSplitOperatorDescriptor(IOperatorDescriptorRegistry spec,
-            IScalarEvaluatorFactory[] evalFactories, IBinaryBooleanInspector boolInspector, int defaultBranchIndex,
-            RecordDescriptor rDesc) {
-        super(spec, 1, (defaultBranchIndex == evalFactories.length) ? evalFactories.length + 1 : evalFactories.length);
-        for (int i = 0; i < evalFactories.length; i++) {
-            recordDescriptors[i] = rDesc;
-        }
-        this.evalFactories = evalFactories;
-        this.boolInspector = boolInspector;
-        this.defaultBranchIndex = defaultBranchIndex;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
-            final IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions)
-                    throws HyracksDataException {
-        return new AbstractUnaryInputOperatorNodePushable() {
-            private final IFrameWriter[] writers = new IFrameWriter[outputArity];
-            private final boolean[] isOpen = new boolean[outputArity];
-            private final IFrame[] writeBuffers = new IFrame[outputArity];
-            private final IScalarEvaluator[] evals = new IScalarEvaluator[outputArity];
-            private final IPointable evalPointable = new VoidPointable();
-            private final RecordDescriptor inOutRecDesc = recordDescProvider.getInputRecordDescriptor(getActivityId(),
-                    0);
-            private final FrameTupleAccessor accessor = new FrameTupleAccessor(inOutRecDesc);
-            private final FrameTupleReference frameTuple = new FrameTupleReference();
-
-            private final FrameTupleAppender tupleAppender = new FrameTupleAppender();
-            private final ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(inOutRecDesc.getFieldCount());
-            private final DataOutput tupleDos = tupleBuilder.getDataOutput();
-
-            @Override
-            public void close() throws HyracksDataException {
-                HyracksDataException hde = null;
-                for (int i = 0; i < outputArity; i++) {
-                    if (isOpen[i]) {
-                        try {
-                            tupleAppender.reset(writeBuffers[i], false);
-                            tupleAppender.write(writers[i], false);
-                        } catch (Throwable th) {
-                            if (hde == null) {
-                                hde = new HyracksDataException();
-                            }
-                            hde.addSuppressed(th);
-                        } finally {
-                            try {
-                                writers[i].close();
-                            } catch (Throwable th) {
-                                if (hde == null) {
-                                    hde = new HyracksDataException();
-                                }
-                                hde.addSuppressed(th);
-                            }
-                        }
-                    }
-                }
-                if (hde != null) {
-                    throw hde;
-                }
-            }
-
-            @Override
-            public void flush() throws HyracksDataException {
-                for (int i = 0; i < outputArity; i++) {
-                    tupleAppender.reset(writeBuffers[i], false);
-                    tupleAppender.flush(writers[i]);
-                }
-            }
-
-            @Override
-            public void fail() throws HyracksDataException {
-                HyracksDataException hde = null;
-                for (int i = 0; i < outputArity; i++) {
-                    if (isOpen[i]) {
-                        try {
-                            writers[i].fail();
-                        } catch (Throwable th) {
-                            if (hde == null) {
-                                hde = new HyracksDataException();
-                            }
-                            hde.addSuppressed(th);
-                        }
-                    }
-                }
-                if (hde != null) {
-                    throw hde;
-                }
-            }
-
-            @Override
-            public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-                accessor.reset(buffer);
-                int tupleCount = accessor.getTupleCount();
-                for (int i = 0; i < tupleCount; i++) {
-                    frameTuple.reset(accessor, i);
-                    boolean found = false;
-                    for (int j = 0; j < evals.length; j++) {
-                        try {
-                            evals[j].evaluate(frameTuple, evalPointable);
-                        } catch (AlgebricksException e) {
-                            throw new HyracksDataException(e);
-                        }
-                        found = boolInspector.getBooleanValue(evalPointable.getByteArray(),
-                                evalPointable.getStartOffset(), evalPointable.getLength());
-                        if (found) {
-                            copyAndAppendTuple(j);
-                            break;
-                        }
-                    }
-                    // Optionally write to default output branch.
-                    if (!found && defaultBranchIndex != NO_DEFAULT_BRANCH) {
-                        copyAndAppendTuple(defaultBranchIndex);
-                    }
-                }
-            }
-
-            private void copyAndAppendTuple(int outputIndex) throws HyracksDataException {
-                // Copy tuple into tuple builder.
-                try {
-                    tupleBuilder.reset();
-                    for (int i = 0; i < frameTuple.getFieldCount(); i++) {
-                        tupleDos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i),
-                                frameTuple.getFieldLength(i));
-                        tupleBuilder.addFieldEndOffset();
-                    }
-                } catch (IOException e) {
-                    throw new HyracksDataException(e);
-                }
-                tupleAppender.reset(writeBuffers[outputIndex], false);
-                FrameUtils.appendToWriter(writers[outputIndex], tupleAppender, tupleBuilder.getFieldEndOffsets(),
-                        tupleBuilder.getByteArray(), 0, tupleBuilder.getSize());
-            }
-
-            @Override
-            public void open() throws HyracksDataException {
-                for (int i = 0; i < writers.length; i++) {
-                    isOpen[i] = true;
-                    writers[i].open();
-                }
-                // Create write buffers.
-                for (int i = 0; i < outputArity; i++) {
-                    writeBuffers[i] = new VSizeFrame(ctx);
-                    // Make sure to clear all buffers, since we are reusing the tupleAppender.
-                    tupleAppender.reset(writeBuffers[i], true);
-                }
-                // Create evaluators for partitioning.
-                try {
-                    for (int i = 0; i < evalFactories.length; i++) {
-                        evals[i] = evalFactories[i].createScalarEvaluator(ctx);
-                    }
-                } catch (AlgebricksException e) {
-                    throw new HyracksDataException(e);
-                }
-            }
-
-            @Override
-            public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
-                writers[index] = writer;
-            }
-        };
-    }
-}
diff --git a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/std/SplitOperatorDescriptor.java b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/std/SplitOperatorDescriptor.java
new file mode 100644
index 0000000..2215a96
--- /dev/null
+++ b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/std/SplitOperatorDescriptor.java
@@ -0,0 +1,188 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.algebricks.runtime.operators.std;
+
+import java.nio.ByteBuffer;
+
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.data.IBinaryIntegerInspector;
+import org.apache.hyracks.algebricks.data.IBinaryIntegerInspectorFactory;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
+import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
+import org.apache.hyracks.api.comm.IFrameWriter;
+import org.apache.hyracks.api.comm.VSizeFrame;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.dataflow.ActivityId;
+import org.apache.hyracks.api.dataflow.IActivityGraphBuilder;
+import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
+import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
+import org.apache.hyracks.data.std.api.IPointable;
+import org.apache.hyracks.data.std.primitive.VoidPointable;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import org.apache.hyracks.dataflow.common.comm.util.FrameUtils;
+import org.apache.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import org.apache.hyracks.dataflow.std.base.AbstractReplicateOperatorDescriptor;
+import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputOperatorNodePushable;
+
+/**
+ * Split operator propagates each tuple in a frame to one output branch only unlike Replicate operator.
+ */
+public class SplitOperatorDescriptor extends AbstractReplicateOperatorDescriptor {
+    private static final long serialVersionUID = 1L;
+
+    private IScalarEvaluatorFactory brachingExprEvalFactory;
+    private IBinaryIntegerInspectorFactory intInsepctorFactory;
+
+    public SplitOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor rDesc, int outputArity,
+            IScalarEvaluatorFactory brachingExprEvalFactory, IBinaryIntegerInspectorFactory intInsepctorFactory) {
+        super(spec, rDesc, outputArity);
+        this.brachingExprEvalFactory = brachingExprEvalFactory;
+        this.intInsepctorFactory = intInsepctorFactory;
+    }
+
+    @Override
+    public void contributeActivities(IActivityGraphBuilder builder) {
+        SplitterMaterializerActivityNode sma = new SplitterMaterializerActivityNode(
+                new ActivityId(odId, SPLITTER_MATERIALIZER_ACTIVITY_ID));
+        builder.addActivity(this, sma);
+        builder.addSourceEdge(0, sma, 0);
+        for (int i = 0; i < outputArity; i++) {
+            builder.addTargetEdge(i, sma, i);
+        }
+    }
+
+    // The difference between SplitterMaterializerActivityNode and ReplicatorMaterializerActivityNode is that
+    // SplitterMaterializerActivityNode propagates each tuple to one output branch only.
+    private final class SplitterMaterializerActivityNode extends ReplicatorMaterializerActivityNode {
+        private static final long serialVersionUID = 1L;
+
+        public SplitterMaterializerActivityNode(ActivityId id) {
+            super(id);
+        }
+
+        @Override
+        public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
+                IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
+                throws HyracksDataException {
+            final IFrameWriter[] writers = new IFrameWriter[numberOfNonMaterializedOutputs];
+            final boolean[] isOpen = new boolean[numberOfNonMaterializedOutputs];
+            final IPointable p = VoidPointable.FACTORY.createPointable();;
+            // To deal with each tuple in a frame
+            final FrameTupleAccessor accessor = new FrameTupleAccessor(recordDescriptors[0]);;
+            final FrameTupleAppender[] appenders = new FrameTupleAppender[numberOfNonMaterializedOutputs];
+            final FrameTupleReference tRef = new FrameTupleReference();;
+            final IBinaryIntegerInspector intInsepctor = intInsepctorFactory.createBinaryIntegerInspector(ctx);
+            final IScalarEvaluator eval;
+            try {
+                eval = brachingExprEvalFactory.createScalarEvaluator(ctx);
+            } catch (AlgebricksException ae) {
+                throw new HyracksDataException(ae);
+            }
+            for (int i = 0; i < numberOfNonMaterializedOutputs; i++) {
+                appenders[i] = new FrameTupleAppender(new VSizeFrame(ctx), true);
+            }
+
+            return new AbstractUnaryInputOperatorNodePushable() {
+                @Override
+                public void open() throws HyracksDataException {
+                    for (int i = 0; i < numberOfNonMaterializedOutputs; i++) {
+                        isOpen[i] = true;
+                        writers[i].open();
+                    }
+                }
+
+                @Override
+                public void nextFrame(ByteBuffer bufferAccessor) throws HyracksDataException {
+                    // Tuple based access
+                    accessor.reset(bufferAccessor);
+                    int tupleCount = accessor.getTupleCount();
+                    // The output branch number that starts from 0.
+                    int outputBranch;
+
+                    for (int i = 0; i < tupleCount; i++) {
+                        // Get the output branch number from the field in the given tuple.
+                        tRef.reset(accessor, i);
+                        try {
+                            eval.evaluate(tRef, p);
+                        } catch (AlgebricksException ae) {
+                            throw new HyracksDataException(ae);
+                        }
+                        outputBranch = intInsepctor.getIntegerValue(p.getByteArray(), p.getStartOffset(),
+                                p.getLength());
+
+                        // Add this tuple to the correct output frame.
+                        FrameUtils.appendToWriter(writers[outputBranch], appenders[outputBranch], accessor, i);
+                    }
+                }
+
+                @Override
+                public void close() throws HyracksDataException {
+                    HyracksDataException hde = null;
+                    for (int i = 0; i < numberOfNonMaterializedOutputs; i++) {
+                        if (isOpen[i]) {
+                            try {
+                                appenders[i].write(writers[i], true);
+                                writers[i].close();
+                            } catch (Throwable th) {
+                                if (hde == null) {
+                                    hde = new HyracksDataException(th);
+                                } else {
+                                    hde.addSuppressed(th);
+                                }
+                            }
+                        }
+                    }
+                    if (hde != null) {
+                        throw hde;
+                    }
+                }
+
+                @Override
+                public void fail() throws HyracksDataException {
+                    HyracksDataException hde = null;
+                    for (int i = 0; i < numberOfNonMaterializedOutputs; i++) {
+                        if (isOpen[i]) {
+                            try {
+                                writers[i].fail();
+                            } catch (Throwable th) {
+                                if (hde == null) {
+                                    hde = new HyracksDataException(th);
+                                } else {
+                                    hde.addSuppressed(th);
+                                }
+                            }
+                        }
+                    }
+                    if (hde != null) {
+                        throw hde;
+                    }
+                }
+
+                @Override
+                public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
+                    writers[index] = writer;
+                }
+            };
+        }
+    }
+}
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/simple/int-string-part1-split-0.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/simple/int-string-part1-split-0.tbl
new file mode 100644
index 0000000..0ea8a88
--- /dev/null
+++ b/hyracks-fullstack/algebricks/algebricks-tests/data/simple/int-string-part1-split-0.tbl
@@ -0,0 +1,4 @@
+0,first branch1
+0,first branch2
+0,first branch3
+0,first branch4
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/simple/int-string-part1-split-1.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/simple/int-string-part1-split-1.tbl
new file mode 100644
index 0000000..53588ef
--- /dev/null
+++ b/hyracks-fullstack/algebricks/algebricks-tests/data/simple/int-string-part1-split-1.tbl
@@ -0,0 +1,3 @@
+1,second branch1
+1,second branch2
+1,second branch3
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/data/simple/int-string-part1.tbl b/hyracks-fullstack/algebricks/algebricks-tests/data/simple/int-string-part1.tbl
new file mode 100644
index 0000000..ceb859a
--- /dev/null
+++ b/hyracks-fullstack/algebricks/algebricks-tests/data/simple/int-string-part1.tbl
@@ -0,0 +1,7 @@
+0|first branch1
+1|second branch1
+0|first branch2
+1|second branch2
+0|first branch3
+1|second branch3
+0|first branch4
\ No newline at end of file
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/pom.xml b/hyracks-fullstack/algebricks/algebricks-tests/pom.xml
index b8dc650..8ad3e7b 100644
--- a/hyracks-fullstack/algebricks/algebricks-tests/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-tests/pom.xml
@@ -110,11 +110,6 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
-      <artifactId>algebricks-compiler</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-control-cc</artifactId>
       <version>${project.version}</version>
     </dependency>
@@ -128,5 +123,50 @@
       <artifactId>hyracks-data-std</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>algebricks-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-control-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>algebricks-runtime</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>algebricks-data</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>algebricks-core</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.json</groupId>
+      <artifactId>json</artifactId>
+      <version>20090211</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-std</artifactId>
+      <version>${project.version}</version>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java
index 020cffe..1276518 100644
--- a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java
+++ b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java
@@ -50,6 +50,7 @@
 import org.apache.hyracks.algebricks.runtime.operators.std.PrinterRuntimeFactory;
 import org.apache.hyracks.algebricks.runtime.operators.std.RunningAggregateRuntimeFactory;
 import org.apache.hyracks.algebricks.runtime.operators.std.SinkWriterRuntimeFactory;
+import org.apache.hyracks.algebricks.runtime.operators.std.SplitOperatorDescriptor;
 import org.apache.hyracks.algebricks.runtime.operators.std.StreamLimitRuntimeFactory;
 import org.apache.hyracks.algebricks.runtime.operators.std.StreamProjectRuntimeFactory;
 import org.apache.hyracks.algebricks.runtime.operators.std.StreamSelectRuntimeFactory;
@@ -83,7 +84,7 @@
 import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
 import org.apache.hyracks.dataflow.std.file.LineFileWriteOperatorDescriptor;
 import org.apache.hyracks.dataflow.std.group.preclustered.PreclusteredGroupOperatorDescriptor;
-import org.apache.hyracks.dataflow.std.misc.SplitOperatorDescriptor;
+import org.apache.hyracks.dataflow.std.misc.ReplicateOperatorDescriptor;
 import org.apache.hyracks.dataflow.std.sort.InMemorySortOperatorDescriptor;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -570,7 +571,7 @@
     }
 
     @Test
-    public void scanSplitWrite() throws Exception {
+    public void scanReplicateWrite() throws Exception {
         final int outputArity = 2;
 
         JobSpecification spec = new JobSpecification(FRAME_SIZE);
@@ -596,7 +597,69 @@
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, scanOp,
                 new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
 
-        SplitOperatorDescriptor splitOp = new SplitOperatorDescriptor(spec, stringRec, outputArity);
+        ReplicateOperatorDescriptor replicateOp = new ReplicateOperatorDescriptor(spec, stringRec, outputArity);
+
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, replicateOp,
+                new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
+
+        IOperatorDescriptor outputOp[] = new IOperatorDescriptor[outputFile.length];
+        for (int i = 0; i < outputArity; i++) {
+            outputOp[i] = new LineFileWriteOperatorDescriptor(spec, new FileSplit[] {
+                    new FileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, new FileReference(outputFile[i])) });
+            PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, outputOp[i],
+                    new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
+        }
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), scanOp, 0, replicateOp, 0);
+        for (int i = 0; i < outputArity; i++) {
+            spec.connect(new OneToOneConnectorDescriptor(spec), replicateOp, i, outputOp[i], 0);
+        }
+
+        for (int i = 0; i < outputArity; i++) {
+            spec.addRoot(outputOp[i]);
+        }
+        AlgebricksHyracksIntegrationUtil.runJob(spec);
+
+        for (int i = 0; i < outputArity; i++) {
+            compareFiles(inputFileName, outputFile[i].getAbsolutePath());
+        }
+    }
+
+    @Test
+    public void scanSplitWrite() throws Exception {
+        final int outputArity = 2;
+
+        JobSpecification spec = new JobSpecification(FRAME_SIZE);
+
+        String inputFileName[] = { "data/simple/int-string-part1.tbl", "data/simple/int-string-part1-split-0.tbl",
+                "data/simple/int-string-part1-split-1.tbl" };
+        File[] inputFiles = new File[inputFileName.length];
+        for (int i=0; i<inputFileName.length; i++) {
+            inputFiles[i] = new File(inputFileName[i]);
+        }
+        File[] outputFile = new File[outputArity];
+        for (int i = 0; i < outputArity; i++) {
+            outputFile[i] = File.createTempFile("splitop", null);
+        }
+
+        FileSplit[] inputSplits = new FileSplit[] {
+                new FileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, new FileReference(inputFiles[0])) };
+        IFileSplitProvider intSplitProvider = new ConstantFileSplitProvider(inputSplits);
+
+        RecordDescriptor scannerDesc = new RecordDescriptor(
+                new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
+                        new UTF8StringSerializerDeserializer() });
+
+        IValueParserFactory[] valueParsers = new IValueParserFactory[] { IntegerParserFactory.INSTANCE,
+                UTF8StringParserFactory.INSTANCE };
+
+        FileScanOperatorDescriptor intScanner = new FileScanOperatorDescriptor(spec, intSplitProvider,
+                new DelimitedDataTupleParserFactory(valueParsers, '|'), scannerDesc);
+
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, intScanner, DEFAULT_NODES);
+
+        SplitOperatorDescriptor splitOp = new SplitOperatorDescriptor(spec, scannerDesc, outputArity,
+                new TupleFieldEvaluatorFactory(0), BinaryIntegerInspectorImpl.FACTORY);
 
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, splitOp,
                 new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
@@ -609,7 +672,7 @@
                     new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
         }
 
-        spec.connect(new OneToOneConnectorDescriptor(spec), scanOp, 0, splitOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), intScanner, 0, splitOp, 0);
         for (int i = 0; i < outputArity; i++) {
             spec.connect(new OneToOneConnectorDescriptor(spec), splitOp, i, outputOp[i], 0);
         }
@@ -620,7 +683,7 @@
         AlgebricksHyracksIntegrationUtil.runJob(spec);
 
         for (int i = 0; i < outputArity; i++) {
-            compareFiles(inputFileName, outputFile[i].getAbsolutePath());
+            compareFiles(inputFileName[i + 1], outputFile[i].getAbsolutePath());
         }
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-api/pom.xml b/hyracks-fullstack/hyracks/hyracks-api/pom.xml
index acb0f1d..14789d9 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-api/pom.xml
@@ -72,13 +72,6 @@
       <scope>compile</scope>
     </dependency>
     <dependency>
-      <groupId>args4j</groupId>
-      <artifactId>args4j</artifactId>
-      <version>2.0.12</version>
-      <type>jar</type>
-      <scope>compile</scope>
-    </dependency>
-    <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-ipc</artifactId>
       <version>${project.version}</version>
@@ -89,6 +82,11 @@
       <version>3.1</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.httpcomponents</groupId>
+      <artifactId>httpcore</artifactId>
+      <version>4.4.4</version>
+    </dependency>
+    <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-util</artifactId>
       <version>${project.version}</version>
@@ -99,17 +97,5 @@
       <version>2.0.2-beta</version>
       <scope>test</scope>
     </dependency>
-    <dependency>
-      <groupId>org.powermock</groupId>
-      <artifactId>powermock-api-mockito</artifactId>
-      <version>1.6.2</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.powermock</groupId>
-      <artifactId>powermock-module-junit4</artifactId>
-      <version>1.6.2</version>
-      <scope>test</scope>
-    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/IClusterLifecycleListener.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/IClusterLifecycleListener.java
index 733382b..a9bef18 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/IClusterLifecycleListener.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/IClusterLifecycleListener.java
@@ -21,8 +21,11 @@
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.hyracks.api.exceptions.HyracksException;
+
 /**
- * A listener interface for providing notification call backs to events such as a Node Controller joining/leaving the cluster.
+ * A listener interface for providing notification call backs to events such as a Node Controller joining/leaving the
+ * cluster.
  */
 public interface IClusterLifecycleListener {
 
@@ -35,15 +38,15 @@
     /**
      * @param nodeId
      *            A unique identifier of a Node Controller
-     * @param ncConfig
+     * @param ncConfiguration
      *            A map containing the set of configuration parameters that were used to start the Node Controller
      */
-    public void notifyNodeJoin(String nodeId, Map<String, String> ncConfiguration);
+    public void notifyNodeJoin(String nodeId, Map<String, String> ncConfiguration) throws HyracksException;
 
     /**
      * @param deadNodeIds
      *            A set of Node Controller Ids that have left the cluster. The set is not cumulative.
      */
-    public void notifyNodeFailure(Set<String> deadNodeIds);
+    public void notifyNodeFailure(Set<String> deadNodeIds) throws HyracksException;
 
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivityOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivityOperatorNodePushable.java
index 4ee8303..2ac392b 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivityOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivityOperatorNodePushable.java
@@ -20,13 +20,13 @@
 package org.apache.hyracks.api.rewriter.runtime;
 
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
-import java.util.Map.Entry;
 import java.util.Queue;
-import java.util.concurrent.Callable;
+import java.util.Map.Entry;
 import java.util.concurrent.Future;
 
 import org.apache.commons.lang3.tuple.Pair;
@@ -56,6 +56,7 @@
     private final int partition;
     private final int nPartitions;
     private int inputArity = 0;
+    private boolean[] startedInitialization;
 
     public SuperActivityOperatorNodePushable(SuperActivity parent, Map<ActivityId, IActivity> startActivities,
             IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
@@ -79,11 +80,14 @@
 
     @Override
     public void initialize() throws HyracksDataException {
-        // Initializes all OperatorNodePushables in parallel.
-        runInParallel(op -> op.initialize());
+        // Initializes all OperatorNodePushables in parallel and then finally deinitializes them.
+        runInParallel((op, index) -> {
+            startedInitialization[index] = true;
+            op.initialize();
+        });
     }
 
-    public void init() throws HyracksDataException {
+    private void init() throws HyracksDataException {
         Map<ActivityId, IOperatorNodePushable> startOperatorNodePushables = new HashMap<ActivityId, IOperatorNodePushable>();
         Queue<Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> childQueue = new LinkedList<Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>>();
         List<IConnectorDescriptor> outputConnectors = null;
@@ -150,12 +154,19 @@
                 }
             }
         }
+
+        // Sets the startedInitialization flags to be false.
+        startedInitialization = new boolean[operatorNodePushablesBFSOrder.size()];
+        Arrays.fill(startedInitialization, false);
     }
 
     @Override
     public void deinitialize() throws HyracksDataException {
-        // De-initialize all OperatorNodePushables in parallel.
-        runInParallel(op -> op.deinitialize());
+        runInParallel((op, index) -> {
+            if (startedInitialization[index]) {
+                op.deinitialize();
+            }
+        });
     }
 
     @Override
@@ -191,20 +202,19 @@
     }
 
     interface OperatorNodePushableAction {
-        public void runAction(IOperatorNodePushable op) throws HyracksDataException;
+        void runAction(IOperatorNodePushable op, int opIndex) throws HyracksDataException;
     }
 
     private void runInParallel(OperatorNodePushableAction opAction) throws HyracksDataException {
-        List<Future<Void>> initializationTasks = new ArrayList<Future<Void>>();
+        List<Future<Void>> initializationTasks = new ArrayList<>();
         try {
+            int index = 0;
             // Run one action for all OperatorNodePushables in parallel through a thread pool.
             for (final IOperatorNodePushable op : operatorNodePushablesBFSOrder) {
-                initializationTasks.add(ctx.getExecutorService().submit(new Callable<Void>() {
-                    @Override
-                    public Void call() throws Exception {
-                        opAction.runAction(op);
-                        return null;
-                    }
+                final int opIndex = index++;
+                initializationTasks.add(ctx.getExecutorService().submit(() -> {
+                    opAction.runAction(op, opIndex);
+                    return null;
                 }));
             }
             // Waits until all parallel actions to finish.
diff --git a/hyracks-fullstack/hyracks/hyracks-client/pom.xml b/hyracks-fullstack/hyracks/hyracks-client/pom.xml
index fad01d5..bf5d167 100644
--- a/hyracks-fullstack/hyracks/hyracks-client/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-client/pom.xml
@@ -83,7 +83,12 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-dataflow-common</artifactId>
+      <artifactId>hyracks-ipc</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-control-common</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml
index 910d1a2..8b66a7f 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml
@@ -47,21 +47,69 @@
       <scope>compile</scope>
     </dependency>
     <dependency>
-      <groupId>org.eclipse.jetty</groupId>
-      <artifactId>jetty-webapp</artifactId>
-      <version>9.3.11.v20160721</version>
-      <type>jar</type>
-      <scope>compile</scope>
-    </dependency>
-    <dependency>
       <groupId>org.apache.wicket</groupId>
       <artifactId>wicket-core</artifactId>
       <version>1.5.2</version>
     </dependency>
     <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-jcl</artifactId>
-      <version>1.6.3</version>
+      <groupId>commons-io</groupId>
+      <artifactId>commons-io</artifactId>
+      <version>2.5</version>
+    </dependency>
+    <dependency>
+      <groupId>javax.servlet</groupId>
+      <artifactId>javax.servlet-api</artifactId>
+      <version>3.1.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-servlet</artifactId>
+      <version>9.3.11.v20160721</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.wicket</groupId>
+      <artifactId>wicket-request</artifactId>
+      <version>1.5.2</version>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-http</artifactId>
+      <version>9.3.11.v20160721</version>
+    </dependency>
+    <dependency>
+      <groupId>org.ini4j</groupId>
+      <artifactId>ini4j</artifactId>
+      <version>0.5.4</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-ipc</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.wicket</groupId>
+      <artifactId>wicket-util</artifactId>
+      <version>1.5.2</version>
+    </dependency>
+    <dependency>
+      <groupId>args4j</groupId>
+      <artifactId>args4j</artifactId>
+      <version>2.0.12</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.json</groupId>
+      <artifactId>json</artifactId>
+      <version>20090211</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-lang3</artifactId>
+      <version>3.1</version>
     </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/CCDriver.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/CCDriver.java
index 786a89f..dff3107 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/CCDriver.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/CCDriver.java
@@ -23,7 +23,7 @@
 import org.apache.hyracks.control.common.controllers.CCConfig;
 
 public class CCDriver {
-    public static void main(String args[]) throws Exception {
+    public static void main(String args []) throws Exception {
         try {
             CCConfig ccConfig = new CCConfig();
             CmdLineParser cp = new CmdLineParser(ccConfig);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/application/CCApplicationContext.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/application/CCApplicationContext.java
index dfce7b8..dd6f83b 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/application/CCApplicationContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/application/CCApplicationContext.java
@@ -110,7 +110,7 @@
         }
     }
 
-    public void notifyNodeFailure(Set<String> deadNodeIds) {
+    public void notifyNodeFailure(Set<String> deadNodeIds) throws HyracksException {
         for (IClusterLifecycleListener l : clusterLifecycleListeners) {
             l.notifyNodeFailure(deadNodeIds);
         }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RemoveDeadNodesWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RemoveDeadNodesWork.java
index b3a3065..510c729 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RemoveDeadNodesWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RemoveDeadNodesWork.java
@@ -24,6 +24,7 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
+import org.apache.hyracks.api.exceptions.HyracksException;
 import org.apache.hyracks.api.job.JobId;
 import org.apache.hyracks.control.cc.ClusterControllerService;
 import org.apache.hyracks.control.cc.NodeControllerState;
@@ -41,7 +42,7 @@
 
     @Override
     public void run() {
-        Set<String> deadNodes = new HashSet<String>();
+        final Set<String> deadNodes = new HashSet<String>();
         Map<String, NodeControllerState> nodeMap = ccs.getNodeMap();
         for (Map.Entry<String, NodeControllerState> e : nodeMap.entrySet()) {
             NodeControllerState state = e.getValue();
@@ -69,8 +70,12 @@
                 }
             }
         }
-        if (deadNodes != null && deadNodes.size() > 0) {
-            ccs.getApplicationContext().notifyNodeFailure(deadNodes);
+        if (!deadNodes.isEmpty()) {
+            try {
+                ccs.getApplicationContext().notifyNodeFailure(deadNodes);
+            } catch (HyracksException e) {
+                LOGGER.log(Level.WARNING, "Uncaught exception on notifyNodeFailure", e);
+            }
         }
     }
 
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml
index ac7323c..ef00821 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml
@@ -35,13 +35,11 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-api</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
       <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>commons-io</groupId>
       <artifactId>commons-io</artifactId>
-      <type>jar</type>
       <scope>compile</scope>
     </dependency>
     <dependency>
@@ -49,5 +47,30 @@
       <artifactId>ini4j</artifactId>
       <version>0.5.4</version>
     </dependency>
+    <dependency>
+      <groupId>args4j</groupId>
+      <artifactId>args4j</artifactId>
+      <version>2.0.12</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-ipc</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.json</groupId>
+      <artifactId>json</artifactId>
+      <version>20090211</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.httpcomponents</groupId>
+      <artifactId>httpcore</artifactId>
+      <version>4.4.4</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.httpcomponents</groupId>
+      <artifactId>httpclient</artifactId>
+      <version>4.5.2</version>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/CCConfig.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/CCConfig.java
index 64bd7d1..98d6375 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/CCConfig.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/CCConfig.java
@@ -34,7 +34,9 @@
     @Option(name = "-address", usage = "IP Address for CC (default: localhost)", required = false)
     public String ipAddress = InetAddress.getLoopbackAddress().getHostAddress();
 
-    @Option(name = "-client-net-ip-address", usage = "Sets the IP Address to listen for connections from clients (default: same as -address)", required = false)
+    @Option(name = "-client-net-ip-address",
+            usage = "Sets the IP Address to listen for connections from clients (default: same as -address)",
+            required = false)
     public String clientNetIpAddress;
 
     @Option(name = "-client-net-port", usage = "Sets the port to listen for connections from clients (default 1098)")
@@ -43,46 +45,60 @@
     // QQQ Note that clusterNetIpAddress is *not directly used* yet. Both
     // the cluster listener and the web server listen on "all interfaces".
     // This IP address is only used to instruct the NC on which IP to call in.
-    @Option(name = "-cluster-net-ip-address", usage = "Sets the IP Address to listen for connections from NCs (default: same as -address)", required = false)
+    @Option(name = "-cluster-net-ip-address",
+            usage = "Sets the IP Address to listen for connections from NCs (default: same as -address)",
+            required = false)
     public String clusterNetIpAddress;
 
-    @Option(name = "-cluster-net-port", usage = "Sets the port to listen for connections from node controllers (default 1099)")
+    @Option(name = "-cluster-net-port",
+            usage = "Sets the port to listen for connections from node controllers (default 1099)")
     public int clusterNetPort = 1099;
 
     @Option(name = "-http-port", usage = "Sets the http port for the Cluster Controller (default: 16001)")
     public int httpPort = 16001;
 
-    @Option(name = "-heartbeat-period", usage = "Sets the time duration between two heartbeats from each node controller in milliseconds (default: 10000)")
+    @Option(name = "-heartbeat-period",
+            usage = "Sets the time duration between two heartbeats from each node controller in milliseconds" +
+                    " (default: 10000)")
     public int heartbeatPeriod = 10000;
 
-    @Option(name = "-max-heartbeat-lapse-periods", usage = "Sets the maximum number of missed heartbeats before a node is marked as dead (default: 5)")
+    @Option(name = "-max-heartbeat-lapse-periods",
+            usage = "Sets the maximum number of missed heartbeats before a node is marked as dead (default: 5)")
     public int maxHeartbeatLapsePeriods = 5;
 
-    @Option(name = "-profile-dump-period", usage = "Sets the time duration between two profile dumps from each node controller in milliseconds. 0 to disable. (default: 0)")
+    @Option(name = "-profile-dump-period", usage = "Sets the time duration between two profile dumps from each node " +
+            "controller in milliseconds. 0 to disable. (default: 0)")
     public int profileDumpPeriod = 0;
 
-    @Option(name = "-default-max-job-attempts", usage = "Sets the default number of job attempts allowed if not specified in the job specification. (default: 5)")
+    @Option(name = "-default-max-job-attempts", usage = "Sets the default number of job attempts allowed if not " +
+            "specified in the job specification. (default: 5)")
     public int defaultMaxJobAttempts = 5;
 
-    @Option(name = "-job-history-size", usage = "Limits the number of historical jobs remembered by the system to the specified value. (default: 10)")
+    @Option(name = "-job-history-size", usage = "Limits the number of historical jobs remembered by the system to " +
+            "the specified value. (default: 10)")
     public int jobHistorySize = 10;
 
-    @Option(name = "-result-time-to-live", usage = "Limits the amount of time results for asynchronous jobs should be retained by the system in milliseconds. (default: 24 hours)")
+    @Option(name = "-result-time-to-live", usage = "Limits the amount of time results for asynchronous jobs should " +
+            "be retained by the system in milliseconds. (default: 24 hours)")
     public long resultTTL = 86400000;
 
-    @Option(name = "-result-sweep-threshold", usage = "The duration within which an instance of the result cleanup should be invoked in milliseconds. (default: 1 minute)")
+    @Option(name = "-result-sweep-threshold", usage = "The duration within which an instance of the result cleanup " +
+            "should be invoked in milliseconds. (default: 1 minute)")
     public long resultSweepThreshold = 60000;
 
-    @Option(name = "-cc-root", usage = "Sets the root folder used for file operations. (default: ClusterControllerService)")
+    @Option(name = "-cc-root",
+            usage = "Sets the root folder used for file operations. (default: ClusterControllerService)")
     public String ccRoot = "ClusterControllerService";
 
-    @Option(name = "-cluster-topology", required = false, usage = "Sets the XML file that defines the cluster topology. (default: null)")
+    @Option(name = "-cluster-topology", required = false,
+            usage = "Sets the XML file that defines the cluster topology. (default: null)")
     public File clusterTopologyDefinition = null;
 
     @Option(name = "-app-cc-main-class", required = false, usage = "Application CC Main Class")
     public String appCCMainClass = null;
 
-    @Option(name = "-config-file", usage = "Specify path to master configuration file (default: none)", required = false)
+    @Option(name = "-config-file",
+            usage = "Specify path to master configuration file (default: none)", required = false)
     public String configFile = null;
 
     @Argument
@@ -132,8 +148,8 @@
         }
 
         // "address" is the default for all IP addresses
-        if (clusterNetIpAddress == null) clusterNetIpAddress = ipAddress;
-        if (clientNetIpAddress == null) clientNetIpAddress = ipAddress;
+        clusterNetIpAddress = clusterNetIpAddress == null ? ipAddress : clusterNetIpAddress;
+        clientNetIpAddress = clientNetIpAddress == null ? ipAddress : clientNetIpAddress;
     }
 
     /**
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/IniUtils.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/IniUtils.java
index c6c3e73..e999de4 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/IniUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/IniUtils.java
@@ -35,7 +35,11 @@
  * the section "nc/red", but if it is not found, will look in the section "nc".
  */
 public class IniUtils {
-    private static <T> T getIniValue(Ini ini, String section, String key, T default_value, Class<T> clazz) {
+
+    private IniUtils() {
+    }
+
+    private static <T> T getIniValue(Ini ini, String section, String key, T defaultValue, Class<T> clazz) {
         T value;
         while (true) {
             value = ini.get(section, key, clazz);
@@ -48,7 +52,7 @@
             }
             break;
         }
-        return (value != null) ? value : default_value;
+        return (value != null) ? value : defaultValue;
     }
 
     @SuppressWarnings("unchecked")
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NCConfig.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NCConfig.java
index ce5043a..2e47f41 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NCConfig.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NCConfig.java
@@ -288,11 +288,9 @@
         configuration.put("messaging-port", String.valueOf(messagingPort));
         configuration.put("messaging-public-ip-address", messagingPublicIPAddress);
         configuration.put("messaging-public-port", String.valueOf(messagingPublicPort));
+        configuration.put("ncservice-pid", String.valueOf(ncservicePid));
         if (appNCMainClass != null) {
             configuration.put("app-nc-main-class", appNCMainClass);
         }
-        if (ncservicePid != 0) {
-            configuration.put("ncservice-pid", String.valueOf(ncservicePid));
-        }
     }
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/SynchronizableWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/SynchronizableWork.java
index 02a789a..f9952db 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/SynchronizableWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/SynchronizableWork.java
@@ -18,6 +18,9 @@
  */
 package org.apache.hyracks.control.common.work;
 
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
 public abstract class SynchronizableWork extends AbstractWork {
     private boolean done;
 
@@ -34,8 +37,9 @@
     public final void run() {
         try {
             doRun();
-        } catch (Exception e) {
-            this.e = e;
+        } catch (Exception ex) {
+            Logger.getLogger(getClass().getName()).log(Level.INFO, "Exception thrown from work", ex);
+            this.e = ex;
         } finally {
             synchronized (this) {
                 done = true;
@@ -46,11 +50,7 @@
 
     public final synchronized void sync() throws Exception {
         while (!done) {
-            try {
-                wait();
-            } catch (InterruptedException e) {
-                throw e;
-            }
+            wait();
         }
         if (e != null) {
             throw e;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml
index a6c06de..20d4abe 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml
@@ -48,6 +48,31 @@
       <artifactId>hyracks-comm</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-ipc</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-io</groupId>
+      <artifactId>commons-io</artifactId>
+      <version>2.5</version>
+    </dependency>
+    <dependency>
+      <groupId>args4j</groupId>
+      <artifactId>args4j</artifactId>
+      <version>2.0.12</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-lang3</artifactId>
+      <version>3.1</version>
+    </dependency>
   </dependencies>
   <reporting>
     <plugins>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java
index f463bfa..7c626c1 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java
@@ -235,8 +235,12 @@
         }
     }
 
-    private synchronized void addPendingThread(Thread t) {
+    private synchronized boolean addPendingThread(Thread t) {
+        if (aborted) {
+            return false;
+        }
         pendingThreads.add(t);
+        return true;
     }
 
     private synchronized void removePendingThread(Thread t) {
@@ -256,9 +260,16 @@
     public void run() {
         Thread ct = Thread.currentThread();
         String threadName = ct.getName();
-        addPendingThread(ct);
+        ct.setName(displayName + ":" + taskAttemptId + ":" + 0);
+        // Calls synchronized addPendingThread(..) to make sure that in the abort() method,
+        // the thread is not escaped from interruption.
+        if (!addPendingThread(ct)) {
+            exceptions.add(new InterruptedException("Task " + getTaskAttemptId() + " was aborted!"));
+            ExceptionUtils.setNodeIds(exceptions, ncs.getId());
+            ncs.getWorkQueue().schedule(new NotifyTaskFailureWork(ncs, this, exceptions));
+            return;
+        }
         try {
-            ct.setName(displayName + ":" + taskAttemptId + ":" + 0);
             try {
                 operator.initialize();
                 if (collectors.length > 0) {
@@ -271,11 +282,12 @@
                         executorService.execute(new Runnable() {
                             @Override
                             public void run() {
-                                if (aborted) {
+                                Thread thread = Thread.currentThread();
+                                // Calls synchronized addPendingThread(..) to make sure that in the abort() method,
+                                // the thread is not escaped from interruption.
+                                if (!addPendingThread(thread)) {
                                     return;
                                 }
-                                Thread thread = Thread.currentThread();
-                                addPendingThread(thread);
                                 String oldName = thread.getName();
                                 thread.setName(displayName + ":" + taskAttemptId + ":" + cIdx);
                                 thread.setPriority(Thread.MIN_PRIORITY);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml
index 2e4012e..6562557 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml
@@ -18,50 +18,53 @@
  ! under the License.
  !-->
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <parent>
-     <artifactId>hyracks-control</artifactId>
+  <parent>
+    <artifactId>hyracks-control</artifactId>
+    <groupId>org.apache.hyracks</groupId>
+    <version>0.2.18-SNAPSHOT</version>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+
+  <artifactId>hyracks-nc-service</artifactId>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+          <fork>true</fork>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <dependency>
+      <groupId>args4j</groupId>
+      <artifactId>args4j</artifactId>
+      <version>2.0.12</version>
+      <type>jar</type>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.ini4j</groupId>
+      <artifactId>ini4j</artifactId>
+      <version>0.5.4</version>
+    </dependency>
+    <dependency>
       <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-control-common</artifactId>
       <version>0.2.18-SNAPSHOT</version>
-    </parent>
-    <modelVersion>4.0.0</modelVersion>
-
-    <artifactId>hyracks-nc-service</artifactId>
-
-    <build>
-      <plugins>
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-compiler-plugin</artifactId>
-          <version>2.0.2</version>
-          <configuration>
-            <source>1.7</source>
-            <target>1.7</target>
-            <fork>true</fork>
-          </configuration>
-        </plugin>
-      </plugins>
-    </build>
-
-    <dependencies>
-      <dependency>
-        <groupId>args4j</groupId>
-        <artifactId>args4j</artifactId>
-        <version>2.0.12</version>
-        <type>jar</type>
-        <scope>compile</scope>
-      </dependency>
-      <dependency>
-        <groupId>org.ini4j</groupId>
-        <artifactId>ini4j</artifactId>
-        <version>0.5.4</version>
-      </dependency>
-      <dependency>
-        <groupId>org.apache.hyracks</groupId>
-        <artifactId>hyracks-control-nc</artifactId>
-        <version>${project.version}</version>
-        <type>jar</type>
-        <scope>compile</scope>
-      </dependency>
-    </dependencies>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-lang3</artifactId>
+      <version>3.1</version>
+    </dependency>
+  </dependencies>
 
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/pom.xml b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/pom.xml
index e6bb340..d5d7d5c 100644
--- a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/pom.xml
@@ -32,6 +32,11 @@
   </properties>
   <dependencies>
     <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-lang3</artifactId>
+      <version>3.1</version>
+    </dependency>
+    <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-util</artifactId>
       <version>${project.version}</version>
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml b/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
index f7fbff9..29d41ff 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
@@ -44,6 +44,26 @@
   <dependencies>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-util</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.json</groupId>
+      <artifactId>json</artifactId>
+      <version>20090211</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-data-std</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-lang3</artifactId>
+      <version>3.1</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-api</artifactId>
       <version>${project.version}</version>
       <type>jar</type>
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/SplitOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractReplicateOperatorDescriptor.java
similarity index 79%
rename from hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/SplitOperatorDescriptor.java
rename to hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractReplicateOperatorDescriptor.java
index 67af861..5c642ba 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/SplitOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractReplicateOperatorDescriptor.java
@@ -16,7 +16,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.apache.hyracks.dataflow.std.misc;
+package org.apache.hyracks.dataflow.std.base;
 
 import java.nio.ByteBuffer;
 
@@ -32,28 +32,32 @@
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
 import org.apache.hyracks.dataflow.common.comm.util.FrameUtils;
-import org.apache.hyracks.dataflow.std.base.AbstractActivityNode;
-import org.apache.hyracks.dataflow.std.base.AbstractOperatorDescriptor;
-import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputOperatorNodePushable;
-import org.apache.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+import org.apache.hyracks.dataflow.std.misc.MaterializerTaskState;
 
-public class SplitOperatorDescriptor extends AbstractOperatorDescriptor {
-    private static final long serialVersionUID = 1L;
+/**
+ * Abstract class for two replication related operator descriptor - replicate and split
+ * Replicate operator propagates all frames to all output branches.
+ * That is, each tuple will be propagated to all output branches.
+ * Split operator propagates each tuple in a frame to one output branch only.
+ */
+public abstract class AbstractReplicateOperatorDescriptor extends AbstractOperatorDescriptor {
+    protected static final long serialVersionUID = 1L;
 
-    private final static int SPLITTER_MATERIALIZER_ACTIVITY_ID = 0;
-    private final static int MATERIALIZE_READER_ACTIVITY_ID = 1;
+    protected final static int SPLITTER_MATERIALIZER_ACTIVITY_ID = 0;
+    protected final static int MATERIALIZE_READER_ACTIVITY_ID = 1;
 
-    private final boolean[] outputMaterializationFlags;
-    private final boolean requiresMaterialization;
-    private final int numberOfNonMaterializedOutputs;
-    private final int numberOfMaterializedOutputs;
+    protected final boolean[] outputMaterializationFlags;
+    protected final boolean requiresMaterialization;
+    protected final int numberOfNonMaterializedOutputs;
+    protected final int numberOfMaterializedOutputs;
 
-    public SplitOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor rDesc, int outputArity) {
+    public AbstractReplicateOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor rDesc,
+            int outputArity) {
         this(spec, rDesc, outputArity, new boolean[outputArity]);
     }
 
-    public SplitOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor rDesc, int outputArity,
-            boolean[] outputMaterializationFlags) {
+    public AbstractReplicateOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor rDesc,
+            int outputArity, boolean[] outputMaterializationFlags) {
         super(spec, 1, outputArity);
         for (int i = 0; i < outputArity; i++) {
             recordDescriptors[i] = rDesc;
@@ -80,16 +84,16 @@
 
     @Override
     public void contributeActivities(IActivityGraphBuilder builder) {
-        SplitterMaterializerActivityNode sma =
-                new SplitterMaterializerActivityNode(new ActivityId(odId, SPLITTER_MATERIALIZER_ACTIVITY_ID));
+        ReplicatorMaterializerActivityNode sma = new ReplicatorMaterializerActivityNode(
+                new ActivityId(odId, SPLITTER_MATERIALIZER_ACTIVITY_ID));
         builder.addActivity(this, sma);
         builder.addSourceEdge(0, sma, 0);
         int pipelineOutputIndex = 0;
         int activityId = MATERIALIZE_READER_ACTIVITY_ID;
         for (int i = 0; i < outputArity; i++) {
             if (outputMaterializationFlags[i]) {
-                MaterializeReaderActivityNode mra =
-                        new MaterializeReaderActivityNode(new ActivityId(odId, activityId++));
+                MaterializeReaderActivityNode mra = new MaterializeReaderActivityNode(
+                        new ActivityId(odId, activityId++));
                 builder.addActivity(this, mra);
                 builder.addBlockingEdge(sma, mra);
                 builder.addTargetEdge(i, mra, 0);
@@ -99,16 +103,17 @@
         }
     }
 
-    private final class SplitterMaterializerActivityNode extends AbstractActivityNode {
+    protected class ReplicatorMaterializerActivityNode extends AbstractActivityNode {
         private static final long serialVersionUID = 1L;
 
-        public SplitterMaterializerActivityNode(ActivityId id) {
+        public ReplicatorMaterializerActivityNode(ActivityId id) {
             super(id);
         }
 
         @Override
         public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
-                IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions) {
+                IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
+                throws HyracksDataException {
             return new AbstractUnaryInputOperatorNodePushable() {
                 private MaterializerTaskState state;
                 private final IFrameWriter[] writers = new IFrameWriter[numberOfNonMaterializedOutputs];
@@ -140,10 +145,8 @@
 
                 @Override
                 public void flush() throws HyracksDataException {
-                    if (!requiresMaterialization) {
-                        for (IFrameWriter writer : writers) {
-                            writer.flush();
-                        }
+                    for (int i = 0; i < numberOfNonMaterializedOutputs; i++) {
+                        writers[i].flush();
                     }
                 }
 
@@ -204,7 +207,7 @@
         }
     }
 
-    private final class MaterializeReaderActivityNode extends AbstractActivityNode {
+    protected class MaterializeReaderActivityNode extends AbstractActivityNode {
         private static final long serialVersionUID = 1L;
 
         public MaterializeReaderActivityNode(ActivityId id) {
@@ -227,5 +230,4 @@
             };
         }
     }
-
 }
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/ReplicateOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/ReplicateOperatorDescriptor.java
new file mode 100644
index 0000000..0782647
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/ReplicateOperatorDescriptor.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.dataflow.std.misc;
+
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
+import org.apache.hyracks.dataflow.std.base.AbstractReplicateOperatorDescriptor;
+
+public class ReplicateOperatorDescriptor extends AbstractReplicateOperatorDescriptor {
+    private static final long serialVersionUID = 1L;
+
+    public ReplicateOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor rDesc, int outputArity) {
+        this(spec, rDesc, outputArity, new boolean[outputArity]);
+    }
+
+    public ReplicateOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor rDesc, int outputArity,
+            boolean[] outputMaterializationFlags) {
+        super(spec, rDesc, outputArity, outputMaterializationFlags);
+    }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HybridTopKSortRunGenerator.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HybridTopKSortRunGenerator.java
index 9be4bc6..5591ef7 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HybridTopKSortRunGenerator.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HybridTopKSortRunGenerator.java
@@ -67,6 +67,9 @@
 
     @Override
     public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+        if (topK <= 0) {
+            return;
+        }
         inAccessor.reset(buffer);
         if (tupleSorter != null) {
             boolean isBadK = false;
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml
index 453354b..1cec310 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml
@@ -37,20 +37,46 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-dataflow-std</artifactId>
       <version>${project.version}</version>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-btree</artifactId>
       <version>${project.version}</version>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks.examples.btree</groupId>
       <artifactId>btreehelper</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>args4j</groupId>
+      <artifactId>args4j</artifactId>
+      <version>2.0.12</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-data-std</artifactId>
+      <version>${project.version}</version>
     </dependency>
   </dependencies>
   <build>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml
index 9364294..991556f 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml
@@ -37,23 +37,25 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-dataflow-std</artifactId>
       <version>${project.version}</version>
-      <scope>compile</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-storage-am-btree</artifactId>
-      <version>${project.version}</version>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-api</artifactId>
       <version>${project.version}</version>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-data-std</artifactId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-common</artifactId>
       <version>${project.version}</version>
     </dependency>
   </dependencies>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeserver/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeserver/pom.xml
index 23400e1..881fd1a 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeserver/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeserver/pom.xml
@@ -71,26 +71,4 @@
       </plugin>
     </plugins>
   </build>
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.hyracks.examples.btree</groupId>
-      <artifactId>btreehelper</artifactId>
-      <version>${project.version}</version>
-      <scope>compile</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-control-cc</artifactId>
-      <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-control-nc</artifactId>
-      <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
-    </dependency>
-  </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
index 0eeddce..3692cd5 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
@@ -47,63 +47,41 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-dataflow-std</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-control-cc</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-control-nc</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-btree</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-rtree</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-lsm-btree</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-lsm-rtree</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-storage-am-lsm-invertedindex</artifactId>
-      <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-test-support</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
       <scope>test</scope>
     </dependency>
     <dependency>
@@ -120,8 +98,6 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-client</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>com.e-movimento.tinytools</groupId>
@@ -135,5 +111,51 @@
       <version>2.0.2-beta</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-io</groupId>
+      <artifactId>commons-io</artifactId>
+      <version>2.5</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-control-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-lsm-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.json</groupId>
+      <artifactId>json</artifactId>
+      <version>20090211</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>4.12</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/JobFailureTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/JobFailureTest.java
index 452da88..b76e458 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/JobFailureTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/JobFailureTest.java
@@ -26,13 +26,18 @@
 import org.apache.hyracks.dataflow.std.misc.SinkOperatorDescriptor;
 import org.apache.hyracks.tests.util.ExceptionOnCreatePushRuntimeOperatorDescriptor;
 import org.junit.Assert;
+import org.junit.Test;
 
 public class JobFailureTest extends AbstractMultiNCIntegrationTest {
 
-    // commenting out due to intermittent hangs:
-    // https://asterix-jenkins.ics.uci.edu/job/asterix-gerrit-notopic/2877/artifact/target/threaddumps/jstack_28541.html
-    // @Test
+    @Test
     public void failureOnCreatePushRuntime() throws Exception {
+        for (int round = 0; round < 1000; ++round) {
+            execTest();
+        }
+    }
+
+    private void execTest() throws Exception {
         JobSpecification spec = new JobSpecification();
         AbstractSingleActivityOperatorDescriptor sourceOpDesc = new ExceptionOnCreatePushRuntimeOperatorDescriptor(spec,
                 0, 1, new int[] { 4 }, true);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/SplitOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ReplicateOperatorTest.java
similarity index 92%
rename from hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/SplitOperatorTest.java
rename to hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ReplicateOperatorTest.java
index 40b4251..9a56cd3 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/SplitOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ReplicateOperatorTest.java
@@ -23,9 +23,6 @@
 import java.io.FileReader;
 import java.io.IOException;
 
-import org.junit.Assert;
-import org.junit.Test;
-
 import org.apache.hyracks.api.constraints.PartitionConstraintHelper;
 import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -40,11 +37,13 @@
 import org.apache.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
 import org.apache.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import org.apache.hyracks.dataflow.std.file.FileSplit;
-import org.apache.hyracks.dataflow.std.misc.SplitOperatorDescriptor;
+import org.apache.hyracks.dataflow.std.misc.ReplicateOperatorDescriptor;
 import org.apache.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
 import org.apache.hyracks.tests.util.ResultSerializerFactoryProvider;
+import org.junit.Assert;
+import org.junit.Test;
 
-public class SplitOperatorTest extends AbstractIntegrationTest {
+public class ReplicateOperatorTest extends AbstractIntegrationTest {
 
     public void compareFiles(String fileNameA, String fileNameB) throws IOException {
         BufferedReader fileA = new BufferedReader(new FileReader(fileNameA));
@@ -69,7 +68,7 @@
         String inputFileName = "data/words.txt";
         File[] outputFile = new File[outputArity];
         for (int i = 0; i < outputArity; i++) {
-            outputFile[i] = File.createTempFile("splitop", null);
+            outputFile[i] = File.createTempFile("replicateop", null);
             outputFile[i].deleteOnExit();
         }
 
@@ -86,8 +85,8 @@
                 inputSplits), stringParser, stringRec);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, scanOp, locations);
 
-        SplitOperatorDescriptor splitOp = new SplitOperatorDescriptor(spec, stringRec, outputArity);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, splitOp, locations);
+        ReplicateOperatorDescriptor replicateOp = new ReplicateOperatorDescriptor(spec, stringRec, outputArity);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, replicateOp, locations);
 
         IOperatorDescriptor outputOp[] = new IOperatorDescriptor[outputFile.length];
         for (int i = 0; i < outputArity; i++) {
@@ -99,9 +98,9 @@
             PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, outputOp[i], locations);
         }
 
-        spec.connect(new OneToOneConnectorDescriptor(spec), scanOp, 0, splitOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), scanOp, 0, replicateOp, 0);
         for (int i = 0; i < outputArity; i++) {
-            spec.connect(new OneToOneConnectorDescriptor(spec), splitOp, i, outputOp[i], 0);
+            spec.connect(new OneToOneConnectorDescriptor(spec), replicateOp, i, outputOp[i], 0);
         }
 
         for (int i = 0; i < outputArity; i++) {
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/ExceptionOnCreatePushRuntimeOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/ExceptionOnCreatePushRuntimeOperatorDescriptor.java
index 8c5bf48..14c644a 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/ExceptionOnCreatePushRuntimeOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/ExceptionOnCreatePushRuntimeOperatorDescriptor.java
@@ -20,6 +20,8 @@
 
 import java.nio.ByteBuffer;
 import java.util.concurrent.atomic.AtomicInteger;
+import java.util.logging.Level;
+import java.util.logging.Logger;
 
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -32,6 +34,7 @@
 
 public class ExceptionOnCreatePushRuntimeOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
     private static final long serialVersionUID = 1L;
+    private static Logger LOGGER = Logger.getLogger(ExceptionOnCreatePushRuntimeOperatorDescriptor.class.getName());
     private static AtomicInteger createPushRuntime = new AtomicInteger();
     private static AtomicInteger initializeCounter = new AtomicInteger();
     private static AtomicInteger openCloseCounter = new AtomicInteger();
@@ -126,10 +129,10 @@
     public static boolean succeed() {
         boolean success = openCloseCounter.get() == 0 && createPushRuntime.get() == 0 && initializeCounter.get() == 0;
         if (!success) {
-            System.err.println("Failure:");
-            System.err.println("CreatePushRuntime:" + createPushRuntime.get());
-            System.err.println("InitializeCounter:" + initializeCounter.get());
-            System.err.println("OpenCloseCounter:" + openCloseCounter.get());
+            LOGGER.log(Level.SEVERE, "Failure:");
+            LOGGER.log(Level.SEVERE, "CreatePushRuntime:" + createPushRuntime.get());
+            LOGGER.log(Level.SEVERE, "InitializeCounter:" + initializeCounter.get());
+            LOGGER.log(Level.SEVERE, "OpenCloseCounter:" + openCloseCounter.get());
         }
         return success;
     }
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml
index 711c183..aafbbe4 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml
@@ -36,6 +36,24 @@
   <build>
     <plugins>
       <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <version>2.10</version>
+        <configuration>
+          <failOnWarning>true</failOnWarning>
+          <outputXML>true</outputXML>
+          <usedDependencies>org.apache.hyracks:hyracks-control-nc,org.apache.hyracks:hyracks-control-cc</usedDependencies>
+        </configuration>
+        <executions>
+          <execution>
+            <phase>process-test-classes</phase>
+            <goals>
+              <goal>analyze-only</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>appassembler-maven-plugin</artifactId>
         <version>1.3</version>
@@ -174,13 +192,12 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
-      <artifactId>texthelper</artifactId>
+      <artifactId>hyracks-ipc</artifactId>
       <version>${project.version}</version>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-control-cc</artifactId>
+      <artifactId>hyracks-api</artifactId>
       <version>${project.version}</version>
       <type>jar</type>
       <scope>compile</scope>
@@ -189,15 +206,11 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-control-nc</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
-      <artifactId>textclient</artifactId>
+      <artifactId>hyracks-control-cc</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>test</scope>
     </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/pom.xml
index d73fe86..fcb5b8e 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/pom.xml
@@ -37,14 +37,31 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-dataflow-std</artifactId>
       <version>${project.version}</version>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>texthelper</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>args4j</groupId>
+      <artifactId>args4j</artifactId>
+      <version>2.0.12</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-data-std</artifactId>
+      <version>${project.version}</version>
     </dependency>
   </dependencies>
   <build>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/text-example/texthelper/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/text-example/texthelper/pom.xml
index f85b264..1792650 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/text-example/texthelper/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/text-example/texthelper/pom.xml
@@ -35,19 +35,17 @@
   <dependencies>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-dataflow-std</artifactId>
-      <version>${project.version}</version>
-      <scope>compile</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-api</artifactId>
       <version>${project.version}</version>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-data-std</artifactId>
+      <artifactId>hyracks-dataflow-std</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
       <version>${project.version}</version>
     </dependency>
   </dependencies>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/text-example/textserver/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/text-example/textserver/pom.xml
index aec77a3..07e64c1 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/text-example/textserver/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/text-example/textserver/pom.xml
@@ -36,6 +36,26 @@
   <build>
     <plugins>
       <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <version>2.10</version>
+        <configuration>
+          <failOnWarning>true</failOnWarning>
+          <outputXML>true</outputXML>
+          <usedDependencies>
+            org.apache.hyracks:hyracks-control-nc,org.apache.hyracks:hyracks-control-cc,org.apache.hyracks:hyracks-dataflow-std,org.apache.hyracks:texthelper
+          </usedDependencies>
+        </configuration>
+        <executions>
+          <execution>
+            <phase>process-test-classes</phase>
+            <goals>
+              <goal>analyze-only</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>appassembler-maven-plugin</artifactId>
         <version>1.3</version>
@@ -173,30 +193,29 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
-      <artifactId>texthelper</artifactId>
+      <artifactId>textclient</artifactId>
       <version>${project.version}</version>
-      <scope>compile</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-control-cc</artifactId>
-      <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
+      <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-control-nc</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
-      <artifactId>textclient</artifactId>
+      <artifactId>hyracks-control-cc</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-std</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>texthelper</artifactId>
+      <version>${project.version}</version>
     </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
index fd5f4d5..59f54a0 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
@@ -42,6 +42,21 @@
       <artifactId>hyracks-data-std</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>args4j</groupId>
+      <artifactId>args4j</artifactId>
+      <version>2.0.12</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
   </dependencies>
   <build>
     <plugins>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchserver/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchserver/pom.xml
index 18e00db..b967e85 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchserver/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchserver/pom.xml
@@ -71,31 +71,4 @@
       </plugin>
     </plugins>
   </build>
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-dataflow-std</artifactId>
-      <version>${project.version}</version>
-      <scope>compile</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-data-std</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-control-cc</artifactId>
-      <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-control-nc</artifactId>
-      <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
-    </dependency>
-  </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-1.x/pom.xml b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-1.x/pom.xml
index 9e8d185..254fc87 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-1.x/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-1.x/pom.xml
@@ -30,6 +30,23 @@
     <root.dir>${basedir}/../../..</root.dir>
   </properties>
 
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <version>2.4</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>test-jar</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
   <profiles>
     <profile>
       <activation>
@@ -49,7 +66,7 @@
           <artifactId>hadoop-test</artifactId>
           <version>0.20.2</version>
           <type>jar</type>
-          <scope>compile</scope>
+          <scope>test</scope>
         </dependency>
       </dependencies>
     </profile>
@@ -75,14 +92,14 @@
           <artifactId>hadoop-minicluster</artifactId>
           <version>1.0.4</version>
           <type>jar</type>
-          <scope>compile</scope>
+          <scope>test</scope>
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-test</artifactId>
           <version>1.0.4</version>
           <type>jar</type>
-          <scope>compile</scope>
+          <scope>test</scope>
         </dependency>
       </dependencies>
     </profile>
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-1.x/src/main/java/org/apache/hyracks/hdfs/MiniDFSClusterFactory.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-1.x/src/test/java/org/apache/hyracks/hdfs/MiniDFSClusterFactory.java
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-1.x/src/main/java/org/apache/hyracks/hdfs/MiniDFSClusterFactory.java
rename to hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-1.x/src/test/java/org/apache/hyracks/hdfs/MiniDFSClusterFactory.java
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-2.x/pom.xml b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-2.x/pom.xml
index 7cca256..4fda8a5 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-2.x/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-2.x/pom.xml
@@ -30,6 +30,23 @@
     <root.dir>${basedir}/../../..</root.dir>
   </properties>
 
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <version>2.4</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>test-jar</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
   <profiles>
     <profile>
       <activation>
@@ -56,14 +73,8 @@
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-hdfs</artifactId>
-          <type>jar</type>
-          <scope>compile</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-minicluster</artifactId>
-          <type>jar</type>
-          <scope>compile</scope>
+          <classifier>tests</classifier>
+          <scope>test</scope>
         </dependency>
       </dependencies>
     </profile>
@@ -80,26 +91,10 @@
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-common</artifactId>
-          <type>jar</type>
-          <scope>compile</scope>
         </dependency>
         <dependency>
           <groupId>org.apache.hadoop</groupId>
           <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <type>jar</type>
-          <scope>compile</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-hdfs</artifactId>
-          <type>jar</type>
-          <scope>compile</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-minicluster</artifactId>
-          <type>jar</type>
-          <scope>compile</scope>
         </dependency>
       </dependencies>
     </profile>
@@ -139,7 +134,7 @@
           <artifactId>hadoop-minicluster</artifactId>
           <version>0.23.6</version>
           <type>jar</type>
-          <scope>compile</scope>
+          <scope>test</scope>
         </dependency>
       </dependencies>
     </profile>
@@ -179,7 +174,7 @@
           <artifactId>hadoop-minicluster</artifactId>
           <version>2.0.0-cdh4.2.0</version>
           <type>jar</type>
-          <scope>compile</scope>
+          <scope>test</scope>
         </dependency>
       </dependencies>
     </profile>
@@ -219,7 +214,7 @@
           <artifactId>hadoop-minicluster</artifactId>
           <version>2.0.0-cdh4.1.0</version>
           <type>jar</type>
-          <scope>compile</scope>
+          <scope>test</scope>
         </dependency>
       </dependencies>
     </profile>
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-2.x/src/main/java/org/apache/hyracks/hdfs/MiniDFSClusterFactory.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-2.x/src/test/java/org/apache/hyracks/hdfs/MiniDFSClusterFactory.java
similarity index 100%
rename from hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-2.x/src/main/java/org/apache/hyracks/hdfs/MiniDFSClusterFactory.java
rename to hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-2.x/src/test/java/org/apache/hyracks/hdfs/MiniDFSClusterFactory.java
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
index 8d019d9..96a30fd 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
@@ -63,6 +63,24 @@
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <version>2.10</version>
+        <configuration>
+          <failOnWarning>true</failOnWarning>
+          <outputXML>true</outputXML>
+          <ignoredUnusedDeclaredDependencies>org.apache.hadoop:hadoop*::</ignoredUnusedDeclaredDependencies>
+        </configuration>
+        <executions>
+          <execution>
+            <phase>process-test-classes</phase>
+            <goals>
+              <goal>analyze-only</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
     </plugins>
   </build>
 
@@ -80,6 +98,20 @@
           <type>jar</type>
           <scope>compile</scope>
         </dependency>
+        <dependency>
+          <groupId>org.apache.hyracks</groupId>
+          <artifactId>hyracks-hdfs-1.x</artifactId>
+          <version>${project.version}</version>
+          <type>test-jar</type>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-test</artifactId>
+          <version>0.20.2</version>
+          <type>jar</type>
+          <scope>test</scope>
+        </dependency>
       </dependencies>
     </profile>
     <profile>
@@ -99,7 +131,27 @@
           <type>jar</type>
           <scope>compile</scope>
         </dependency>
-      </dependencies>
+        <dependency>
+          <groupId>org.apache.hyracks</groupId>
+          <artifactId>hyracks-hdfs-1.x</artifactId>
+          <version>${project.version}</version>
+          <type>test-jar</type>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-minicluster</artifactId>
+          <version>1.0.4</version>
+          <type>jar</type>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-test</artifactId>
+          <version>1.0.4</version>
+          <type>jar</type>
+          <scope>test</scope>
+        </dependency>      </dependencies>
     </profile>
 
     <profile>
@@ -119,6 +171,57 @@
           <type>jar</type>
           <scope>compile</scope>
         </dependency>
+        <dependency>
+          <groupId>org.apache.hyracks</groupId>
+          <artifactId>hyracks-hdfs-2.x</artifactId>
+          <version>${project.version}</version>
+          <type>test-jar</type>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hyracks</groupId>
+          <artifactId>hyracks-control-common</artifactId>
+          <version>0.2.18-SNAPSHOT</version>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>2.2.0</version>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+          <version>2.2.0</version>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdfs</artifactId>
+          <version>2.2.0</version>
+          <type>jar</type>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdfs</artifactId>
+          <version>2.2.0</version>
+          <type>test-jar</type>
+          <classifier>tests</classifier>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+          <version>2.2.0</version>
+          <type>test-jar</type>
+          <classifier>tests</classifier>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>junit</groupId>
+          <artifactId>junit</artifactId>
+          <scope>test</scope>
+        </dependency>
       </dependencies>
     </profile>
     <profile>
@@ -138,6 +241,19 @@
           <type>jar</type>
           <scope>compile</scope>
         </dependency>
+        <dependency>
+          <groupId>org.apache.hyracks</groupId>
+          <artifactId>hyracks-hdfs-2.x</artifactId>
+          <version>${project.version}</version>
+          <type>test-jar</type>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-minicluster</artifactId>
+          <type>jar</type>
+          <scope>test</scope>
+        </dependency>
       </dependencies>
     </profile>
     <profile>
@@ -157,6 +273,20 @@
           <type>jar</type>
           <scope>compile</scope>
         </dependency>
+        <dependency>
+          <groupId>org.apache.hyracks</groupId>
+          <artifactId>hyracks-hdfs-2.x</artifactId>
+          <version>${project.version}</version>
+          <type>test-jar</type>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-minicluster</artifactId>
+          <version>0.23.6</version>
+          <type>jar</type>
+          <scope>test</scope>
+        </dependency>
       </dependencies>
     </profile>
     <profile>
@@ -176,6 +306,20 @@
           <type>jar</type>
           <scope>compile</scope>
         </dependency>
+        <dependency>
+          <groupId>org.apache.hyracks</groupId>
+          <artifactId>hyracks-hdfs-2.x</artifactId>
+          <version>${project.version}</version>
+          <type>test-jar</type>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-minicluster</artifactId>
+          <version>2.0.0-cdh4.1.0</version>
+          <type>jar</type>
+          <scope>test</scope>
+        </dependency>
       </dependencies>
     </profile>
     <profile>
@@ -195,6 +339,20 @@
           <type>jar</type>
           <scope>compile</scope>
         </dependency>
+        <dependency>
+          <groupId>org.apache.hyracks</groupId>
+          <artifactId>hyracks-hdfs-2.x</artifactId>
+          <version>${project.version}</version>
+          <type>test-jar</type>
+          <scope>test</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-minicluster</artifactId>
+          <version>2.0.0-cdh4.2.0</version>
+          <type>jar</type>
+          <scope>test</scope>
+        </dependency>
       </dependencies>
     </profile>
   </profiles>
diff --git a/hyracks-fullstack/hyracks/hyracks-net/pom.xml b/hyracks-fullstack/hyracks/hyracks-net/pom.xml
index 46494c3..838e0c7 100644
--- a/hyracks-fullstack/hyracks/hyracks-net/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-net/pom.xml
@@ -44,7 +44,7 @@
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-api</artifactId>
-      <version>0.2.18-SNAPSHOT</version>
+      <version>${project.version}</version>
     </dependency>
     <dependency>
       <groupId>junit</groupId>
diff --git a/hyracks-fullstack/hyracks/hyracks-server/pom.xml b/hyracks-fullstack/hyracks/hyracks-server/pom.xml
index ed82438..482401b 100644
--- a/hyracks-fullstack/hyracks/hyracks-server/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-server/pom.xml
@@ -44,6 +44,24 @@
     <plugins>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <version>2.10</version>
+        <configuration>
+          <failOnWarning>true</failOnWarning>
+          <outputXML>true</outputXML>
+          <usedDependencies>org.apache.hyracks:hyracks-control-nc</usedDependencies>
+        </configuration>
+        <executions>
+          <execution>
+            <phase>process-test-classes</phase>
+            <goals>
+              <goal>analyze-only</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-failsafe-plugin</artifactId>
         <version>2.6</version>
         <configuration>
@@ -129,22 +147,11 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-control-cc</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-control-nc</artifactId>
-      <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-nc-service</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.httpcomponents</groupId>
@@ -156,5 +163,25 @@
       <artifactId>junit</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.json</groupId>
+      <artifactId>json</artifactId>
+      <version>20090211</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.httpcomponents</groupId>
+      <artifactId>httpcore</artifactId>
+      <version>4.4.4</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-lang3</artifactId>
+      <version>3.1</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-control-nc</artifactId>
+      <version>${project.version}</version>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java b/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java
index 13607ad..8d1246b 100644
--- a/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java
+++ b/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java
@@ -41,7 +41,7 @@
 public class NCServiceIT {
 
     private static final String TARGET_DIR = StringUtils
-            .join(new String[] { System.getProperty("basedir"), "target" }, File.separator);
+            .join(new String[] { ".", "target" }, File.separator);
     private static final String LOG_DIR = StringUtils
             .join(new String[] { TARGET_DIR, "failsafe-reports" }, File.separator);
     private static final String RESOURCE_DIR = StringUtils
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/pom.xml
index 55f7a42..b16a827 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/pom.xml
@@ -45,8 +45,21 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-common</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-common</artifactId>
+      <version>${project.version}</version>
     </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/pom.xml
index be26125..39409c6 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/pom.xml
@@ -90,5 +90,27 @@
       <version>1.6.2</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.powermock</groupId>
+      <artifactId>powermock-core</artifactId>
+      <version>1.6.2</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>4.12</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-data-std</artifactId>
+      <version>${project.version}</version>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-common/pom.xml
index da64794..7ebd27f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/pom.xml
@@ -75,5 +75,10 @@
       <artifactId>hyracks-util</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-data-std</artifactId>
+      <version>${project.version}</version>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/pom.xml
index ab69b9b..5d518bb 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/pom.xml
@@ -45,22 +45,41 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-btree</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-bloomfilter</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-lsm-common</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-data-std</artifactId>
+      <version>${project.version}</version>
     </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/pom.xml
index a4a97f2..da7c397 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/pom.xml
@@ -45,22 +45,36 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-common</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-bloomfilter</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-btree</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-std</artifactId>
+      <version>${project.version}</version>
     </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml
index 670c85e..95e446a 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml
@@ -51,15 +51,46 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-btree</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-lsm-common</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-std</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-data-std</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-bloomfilter</artifactId>
+      <version>${project.version}</version>
     </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/pom.xml
index 17a471c..2d8085c 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/pom.xml
@@ -45,22 +45,51 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-lsm-common</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-btree</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-rtree</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-util</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-data-std</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-bloomfilter</artifactId>
+      <version>${project.version}</version>
     </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/pom.xml
index 9cda3b2..d03824e 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/pom.xml
@@ -46,22 +46,36 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-common</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-dataflow-common</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-dataflow-std</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-util</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-data-std</artifactId>
+      <version>${project.version}</version>
     </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml
index 1eedbfd..5aebb22 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml
@@ -45,8 +45,11 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-api</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
     </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml b/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml
index 6ee9711..34dedfd 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml
@@ -49,34 +49,56 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-control-nc</artifactId>
       <version>${project.version}</version>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-common</artifactId>
       <version>${project.version}</version>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-btree</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-rtree</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
-      <artifactId>hyracks-storage-am-lsm-invertedindex</artifactId>
+      <artifactId>hyracks-dataflow-common</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-control-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-lsm-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-data-std</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-std</artifactId>
+      <version>${project.version}</version>
     </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml
index 69e9af5..937c299 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml
@@ -48,14 +48,36 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-bloomfilter</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-test-support</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
       <scope>test</scope>
     </dependency>
   </dependencies>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
index e9f7fc1..c57690b 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
@@ -49,15 +49,41 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-btree</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-test-support</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-data-std</artifactId>
+      <version>${project.version}</version>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml
index 34a1ab8..bd81ee9 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml
@@ -48,20 +48,55 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-lsm-btree</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-common</artifactId>
       <version>${project.version}</version>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-test-support</artifactId>
       <version>${project.version}</version>
-      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-lsm-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-control-nc</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-data-std</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-btree</artifactId>
+      <version>${project.version}</version>
     </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/pom.xml
index 9d6bc2a..40ae5d1 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/pom.xml
@@ -48,15 +48,35 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-lsm-common</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-test-support</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-control-nc</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
     </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml
index bab356b..ab1b840 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml
@@ -48,30 +48,65 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-lsm-invertedindex</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-test-support</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-data-std</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-util</artifactId>
       <version>${project.version}</version>
-      <type>test-jar</type>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-lsm-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-control-nc</artifactId>
+      <version>${project.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-btree</artifactId>
+      <version>${project.version}</version>
+    </dependency>
   </dependencies>
 
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml
index b0405d4..ff0aebe 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml
@@ -48,15 +48,55 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-lsm-rtree</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-test-support</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-lsm-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-rtree</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-control-nc</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-data-std</artifactId>
+      <version>${project.version}</version>
     </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
index 99101fd..d5cd2ea 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
@@ -49,15 +49,42 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-am-rtree</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-test-support</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-dataflow-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-am-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-storage-common</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-data-std</artifactId>
+      <version>${project.version}</version>
+    </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
index 6dd96d3..baa5e76 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
@@ -49,14 +49,20 @@
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-storage-common</artifactId>
       <version>${project.version}</version>
-      <scope>compile</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.hyracks</groupId>
       <artifactId>hyracks-test-support</artifactId>
       <version>${project.version}</version>
-      <type>jar</type>
-      <scope>compile</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hyracks</groupId>
+      <artifactId>hyracks-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
     </dependency>
   </dependencies>
 </project>
diff --git a/hyracks-fullstack/pom.xml b/hyracks-fullstack/pom.xml
index 1a0ed20..af536e1 100644
--- a/hyracks-fullstack/pom.xml
+++ b/hyracks-fullstack/pom.xml
@@ -55,8 +55,8 @@
     </global.test.excludes>
     <!-- Versions under dependencymanagement or used in many projects via properties -->
     <hadoop.version>2.2.0</hadoop.version>
-    <junit.version>4.8.1</junit.version>
-    <commons.io.version>2.4</commons.io.version>
+    <junit.version>4.12</junit.version>
+    <commons.io.version>2.5</commons.io.version>
     <jacoco.version>0.7.6.201602180812</jacoco.version>
   </properties>
   <dependencyManagement>
@@ -97,6 +97,13 @@
         <version>${hadoop.version}</version>
       </dependency>
       <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-hdfs</artifactId>
+        <version>${hadoop.version}</version>
+        <classifier>tests</classifier>
+        <scope>test</scope>
+      </dependency>
+      <dependency>
         <groupId>commons-io</groupId>
         <artifactId>commons-io</artifactId>
         <version>${commons.io.version}</version>
@@ -107,6 +114,23 @@
   <build>
     <plugins>
       <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <version>2.10</version>
+        <configuration>
+          <failOnWarning>true</failOnWarning>
+          <outputXML>true</outputXML>
+        </configuration>
+        <executions>
+          <execution>
+            <phase>process-test-classes</phase>
+            <goals>
+              <goal>analyze-only</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>versions-maven-plugin</artifactId>
         <version>1.2</version>