Fixed issue 108.

git-svn-id: https://asterixdb.googlecode.com/svn/branches/asterix-fix-issue-108@189 eaa15691-b419-025a-1212-ee371bd00084
diff --git a/asterix-app/data/empty.adm b/asterix-app/data/empty.adm
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/asterix-app/data/empty.adm
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
index a5e28a5..43f0e5a 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
@@ -4,7 +4,6 @@
 import java.rmi.RemoteException;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.Map;
 
 import org.json.JSONException;
 
@@ -31,7 +30,6 @@
 import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
 import edu.uci.ics.asterix.metadata.declared.AqlCompiledMetadataDeclarations;
 import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
-import edu.uci.ics.asterix.metadata.entities.Index;
 import edu.uci.ics.asterix.optimizer.base.RuleCollections;
 import edu.uci.ics.asterix.runtime.job.listener.JobEventListenerFactory;
 import edu.uci.ics.asterix.transaction.management.exception.ACIDException;
@@ -69,7 +67,6 @@
 import edu.uci.ics.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
 import edu.uci.ics.hyracks.algebricks.core.utils.Pair;
 import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
-import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
 import edu.uci.ics.hyracks.api.job.JobId;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 
@@ -194,7 +191,7 @@
                 switch (stmt.getKind()) {
                     case LOAD_FROM_FILE: {
                         CompiledLoadFromFileStatement stmtLoad = (CompiledLoadFromFileStatement) stmt;
-                        dmlJobs.addAll(DatasetOperations.createLoadDatasetJobSpec(stmtLoad, metadata));
+                        dmlJobs.add(DatasetOperations.createLoadDatasetJobSpec(stmtLoad, metadata));
                         break;
                     }
                     case WRITE_FROM_QUERY_RESULT: {
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/DdlTranslator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/DdlTranslator.java
index f8631ab..f3dc2d4 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/DdlTranslator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/DdlTranslator.java
@@ -21,8 +21,8 @@
 import java.util.List;
 import java.util.Map;
 
-import edu.uci.ics.asterix.api.common.SessionConfig;
 import edu.uci.ics.asterix.api.common.APIFramework.DisplayFormat;
+import edu.uci.ics.asterix.api.common.SessionConfig;
 import edu.uci.ics.asterix.aql.base.Statement;
 import edu.uci.ics.asterix.aql.base.Statement.Kind;
 import edu.uci.ics.asterix.aql.expression.CreateDataverseStatement;
@@ -43,15 +43,15 @@
 import edu.uci.ics.asterix.aql.expression.OrderedListTypeDefinition;
 import edu.uci.ics.asterix.aql.expression.Query;
 import edu.uci.ics.asterix.aql.expression.RecordTypeDefinition;
+import edu.uci.ics.asterix.aql.expression.RecordTypeDefinition.RecordKind;
 import edu.uci.ics.asterix.aql.expression.TypeDecl;
 import edu.uci.ics.asterix.aql.expression.TypeDropStatement;
 import edu.uci.ics.asterix.aql.expression.TypeExpression;
 import edu.uci.ics.asterix.aql.expression.TypeReferenceExpression;
 import edu.uci.ics.asterix.aql.expression.UnorderedListTypeDefinition;
-import edu.uci.ics.asterix.aql.expression.RecordTypeDefinition.RecordKind;
 import edu.uci.ics.asterix.aql.util.FunctionUtils;
-import edu.uci.ics.asterix.common.config.GlobalConfig;
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
+import edu.uci.ics.asterix.common.config.GlobalConfig;
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.common.functions.FunctionConstants;
 import edu.uci.ics.asterix.common.parse.IParseFileSplitsDecl;
@@ -63,6 +63,7 @@
 import edu.uci.ics.asterix.metadata.MetadataTransactionContext;
 import edu.uci.ics.asterix.metadata.declared.AqlCompiledMetadataDeclarations;
 import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinArtifactMap;
+import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinArtifactMap.ARTIFACT_KIND;
 import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinTypeMap;
 import edu.uci.ics.asterix.metadata.entities.Dataset;
 import edu.uci.ics.asterix.metadata.entities.Datatype;
@@ -73,7 +74,6 @@
 import edu.uci.ics.asterix.metadata.entities.Index;
 import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
 import edu.uci.ics.asterix.metadata.entities.NodeGroup;
-import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinArtifactMap.ARTIFACT_KIND;
 import edu.uci.ics.asterix.om.types.AOrderedListType;
 import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.om.types.ATypeTag;
@@ -132,20 +132,20 @@
                     compiledDeclarations.connectToDataverse(dataverseName);
                     break;
                 }
+                
                 case CREATE_DATAVERSE: {
                     checkForDataverseConnection(false);
                     CreateDataverseStatement stmtCreateDataverse = (CreateDataverseStatement) stmt;
                     String dvName = stmtCreateDataverse.getDataverseName().getValue();
                     Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dvName);
-                    if (dv != null) {
-                        if (!stmtCreateDataverse.getIfNotExists())
-                            throw new AlgebricksException("\nA dataverse with this name " + dvName + " already exists.");
-                    } else {
-                        MetadataManager.INSTANCE.addDataverse(mdTxnCtx,
-                                new Dataverse(dvName, stmtCreateDataverse.getFormat()));
+                    if (dv != null && !stmtCreateDataverse.getIfNotExists()) {
+                            throw new AlgebricksException("A dataverse with this name " + dvName + " already exists.");
                     }
+                    MetadataManager.INSTANCE.addDataverse(mdTxnCtx,
+                    		new Dataverse(dvName, stmtCreateDataverse.getFormat()));
                     break;
                 }
+                
                 case DATASET_DECL: {
                     checkForDataverseConnection(true);
                     DatasetDecl dd = (DatasetDecl) stmt;
@@ -153,78 +153,64 @@
                     DatasetType dsType = dd.getDatasetType();
                     String itemTypeName = null;
                     IDatasetDetails datasetDetails = null;
-
                     Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, compiledDeclarations.getDataverseName(),
                             datasetName);
-                    if (ds != null) {
-                        if (!dd.getIfNotExists())
-                            throw new AlgebricksException("\nA dataset with this name " + datasetName
-                                    + " already exists.");
-                    } else {
-                        itemTypeName = dd.getItemTypeName().getValue();
-                        Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx,
-                                compiledDeclarations.getDataverseName(), itemTypeName);
-                        if (dt == null)
-                            throw new AlgebricksException(": type " + itemTypeName + " could not be found.");
-
-                        switch (dd.getDatasetType()) {
-                            case INTERNAL: {
-                                IAType itemType = dt.getDatatype();
-                                if (itemType.getTypeTag() != ATypeTag.RECORD) {
-                                    throw new AlgebricksException("Can only partition ARecord's.");
-                                }
-                                List<String> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl())
-                                        .getPartitioningExprs();
-                                String ngName = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getNodegroupName()
-                                        .getValue();
-                                datasetDetails = new InternalDatasetDetails(InternalDatasetDetails.FileStructure.BTREE,
-                                        InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs,
-                                        partitioningExprs, ngName);
+                    if (ds != null && !dd.getIfNotExists()) {
+                        throw new AlgebricksException("A dataset with this name " + datasetName
+                                + " already exists.");
+                    } 
+                    itemTypeName = dd.getItemTypeName().getValue();
+                    Datatype dt = MetadataManager.INSTANCE.getDatatype(mdTxnCtx,
+                            compiledDeclarations.getDataverseName(), itemTypeName);
+                    if (dt == null) {
+                        throw new AlgebricksException(": type " + itemTypeName + " could not be found.");
+                    }
+                    switch (dd.getDatasetType()) {
+                        case INTERNAL: {
+                            IAType itemType = dt.getDatatype();
+                            if (itemType.getTypeTag() != ATypeTag.RECORD) {
+                                throw new AlgebricksException("Can only partition ARecord's.");
                             }
-                                break;
-                            case EXTERNAL: {
-                                String adapter = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getAdapter();
-                                Map<String, String> properties = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl())
-                                        .getProperties();
-                                datasetDetails = new ExternalDatasetDetails(adapter, properties);
-                            }
-                                break;
-                            case FEED: {
-                                IAType itemType = dt.getDatatype();
-                                if (itemType.getTypeTag() != ATypeTag.RECORD) {
-                                    throw new AlgebricksException("Can only partition ARecord's.");
-                                }
-                                List<String> partitioningExprs = ((FeedDetailsDecl) dd.getDatasetDetailsDecl())
-                                        .getPartitioningExprs();
-                                String ngName = ((FeedDetailsDecl) dd.getDatasetDetailsDecl()).getNodegroupName()
-                                        .getValue();
-                                String adapter = ((FeedDetailsDecl) dd.getDatasetDetailsDecl()).getAdapterClassname();
-                                Map<String, String> properties = ((FeedDetailsDecl) dd.getDatasetDetailsDecl())
-                                        .getProperties();
-                                String functionIdentifier = ((FeedDetailsDecl) dd.getDatasetDetailsDecl())
-                                        .getFunctionIdentifier();
-                                datasetDetails = new FeedDatasetDetails(InternalDatasetDetails.FileStructure.BTREE,
-                                        InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs,
-                                        partitioningExprs, ngName, adapter, properties, functionIdentifier,
-                                        FeedDatasetDetails.FeedState.INACTIVE.toString());
-
-                            }
-                                break;
+                            List<String> partitioningExprs = ((InternalDetailsDecl) dd.getDatasetDetailsDecl())
+                                    .getPartitioningExprs();
+                            String ngName = ((InternalDetailsDecl) dd.getDatasetDetailsDecl()).getNodegroupName()
+                                    .getValue();
+                            datasetDetails = new InternalDatasetDetails(InternalDatasetDetails.FileStructure.BTREE,
+                                    InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs,
+                                    partitioningExprs, ngName);
+                            break;
                         }
-                        MetadataManager.INSTANCE.addDataset(mdTxnCtx,
-                                new Dataset(compiledDeclarations.getDataverseName(), datasetName, itemTypeName,
-                                        datasetDetails, dsType));
-
-                        // If the dataset is of type INTERNAL or FEED, Asterix
-                        // needs to create Tree indexes at all nodes
-                        // corresponding to the associated node group. This is
-                        // not required for external datasets as
-                        // the data for such a dataset is never persisted in
-                        // Asterix storage.
-                        if (dd.getDatasetType() == DatasetType.INTERNAL || dd.getDatasetType() == DatasetType.FEED) {
-                            compileDatasetInitializeStatement(hcc, mdTxnCtx.getTxnId(), datasetName);
+                        case EXTERNAL: {
+                            String adapter = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl()).getAdapter();
+                            Map<String, String> properties = ((ExternalDetailsDecl) dd.getDatasetDetailsDecl())
+                                    .getProperties();
+                            datasetDetails = new ExternalDatasetDetails(adapter, properties);
+                            break;
+                        }
+                        case FEED: {
+                            IAType itemType = dt.getDatatype();
+                            if (itemType.getTypeTag() != ATypeTag.RECORD) {
+                                throw new AlgebricksException("Can only partition ARecord's.");
+                            }
+                            List<String> partitioningExprs = ((FeedDetailsDecl) dd.getDatasetDetailsDecl())
+                                    .getPartitioningExprs();
+                            String ngName = ((FeedDetailsDecl) dd.getDatasetDetailsDecl()).getNodegroupName()
+                                    .getValue();
+                            String adapter = ((FeedDetailsDecl) dd.getDatasetDetailsDecl()).getAdapterClassname();
+                            Map<String, String> properties = ((FeedDetailsDecl) dd.getDatasetDetailsDecl())
+                                    .getProperties();
+                            String functionIdentifier = ((FeedDetailsDecl) dd.getDatasetDetailsDecl())
+                                    .getFunctionIdentifier();
+                            datasetDetails = new FeedDatasetDetails(InternalDatasetDetails.FileStructure.BTREE,
+                                    InternalDatasetDetails.PartitioningStrategy.HASH, partitioningExprs,
+                                    partitioningExprs, ngName, adapter, properties, functionIdentifier,
+                                    FeedDatasetDetails.FeedState.INACTIVE.toString());
+                            break;
                         }
                     }
+                    MetadataManager.INSTANCE.addDataset(mdTxnCtx,
+                            new Dataset(compiledDeclarations.getDataverseName(), datasetName, itemTypeName,
+                                    datasetDetails, dsType));
                     break;
                 }
 
@@ -234,16 +220,18 @@
                     String datasetName = stmtCreateIndex.getDatasetName().getValue();
                     Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, compiledDeclarations.getDataverseName(),
                             datasetName);
-                    if (ds == null)
-                        throw new AlgebricksException("\nThere is no dataset with this name " + datasetName);
+                    if (ds == null) {
+                        throw new AlgebricksException("There is no dataset with this name " + datasetName);
+                    }
                     String indexName = stmtCreateIndex.getIndexName().getValue();
                     Index idx = MetadataManager.INSTANCE.getIndex(mdTxnCtx, compiledDeclarations.getDataverseName(),
                             datasetName, indexName);
                     if (idx != null) {
-                        if (!stmtCreateIndex.getIfNotExists())
-                            throw new AlgebricksException("\nAn index with this name " + indexName + " already exists.");
-                        else
+                        if (!stmtCreateIndex.getIfNotExists()) {
+                            throw new AlgebricksException("An index with this name " + indexName + " already exists.");
+                        } else {
                             stmtCreateIndex.setNeedToCreate(false);
+                        }
                     } else {
                         MetadataManager.INSTANCE.addIndex(mdTxnCtx, new Index(compiledDeclarations.getDataverseName(),
                                 datasetName, indexName, stmtCreateIndex.getIndexType(),
@@ -259,7 +247,7 @@
                             compiledDeclarations.getDataverseName(), typeName);
                     if (dt != null) {
                         if (!stmtCreateType.getIfNotExists())
-                            throw new AlgebricksException("\nA datatype with this name " + typeName
+                            throw new AlgebricksException("A datatype with this name " + typeName
                                     + " already exists.");
                     } else {
                         if (builtinTypeMap.get(typeName) != null) {
@@ -279,7 +267,7 @@
                     NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, ngName);
                     if (ng != null) {
                         if (!stmtCreateNodegroup.getIfNotExists())
-                            throw new AlgebricksException("\nA nodegroup with this name " + ngName + " already exists.");
+                            throw new AlgebricksException("A nodegroup with this name " + ngName + " already exists.");
                     } else {
                         List<Identifier> ncIdentifiers = stmtCreateNodegroup.getNodeControllerNames();
                         List<String> ncNames = new ArrayList<String>(ncIdentifiers.size());
@@ -295,7 +283,7 @@
                     DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
                     String dvName = stmtDelete.getDataverseName().getValue();
                     if (AsterixBuiltinArtifactMap.isSystemProtectedArtifact(ARTIFACT_KIND.DATAVERSE, dvName)) {
-                        throw new AsterixException(" Invalid Operation cannot drop dataverse " + dvName
+                        throw new AsterixException("Invalid Operation cannot drop dataverse " + dvName
                                 + " (protected by system)");
                     }
 
@@ -305,8 +293,9 @@
 
                     Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dvName);
                     if (dv == null) {
-                        if (!stmtDelete.getIfExists())
-                            throw new AlgebricksException("\nThere is no dataverse with this name " + dvName + ".");
+                        if (!stmtDelete.getIfExists()) {
+                            throw new AlgebricksException("There is no dataverse with this name " + dvName + ".");
+                        }
                     } else {
                         compiledDeclarations.connectToDataverse(dvName);
                         List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dvName);
@@ -336,14 +325,14 @@
                     DropStatement stmtDelete = (DropStatement) stmt;
                     String datasetName = stmtDelete.getDatasetName().getValue();
                     if (AsterixBuiltinArtifactMap.isSystemProtectedArtifact(ARTIFACT_KIND.DATASET, datasetName)) {
-                        throw new AsterixException(" Invalid Operation cannot drop dataset " + datasetName
+                        throw new AsterixException("Invalid Operation cannot drop dataset " + datasetName
                                 + " (protected by system)");
                     }
                     Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, compiledDeclarations.getDataverseName(),
                             datasetName);
                     if (ds == null) {
                         if (!stmtDelete.getIfExists())
-                            throw new AlgebricksException("\nThere is no dataset with this name " + datasetName + ".");
+                            throw new AlgebricksException("There is no dataset with this name " + datasetName + ".");
                     } else {
                         if (ds.getType() == DatasetType.INTERNAL || ds.getType() == DatasetType.FEED) {
                             List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx,
@@ -365,14 +354,14 @@
                     Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, compiledDeclarations.getDataverseName(),
                             datasetName);
                     if (ds == null)
-                        throw new AlgebricksException("\nThere is no dataset with this name " + datasetName + ".");
+                        throw new AlgebricksException("There is no dataset with this name " + datasetName + ".");
                     if (ds.getType() == DatasetType.INTERNAL || ds.getType() == DatasetType.FEED) {
                         String indexName = stmtDelete.getIndexName().getValue();
                         Index idx = MetadataManager.INSTANCE.getIndex(mdTxnCtx,
                                 compiledDeclarations.getDataverseName(), datasetName, indexName);
                         if (idx == null) {
                             if (!stmtDelete.getIfExists())
-                                throw new AlgebricksException("\nThere is no index with this name " + indexName + ".");
+                                throw new AlgebricksException("There is no index with this name " + indexName + ".");
                         } else
                             compileIndexDropStatement(hcc, mdTxnCtx, datasetName, indexName);
                     } else {
@@ -389,7 +378,7 @@
                             compiledDeclarations.getDataverseName(), typeName);
                     if (dt == null) {
                         if (!stmtDelete.getIfExists())
-                            throw new AlgebricksException("\nThere is no datatype with this name " + typeName + ".");
+                            throw new AlgebricksException("There is no datatype with this name " + typeName + ".");
                     } else
                         MetadataManager.INSTANCE.dropDatatype(mdTxnCtx, compiledDeclarations.getDataverseName(),
                                 typeName);
@@ -399,13 +388,13 @@
                     NodeGroupDropStatement stmtDelete = (NodeGroupDropStatement) stmt;
                     String nodegroupName = stmtDelete.getNodeGroupName().getValue();
                     if (AsterixBuiltinArtifactMap.isSystemProtectedArtifact(ARTIFACT_KIND.NODEGROUP, nodegroupName)) {
-                        throw new AsterixException(" Invalid Operation cannot drop nodegroup " + nodegroupName
+                        throw new AsterixException("Invalid Operation cannot drop nodegroup " + nodegroupName
                                 + " (protected by system)");
                     }
                     NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodegroupName);
                     if (ng == null) {
                         if (!stmtDelete.getIfExists())
-                            throw new AlgebricksException("\nThere is no nodegroup with this name " + nodegroupName
+                            throw new AlgebricksException("There is no nodegroup with this name " + nodegroupName
                                     + ".");
                     } else
                         MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, nodegroupName);
@@ -416,12 +405,10 @@
                     CreateFunctionStatement cfs = (CreateFunctionStatement) stmt;
                     Function function = new Function(compiledDeclarations.getDataverseName(), cfs.getFunctionIdentifier().getFunctionName(),
                             cfs.getFunctionIdentifier().getArity(), cfs.getParamList(), cfs.getFunctionBody());
-                    
-                    
                     try {
                         FunctionUtils.getFunctionDecl(function);
                     } catch (Exception e) {
-                        throw new AsterixException("unable to compile function definition", e);
+                        throw new AsterixException("Unable to compile function definition", e);
                     }
                     MetadataManager.INSTANCE.addFunction(mdTxnCtx, new Function(
                             compiledDeclarations.getDataverseName(), cfs.getFunctionIdentifier().getFunctionName(), cfs
@@ -436,14 +423,14 @@
                     FunctionIdentifier fId = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, functionName,
                             stmtDropFunction.getArity(), false);
                     if (AsterixBuiltinArtifactMap.isSystemProtectedArtifact(ARTIFACT_KIND.FUNCTION, fId)) {
-                        throw new AsterixException(" Invalid Operation cannot drop function " + functionName
+                        throw new AsterixException("Invalid Operation cannot drop function " + functionName
                                 + " (protected by system)");
                     }
                     Function function = MetadataManager.INSTANCE.getFunction(mdTxnCtx,
                             compiledDeclarations.getDataverseName(), functionName, stmtDropFunction.getArity());
                     if (function == null) {
                         if (!stmtDropFunction.getIfExists())
-                            throw new AlgebricksException("\nThere is no function with this name " + functionName + ".");
+                            throw new AlgebricksException("There is no function with this name " + functionName + ".");
                     } else {
                         MetadataManager.INSTANCE.dropFunction(mdTxnCtx, compiledDeclarations.getDataverseName(),
                                 functionName, stmtDropFunction.getArity());
@@ -454,9 +441,9 @@
         }
 
         if (disconnectFromDataverse) {
-            // disconnect the dataverse
-            if (compiledDeclarations.isConnectedToDataverse())
+            if (compiledDeclarations.isConnectedToDataverse()) {
                 compiledDeclarations.disconnectFromDataverse();
+            }
         }
     }
 
@@ -497,15 +484,6 @@
         MetadataManager.INSTANCE.dropDataset(mdTxnCtx, compiledDeclarations.getDataverseName(), datasetName);
     }
 
-    private void compileDatasetInitializeStatement(IHyracksClientConnection hcc, long txnId, String datasetName)
-            throws Exception {
-        JobSpecification[] jobs = DatasetOperations.createInitializeDatasetJobSpec(txnId, datasetName,
-                compiledDeclarations);
-        for (JobSpecification job : jobs) {
-            runJob(hcc, job);
-        }
-    }
-
     public AqlCompiledMetadataDeclarations getCompiledDeclarations() {
         return compiledDeclarations;
     }
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
index 1ddbb1f..6c6a501 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
@@ -15,7 +15,6 @@
 package edu.uci.ics.asterix.file;
 
 import java.rmi.RemoteException;
-import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
 
@@ -28,7 +27,6 @@
 import edu.uci.ics.asterix.common.context.AsterixTreeRegistryProvider;
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.formats.base.IDataFormat;
-import edu.uci.ics.asterix.metadata.MetadataException;
 import edu.uci.ics.asterix.metadata.declared.AqlCompiledDatasetDecl;
 import edu.uci.ics.asterix.metadata.declared.AqlCompiledExternalDatasetDetails;
 import edu.uci.ics.asterix.metadata.declared.AqlCompiledIndexDecl;
@@ -37,7 +35,6 @@
 import edu.uci.ics.asterix.metadata.utils.DatasetUtils;
 import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.runtime.operators.std.NoTupleSourceRuntimeFactory;
 import edu.uci.ics.asterix.transaction.management.exception.ACIDException;
 import edu.uci.ics.asterix.translator.DmlTranslator.CompiledLoadFromFileStatement;
 import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
@@ -111,7 +108,7 @@
             int n = secondaryIndexes.size();
             specs = new JobSpecification[n + 1];
             int i = 0;
-            // first, drop indexes
+            // First, drop secondary indexes.
             for (AqlCompiledIndexDecl acid : secondaryIndexes) {
                 specs[i] = new JobSpecification();
                 Pair<IFileSplitProvider, AlgebricksPartitionConstraint> idxSplitsAndConstraint = metadata
@@ -140,88 +137,9 @@
         return specs;
     }
 
-    public static JobSpecification[] createInitializeDatasetJobSpec(long txnId, String datasetName,
-            AqlCompiledMetadataDeclarations metadata) throws AsterixException {
-
-        AqlCompiledDatasetDecl compiledDatasetDecl = metadata.findDataset(datasetName);
-        if (compiledDatasetDecl == null) {
-            throw new AsterixException("Could not find dataset " + datasetName);
-        }
-        if (compiledDatasetDecl.getDatasetType() != DatasetType.INTERNAL
-                && compiledDatasetDecl.getDatasetType() != DatasetType.FEED) {
-            throw new AsterixException("Cannot initialize  dataset  (" + datasetName + ")" + "of type "
-                    + compiledDatasetDecl.getDatasetType());
-        }
-
-        ARecordType itemType = (ARecordType) metadata.findType(compiledDatasetDecl.getItemTypeName());
-        IDataFormat format;
-        ISerializerDeserializer payloadSerde;
-        IBinaryComparatorFactory[] comparatorFactories;
-        ITypeTraits[] typeTraits;
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint;
-
-        try {
-            format = metadata.getFormat();
-            payloadSerde = format.getSerdeProvider().getSerializerDeserializer(itemType);
-            comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(compiledDatasetDecl, metadata
-                    .getFormat().getBinaryComparatorFactoryProvider());
-            typeTraits = DatasetUtils.computeTupleTypeTraits(compiledDatasetDecl, metadata);
-            splitsAndConstraint = metadata.splitProviderAndPartitionConstraintsForInternalOrFeedDataset(datasetName,
-                    datasetName);
-
-        } catch (AlgebricksException e1) {
-            throw new AsterixException(e1);
-        }
-
-        IIndexRegistryProvider<IIndex> btreeRegistryProvider = AsterixTreeRegistryProvider.INSTANCE;
-        IStorageManagerInterface storageManager = AsterixStorageManagerInterface.INSTANCE;
-
-        JobSpecification spec = new JobSpecification();
-        RecordDescriptor recDesc;
-        try {
-            recDesc = computePayloadKeyRecordDescriptor(compiledDatasetDecl, payloadSerde, metadata.getFormat());
-            NoTupleSourceRuntimeFactory factory = new NoTupleSourceRuntimeFactory();
-            AlgebricksMetaOperatorDescriptor asterixOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 1,
-                    new IPushRuntimeFactory[] { factory }, new RecordDescriptor[] { recDesc });
-
-            // move key fieldsx to front
-            List<Triple<IEvaluatorFactory, ScalarFunctionCallExpression, IAType>> partitioningFunctions = DatasetUtils
-                    .getPartitioningFunctions(compiledDatasetDecl);
-            int numKeys = partitioningFunctions.size();
-            int[] keys = new int[numKeys];
-            for (int i = 0; i < numKeys; i++) {
-                keys[i] = i + 1;
-            }
-
-            int[] fieldPermutation = new int[numKeys + 1];
-            System.arraycopy(keys, 0, fieldPermutation, 0, numKeys);
-            fieldPermutation[numKeys] = 0;
-
-            TreeIndexBulkLoadOperatorDescriptor bulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                    storageManager, btreeRegistryProvider, splitsAndConstraint.first, typeTraits, comparatorFactories, fieldPermutation,
-                    GlobalConfig.DEFAULT_BTREE_FILL_FACTOR, new BTreeDataflowHelperFactory(), NoOpOperationCallbackProvider.INSTANCE);
-
-            AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, asterixOp,
-                    splitsAndConstraint.second);
-            AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, bulkLoad,
-                    splitsAndConstraint.second);
-
-            spec.connect(new OneToOneConnectorDescriptor(spec), asterixOp, 0, bulkLoad, 0);
-            spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-            spec.addRoot(bulkLoad);
-        } catch (AlgebricksException e) {
-            throw new AsterixException(e);
-        }
-
-        return new JobSpecification[] { spec };
-    }
-
-    @SuppressWarnings("unchecked")
-    public static List<Job> createLoadDatasetJobSpec(CompiledLoadFromFileStatement loadStmt,
-            AqlCompiledMetadataDeclarations metadata) throws AsterixException {
-
+    public static Job createLoadDatasetJobSpec(CompiledLoadFromFileStatement loadStmt,
+            AqlCompiledMetadataDeclarations metadata) throws AsterixException, AlgebricksException {
         String datasetName = loadStmt.getDatasetName();
-
         AqlCompiledDatasetDecl compiledDatasetDecl = metadata.findDataset(datasetName);
         if (compiledDatasetDecl == null) {
             throw new AsterixException("Could not find dataset " + datasetName);
@@ -231,56 +149,26 @@
             throw new AsterixException("Cannot load data into dataset  (" + datasetName + ")" + "of type "
                     + compiledDatasetDecl.getDatasetType());
         }
-
-        List<Job> jobSpecs = new ArrayList<Job>();
-        try {
-            jobSpecs.addAll(dropDatasetIndexes(datasetName, metadata));
-        } catch (AlgebricksException ae) {
-            throw new AsterixException(ae);
-        }
+        JobSpecification spec = new JobSpecification();
 
         ARecordType itemType = (ARecordType) metadata.findType(compiledDatasetDecl.getItemTypeName());
-        IDataFormat format;
-        try {
-            format = metadata.getFormat();
-        } catch (AlgebricksException e1) {
-            throw new AsterixException(e1);
-        }
-        ISerializerDeserializer payloadSerde;
-        try {
-            payloadSerde = format.getSerdeProvider().getSerializerDeserializer(itemType);
-        } catch (AlgebricksException e) {
-            throw new AsterixException(e);
-        }
+        IDataFormat format = metadata.getFormat();
+        ISerializerDeserializer payloadSerde = format.getSerdeProvider().getSerializerDeserializer(itemType);
 
-        IBinaryHashFunctionFactory[] hashFactories;
-        IBinaryComparatorFactory[] comparatorFactories;
-        ITypeTraits[] typeTraits;
-        try {
-            hashFactories = DatasetUtils.computeKeysBinaryHashFunFactories(compiledDatasetDecl, metadata.getFormat()
-                    .getBinaryHashFunctionFactoryProvider());
-            comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(compiledDatasetDecl, metadata
-                    .getFormat().getBinaryComparatorFactoryProvider());
-            typeTraits = DatasetUtils.computeTupleTypeTraits(compiledDatasetDecl, metadata);
-        } catch (AlgebricksException e) {
-            throw new AsterixException(e);
-        }
+        IBinaryHashFunctionFactory[] hashFactories = DatasetUtils.computeKeysBinaryHashFunFactories(
+                compiledDatasetDecl, metadata.getFormat().getBinaryHashFunctionFactoryProvider());
+        IBinaryComparatorFactory[] comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(
+                compiledDatasetDecl, metadata.getFormat().getBinaryComparatorFactoryProvider());
+        ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(compiledDatasetDecl, metadata);
 
-        JobSpecification spec = new JobSpecification();
-        IOperatorDescriptor scanner;
-        AlgebricksPartitionConstraint scannerPc;
-        RecordDescriptor recDesc;
-        try {
-            AqlCompiledExternalDatasetDetails add = new AqlCompiledExternalDatasetDetails(loadStmt.getAdapter(),
-                    loadStmt.getProperties());
-            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = AqlMetadataProvider
-                    .buildExternalDataScannerRuntime(spec, itemType, add, format);
-            scanner = p.first;
-            scannerPc = p.second;
-            recDesc = computePayloadKeyRecordDescriptor(compiledDatasetDecl, payloadSerde, metadata.getFormat());
-        } catch (AlgebricksException e) {
-            throw new AsterixException(e);
-        }
+        AqlCompiledExternalDatasetDetails externalDatasetDetails = new AqlCompiledExternalDatasetDetails(loadStmt.getAdapter(),
+                loadStmt.getProperties());
+        Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = AqlMetadataProvider
+                .buildExternalDataScannerRuntime(spec, itemType, externalDatasetDetails, format);
+        IOperatorDescriptor scanner = p.first;
+        AlgebricksPartitionConstraint scannerPc = p.second;
+        RecordDescriptor recDesc = computePayloadKeyRecordDescriptor(compiledDatasetDecl, payloadSerde,
+                metadata.getFormat());
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, scanner, scannerPc);
 
         AssignRuntimeFactory assign = makeAssignRuntimeFactory(compiledDatasetDecl);
@@ -296,23 +184,15 @@
         for (int i = 0; i < numKeys; i++) {
             keys[i] = i + 1;
         }
-        int framesLimit = physicalOptimizationConfig.getMaxFramesExternalSort();
-
-        IIndexRegistryProvider<IIndex> btreeRegistryProvider = AsterixTreeRegistryProvider.INSTANCE;
-        IStorageManagerInterface storageManager = AsterixStorageManagerInterface.INSTANCE;
-
-        // move key fields to front
+        // Move key fields to front.
         int[] fieldPermutation = new int[numKeys + 1];
-        System.arraycopy(keys, 0, fieldPermutation, 0, numKeys);
+        for (int i = 0; i < numKeys; i++) {
+            fieldPermutation[i] = i + 1;
+        }
         fieldPermutation[numKeys] = 0;
 
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint;
-        try {
-            splitsAndConstraint = metadata.splitProviderAndPartitionConstraintsForInternalOrFeedDataset(datasetName,
-                    datasetName);
-        } catch (AlgebricksException e) {
-            throw new AsterixException(e);
-        }
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadata
+                .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(datasetName, datasetName);
 
         FileSplit[] fs = splitsAndConstraint.first.getFileSplits();
         StringBuilder sb = new StringBuilder();
@@ -321,23 +201,25 @@
         }
         LOGGER.info("LOAD into File Splits: " + sb.toString());
 
+        IIndexRegistryProvider<IIndex> btreeRegistryProvider = AsterixTreeRegistryProvider.INSTANCE;
+        IStorageManagerInterface storageManager = AsterixStorageManagerInterface.INSTANCE;
         TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, btreeRegistryProvider, splitsAndConstraint.first, typeTraits, comparatorFactories, fieldPermutation,
-                GlobalConfig.DEFAULT_BTREE_FILL_FACTOR, new BTreeDataflowHelperFactory(), NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, btreeRegistryProvider, splitsAndConstraint.first, typeTraits, comparatorFactories,
+                fieldPermutation, GlobalConfig.DEFAULT_BTREE_FILL_FACTOR, new BTreeDataflowHelperFactory(),
+                NoOpOperationCallbackProvider.INSTANCE);
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, btreeBulkLoad,
                 splitsAndConstraint.second);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 0, asterixOp, 0);
 
         if (!loadStmt.alreadySorted()) {
+            int framesLimit = physicalOptimizationConfig.getMaxFramesExternalSort();
             ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, framesLimit, keys,
                     comparatorFactories, recDesc);
             AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, sorter,
                     splitsAndConstraint.second);
-
             IConnectorDescriptor hashConn = new MToNPartitioningConnectorDescriptor(spec,
                     new FieldHashPartitionComputerFactory(keys, hashFactories));
-
             spec.connect(hashConn, asterixOp, 0, sorter, 0);
             spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, btreeBulkLoad, 0);
         } else {
@@ -345,45 +227,12 @@
                     new FieldHashPartitionComputerFactory(keys, hashFactories), keys, comparatorFactories);
             spec.connect(sortMergeConn, asterixOp, 0, btreeBulkLoad, 0);
         }
-
         spec.addRoot(btreeBulkLoad);
         spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        
-        jobSpecs.add(new Job(spec));
-        return jobSpecs;
+
+        return new Job(spec);
     }
-
-    private static List<Job> dropDatasetIndexes(String datasetName, AqlCompiledMetadataDeclarations metadata)
-            throws AlgebricksException, MetadataException {
-
-        AqlCompiledDatasetDecl adecl = metadata.findDataset(datasetName);
-        if (adecl == null) {
-            throw new AlgebricksException("DROP DATASET INDEXES: No metadata for dataset " + datasetName);
-        }
-
-        List<AqlCompiledIndexDecl> indexes = DatasetUtils.getSecondaryIndexes(adecl);
-        indexes.add(DatasetUtils.getPrimaryIndex(adecl));
-
-        List<Job> specs = new ArrayList<Job>();
-        IIndexRegistryProvider<IIndex> btreeRegistryProvider = AsterixTreeRegistryProvider.INSTANCE;
-        IStorageManagerInterface storageManager = AsterixStorageManagerInterface.INSTANCE;
-
-        if (indexes != null && !indexes.isEmpty()) {
-            // first, drop indexes
-            for (AqlCompiledIndexDecl acid : indexes) {
-                JobSpecification spec = new JobSpecification();
-                Pair<IFileSplitProvider, AlgebricksPartitionConstraint> idxSplitsAndConstraint = metadata
-                        .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(datasetName, acid.getIndexName());
-                TreeIndexDropOperatorDescriptor secondaryBtreeDrop = new TreeIndexDropOperatorDescriptor(spec,
-                        storageManager, btreeRegistryProvider, idxSplitsAndConstraint.first);
-                AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, secondaryBtreeDrop,
-                        idxSplitsAndConstraint.second);
-                specs.add(new Job(spec));
-            }
-        }
-        return specs;
-    }
-
+    
     private static String stringOf(FileSplit fs) {
         return fs.getNodeName() + ":" + fs.getLocalFile().toString();
     }
@@ -409,7 +258,6 @@
         return new AssignRuntimeFactory(outColumns, evalFactories, projectionList);
     }
 
-    @SuppressWarnings("unchecked")
     private static RecordDescriptor computePayloadKeyRecordDescriptor(AqlCompiledDatasetDecl compiledDatasetDecl,
             ISerializerDeserializer payloadSerde, IDataFormat dataFormat) throws AlgebricksException {
         List<Triple<IEvaluatorFactory, ScalarFunctionCallExpression, IAType>> partitioningFunctions = DatasetUtils
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/empty-load-with-index.aql b/asterix-app/src/test/resources/runtimets/queries/dml/empty-load-with-index.aql
new file mode 100644
index 0000000..ee418e4
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/empty-load-with-index.aql
@@ -0,0 +1,35 @@
+/* 
+ * Test case Name  : empty-load-with-index.aql
+ * Description     : Check that an empty load doesn't preclude a future non-empty load on secondary index 
+ * Expected Result : Success
+ * Date            : May 2 2012
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineIDType as closed {
+  l_orderkey: int32, 
+  l_linenumber: int32, 
+  l_suppkey: int32
+}
+
+create dataset LineID(LineIDType)
+  partitioned by key l_orderkey, l_linenumber;
+
+create index linenum_index on LineID(l_linenumber);
+
+load dataset LineID 
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/empty.adm"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+load dataset LineID 
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+write output to nc1:"rttest/dml_empty-load-with-index.adm";      
+for $c in dataset('LineID')
+order by $c.l_orderkey, $c.l_linenumber
+limit 1
+return $c
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/empty-load.aql b/asterix-app/src/test/resources/runtimets/queries/dml/empty-load.aql
new file mode 100644
index 0000000..5f51b93
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/empty-load.aql
@@ -0,0 +1,33 @@
+/* 
+ * Test case Name  : empty-load.aql
+ * Description     : Check that an empty load doesn't preclude a future non-empty load on a primary index
+ * Expected Result : Success
+ * Date            : May 2 2012
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineIDType as closed {
+  l_orderkey: int32, 
+  l_linenumber: int32, 
+  l_suppkey: int32
+}
+
+create dataset LineID(LineIDType)
+  partitioned by key l_orderkey, l_linenumber;
+
+load dataset LineID 
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/empty.adm"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+load dataset LineID 
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+write output to nc1:"rttest/dml_empty-load.adm";      
+for $c in dataset('LineID')
+order by $c.l_orderkey, $c.l_linenumber
+limit 1
+return $c 
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index.aql
new file mode 100644
index 0000000..f88b897
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset-with-index.aql
@@ -0,0 +1,50 @@
+/* 
+ * Test case Name  : insert-into-empty-dataset-with-index.aql
+ * Description     : Check that we can insert into an empty dataset and its empty secondary indexes 
+ * Expected Result : Success
+ * Date            : May 2 2012
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineIDType as closed {
+  l_orderkey: int32, 
+  l_linenumber: int32, 
+  l_suppkey: int32
+}
+
+create dataset LineID(LineIDType)
+  partitioned by key l_orderkey, l_linenumber;
+
+create index idx_LineID_partkey on LineID(l_linenumber);
+create index idx_LineID_suppkey on LineID(l_suppkey);
+
+insert into dataset LineID (
+let $x:=1
+let $y:=2
+let $z:=3
+return {
+	"l_orderkey": $x,
+	"l_partkey": $y,
+	"l_suppkey": $z
+}
+);
+
+insert into dataset LineID (
+let $x:=2
+let $y:=3
+let $z:=4
+return {
+	"l_orderkey": $x,
+	"l_partkey": $y,
+	"l_suppkey": $z
+}
+);
+
+write output to nc1:"rttest/dml_insert-into-empty-dataset-with-index.adm";      
+for $c in dataset('LineID')
+where $c.l_suppkey < 100 and $c.l_linenumber<5
+order by $c.l_orderkey, $c.l_linenumber
+return $c 
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset.aql b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset.aql
index 5c19730..ad7e3b2 100644
--- a/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/insert-into-empty-dataset.aql
@@ -1,3 +1,10 @@
+/* 
+ * Test case Name  : insert-into-empty-dataset.aql
+ * Description     : Check that we can insert into an empty dataset 
+ * Expected Result : Success
+ * Date            : May 2 2012
+ */
+
 drop dataverse test if exists;
 create dataverse test;
 use dataverse test;
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/empty-load-with-index.adm b/asterix-app/src/test/resources/runtimets/results/dml/empty-load-with-index.adm
new file mode 100644
index 0000000..8eabe57
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/dml/empty-load-with-index.adm
@@ -0,0 +1 @@
+{ "l_orderkey": 1, "l_linenumber": 3, "l_suppkey": 6 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/empty-load.adm b/asterix-app/src/test/resources/runtimets/results/dml/empty-load.adm
new file mode 100644
index 0000000..8eabe57
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/dml/empty-load.adm
@@ -0,0 +1 @@
+{ "l_orderkey": 1, "l_linenumber": 3, "l_suppkey": 6 }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/insert-into-empty-dataset-with-index.adm b/asterix-app/src/test/resources/runtimets/results/dml/insert-into-empty-dataset-with-index.adm
new file mode 100644
index 0000000..1b6c344
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/dml/insert-into-empty-dataset-with-index.adm
@@ -0,0 +1,2 @@
+{ "l_orderkey": 1, "l_linenumber": 2, "l_suppkey": 3 }
+{ "l_orderkey": 2, "l_linenumber": 3, "l_suppkey": 4 }