Merged asterix_stabilization r172:r179.

git-svn-id: https://asterixdb.googlecode.com/svn/branches/asterix_stabilization_btree_fixes@180 eaa15691-b419-025a-1212-ee371bd00084
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
index 9f41b65..1ddbb1f 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/DatasetOperations.java
@@ -21,9 +21,9 @@
 
 import edu.uci.ics.asterix.api.common.Job;
 import edu.uci.ics.asterix.aql.translator.DdlTranslator.CompiledDatasetDropStatement;
+import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
 import edu.uci.ics.asterix.common.config.GlobalConfig;
 import edu.uci.ics.asterix.common.config.OptimizationConfUtil;
-import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
 import edu.uci.ics.asterix.common.context.AsterixStorageManagerInterface;
 import edu.uci.ics.asterix.common.context.AsterixTreeRegistryProvider;
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
@@ -69,11 +69,11 @@
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
 import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDropOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 
 public class DatasetOperations {
@@ -173,9 +173,6 @@
             throw new AsterixException(e1);
         }
 
-        ITreeIndexFrameFactory interiorFrameFactory = AqlMetadataProvider.createBTreeNSMInteriorFrameFactory(typeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = AqlMetadataProvider.createBTreeNSMLeafFrameFactory(typeTraits);
-
         IIndexRegistryProvider<IIndex> btreeRegistryProvider = AsterixTreeRegistryProvider.INSTANCE;
         IStorageManagerInterface storageManager = AsterixStorageManagerInterface.INSTANCE;
 
@@ -201,9 +198,8 @@
             fieldPermutation[numKeys] = 0;
 
             TreeIndexBulkLoadOperatorDescriptor bulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                    storageManager, btreeRegistryProvider, splitsAndConstraint.first, interiorFrameFactory,
-                    leafFrameFactory, typeTraits, comparatorFactories, fieldPermutation,
-                    GlobalConfig.DEFAULT_BTREE_FILL_FACTOR, new BTreeDataflowHelperFactory());
+                    storageManager, btreeRegistryProvider, splitsAndConstraint.first, typeTraits, comparatorFactories, fieldPermutation,
+                    GlobalConfig.DEFAULT_BTREE_FILL_FACTOR, new BTreeDataflowHelperFactory(), NoOpOperationCallbackProvider.INSTANCE);
 
             AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, asterixOp,
                     splitsAndConstraint.second);
@@ -302,9 +298,6 @@
         }
         int framesLimit = physicalOptimizationConfig.getMaxFramesExternalSort();
 
-        ITreeIndexFrameFactory interiorFrameFactory = AqlMetadataProvider.createBTreeNSMInteriorFrameFactory(typeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = AqlMetadataProvider.createBTreeNSMLeafFrameFactory(typeTraits);
-
         IIndexRegistryProvider<IIndex> btreeRegistryProvider = AsterixTreeRegistryProvider.INSTANCE;
         IStorageManagerInterface storageManager = AsterixStorageManagerInterface.INSTANCE;
 
@@ -329,9 +322,8 @@
         LOGGER.info("LOAD into File Splits: " + sb.toString());
 
         TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, btreeRegistryProvider, splitsAndConstraint.first, interiorFrameFactory,
-                leafFrameFactory, typeTraits, comparatorFactories, fieldPermutation,
-                GlobalConfig.DEFAULT_BTREE_FILL_FACTOR, new BTreeDataflowHelperFactory());
+                storageManager, btreeRegistryProvider, splitsAndConstraint.first, typeTraits, comparatorFactories, fieldPermutation,
+                GlobalConfig.DEFAULT_BTREE_FILL_FACTOR, new BTreeDataflowHelperFactory(), NoOpOperationCallbackProvider.INSTANCE);
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, btreeBulkLoad,
                 splitsAndConstraint.second);
 
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java
index 730fac6..c9cb798 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/IndexOperations.java
@@ -31,7 +31,6 @@
 import edu.uci.ics.hyracks.algebricks.core.algebra.runtime.base.IEvaluatorFactory;
 import edu.uci.ics.hyracks.algebricks.core.algebra.runtime.base.IPushRuntimeFactory;
 import edu.uci.ics.hyracks.algebricks.core.algebra.runtime.jobgen.impl.ConnectorPolicyAssignmentPolicy;
-import edu.uci.ics.hyracks.algebricks.core.algebra.runtime.jobgen.impl.JobGenHelper;
 import edu.uci.ics.hyracks.algebricks.core.algebra.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
 import edu.uci.ics.hyracks.algebricks.core.algebra.runtime.operators.std.AssignRuntimeFactory;
 import edu.uci.ics.hyracks.algebricks.core.api.constraints.AlgebricksPartitionConstraint;
@@ -60,12 +59,10 @@
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDropOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
 import edu.uci.ics.hyracks.storage.am.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizerFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 
 public class IndexOperations {
@@ -202,11 +199,6 @@
         primaryRecFields[numPrimaryKeys] = payloadSerde;
         primaryTypeTraits[numPrimaryKeys] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(itemType);
 
-        ITreeIndexFrameFactory primaryInteriorFrameFactory = AqlMetadataProvider
-                .createBTreeNSMInteriorFrameFactory(primaryTypeTraits);
-        ITreeIndexFrameFactory primaryLeafFrameFactory = AqlMetadataProvider
-                .createBTreeNSMLeafFrameFactory(primaryTypeTraits);
-
         int[] lowKeyFields = null; // -infinity
         int[] highKeyFields = null; // +infinity
         RecordDescriptor primaryRecDesc = new RecordDescriptor(primaryRecFields);
@@ -214,10 +206,12 @@
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> primarySplitsAndConstraint = metadata
                 .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(datasetName, datasetName);
 
-        BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, treeRegistryProvider, primarySplitsAndConstraint.first, primaryInteriorFrameFactory,
-                primaryLeafFrameFactory, primaryTypeTraits, primaryComparatorFactories, true, lowKeyFields,
-                highKeyFields, true, true, new BTreeDataflowHelperFactory());
+		BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(
+				spec, primaryRecDesc, storageManager, treeRegistryProvider,
+				primarySplitsAndConstraint.first, primaryTypeTraits,
+				primaryComparatorFactories, lowKeyFields, highKeyFields, true,
+				true, new BTreeDataflowHelperFactory(),
+				NoOpOperationCallbackProvider.INSTANCE);
 
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, primarySearchOp,
                 primarySplitsAndConstraint.second);
@@ -296,11 +290,6 @@
 
         // ---------- START SECONDARY INDEX BULK LOAD
 
-        ITreeIndexFrameFactory secondaryInteriorFrameFactory = AqlMetadataProvider
-                .createBTreeNSMInteriorFrameFactory(secondaryTypeTraits);
-        ITreeIndexFrameFactory secondaryLeafFrameFactory = AqlMetadataProvider
-                .createBTreeNSMLeafFrameFactory(secondaryTypeTraits);
-
         int[] fieldPermutation = new int[numSecondaryKeys + numPrimaryKeys];
         for (i = 0; i < numSecondaryKeys + numPrimaryKeys; i++)
             fieldPermutation[i] = i;
@@ -309,10 +298,12 @@
                 .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(datasetName, secondaryIndexName);
 
         // GlobalConfig.DEFAULT_BTREE_FILL_FACTOR
-        TreeIndexBulkLoadOperatorDescriptor secondaryBulkLoadOp = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, treeRegistryProvider, secondarySplitsAndConstraint.first,
-                secondaryInteriorFrameFactory, secondaryLeafFrameFactory, secondaryTypeTraits,
-                secondaryComparatorFactories, fieldPermutation, 0.7f, new BTreeDataflowHelperFactory());
+		TreeIndexBulkLoadOperatorDescriptor secondaryBulkLoadOp = new TreeIndexBulkLoadOperatorDescriptor(
+				spec, storageManager, treeRegistryProvider,
+				secondarySplitsAndConstraint.first, secondaryTypeTraits,
+				secondaryComparatorFactories, fieldPermutation, 0.7f,
+				new BTreeDataflowHelperFactory(),
+				NoOpOperationCallbackProvider.INSTANCE);
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, secondaryBulkLoadOp,
                 secondarySplitsAndConstraint.second);
 
@@ -425,10 +416,12 @@
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> primarySplitsAndConstraint = metadata
                 .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(primaryIndexName, primaryIndexName);
 
-        BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, treeRegistryProvider, primarySplitsAndConstraint.first, primaryInteriorFrameFactory,
-                primaryLeafFrameFactory, primaryTypeTraits, primaryComparatorFactories, true, lowKeyFields,
-                highKeyFields, true, true, new BTreeDataflowHelperFactory());
+		BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(
+				spec, primaryRecDesc, storageManager, treeRegistryProvider,
+				primarySplitsAndConstraint.first, primaryTypeTraits,
+				primaryComparatorFactories, lowKeyFields, highKeyFields, true,
+				true, new BTreeDataflowHelperFactory(),
+				NoOpOperationCallbackProvider.INSTANCE);
 
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, primarySearchOp,
                 primarySplitsAndConstraint.second);
@@ -510,18 +503,6 @@
 
         // ---------- START SECONDARY INDEX BULK LOAD
 
-        /*
-        ITreeIndexFrameFactory secondaryInteriorFrameFactory = JobGenHelper.createRTreeNSMInteriorFrameFactory(
-                secondaryTypeTraits, numNestedSecondaryKeyFields);
-        ITreeIndexFrameFactory secondaryLeafFrameFactory = JobGenHelper.createRTreeNSMLeafFrameFactory(
-                secondaryTypeTraits, numNestedSecondaryKeyFields);
-        */
-
-        ITreeIndexFrameFactory secondaryInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(
-                new RTreeTypeAwareTupleWriterFactory(secondaryTypeTraits), valueProviderFactories);
-        ITreeIndexFrameFactory secondaryLeafFrameFactory = new RTreeNSMLeafFrameFactory(
-                new RTreeTypeAwareTupleWriterFactory(secondaryTypeTraits), valueProviderFactories);
-
         int[] fieldPermutation = new int[numNestedSecondaryKeyFields + numPrimaryKeys];
         for (i = 0; i < numNestedSecondaryKeyFields + numPrimaryKeys; i++)
             fieldPermutation[i] = i;
@@ -529,11 +510,12 @@
         Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadata
                 .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(primaryIndexName, secondaryIndexName);
 
-        // GlobalConfig.DEFAULT_BTREE_FILL_FACTOR
-        TreeIndexBulkLoadOperatorDescriptor secondaryBulkLoadOp = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, treeRegistryProvider, secondarySplitsAndConstraint.first,
-                secondaryInteriorFrameFactory, secondaryLeafFrameFactory, secondaryTypeTraits,
-                secondaryComparatorFactories, fieldPermutation, 0.7f, new RTreeDataflowHelperFactory());
+		TreeIndexBulkLoadOperatorDescriptor secondaryBulkLoadOp = new TreeIndexBulkLoadOperatorDescriptor(
+				spec, storageManager, treeRegistryProvider,
+				secondarySplitsAndConstraint.first, secondaryTypeTraits,
+				secondaryComparatorFactories, fieldPermutation, 0.7f,
+				new RTreeDataflowHelperFactory(valueProviderFactories),
+				NoOpOperationCallbackProvider.INSTANCE);
 
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, secondaryBulkLoadOp,
                 secondarySplitsAndConstraint.second);
@@ -662,9 +644,8 @@
                 .splitProviderAndPartitionConstraintsForInternalOrFeedDataset(primaryIndexName, primaryIndexName);
 
         BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, treeRegistryProvider, primarySplitsAndConstraint.first, primaryInteriorFrameFactory,
-                primaryLeafFrameFactory, primaryTypeTraits, primaryComparatorFactories, true, lowKeyFields,
-                highKeyFields, true, true, new BTreeDataflowHelperFactory());
+                storageManager, treeRegistryProvider, primarySplitsAndConstraint.first, primaryTypeTraits, primaryComparatorFactories, lowKeyFields,
+                highKeyFields, true, true, new BTreeDataflowHelperFactory(), NoOpOperationCallbackProvider.INSTANCE);
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, primarySearchOp,
                 primarySplitsAndConstraint.second);
 
@@ -774,19 +755,16 @@
         for (i = 0; i < numPrimaryKeys; i++)
             secondaryTypeTraits[i + 1] = primaryTypeTraits[i];
 
-        ITreeIndexFrameFactory secondaryInteriorFrameFactory = AqlMetadataProvider
-                .createBTreeNSMInteriorFrameFactory(secondaryTypeTraits);
-        ITreeIndexFrameFactory secondaryLeafFrameFactory = AqlMetadataProvider
-                .createBTreeNSMLeafFrameFactory(secondaryTypeTraits);
-
         int[] fieldPermutation = new int[numSecondaryKeys + numPrimaryKeys];
         for (i = 0; i < numTokenKeyPairFields; i++)
             fieldPermutation[i] = i;
 
-        TreeIndexBulkLoadOperatorDescriptor secondaryBulkLoadOp = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, treeRegistryProvider, secondarySplitsAndConstraint.first,
-                secondaryInteriorFrameFactory, secondaryLeafFrameFactory, secondaryTypeTraits,
-                tokenKeyPairComparatorFactories, fieldPermutation, 0.7f, new BTreeDataflowHelperFactory());
+		TreeIndexBulkLoadOperatorDescriptor secondaryBulkLoadOp = new TreeIndexBulkLoadOperatorDescriptor(
+				spec, storageManager, treeRegistryProvider,
+				secondarySplitsAndConstraint.first, secondaryTypeTraits,
+				tokenKeyPairComparatorFactories, fieldPermutation, 0.7f,
+				new BTreeDataflowHelperFactory(),
+				NoOpOperationCallbackProvider.INSTANCE);
         AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, secondaryBulkLoadOp,
                 secondarySplitsAndConstraint.second);
 
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/TestKeywordIndexJob.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/TestKeywordIndexJob.java
index a4a84db..a96ccee 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/TestKeywordIndexJob.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/TestKeywordIndexJob.java
@@ -11,11 +11,9 @@
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.AObjectAscBinaryComparatorFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AObjectSerializerDeserializer;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
 import edu.uci.ics.asterix.om.base.AString;
 import edu.uci.ics.asterix.om.types.ATypeTag;
 import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.runtime.jobgen.impl.JobGenHelper;
 import edu.uci.ics.hyracks.api.client.HyracksConnection;
 import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
 import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
@@ -42,9 +40,9 @@
 import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 
 public class TestKeywordIndexJob {
@@ -131,10 +129,6 @@
             }
         };
 
-        ITreeIndexFrameFactory interiorFrameFactory = AqlMetadataProvider
-                .createBTreeNSMInteriorFrameFactory(secondaryTypeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = AqlMetadataProvider.createBTreeNSMLeafFrameFactory(secondaryTypeTraits);
-
         ISerializerDeserializer[] secondaryRecFields = new ISerializerDeserializer[2];
         secondaryRecFields[0] = AObjectSerializerDeserializer.INSTANCE;
         secondaryRecFields[1] = AObjectSerializerDeserializer.INSTANCE;
@@ -150,9 +144,9 @@
                 new FileSplit("nc1", new FileReference(new File("/tmp/nc1/demo1112/Customers_idx_NameInvIndex"))),
                 new FileSplit("nc2", new FileReference(new File("/tmp/nc2/demo1112/Customers_idx_NameInvIndex"))) });
         BTreeSearchOperatorDescriptor secondarySearchOp = new BTreeSearchOperatorDescriptor(spec, secondaryRecDesc,
-                storageManager, btreeRegistryProvider, secondarySplitProvider, interiorFrameFactory, leafFrameFactory,
-                secondaryTypeTraits, secondaryComparatorFactories, true, lowKeyFields, highKeyFields, true, true,
-                new BTreeDataflowHelperFactory());
+                storageManager, btreeRegistryProvider, secondarySplitProvider, 
+                secondaryTypeTraits, secondaryComparatorFactories, lowKeyFields, highKeyFields, true, true,
+                new BTreeDataflowHelperFactory(), NoOpOperationCallbackProvider.INSTANCE);
         String[] secondarySearchOpLocationConstraint = new String[nodeGroup.size()];
         for (int p = 0; p < nodeGroup.size(); p++) {
             secondarySearchOpLocationConstraint[p] = nodeGroup.get(p);
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/TestSecondaryIndexJob.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/TestSecondaryIndexJob.java
index cdecd74..01279f3 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/TestSecondaryIndexJob.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/TestSecondaryIndexJob.java
@@ -11,7 +11,6 @@
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.dataflow.data.nontagged.comparators.AObjectAscBinaryComparatorFactory;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AObjectSerializerDeserializer;
-import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
 import edu.uci.ics.asterix.om.base.AString;
 import edu.uci.ics.asterix.om.types.ATypeTag;
 import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
@@ -41,9 +40,9 @@
 import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 
 public class TestSecondaryIndexJob {
@@ -130,10 +129,6 @@
             }
         };
 
-        ITreeIndexFrameFactory interiorFrameFactory = AqlMetadataProvider
-                .createBTreeNSMInteriorFrameFactory(secondaryTypeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = AqlMetadataProvider.createBTreeNSMLeafFrameFactory(secondaryTypeTraits);
-
         ISerializerDeserializer[] secondaryRecFields = new ISerializerDeserializer[2];
         secondaryRecFields[0] = AObjectSerializerDeserializer.INSTANCE;
         secondaryRecFields[1] = AObjectSerializerDeserializer.INSTANCE;
@@ -148,10 +143,12 @@
         IFileSplitProvider secondarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] {
                 new FileSplit("nc1", new FileReference(new File("/tmp/nc1/demo1112/Customers_idx_NameBtreeIndex"))),
                 new FileSplit("nc2", new FileReference(new File("/tmp/nc2/demo1112/Customers_idx_NameBtreeIndex"))) });
-        BTreeSearchOperatorDescriptor secondarySearchOp = new BTreeSearchOperatorDescriptor(spec, secondaryRecDesc,
-                storageManager, btreeRegistryProvider, secondarySplitProvider, interiorFrameFactory, leafFrameFactory,
-                secondaryTypeTraits, secondaryComparatorFactories, true, lowKeyFields, highKeyFields, true, true,
-                new BTreeDataflowHelperFactory());
+		BTreeSearchOperatorDescriptor secondarySearchOp = new BTreeSearchOperatorDescriptor(
+				spec, secondaryRecDesc, storageManager, btreeRegistryProvider,
+				secondarySplitProvider, secondaryTypeTraits,
+				secondaryComparatorFactories, lowKeyFields, highKeyFields,
+				true, true, new BTreeDataflowHelperFactory(),
+				NoOpOperationCallbackProvider.INSTANCE);
         String[] secondarySearchOpLocationConstraint = new String[nodeGroup.size()];
         for (int p = 0; p < nodeGroup.size(); p++) {
             secondarySearchOpLocationConstraint[p] = nodeGroup.get(p);
diff --git a/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeIndexInsertUpdateDeleteOperatorDescriptor.java b/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeIndexInsertUpdateDeleteOperatorDescriptor.java
index e5a4a3c..1b6daec 100644
--- a/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeIndexInsertUpdateDeleteOperatorDescriptor.java
+++ b/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeIndexInsertUpdateDeleteOperatorDescriptor.java
@@ -27,7 +27,7 @@
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
@@ -46,24 +46,18 @@
 
 	private final long transactionId;
 
-	public TreeIndexInsertUpdateDeleteOperatorDescriptor(JobSpecification spec,
-			RecordDescriptor recDesc, IStorageManagerInterface storageManager,
-			IIndexRegistryProvider<IIndex> treeIndexRegistryProvider,
-			IFileSplitProvider fileSplitProvider,
-			ITreeIndexFrameFactory interiorFrameFactory,
-			ITreeIndexFrameFactory leafFrameFactory, ITypeTraits[] typeTraits,
-			IBinaryComparatorFactory[] comparatorFactories,
-			IIndexDataflowHelperFactory dataflowHelperFactory,
-			int[] fieldPermutation, IndexOp op, long transactionId) {
-		super(spec, 1, 1, recDesc, storageManager, treeIndexRegistryProvider,
-				fileSplitProvider, interiorFrameFactory, leafFrameFactory,
-				typeTraits, comparatorFactories, dataflowHelperFactory);
 
-		this.fieldPermutation = fieldPermutation;
-		this.op = op;
-		this.transactionId = transactionId; // would obtain it from query
-		// context
-	}
+    public TreeIndexInsertUpdateDeleteOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
+            IStorageManagerInterface storageManager, IIndexRegistryProvider<IIndex> indexRegistryProvider,
+            IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
+            IBinaryComparatorFactory[] comparatorFactories, int[] fieldPermutation, IndexOp op,
+            IIndexDataflowHelperFactory dataflowHelperFactory, IOperationCallbackProvider opCallbackProvider, long transactionId) {
+        super(spec, 1, 1, recDesc, storageManager, indexRegistryProvider, fileSplitProvider, typeTraits,
+                comparatorFactories, dataflowHelperFactory, opCallbackProvider);
+        this.fieldPermutation = fieldPermutation;
+        this.op = op;
+        this.transactionId = transactionId;
+    }
 
 	@Override
 	public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
@@ -81,8 +75,8 @@
 					" could not obtain context for invalid transaction id "
 							+ transactionId);
 		}
-		return new TreeIndexInsertUpdateDeleteOperatorNodePushable(txnContext,
-				this, ctx, partition, fieldPermutation, recordDescProvider, op);
+		return new TreeIndexInsertUpdateDeleteOperatorNodePushable(txnContext, this, ctx, opCallbackProvider, partition,
+                fieldPermutation, recordDescProvider, op);
 	}
 
 }
diff --git a/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeIndexInsertUpdateDeleteOperatorNodePushable.java b/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeIndexInsertUpdateDeleteOperatorNodePushable.java
index 0a5a2ea..3af5e91 100644
--- a/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeIndexInsertUpdateDeleteOperatorNodePushable.java
+++ b/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeIndexInsertUpdateDeleteOperatorNodePushable.java
@@ -31,8 +31,9 @@
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallbackProvider;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.PermutingFrameTupleReference;
@@ -46,17 +47,18 @@
     private final IndexOp op;
     private final PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
     private ByteBuffer writeBuffer;
+    private IIndexAccessor indexAccessor;
     private ILockManager lockManager;
     private final TransactionContext txnContext;
     private TreeLogger bTreeLogger;
     private final TransactionProvider transactionProvider;
-    private ITreeIndexAccessor treeIndexAccessor;
 
-    public TreeIndexInsertUpdateDeleteOperatorNodePushable(TransactionContext txnContext,
-            AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition, int[] fieldPermutation,
-            IRecordDescriptorProvider recordDescProvider, IndexOp op) {
+    public TreeIndexInsertUpdateDeleteOperatorNodePushable(TransactionContext txnContext, AbstractTreeIndexOperatorDescriptor opDesc,
+            IHyracksTaskContext ctx, IOperationCallbackProvider opCallbackProvider, int partition,
+            int[] fieldPermutation, IRecordDescriptorProvider recordDescProvider, IndexOp op) {
+        boolean createIfNotExists = (op == IndexOp.INSERT);
         treeIndexHelper = (TreeIndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
-                opDesc, ctx, partition, false);
+                opDesc, ctx, opCallbackProvider, partition, createIfNotExists);
         this.recordDescProvider = recordDescProvider;
         this.op = op;
         tuple.setFieldPermutation(fieldPermutation);
@@ -64,7 +66,7 @@
         transactionProvider = (TransactionProvider) ctx.getJobletContext().getApplicationContext()
                 .getApplicationObject();
     }
-
+    
     public void initializeTransactionSupport() {
         TransactionalResourceRepository.registerTransactionalResourceManager(TreeResourceManager.ID,
                 TreeResourceManager.getInstance());
@@ -75,6 +77,7 @@
         bTreeLogger = TreeLoggerRepository.getTreeLogger(resourceId);
     }
 
+    
     @Override
     public void open() throws HyracksDataException {
         AbstractTreeIndexOperatorDescriptor opDesc = (AbstractTreeIndexOperatorDescriptor) treeIndexHelper
@@ -85,8 +88,8 @@
         writer.open();
         try {
             treeIndexHelper.init();
-            treeIndexHelper.getIndex().open(treeIndexHelper.getIndexFileId());
-            treeIndexAccessor = ((ITreeIndex) treeIndexHelper.getIndex()).createAccessor();
+            ITreeIndex treeIndex = (ITreeIndex) treeIndexHelper.getIndex();
+            indexAccessor = treeIndex.createAccessor();
             initializeTransactionSupport();
         } catch (Exception e) {
             // cleanup in case of failure
@@ -94,7 +97,7 @@
             throw new HyracksDataException(e);
         }
     }
-
+    
     @Override
     public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
         final IIndex treeIndex = treeIndexHelper.getIndex();
@@ -109,18 +112,18 @@
                     case INSERT: {
                         lockManager.lock(txnContext, resourceId,
                                 TransactionManagementConstants.LockManagerConstants.LockMode.EXCLUSIVE);
-                        treeIndexAccessor.insert(tuple);
+                        indexAccessor.insert(tuple);
                         bTreeLogger.generateLogRecord(transactionProvider, txnContext, op, tuple);
-                    }
                         break;
+                    }
 
                     case DELETE: {
                         lockManager.lock(txnContext, resourceId,
                                 TransactionManagementConstants.LockManagerConstants.LockMode.EXCLUSIVE);
-                        treeIndexAccessor.delete(tuple);
+                        indexAccessor.delete(tuple);
                         bTreeLogger.generateLogRecord(transactionProvider, txnContext, op, tuple);
-                    }
                         break;
+                    }                        
 
                     default: {
                         throw new HyracksDataException("Unsupported operation " + op
diff --git a/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeResourceManager.java b/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeResourceManager.java
index 5b812fc..4309618 100644
--- a/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeResourceManager.java
+++ b/asterix-hyracks-glue/src/main/java/edu/uci/ics/asterix/runtime/transaction/TreeResourceManager.java
@@ -20,8 +20,8 @@
 import edu.uci.ics.asterix.transaction.management.service.logging.ILogRecordHelper;
 import edu.uci.ics.asterix.transaction.management.service.logging.LogicalLogLocator;
 import edu.uci.ics.asterix.transaction.management.service.transaction.IResourceManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 
 public class TreeResourceManager implements IResourceManager {
@@ -63,7 +63,7 @@
         tupleReference.setFieldCount(tupleReference.getFieldCount());
         tupleReference.resetByTupleOffset(logLocator.getBuffer().getByteBuffer(), tupleBeginPos);
         byte operation = logBufferContent[operationOffset];
-        ITreeIndexAccessor treeIndexAccessor = treeIndex.createAccessor();
+        IIndexAccessor treeIndexAccessor = treeIndex.createAccessor();
         try {
             switch (operation) {
                 case TreeLogger.BTreeOperationCodes.INSERT:
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
index eb73938..243fc61 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataNode.java
@@ -796,7 +796,7 @@
             searchCmps[i] = comparatorFactories[i].createBinaryComparator();
         }
         MultiComparator searchCmp = new MultiComparator(searchCmps);
-        RangePredicate rangePred = new RangePredicate(true, searchKey, searchKey, true, true, searchCmp, searchCmp);
+        RangePredicate rangePred = new RangePredicate(searchKey, searchKey, true, true, searchCmp, searchCmp);
         indexAccessor.search(rangeCursor, rangePred);
 
         try {
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
index 84fb721..b2bfaf5 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
@@ -47,7 +47,6 @@
 import edu.uci.ics.asterix.transaction.management.resource.TransactionalResourceRepository;
 import edu.uci.ics.asterix.transaction.management.service.logging.DataUtil;
 import edu.uci.ics.asterix.transaction.management.service.transaction.TransactionManagementConstants.LockManagerConstants.LockMode;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
@@ -63,7 +62,7 @@
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexRegistry;
 import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
 import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
@@ -302,50 +301,30 @@
 
     public static void createIndex(IMetadataIndex dataset) throws Exception {
         int fileId = dataset.getFileId();
-        int numberOfKeyField = dataset.getKeyFieldCount();
         ITypeTraits[] typeTraits = dataset.getTypeTraits();
         IBinaryComparatorFactory[] comparatorFactories = dataset.getKeyBinaryComparatorFactory();
-
-        IBinaryComparator[] cmps = new IBinaryComparator[numberOfKeyField];
-        for (int i = 0; i < numberOfKeyField; i++)
-            cmps[i] = comparatorFactories[i].createBinaryComparator();
-
-        MultiComparator cmp = new MultiComparator(cmps);
         TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-
         ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
         ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
         ITreeIndexMetaDataFrameFactory metaDataFrameFactory = new LIFOMetaDataFrameFactory();
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaDataFrameFactory);
-        BTree btree = new BTree(bufferCache, typeTraits.length, cmp, freePageManager, interiorFrameFactory,
+        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaDataFrameFactory);
+        BTree btree = new BTree(bufferCache, NoOpOperationCallback.INSTANCE, typeTraits.length, comparatorFactories, freePageManager, interiorFrameFactory,
                 leafFrameFactory);
         btree.create(fileId);
         btreeRegistry.register(fileId, btree);
     }
 
     public static void enlistMetadataDataset(IMetadataIndex dataset) throws Exception {
-
         int fileId = dataset.getFileId();
-        int numberOfKeyField = dataset.getKeyFieldCount();
         ITypeTraits[] typeTraits = dataset.getTypeTraits();
         IBinaryComparatorFactory[] comparatorFactories = dataset.getKeyBinaryComparatorFactory();
-
-        IBinaryComparator[] cmps = new IBinaryComparator[numberOfKeyField];
-        for (int i = 0; i < numberOfKeyField; i++)
-            cmps[i] = comparatorFactories[i].createBinaryComparator();
-
-        MultiComparator cmp = new MultiComparator(cmps);
         TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-
         ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
         ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
-
         ITreeIndexMetaDataFrameFactory metaDataFrameFactory = new LIFOMetaDataFrameFactory();
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaDataFrameFactory);
-
-        BTree btree = new BTree(bufferCache, typeTraits.length, cmp, freePageManager, interiorFrameFactory,
+        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaDataFrameFactory);
+        BTree btree = new BTree(bufferCache, NoOpOperationCallback.INSTANCE, typeTraits.length, comparatorFactories, freePageManager, interiorFrameFactory,
                 leafFrameFactory);
-
         btreeRegistry.register(fileId, btree);
     }
 
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlCompiledMetadataDeclarations.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlCompiledMetadataDeclarations.java
index 889de2e..ec74e42 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlCompiledMetadataDeclarations.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlCompiledMetadataDeclarations.java
@@ -255,7 +255,7 @@
         return evalFactories;
     }
 
-    public Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitProviderAndPartitionConstraintsForInternalOrFeedDataset(
+	public Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitProviderAndPartitionConstraintsForInternalOrFeedDataset(
             String datasetName, String targetIdxName) throws AlgebricksException, MetadataException {
         FileSplit[] splits = splitsForInternalOrFeedDataset(datasetName, targetIdxName);
         IFileSplitProvider splitProvider = new ConstantFileSplitProvider(splits);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
index f99d4ff..dc0ffdd 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
@@ -17,7 +17,6 @@
 
 import java.io.File;
 import java.util.List;
-import java.util.Map;
 
 import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
 import edu.uci.ics.asterix.common.config.GlobalConfig;
@@ -63,7 +62,6 @@
 import edu.uci.ics.hyracks.algebricks.core.api.exceptions.AlgebricksException;
 import edu.uci.ics.hyracks.algebricks.core.utils.Pair;
 import edu.uci.ics.hyracks.algebricks.core.utils.Triple;
-import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
 import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -82,13 +80,11 @@
 import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
 import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeDataflowHelperFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
 
 public class AqlMetadataProvider implements
 		IMetadataProvider<AqlSourceId, String> {
@@ -294,7 +290,6 @@
 		}
 
 		ARecordType rt = (ARecordType) itemType;
-		Map<String, NodeControllerInfo> ncInfo = null;
 		try {
 			adapter.configure(decl.getProperties(), itemType);
 		} catch (Exception e) {
@@ -427,9 +422,6 @@
 			++i;
 		}
 
-		ITreeIndexFrameFactory interiorFrameFactory = createBTreeNSMInteriorFrameFactory(typeTraits);
-		ITreeIndexFrameFactory leafFrameFactory = createBTreeNSMLeafFrameFactory(typeTraits);
-
 		IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context
 				.getAppContext();
 		RecordDescriptor recDesc = new RecordDescriptor(recordFields);
@@ -446,11 +438,10 @@
 		BTreeSearchOperatorDescriptor btreeSearchOp = new BTreeSearchOperatorDescriptor(
 				jobSpec, recDesc, appContext.getStorageManagerInterface(),
 				appContext.getTreeRegisterProvider(), spPc.first,
-				interiorFrameFactory, leafFrameFactory, typeTraits,
-				comparatorFactories, true, lowKeyFields, highKeyFields,
+				typeTraits,
+				comparatorFactories, lowKeyFields, highKeyFields,
 				lowKeyInclusive, highKeyInclusive,
-				new BTreeDataflowHelperFactory());
-
+				new BTreeDataflowHelperFactory(), NoOpOperationCallbackProvider.INSTANCE);
 		return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(
 				btreeSearchOp, spPc.second);
 	}
@@ -553,19 +544,6 @@
 			++i;
 		}
 
-		ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(
-				new RTreeTypeAwareTupleWriterFactory(typeTraits),
-				valueProviderFactories);
-		ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(
-				new RTreeTypeAwareTupleWriterFactory(typeTraits),
-				valueProviderFactories);
-		/*
-		 * ITreeIndexFrameFactory interiorFrameFactory =
-		 * JobGenHelper.createRTreeNSMInteriorFrameFactory(typeTraits,
-		 * numNestedSecondaryKeyFields); ITreeIndexFrameFactory leafFrameFactory
-		 * = JobGenHelper.createRTreeNSMLeafFrameFactory(typeTraits,
-		 * numNestedSecondaryKeyFields);
-		 */
 		IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context
 				.getAppContext();
 		RecordDescriptor recDesc = new RecordDescriptor(recordFields);
@@ -582,9 +560,9 @@
 		RTreeSearchOperatorDescriptor rtreeSearchOp = new RTreeSearchOperatorDescriptor(
 				jobSpec, recDesc, appContext.getStorageManagerInterface(),
 				appContext.getTreeRegisterProvider(), spPc.first,
-				interiorFrameFactory, leafFrameFactory, typeTraits,
+				typeTraits,
 				comparatorFactories, keyFields,
-				new RTreeDataflowHelperFactory());
+				new RTreeDataflowHelperFactory(valueProviderFactories), NoOpOperationCallbackProvider.INSTANCE);
 
 		return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(
 				rtreeSearchOp, spPc.second);
@@ -699,8 +677,6 @@
 		ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(
 				compiledDatasetDecl, metadata);
 
-		ITreeIndexFrameFactory interiorFrameFactory = createBTreeNSMInteriorFrameFactory(typeTraits);
-		ITreeIndexFrameFactory leafFrameFactory = createBTreeNSMLeafFrameFactory(typeTraits);
 		IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context
 				.getAppContext();
 
@@ -718,12 +694,12 @@
 		}
 
 		TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(
-				spec, appContext.getStorageManagerInterface(), appContext
-						.getTreeRegisterProvider(), splitsAndConstraint.first,
-				interiorFrameFactory, leafFrameFactory, typeTraits,
-				comparatorFactories, fieldPermutation,
-				GlobalConfig.DEFAULT_BTREE_FILL_FACTOR,
-				new BTreeDataflowHelperFactory());
+				spec, appContext.getStorageManagerInterface(),
+				appContext.getTreeRegisterProvider(),
+				splitsAndConstraint.first, typeTraits, comparatorFactories,
+				fieldPermutation, GlobalConfig.DEFAULT_BTREE_FILL_FACTOR,
+				new BTreeDataflowHelperFactory(),
+				NoOpOperationCallbackProvider.INSTANCE);
 		return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(
 				btreeBulkLoad, splitsAndConstraint.second);
 	}
@@ -759,8 +735,6 @@
 		ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(
 				compiledDatasetDecl, metadata);
 
-		ITreeIndexFrameFactory interiorFrameFactory = createBTreeNSMInteriorFrameFactory(typeTraits);
-		ITreeIndexFrameFactory leafFrameFactory = createBTreeNSMLeafFrameFactory(typeTraits);
 		IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context
 				.getAppContext();
 
@@ -780,10 +754,10 @@
 		TreeIndexInsertUpdateDeleteOperatorDescriptor btreeBulkLoad = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
 				spec, recordDesc, appContext.getStorageManagerInterface(),
 				appContext.getTreeRegisterProvider(),
-				splitsAndConstraint.first, interiorFrameFactory,
-				leafFrameFactory, typeTraits, comparatorFactories,
-				new BTreeDataflowHelperFactory(), fieldPermutation,
-				IndexOp.INSERT, txnId);
+				splitsAndConstraint.first, typeTraits, comparatorFactories,
+				fieldPermutation, IndexOp.INSERT,
+				new BTreeDataflowHelperFactory(),
+				NoOpOperationCallbackProvider.INSTANCE, txnId);
 		return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(
 				btreeBulkLoad, splitsAndConstraint.second);
 	}
@@ -819,8 +793,6 @@
 		ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(
 				compiledDatasetDecl, metadata);
 
-		ITreeIndexFrameFactory interiorFrameFactory = createBTreeNSMInteriorFrameFactory(typeTraits);
-		ITreeIndexFrameFactory leafFrameFactory = createBTreeNSMLeafFrameFactory(typeTraits);
 		IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context
 				.getAppContext();
 
@@ -840,10 +812,10 @@
 		TreeIndexInsertUpdateDeleteOperatorDescriptor btreeBulkLoad = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
 				spec, recordDesc, appContext.getStorageManagerInterface(),
 				appContext.getTreeRegisterProvider(),
-				splitsAndConstraint.first, interiorFrameFactory,
-				leafFrameFactory, typeTraits, comparatorFactories,
-				new BTreeDataflowHelperFactory(), fieldPermutation,
-				IndexOp.DELETE, txnId);
+				splitsAndConstraint.first, typeTraits, comparatorFactories,
+				fieldPermutation, IndexOp.DELETE,
+				new BTreeDataflowHelperFactory(),
+				NoOpOperationCallbackProvider.INSTANCE, txnId);
 		return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(
 				btreeBulkLoad, splitsAndConstraint.second);
 	}
@@ -967,8 +939,6 @@
 			++i;
 		}
 
-		ITreeIndexFrameFactory interiorFrameFactory = createBTreeNSMInteriorFrameFactory(typeTraits);
-		ITreeIndexFrameFactory leafFrameFactory = createBTreeNSMLeafFrameFactory(typeTraits);
 		IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context
 				.getAppContext();
 		Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint;
@@ -982,10 +952,9 @@
 		TreeIndexInsertUpdateDeleteOperatorDescriptor btreeBulkLoad = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
 				spec, recordDesc, appContext.getStorageManagerInterface(),
 				appContext.getTreeRegisterProvider(),
-				splitsAndConstraint.first, interiorFrameFactory,
-				leafFrameFactory, typeTraits, comparatorFactories,
-				new BTreeDataflowHelperFactory(), fieldPermutation, indexOp,
-				txnId);
+				splitsAndConstraint.first, typeTraits, comparatorFactories,
+				fieldPermutation, indexOp, new BTreeDataflowHelperFactory(),
+				NoOpOperationCallbackProvider.INSTANCE, txnId);
 		return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(
 				btreeBulkLoad, splitsAndConstraint.second);
 	}
@@ -1056,21 +1025,6 @@
 			++i;
 		}
 
-		ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(
-				new RTreeTypeAwareTupleWriterFactory(typeTraits),
-				valueProviderFactories);
-		ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(
-				new RTreeTypeAwareTupleWriterFactory(typeTraits),
-				valueProviderFactories);
-
-		/*
-		 * ITreeIndexFrameFactory interiorFrameFactory =
-		 * JobGenHelper.createRTreeNSMInteriorFrameFactory(typeTraits,
-		 * numSecondaryKeys); ITreeIndexFrameFactory leafFrameFactory =
-		 * JobGenHelper.createRTreeNSMLeafFrameFactory(typeTraits,
-		 * numSecondaryKeys);
-		 */
-
 		IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context
 				.getAppContext();
 		Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint;
@@ -1084,10 +1038,10 @@
 		TreeIndexInsertUpdateDeleteOperatorDescriptor rtreeUpdate = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
 				spec, recordDesc, appContext.getStorageManagerInterface(),
 				appContext.getTreeRegisterProvider(),
-				splitsAndConstraint.first, interiorFrameFactory,
-				leafFrameFactory, typeTraits, comparatorFactories,
-				new RTreeDataflowHelperFactory(), fieldPermutation, indexOp,
-				txnId);
+				splitsAndConstraint.first, typeTraits, comparatorFactories,
+				fieldPermutation, indexOp, new RTreeDataflowHelperFactory(
+						valueProviderFactories),
+				NoOpOperationCallbackProvider.INSTANCE, txnId);
 		return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(
 				rtreeUpdate, splitsAndConstraint.second);
 	}