assorted bugfixes
diff --git a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/iter/RyaStatementBindingSetCursorIterator.java b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/iter/RyaStatementBindingSetCursorIterator.java
index de5e8b0..727538b 100644
--- a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/iter/RyaStatementBindingSetCursorIterator.java
+++ b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/iter/RyaStatementBindingSetCursorIterator.java
@@ -48,7 +48,7 @@
 
 public class RyaStatementBindingSetCursorIterator implements CloseableIteration<Entry<RyaStatement, BindingSet>, RyaDAOException> {
     private static final Logger log = Logger.getLogger(RyaStatementBindingSetCursorIterator.class);
-    
+
     private static final int QUERY_BATCH_SIZE = 50;
 
     private final MongoCollection<Document> coll;
@@ -99,38 +99,38 @@
         if (!currentBatchQueryResultCursorIsValid()) {
             submitBatchQuery();
         }
-        
+
         if (currentBatchQueryResultCursorIsValid()) {
             // convert to Rya Statement
             final Document queryResult = batchQueryResultsIterator.next();
             final DBObject dbo = (DBObject) JSON.parse(queryResult.toJson());
             currentResultStatement = strategy.deserializeDBObject(dbo);
-            
+
             // Find all of the queries in the executed RangeMap that this result matches
             // and collect all of those binding sets
-            Set<BindingSet> bsList = new HashSet<>();
-            for (RyaStatement executedQuery : executedRangeMap.keys()) {
+            final Set<BindingSet> bsList = new HashSet<>();
+            for (final RyaStatement executedQuery : executedRangeMap.keys()) {
                 if (isResultForQuery(executedQuery, currentResultStatement)) {
                     bsList.addAll(executedRangeMap.get(executedQuery));
                 }
             }
             currentBindingSetIterator = bsList.iterator();
         }
-        
+
         // Handle case of invalid currentResultStatement or no binding sets returned
         if ((currentBindingSetIterator == null || !currentBindingSetIterator.hasNext()) && (currentBatchQueryResultCursorIsValid() || queryIterator.hasNext())) {
             findNextResult();
         }
     }
-    
-    private static boolean isResultForQuery(RyaStatement query, RyaStatement result) {
+
+    private static boolean isResultForQuery(final RyaStatement query, final RyaStatement result) {
         return isResult(query.getSubject(), result.getSubject()) &&
                 isResult(query.getPredicate(), result.getPredicate()) &&
                 isResult(query.getObject(), result.getObject()) &&
                 isResult(query.getContext(), result.getContext());
     }
-    
-    private static boolean isResult(RyaType query, RyaType result) {
+
+    private static boolean isResult(final RyaType query, final RyaType result) {
         return (query == null) || query.equals(result);
     }
 
@@ -142,7 +142,7 @@
 
         while (queryIterator.hasNext() && count < QUERY_BATCH_SIZE){
             count++;
-            RyaStatement query = queryIterator.next();
+            final RyaStatement query = queryIterator.next();
             executedRangeMap.putAll(query, rangeMap.get(query));
             final DBObject currentQuery = strategy.getQuery(query);
             match.add(currentQuery);
@@ -156,10 +156,10 @@
             batchQueryResultsIterator = Iterators.emptyIterator();
             return;
         }
-        
+
         // Executing redact aggregation to only return documents the user has access to.
         pipeline.addAll(AggregationUtil.createRedactPipeline(auths));
-        log.info(pipeline);
+        log.trace(pipeline);
 
         final AggregateIterable<Document> aggIter = coll.aggregate(pipeline);
         aggIter.batchSize(1000);
diff --git a/extras/indexingExample/src/main/java/MongoRyaDirectExample.java b/extras/indexingExample/src/main/java/MongoRyaDirectExample.java
index addbd18..21e5dea 100644
--- a/extras/indexingExample/src/main/java/MongoRyaDirectExample.java
+++ b/extras/indexingExample/src/main/java/MongoRyaDirectExample.java
@@ -296,8 +296,8 @@
         if (USE_MOCK) {
             final EmbeddedMongoFactory factory = EmbeddedMongoFactory.newFactory();
             final IMongoConfig connectionConfig = factory.getMongoServerDetails();
-            Net net = connectionConfig.net();
-            builder.setMongoHost(net.getServerAddress().getHostAddress())
+            final Net net = connectionConfig.net();
+            builder.setMongoHost(net.getBindIp() == null ? "127.0.0.1" : net.getBindIp())
                    .setMongoPort(net.getPort() + "");
         } else {
             // User name and password must be filled in:
diff --git a/extras/indexingExample/src/main/java/RyaClientExample.java b/extras/indexingExample/src/main/java/RyaClientExample.java
index 278f214..6b39774 100644
--- a/extras/indexingExample/src/main/java/RyaClientExample.java
+++ b/extras/indexingExample/src/main/java/RyaClientExample.java
@@ -99,7 +99,7 @@
 
             // Give the root user the 'U' authorizations.
             final Connector connector = cluster.getConnector(accumuloUsername, accumuloPassword);
-            connector.securityOperations().changeUserAuthorizations("root", new Authorizations("U"));
+            connector.securityOperations().changeUserAuthorizations(accumuloUsername, new Authorizations("U"));
 
             // Setup a Rya Client that is able to interact with the mini cluster.
             final AccumuloConnectionDetails connectionDetails =
diff --git a/extras/rya.merger/src/main/java/org/apache/rya/accumulo/mr/merge/CopyTool.java b/extras/rya.merger/src/main/java/org/apache/rya/accumulo/mr/merge/CopyTool.java
index 6fc90ec..31edd68 100644
--- a/extras/rya.merger/src/main/java/org/apache/rya/accumulo/mr/merge/CopyTool.java
+++ b/extras/rya.merger/src/main/java/org/apache/rya/accumulo/mr/merge/CopyTool.java
@@ -629,14 +629,19 @@
      * @throws Exception
      */
     public void importFilesToChildTable(final String childTableName) throws Exception {
+        final String normalizedChildTableName = FilenameUtils.normalize(childTableName);
+        if (normalizedChildTableName == null) {
+            throw new Exception("Invalid child table name: " + childTableName);
+        }
+
         final Configuration childConfig = MergeToolMapper.getChildConfig(conf);
         final AccumuloRdfConfiguration childAccumuloRdfConfiguration = new AccumuloRdfConfiguration(childConfig);
         childAccumuloRdfConfiguration.setTablePrefix(childTablePrefix);
         final Connector childConnector = AccumuloRyaUtils.setupConnector(childAccumuloRdfConfiguration);
         final TableOperations childTableOperations = childConnector.tableOperations();
 
-        final Path localWorkDir = getPath(localCopyFileImportDir, childTableName);
-        final Path hdfsBaseWorkDir = getPath(baseImportDir, childTableName);
+        final Path localWorkDir = getPath(localCopyFileImportDir, normalizedChildTableName);
+        final Path hdfsBaseWorkDir = getPath(baseImportDir, normalizedChildTableName);
 
         final FileSystem fs = FileSystem.get(conf);
         if (fs.exists(hdfsBaseWorkDir)) {
@@ -661,7 +666,7 @@
         }
         fs.mkdirs(failures);
 
-        childTableOperations.importDirectory(childTableName, files.toString(), failures.toString(), false);
+        childTableOperations.importDirectory(normalizedChildTableName, files.toString(), failures.toString(), false);
     }
 
     /**
diff --git a/test/kafka/src/main/java/org/apache/rya/test/kafka/PortUtils.java b/test/kafka/src/main/java/org/apache/rya/test/kafka/PortUtils.java
index 7dad966..b7fc51f 100644
--- a/test/kafka/src/main/java/org/apache/rya/test/kafka/PortUtils.java
+++ b/test/kafka/src/main/java/org/apache/rya/test/kafka/PortUtils.java
@@ -16,14 +16,14 @@
 
 import java.io.IOException;
 import java.net.ServerSocket;
-import java.util.Random;
+import java.security.SecureRandom;
 
 public class PortUtils {
 
     private PortUtils() {}
 
     public static int getRandomFreePort() {
-        final Random r = new Random();
+        final SecureRandom r = new SecureRandom();
         int count = 0;
 
         while (count < 13) {
diff --git a/web/web.rya/src/main/java/org/apache/cloud/rdf/web/sail/RdfController.java b/web/web.rya/src/main/java/org/apache/cloud/rdf/web/sail/RdfController.java
index 209c2b4..ab539c4 100644
--- a/web/web.rya/src/main/java/org/apache/cloud/rdf/web/sail/RdfController.java
+++ b/web/web.rya/src/main/java/org/apache/cloud/rdf/web/sail/RdfController.java
@@ -212,8 +212,9 @@
             final CountingTupleQueryResultHandlerWrapper sparqlWriter = new CountingTupleQueryResultHandlerWrapper(handler);
             final long startTime = System.currentTimeMillis();
             tupleQuery.evaluate(sparqlWriter);
-            log.info(String.format("Query Time = %.3f\n", (System.currentTimeMillis() - startTime) / 1000.));
-            log.info(String.format("Result Count = %s\n", sparqlWriter.getCount()));
+            log.info(String.format("Query Time = %.3f   Result Count = %s\n",
+                                   (System.currentTimeMillis() - startTime) / 1000.,
+                                   sparqlWriter.getCount()));
         }
 
     }
@@ -329,7 +330,6 @@
                         @RequestBody final String body,
                         final HttpServletResponse response)
             throws RepositoryException, IOException, RDFParseException {
-        final List<Resource> authList = new ArrayList<Resource>();
         RDFFormat format_r = RDFFormat.RDFXML;
         if (format != null) {
             format_r = RDFFormat.valueOf(format);
@@ -337,8 +337,11 @@
                 throw new RuntimeException("RDFFormat[" + format + "] not found");
             }
         }
+
+        // add named graph as context (if specified).
+        final List<Resource> contextList = new ArrayList<Resource>();
         if (graph != null) {
-            authList.add(VALUE_FACTORY.createURI(graph));
+            contextList.add(VALUE_FACTORY.createURI(graph));
         }
         SailRepositoryConnection conn = null;
         try {
@@ -349,7 +352,7 @@
                 sailConnection.getConf().set(RdfCloudTripleStoreConfiguration.CONF_CV, cv);
             }
 
-            conn.add(new StringReader(body), "", format_r);
+            conn.add(new StringReader(body), "", format_r, contextList.toArray(new Resource[contextList.size()]));
             conn.commit();
         } finally {
             if (conn != null) {
diff --git a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-mongodb-geo.xml b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-mongodb-geo.xml
index 5780314..ba124fb 100644
--- a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-mongodb-geo.xml
+++ b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-mongodb-geo.xml
@@ -36,34 +36,17 @@
 
     <bean id="conf" class="org.apache.rya.mongodb.MongoDBRdfConfiguration">
         <constructor-arg ref="hadoopConf"/>
-        <property name="collectionName" value="${mongo.db.collectionprefix}"/>
+        <property name="ryaInstanceName" value="${mongo.db.collectionprefix}"/>
         <property name="tablePrefix" value="${mongo.db.collectionprefix}"/>
-        <property name="mongoInstance" value="${mongo.db.instance}"/>
+        <property name="mongoHostname" value="${mongo.db.instance}"/>
         <property name="mongoPort" value="${mongo.db.port}"/>
         <property name="mongoDBName" value="${mongo.db.name}"/>
         <property name="displayQueryPlan" value="${rya.displayqueryplan}"/>
         <property name="useStats" value="false"/>
-        <property name="additionalIndexers">
-            <list>
-            <value type="java.lang.Class">org.apache.rya.indexing.mongodb.geo.MongoGeoIndexer</value>
-            </list>
-        </property>
-        <property name="optimizers">
-            <list>
-            <value type="java.lang.Class">org.apache.rya.indexing.GeoEnabledFilterFunctionOptimizer</value>
-            </list>
-        </property>
     </bean>
-
-
-    <bean id="mongoClient" class="org.apache.rya.mongodb.MongoConnectorFactory" factory-method="getMongoClient">
+    
+    <bean id="sail" class="org.apache.rya.sail.config.RyaSailFactory" factory-method="getInstance">
         <constructor-arg ref="conf"/>
     </bean>
 
-	<bean id="ryaDAO" class="org.apache.rya.mongodb.MongoDBRyaDAO" init-method="init" destroy-method="destroy">
-        <constructor-arg ref="conf"/>
-        <constructor-arg ref="mongoClient"/>
-    </bean>
-
-
 </beans>
diff --git a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-mongodb.xml b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-mongodb.xml
index e99ec5f..ba124fb 100644
--- a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-mongodb.xml
+++ b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-mongodb.xml
@@ -20,24 +20,33 @@
 -->
 
 <beans xmlns="http://www.springframework.org/schema/beans"
-	xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:p="http://www.springframework.org/schema/p"
-	xmlns:context="http://www.springframework.org/schema/context"
-	xmlns:oxm="http://www.springframework.org/schema/oxm"
-	xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd
-				http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-2.5.xsd
-				http://www.springframework.org/schema/oxm http://www.springframework.org/schema/oxm/spring-oxm-3.0.xsd">
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:p="http://www.springframework.org/schema/p"
+    xmlns:context="http://www.springframework.org/schema/context"
+    xmlns:oxm="http://www.springframework.org/schema/oxm"
+    xmlns:hdp="http://www.springframework.org/schema/hadoop"
+    xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd
+                http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-2.5.xsd
+                http://www.springframework.org/schema/oxm http://www.springframework.org/schema/oxm/spring-oxm-3.0.xsd
+                http://www.springframework.org/schema/hadoop http://www.springframework.org/schema/hadoop/spring-hadoop.xsd">
 
 
+     <hdp:configuration id="hadoopConf">
+        sc.useMongo=true
+      </hdp:configuration>
+
     <bean id="conf" class="org.apache.rya.mongodb.MongoDBRdfConfiguration">
-        <property name="collectionName" value="${mongo.db.collectionprefix}"/>
-        <property name="mongoInstance" value="${mongo.db.instance}"/>
+        <constructor-arg ref="hadoopConf"/>
+        <property name="ryaInstanceName" value="${mongo.db.collectionprefix}"/>
+        <property name="tablePrefix" value="${mongo.db.collectionprefix}"/>
+        <property name="mongoHostname" value="${mongo.db.instance}"/>
         <property name="mongoPort" value="${mongo.db.port}"/>
         <property name="mongoDBName" value="${mongo.db.name}"/>
         <property name="displayQueryPlan" value="${rya.displayqueryplan}"/>
         <property name="useStats" value="false"/>
     </bean>
-
-	<bean id="ryaDAO" class="org.apache.rya.mongodb.MongoDBRyaDAO" init-method="init" destroy-method="destroy">
+    
+    <bean id="sail" class="org.apache.rya.sail.config.RyaSailFactory" factory-method="getInstance">
         <constructor-arg ref="conf"/>
     </bean>
+
 </beans>
diff --git a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root-mongodb-geo.xml b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root-mongodb-geo.xml
index 43fc319..92b3692 100644
--- a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root-mongodb-geo.xml
+++ b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root-mongodb-geo.xml
@@ -36,18 +36,7 @@
 	<import resource="spring-security.xml"/>	
 	<import resource="spring-mongodb-geo.xml"/>
 
-	<bean id="inferenceEngine" class="org.apache.rya.rdftriplestore.inference.InferenceEngine" init-method="init" destroy-method="destroy">
-        <property name="ryaDAO" ref="ryaDAO"/>
-        <property name="conf" ref="conf"/>
-	</bean>
-    
-	<bean id="rts" class="org.apache.rya.rdftriplestore.RdfCloudTripleStore">
-        <property name="ryaDAO" ref="ryaDAO"/>
-        <property name="inferenceEngine" ref="inferenceEngine"/>
-        <property name="conf" ref="conf"/>
-	</bean>
-
-	<bean id="sailRepo" class="org.apache.rya.rdftriplestore.RyaSailRepository" init-method="initialize" destroy-method="shutDown">
-        <constructor-arg ref="rts"/>
-	</bean>
+    <bean id="sailRepo" class="org.apache.rya.rdftriplestore.RyaSailRepository" destroy-method="shutDown">
+        <constructor-arg ref="sail"/>
+    </bean>
 </beans>
diff --git a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root-mongodb.xml b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root-mongodb.xml
index 321890c..9f94aa3 100644
--- a/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root-mongodb.xml
+++ b/web/web.rya/src/main/webapp/WEB-INF/spring/spring-root-mongodb.xml
@@ -36,18 +36,7 @@
 	<import resource="spring-security.xml"/>	
 	<import resource="spring-mongodb.xml"/>
 
-	<bean id="inferenceEngine" class="org.apache.rya.rdftriplestore.inference.InferenceEngine" init-method="init" destroy-method="destroy">
-        <property name="ryaDAO" ref="ryaDAO"/>
-        <property name="conf" ref="conf"/>
-	</bean>
-    
-	<bean id="rts" class="org.apache.rya.rdftriplestore.RdfCloudTripleStore">
-        <property name="ryaDAO" ref="ryaDAO"/>
-        <property name="inferenceEngine" ref="inferenceEngine"/>
-        <property name="conf" ref="conf"/>
-	</bean>
-
-	<bean id="sailRepo" class="org.apache.rya.rdftriplestore.RyaSailRepository" init-method="initialize" destroy-method="shutDown">
-        <constructor-arg ref="rts"/>
-	</bean>
+    <bean id="sailRepo" class="org.apache.rya.rdftriplestore.RyaSailRepository" destroy-method="shutDown">
+        <constructor-arg ref="sail"/>
+    </bean>
 </beans>