RYA-405 PR Updates
diff --git a/README.md b/README.md
index fa79ef9..3daa347 100644
--- a/README.md
+++ b/README.md
@@ -157,7 +157,7 @@
 System.out.println(query);
 TupleQuery tupleQuery = conn.prepareTupleQuery(
         QueryLanguage.SPARQL, query);
-ValueFactory vf = ValueFactoryImpl.getInstance();
+ValueFactory vf = SimpleValueFactory.getInstance();
 
 TupleQueryResultHandler writer = new SPARQLResultsXMLWriter(System.out);
 tupleQuery.evaluate(writer);
diff --git a/common/rya.api/pom.xml b/common/rya.api/pom.xml
index 70bc691..13cd4cf 100644
--- a/common/rya.api/pom.xml
+++ b/common/rya.api/pom.xml
@@ -111,7 +111,6 @@
         <dependency>
             <groupId>org.eclipse.rdf4j</groupId>
             <artifactId>rdf4j-sail-base</artifactId>
-            <version>${org.eclipse.rdf4j.version}</version>
         </dependency>
     </dependencies>
 
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/RdfCloudTripleStoreUtils.java b/common/rya.api/src/main/java/org/apache/rya/api/RdfCloudTripleStoreUtils.java
index acb0b91..91cb74e 100644
--- a/common/rya.api/src/main/java/org/apache/rya/api/RdfCloudTripleStoreUtils.java
+++ b/common/rya.api/src/main/java/org/apache/rya/api/RdfCloudTripleStoreUtils.java
@@ -42,9 +42,9 @@
 ////        if (value == null)
 ////            return new byte[]{};
 ////        ByteArrayDataOutput dataOut = ByteStreams.newDataOutput();
-////        if (value instanceof URI) {
+////        if (value instanceof IRI) {
 ////            dataOut.writeByte(RdfCloudTripleStoreConstants.URI_MARKER);
-////            writeString(((URI) value).toString(), dataOut);
+////            writeString(((IRI) value).toString(), dataOut);
 ////        } else if (value instanceof BNode) {
 ////            dataOut.writeByte(RdfCloudTripleStoreConstants.BNODE_MARKER);
 ////            writeString(((BNode) value).getID(), dataOut);
@@ -53,7 +53,7 @@
 ////
 ////            String label = lit.getLabel();
 ////            String language = lit.getLanguage();
-////            URI datatype = lit.getDatatype();
+////            IRI datatype = lit.getDatatype();
 ////
 ////            if (datatype != null) {
 ////                dataOut.writeByte(RdfCloudTripleStoreConstants.DATATYPE_LITERAL_MARKER);
@@ -100,7 +100,7 @@
 ////            ret = vf.createLiteral(label, language);
 ////        } else if (valueTypeMarker == RdfCloudTripleStoreConstants.DATATYPE_LITERAL_MARKER) {
 ////            String label = readString(dataIn);
-////            URI datatype = (URI) readValue(dataIn, vf);
+////            IRI datatype = (IRI) readValue(dataIn, vf);
 ////            ret = vf.createLiteral(label, datatype);
 ////        } else {
 ////            throw new InvalidValueTypeMarkerRuntimeException(valueTypeMarker, "Invalid value type marker: "
@@ -148,27 +148,27 @@
 
 //    public static Statement translateStatementFromRow(ByteArrayDataInput input, Text context, TABLE_LAYOUT tble, ValueFactory vf) throws IOException {
 //        Resource subject;
-//        URI predicate;
+//        IRI predicate;
 //        Value object;
 //        if (TABLE_LAYOUT.SPO.equals(tble)) {
 //            subject = (Resource) RdfCloudTripleStoreUtils.readValue(input, vf);
-//            predicate = (URI) RdfCloudTripleStoreUtils.readValue(input, vf);
+//            predicate = (IRI) RdfCloudTripleStoreUtils.readValue(input, vf);
 //            object = RdfCloudTripleStoreUtils.readValue(input, vf);
 //        } else if (TABLE_LAYOUT.OSP.equals(tble)) {
 //            object = RdfCloudTripleStoreUtils.readValue(input, vf);
 //            subject = (Resource) RdfCloudTripleStoreUtils.readValue(input, vf);
-//            predicate = (URI) RdfCloudTripleStoreUtils.readValue(input, vf);
+//            predicate = (IRI) RdfCloudTripleStoreUtils.readValue(input, vf);
 //        } else if (TABLE_LAYOUT.PO.equals(tble)) {
-//            predicate = (URI) RdfCloudTripleStoreUtils.readValue(input, vf);
+//            predicate = (IRI) RdfCloudTripleStoreUtils.readValue(input, vf);
 //            object = RdfCloudTripleStoreUtils.readValue(input, vf);
 //            subject = (Resource) RdfCloudTripleStoreUtils.readValue(input, vf);
 //        } else {
 //            throw new IllegalArgumentException("Table[" + tble + "] is not valid");
 //        }
 //        if (context == null || INFO_TXT.equals(context))
-//            return new StatementImpl(subject, predicate, object); //default graph
+//            return vf.createStatement(subject, predicate, object); //default graph
 //        else
-//            return new ContextStatementImpl(subject, predicate, object, (Resource) readValue(ByteStreams.newDataInput(context.getBytes()), vf)); //TODO: Seems like a perf hog
+//            return vf.createStatement(subject, predicate, object, (Resource) readValue(ByteStreams.newDataInput(context.getBytes()), vf)); //TODO: Seems like a perf hog
 //    }
 
 //    public static byte[] buildRowWith(byte[] bytes_one, byte[] bytes_two, byte[] bytes_three) throws IOException {
@@ -252,7 +252,7 @@
     }
 
     /**
-     * If value is a URI, then return as URI, otherwise return namespace/value as the URI
+     * If value is a IRI, then return as IRI, otherwise return namespace/value as the IRI
      *
      * @param namespace
      * @param value
@@ -315,7 +315,7 @@
 //    }
 
 
-//    public static void addTimeIndexUri(Configuration conf, URI timeUri, Class<? extends TtlValueConverter> ttlValueConvClass) {
+//    public static void addTimeIndexUri(Configuration conf, IRI timeUri, Class<? extends TtlValueConverter> ttlValueConvClass) {
 //        String[] timeIndexUris = conf.getStrings(RdfCloudTripleStoreConfiguration.CONF_TIMEINDEXURIS);
 //        if (timeIndexUris == null)
 //            timeIndexUris = new String[0];
@@ -327,7 +327,7 @@
 //        conf.set(timeUri_s, ttlValueConvClass.getName());
 //    }
 
-//    public static Class<? extends TtlValueConverter> getTtlValueConverter(Configuration conf, URI predicate) throws ClassNotFoundException {
+//    public static Class<? extends TtlValueConverter> getTtlValueConverter(Configuration conf, IRI predicate) throws ClassNotFoundException {
 //        if (predicate == null)
 //            return null;
 //
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/domain/Node.java b/common/rya.api/src/main/java/org/apache/rya/api/domain/Node.java
index a1e1468..7ecac9d 100644
--- a/common/rya.api/src/main/java/org/apache/rya/api/domain/Node.java
+++ b/common/rya.api/src/main/java/org/apache/rya/api/domain/Node.java
@@ -24,15 +24,15 @@
 import org.eclipse.rdf4j.model.impl.SimpleIRI;
 
 /**
- * A Node is an expected node in the global graph. This typing of the URI allows us to dictate the difference between a
- * URI that is just an Attribute on the subject vs. a URI that is another subject Node in the global graph. It does not
+ * A Node is an expected node in the global graph. This typing of the IRI allows us to dictate the difference between an
+ * IRI that is just an Attribute on the subject vs. an IRI that is another subject Node in the global graph. It does not
  * guarantee that the subject exists, just that there is an Edge to it.
  */
 public class Node extends SimpleIRI {
     public Node() {
     }
 
-    public Node(String uriString) {
-        super(uriString);
+    public Node(String iriString) {
+        super(iriString);
     }
 }
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/persist/index/RyaSecondaryIndexer.java b/common/rya.api/src/main/java/org/apache/rya/api/persist/index/RyaSecondaryIndexer.java
index 1bdc95b..2696e5b 100644
--- a/common/rya.api/src/main/java/org/apache/rya/api/persist/index/RyaSecondaryIndexer.java
+++ b/common/rya.api/src/main/java/org/apache/rya/api/persist/index/RyaSecondaryIndexer.java
@@ -31,10 +31,11 @@
 import org.eclipse.rdf4j.model.IRI;
 
 public interface RyaSecondaryIndexer extends Closeable, Flushable, Configurable {
-	/**
-	 * initialize after setting configuration.
-	 */
-	public void init();
+    /**
+     * initialize after setting configuration.
+     */
+    public void init();
+
     /**
      * Returns the table name if the implementation supports it.
      * Note that some indexers use multiple tables, this only returns one.
diff --git a/common/rya.api/src/test/java/org/apache/rya/api/utils/RdfIOTest.java b/common/rya.api/src/test/java/org/apache/rya/api/utils/RdfIOTest.java
index 1a07184..83c1fd8 100644
--- a/common/rya.api/src/test/java/org/apache/rya/api/utils/RdfIOTest.java
+++ b/common/rya.api/src/test/java/org/apache/rya/api/utils/RdfIOTest.java
@@ -23,7 +23,7 @@
 //import junit.framework.TestCase;
 //import org.apache.rya.api.RdfCloudTripleStoreUtils;
 //import org.eclipse.rdf4j.model.Statement;
-//import org.eclipse.rdf4j.model.impl.StatementImpl;
+//import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
 //
 //import static org.apache.rya.api.RdfCloudTripleStoreConstants.*;
 //
@@ -34,7 +34,7 @@
 // */
 //public class RdfIOTest extends TestCase {
 //
-//    Statement st = new StatementImpl(RTS_SUBJECT, RTS_VERSION_PREDICATE, VERSION);
+//    Statement st = SimpleValueFactory.getInstance().createStatement(RTS_SUBJECT, RTS_VERSION_PREDICATE, VERSION);
 //    int num = 100000;
 //
 //    public void testPerf() throws Exception {
diff --git a/dao/accumulo.rya/pom.xml b/dao/accumulo.rya/pom.xml
index a90c30a..03f4dc7 100644
--- a/dao/accumulo.rya/pom.xml
+++ b/dao/accumulo.rya/pom.xml
@@ -57,7 +57,6 @@
         <dependency>
             <groupId>org.eclipse.rdf4j</groupId>
             <artifactId>rdf4j-sail-api</artifactId>
-            <version>${org.eclipse.rdf4j.version}</version>
         </dependency>
         <dependency>
             <groupId>commons-io</groupId>
diff --git a/dao/accumulo.rya/src/main/java/org/apache/rya/accumulo/DefineTripleQueryRangeFactory.java b/dao/accumulo.rya/src/main/java/org/apache/rya/accumulo/DefineTripleQueryRangeFactory.java
index 1609f2f..cc3ef43 100644
--- a/dao/accumulo.rya/src/main/java/org/apache/rya/accumulo/DefineTripleQueryRangeFactory.java
+++ b/dao/accumulo.rya/src/main/java/org/apache/rya/accumulo/DefineTripleQueryRangeFactory.java
@@ -28,7 +28,7 @@
 //import org.apache.hadoop.io.Text;
 //import org.eclipse.rdf4j.model.Value;
 //import org.eclipse.rdf4j.model.ValueFactory;
-//import org.eclipse.rdf4j.model.impl.ValueFactoryImpl;
+//import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
 //
 //import java.io.IOException;
 //import java.util.Map;
@@ -43,7 +43,7 @@
 // */
 //public class DefineTripleQueryRangeFactory {
 //
-//    ValueFactory vf = ValueFactoryImpl.getInstance();
+//    ValueFactory vf = SimpleValueFactory.getInstance();
 //
 //    protected void fillRange(ByteArrayDataOutput startRowOut, ByteArrayDataOutput endRowOut, Value val, boolean empty)
 //            throws IOException {
diff --git a/dao/accumulo.rya/src/test/java/org/apache/rya/accumulo/DefineTripleQueryRangeFactoryTest.java b/dao/accumulo.rya/src/test/java/org/apache/rya/accumulo/DefineTripleQueryRangeFactoryTest.java
index cf2ae4f..6119ce7 100644
--- a/dao/accumulo.rya/src/test/java/org/apache/rya/accumulo/DefineTripleQueryRangeFactoryTest.java
+++ b/dao/accumulo.rya/src/test/java/org/apache/rya/accumulo/DefineTripleQueryRangeFactoryTest.java
@@ -23,14 +23,12 @@
 //import junit.framework.TestCase;
 //import org.apache.rya.accumulo.AccumuloRdfConfiguration;
 //import org.apache.rya.accumulo.DefineTripleQueryRangeFactory;
-//import org.apache.rya.accumulo.AccumuloRdfConfiguration;
-//import org.apache.rya.accumulo.DefineTripleQueryRangeFactory;
 //import org.apache.rya.api.domain.RangeValue;
 //import org.apache.accumulo.core.data.Range;
 //import org.eclipse.rdf4j.model.IRI;
 //import org.eclipse.rdf4j.model.Value;
 //import org.eclipse.rdf4j.model.ValueFactory;
-//import org.eclipse.rdf4j.model.impl.ValueFactoryImpl;
+//import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
 //
 //import java.util.Map;
 //
@@ -44,15 +42,15 @@
 //    public static final String URI_MARKER_STR = "\u0007";
 //    public static final String RANGE_ENDKEY_SUFFIX = "\u0000";
 //    DefineTripleQueryRangeFactory factory = new DefineTripleQueryRangeFactory();
-//    ValueFactory vf = ValueFactoryImpl.getInstance();
+//    ValueFactory vf = SimpleValueFactory.getInstance();
 //    static String litdupsNS = "urn:test:litdups#";
 //
 //    private AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
 //
 //    public void testSPOCases() throws Exception {
-//        URI cpu = vf.createIRI(litdupsNS, "cpu");
-//        URI loadPerc = vf.createIRI(litdupsNS, "loadPerc");
-//        URI obj = vf.createIRI(litdupsNS, "uri1");
+//        IRI cpu = vf.createIRI(litdupsNS, "cpu");
+//        IRI loadPerc = vf.createIRI(litdupsNS, "loadPerc");
+//        IRI obj = vf.createIRI(litdupsNS, "uri1");
 //
 //        //spo
 //        Map.Entry<TABLE_LAYOUT, Range> entry =
@@ -96,12 +94,12 @@
 //    }
 //
 //    public void testSPOCasesWithRanges() throws Exception {
-//        URI subj_start = vf.createIRI(litdupsNS, "subj_start");
-//        URI subj_end = vf.createIRI(litdupsNS, "subj_stop");
-//        URI pred_start = vf.createIRI(litdupsNS, "pred_start");
-//        URI pred_end = vf.createIRI(litdupsNS, "pred_stop");
-//        URI obj_start = vf.createIRI(litdupsNS, "obj_start");
-//        URI obj_end = vf.createIRI(litdupsNS, "obj_stop");
+//        IRI subj_start = vf.createIRI(litdupsNS, "subj_start");
+//        IRI subj_end = vf.createIRI(litdupsNS, "subj_stop");
+//        IRI pred_start = vf.createIRI(litdupsNS, "pred_start");
+//        IRI pred_end = vf.createIRI(litdupsNS, "pred_stop");
+//        IRI obj_start = vf.createIRI(litdupsNS, "obj_start");
+//        IRI obj_end = vf.createIRI(litdupsNS, "obj_stop");
 //
 //        Value subj = new RangeValue(subj_start, subj_end);
 //        Value pred = new RangeValue(pred_start, pred_end);
@@ -146,8 +144,8 @@
 //    }
 //
 //    public void testPOCases() throws Exception {
-//        URI loadPerc = vf.createIRI(litdupsNS, "loadPerc");
-//        URI obj = vf.createIRI(litdupsNS, "uri1");
+//        IRI loadPerc = vf.createIRI(litdupsNS, "loadPerc");
+//        IRI obj = vf.createIRI(litdupsNS, "uri1");
 //
 //        //po
 //        Map.Entry<TABLE_LAYOUT, Range> entry =
@@ -171,10 +169,10 @@
 //    }
 //
 //    public void testPOCasesWithRanges() throws Exception {
-//        URI pred_start = vf.createIRI(litdupsNS, "pred_start");
-//        URI pred_end = vf.createIRI(litdupsNS, "pred_stop");
-//        URI obj_start = vf.createIRI(litdupsNS, "obj_start");
-//        URI obj_end = vf.createIRI(litdupsNS, "obj_stop");
+//        IRI pred_start = vf.createIRI(litdupsNS, "pred_start");
+//        IRI pred_end = vf.createIRI(litdupsNS, "pred_stop");
+//        IRI obj_start = vf.createIRI(litdupsNS, "obj_start");
+//        IRI obj_end = vf.createIRI(litdupsNS, "obj_stop");
 //
 //        Value pred = new RangeValue(pred_start, pred_end);
 //        Value obj = new RangeValue(obj_start, obj_end);
@@ -204,8 +202,8 @@
 //    }
 //
 //    public void testOSPCases() throws Exception {
-//        URI cpu = vf.createIRI(litdupsNS, "cpu");
-//        URI obj = vf.createIRI(litdupsNS, "uri1");
+//        IRI cpu = vf.createIRI(litdupsNS, "cpu");
+//        IRI obj = vf.createIRI(litdupsNS, "uri1");
 //
 //        //so
 //        Map.Entry<TABLE_LAYOUT, Range> entry =
@@ -230,10 +228,10 @@
 //
 //
 //    public void testOSPCasesWithRanges() throws Exception {
-//        URI subj_start = vf.createIRI(litdupsNS, "subj_start");
-//        URI subj_end = vf.createIRI(litdupsNS, "subj_stop");
-//        URI obj_start = vf.createIRI(litdupsNS, "obj_start");
-//        URI obj_end = vf.createIRI(litdupsNS, "obj_stop");
+//        IRI subj_start = vf.createIRI(litdupsNS, "subj_start");
+//        IRI subj_end = vf.createIRI(litdupsNS, "subj_stop");
+//        IRI obj_start = vf.createIRI(litdupsNS, "obj_start");
+//        IRI obj_end = vf.createIRI(litdupsNS, "obj_stop");
 //
 //        Value subj = new RangeValue(subj_start, subj_end);
 //        Value obj = new RangeValue(obj_start, obj_end);
diff --git a/extras/indexingExample/pom.xml b/extras/indexingExample/pom.xml
index 5740f08..abb03c3 100644
--- a/extras/indexingExample/pom.xml
+++ b/extras/indexingExample/pom.xml
@@ -76,7 +76,6 @@
         <dependency>
             <groupId>org.eclipse.rdf4j</groupId>
             <artifactId>rdf4j-queryrender</artifactId>
-            <version>${org.eclipse.rdf4j.version}</version>
         </dependency>
 
         <dependency>
diff --git a/extras/rya.geoindexing/geo.common/src/main/java/org/apache/rya/indexing/geotemporal/GeoTemporalIndexer.java b/extras/rya.geoindexing/geo.common/src/main/java/org/apache/rya/indexing/geotemporal/GeoTemporalIndexer.java
index 9db4bd7..37cb99d 100644
--- a/extras/rya.geoindexing/geo.common/src/main/java/org/apache/rya/indexing/geotemporal/GeoTemporalIndexer.java
+++ b/extras/rya.geoindexing/geo.common/src/main/java/org/apache/rya/indexing/geotemporal/GeoTemporalIndexer.java
@@ -23,7 +23,6 @@
 import org.apache.rya.indexing.geotemporal.storage.EventStorage;
 import org.eclipse.rdf4j.model.IRI;
 import org.eclipse.rdf4j.model.Statement;
-import org.eclipse.rdf4j.model.ValueFactory;
 import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
 
 /**
diff --git a/extras/rya.indexing.pcj/src/main/java/org/apache/rya/indexing/pcj/storage/accumulo/VisibilityBindingSetSerDe.java b/extras/rya.indexing.pcj/src/main/java/org/apache/rya/indexing/pcj/storage/accumulo/VisibilityBindingSetSerDe.java
index 3ba5e06..0dbc880 100644
--- a/extras/rya.indexing.pcj/src/main/java/org/apache/rya/indexing/pcj/storage/accumulo/VisibilityBindingSetSerDe.java
+++ b/extras/rya.indexing.pcj/src/main/java/org/apache/rya/indexing/pcj/storage/accumulo/VisibilityBindingSetSerDe.java
@@ -26,9 +26,7 @@
 
 import org.apache.commons.io.serialization.ValidatingObjectInputStream;
 import org.apache.fluo.api.data.Bytes;
-import org.apache.rya.api.model.BindingSetDecorator;
 import org.apache.rya.api.model.VisibilityBindingSet;
-import org.eclipse.rdf4j.query.AbstractBindingSet;
 
 import edu.umd.cs.findbugs.annotations.DefaultAnnotation;
 import edu.umd.cs.findbugs.annotations.NonNull;
diff --git a/extras/rya.indexing.pcj/src/test/java/org/apache/rya/indexing/pcj/storage/accumulo/PcjTablesIT.java b/extras/rya.indexing.pcj/src/test/java/org/apache/rya/indexing/pcj/storage/accumulo/PcjTablesIT.java
index f142b77..be1a64d 100644
--- a/extras/rya.indexing.pcj/src/test/java/org/apache/rya/indexing/pcj/storage/accumulo/PcjTablesIT.java
+++ b/extras/rya.indexing.pcj/src/test/java/org/apache/rya/indexing/pcj/storage/accumulo/PcjTablesIT.java
@@ -82,7 +82,7 @@
     private static final String CLOUDBASE_INSTANCE = "sc.cloudbase.instancename";
     private static final String CLOUDBASE_USER = "sc.cloudbase.username";
     private static final String CLOUDBASE_PASSWORD = "sc.cloudbase.password";
-    private static final ValueFactory VF =  SimpleValueFactory.getInstance();
+    private static final ValueFactory VF = SimpleValueFactory.getInstance();
 
     private static final AccumuloPcjSerializer converter = new AccumuloPcjSerializer();
 
diff --git a/extras/rya.manual/src/site/markdown/querydata.md b/extras/rya.manual/src/site/markdown/querydata.md
index adc5d12..b18728d 100644
--- a/extras/rya.manual/src/site/markdown/querydata.md
+++ b/extras/rya.manual/src/site/markdown/querydata.md
@@ -110,7 +110,7 @@
 System.out.println(query);
 TupleQuery tupleQuery = conn.prepareTupleQuery(
         QueryLanguage.SPARQL, query);
-ValueFactory vf = ValueFactoryImpl.getInstance();
+ValueFactory vf = SimpleValueFactory.getInstance();
 
 TupleQueryResultHandler writer = new SPARQLResultsXMLWriter(System.out);
 tupleQuery.evaluate(new TupleQueryResultHandler() {
diff --git a/extras/rya.manual/src/site/markdown/sm-addauth.md b/extras/rya.manual/src/site/markdown/sm-addauth.md
index eec0949..077fe45 100644
--- a/extras/rya.manual/src/site/markdown/sm-addauth.md
+++ b/extras/rya.manual/src/site/markdown/sm-addauth.md
@@ -89,9 +89,9 @@
 
 //define and add statement
 String litdupsNS = "urn:test:litdups#";
-URI cpu = vf.createIRI(litdupsNS, "cpu");
-URI loadPerc = vf.createIRI(litdupsNS, "loadPerc");
-URI uri1 = vf.createIRI(litdupsNS, "uri1");
+IRI cpu = vf.createIRI(litdupsNS, "cpu");
+IRI loadPerc = vf.createIRI(litdupsNS, "loadPerc");
+IRI uri1 = vf.createIRI(litdupsNS, "uri1");
 conn.add(cpu, loadPerc, uri1);
 conn.commit();
 
diff --git a/extras/rya.manual/src/site/markdown/sm-infer.md b/extras/rya.manual/src/site/markdown/sm-infer.md
index 8021297..206f4fc 100644
--- a/extras/rya.manual/src/site/markdown/sm-infer.md
+++ b/extras/rya.manual/src/site/markdown/sm-infer.md
@@ -81,11 +81,11 @@
 First the code, which will load the following subclassof relationship: `UndergraduateStudent subclassof Student subclassof Person`. Then we will load into the tables three triples defining `UgradA rdf:type UndergraduateStudent, StudentB rdf:type Student, PersonC rdf:type Person`
 
 ``` JAVA
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "UndergraduateStudent"), RDFS.SUBCLASSOF, vf.createIRI(litdupsNS, "Student")));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "Student"), RDFS.SUBCLASSOF, vf.createIRI(litdupsNS, "Person")));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "UgradA"), RDF.TYPE, vf.createIRI(litdupsNS, "UndergraduateStudent")));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "StudentB"), RDF.TYPE, vf.createIRI(litdupsNS, "Student")));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "PersonC"), RDF.TYPE, vf.createIRI(litdupsNS, "Person")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "UndergraduateStudent"), RDFS.SUBCLASSOF, vf.createIRI(litdupsNS, "Student")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "Student"), RDFS.SUBCLASSOF, vf.createIRI(litdupsNS, "Person")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "UgradA"), RDF.TYPE, vf.createIRI(litdupsNS, "UndergraduateStudent")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "StudentB"), RDF.TYPE, vf.createIRI(litdupsNS, "Student")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "PersonC"), RDF.TYPE, vf.createIRI(litdupsNS, "Person")));
 conn.commit();
 ```
 
@@ -140,13 +140,13 @@
 Sample Code:
 
 ``` JAVA
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "undergradDegreeFrom"), RDFS.SUBPROPERTYOF, vf.createIRI(litdupsNS, "degreeFrom")));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "gradDegreeFrom"), RDFS.SUBPROPERTYOF, vf.createIRI(litdupsNS, "degreeFrom")));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "degreeFrom"), RDFS.SUBPROPERTYOF, vf.createIRI(litdupsNS, "memberOf")));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "memberOf"), RDFS.SUBPROPERTYOF, vf.createIRI(litdupsNS, "associatedWith")));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "UgradA"), vf.createIRI(litdupsNS, "undergradDegreeFrom"), vf.createIRI(litdupsNS, "Harvard")));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "GradB"), vf.createIRI(litdupsNS, "gradDegreeFrom"), vf.createIRI(litdupsNS, "Yale")));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "ProfessorC"), vf.createIRI(litdupsNS, "memberOf"), vf.createIRI(litdupsNS, "Harvard")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "undergradDegreeFrom"), RDFS.SUBPROPERTYOF, vf.createIRI(litdupsNS, "degreeFrom")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "gradDegreeFrom"), RDFS.SUBPROPERTYOF, vf.createIRI(litdupsNS, "degreeFrom")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "degreeFrom"), RDFS.SUBPROPERTYOF, vf.createIRI(litdupsNS, "memberOf")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "memberOf"), RDFS.SUBPROPERTYOF, vf.createIRI(litdupsNS, "associatedWith")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "UgradA"), vf.createIRI(litdupsNS, "undergradDegreeFrom"), vf.createIRI(litdupsNS, "Harvard")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "GradB"), vf.createIRI(litdupsNS, "gradDegreeFrom"), vf.createIRI(litdupsNS, "Yale")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "ProfessorC"), vf.createIRI(litdupsNS, "memberOf"), vf.createIRI(litdupsNS, "Harvard")));
 conn.commit();
 ```
 
@@ -197,10 +197,10 @@
 Code:
 
 ``` JAVA
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "degreeFrom"), OWL.INVERSEOF, vf.createIRI(litdupsNS, "hasAlumnus")));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "UgradA"), vf.createIRI(litdupsNS, "degreeFrom"), vf.createIRI(litdupsNS, "Harvard")));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "GradB"), vf.createIRI(litdupsNS, "degreeFrom"), vf.createIRI(litdupsNS, "Harvard")));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "Harvard"), vf.createIRI(litdupsNS, "hasAlumnus"), vf.createIRI(litdupsNS, "AlumC")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "degreeFrom"), OWL.INVERSEOF, vf.createIRI(litdupsNS, "hasAlumnus")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "UgradA"), vf.createIRI(litdupsNS, "degreeFrom"), vf.createIRI(litdupsNS, "Harvard")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "GradB"), vf.createIRI(litdupsNS, "degreeFrom"), vf.createIRI(litdupsNS, "Harvard")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "Harvard"), vf.createIRI(litdupsNS, "hasAlumnus"), vf.createIRI(litdupsNS, "AlumC")));
 conn.commit();
 ```
 
@@ -252,9 +252,9 @@
 Code:
 
 ``` JAVA
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "friendOf"), RDF.TYPE, OWL.SYMMETRICPROPERTY));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "Bob"), vf.createIRI(litdupsNS, "friendOf"), vf.createIRI(litdupsNS, "Jeff")));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "James"), vf.createIRI(litdupsNS, "friendOf"), vf.createIRI(litdupsNS, "Jeff")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "friendOf"), RDF.TYPE, OWL.SYMMETRICPROPERTY));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "Bob"), vf.createIRI(litdupsNS, "friendOf"), vf.createIRI(litdupsNS, "Jeff")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "James"), vf.createIRI(litdupsNS, "friendOf"), vf.createIRI(litdupsNS, "Jeff")));
 conn.commit();
 ```
 
@@ -302,12 +302,12 @@
 Code:
 
 ``` JAVA
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "subRegionOf"), RDF.TYPE, OWL.TRANSITIVEPROPERTY));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "Queens"), vf.createIRI(litdupsNS, "subRegionOf"), vf.createIRI(litdupsNS, "NYC")));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "NYC"), vf.createIRI(litdupsNS, "subRegionOf"), vf.createIRI(litdupsNS, "NY")));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "NY"), vf.createIRI(litdupsNS, "subRegionOf"), vf.createIRI(litdupsNS, "US")));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "US"), vf.createIRI(litdupsNS, "subRegionOf"), vf.createIRI(litdupsNS, "NorthAmerica")));
-conn.add(new StatementImpl(vf.createIRI(litdupsNS, "NorthAmerica"), vf.createIRI(litdupsNS, "subRegionOf"), vf.createIRI(litdupsNS, "World")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "subRegionOf"), RDF.TYPE, OWL.TRANSITIVEPROPERTY));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "Queens"), vf.createIRI(litdupsNS, "subRegionOf"), vf.createIRI(litdupsNS, "NYC")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "NYC"), vf.createIRI(litdupsNS, "subRegionOf"), vf.createIRI(litdupsNS, "NY")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "NY"), vf.createIRI(litdupsNS, "subRegionOf"), vf.createIRI(litdupsNS, "US")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "US"), vf.createIRI(litdupsNS, "subRegionOf"), vf.createIRI(litdupsNS, "NorthAmerica")));
+conn.add(vf.createStatement(vf.createIRI(litdupsNS, "NorthAmerica"), vf.createIRI(litdupsNS, "subRegionOf"), vf.createIRI(litdupsNS, "World")));
 conn.commit();
 ```
 
diff --git a/extras/rya.manual/src/site/markdown/sm-simpleaqr.md b/extras/rya.manual/src/site/markdown/sm-simpleaqr.md
index e6d03d4..6d0062e 100644
--- a/extras/rya.manual/src/site/markdown/sm-simpleaqr.md
+++ b/extras/rya.manual/src/site/markdown/sm-simpleaqr.md
@@ -53,9 +53,9 @@
 
 //define and add statement
 String litdupsNS = "urn:test:litdups#";
-URI cpu = vf.createIRI(litdupsNS, "cpu");
-URI loadPerc = vf.createIRI(litdupsNS, "loadPerc");
-URI uri1 = vf.createIRI(litdupsNS, "uri1");
+IRI cpu = vf.createIRI(litdupsNS, "cpu");
+IRI loadPerc = vf.createIRI(litdupsNS, "loadPerc");
+IRI uri1 = vf.createIRI(litdupsNS, "uri1");
 conn.add(cpu, loadPerc, uri1);
 conn.commit();
 
diff --git a/extras/rya.manual/src/site/markdown/sm-sparqlquery.md b/extras/rya.manual/src/site/markdown/sm-sparqlquery.md
index ffa537a..5afbfdf 100644
--- a/extras/rya.manual/src/site/markdown/sm-sparqlquery.md
+++ b/extras/rya.manual/src/site/markdown/sm-sparqlquery.md
@@ -53,11 +53,11 @@
 
 //define and add statements
 String litdupsNS = "urn:test:litdups#";
-URI cpu = vf.createIRI(litdupsNS, "cpu");
-URI loadPerc = vf.createIRI(litdupsNS, "loadPerc");
-URI uri1 = vf.createIRI(litdupsNS, "uri1");
-URI pred2 = vf.createIRI(litdupsNS, "pred2");
-URI uri2 = vf.createIRI(litdupsNS, "uri2");
+IRI cpu = vf.createIRI(litdupsNS, "cpu");
+IRI loadPerc = vf.createIRI(litdupsNS, "loadPerc");
+IRI uri1 = vf.createIRI(litdupsNS, "uri1");
+IRI pred2 = vf.createIRI(litdupsNS, "pred2");
+IRI uri2 = vf.createIRI(litdupsNS, "uri2");
 conn.add(cpu, loadPerc, uri1);
 conn.commit();
 
diff --git a/extras/rya.pcj.fluo/pcj.fluo.client/pom.xml b/extras/rya.pcj.fluo/pcj.fluo.client/pom.xml
index 403b4eb..799a839 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.client/pom.xml
+++ b/extras/rya.pcj.fluo/pcj.fluo.client/pom.xml
@@ -74,7 +74,6 @@
         <dependency>
             <groupId>org.eclipse.rdf4j</groupId>
             <artifactId>rdf4j-rio-turtle</artifactId>
-            <version>${org.eclipse.rdf4j.version}</version>
         </dependency>
         
         <!-- Log4j 2 bridge, api, and core. -->        
diff --git a/extras/rya.reasoning/pom.xml b/extras/rya.reasoning/pom.xml
index 44042bc..ec604b8 100644
--- a/extras/rya.reasoning/pom.xml
+++ b/extras/rya.reasoning/pom.xml
@@ -72,7 +72,6 @@
         <dependency>
             <groupId>org.eclipse.rdf4j</groupId>
             <artifactId>rdf4j-rio-turtle</artifactId>
-            <version>${org.eclipse.rdf4j.version}</version>
         </dependency>
 
         <dependency>
diff --git a/extras/vagrantExample/src/main/vagrant/Vagrantfile b/extras/vagrantExample/src/main/vagrant/Vagrantfile
index 6b81abe..2330bd9 100644
--- a/extras/vagrantExample/src/main/vagrant/Vagrantfile
+++ b/extras/vagrantExample/src/main/vagrant/Vagrantfile
@@ -52,10 +52,12 @@
     export ACCUMULO_VERSION=1.6.5
     ###export ACCUMULO_VERSION=1.7.1
     export HADOOP_VERSION=2.7.2
-    export RYA_EXAMPLE_VERSION=3.2.13-SNAPSHOT
+    export RYA_EXAMPLE_VERSION=4.0.0-incubating
     export RDF4J_VERSION=2.3.1
     export ZOOKEEPER_VERSION=3.4.5-cdh4.5.0
-    
+
+    mavenRepoUrl=http://repo1.maven.org/maven2/
+
     echo "Updating host file with permanent ip"
 	sudo sed -i 's/127.0.1.1/192.168.33.10/' /etc/hosts
     cat >> /etc/hosts <<EOF
@@ -295,25 +297,25 @@
     sudo ln --force -s /usr/share/tomcat7/.RDF4J/Server/logs /var/log/tomcat7/rdf4j-server
 	rdf4jwar=/var/lib/tomcat7/webapps/rdf4j-server.war
 	if [[ ! -s $rdf4jwar ]] ; then 
-		echo "Downloading"
-		download --output $rdf4jwar http://repo1.maven.org/maven2/org/eclipse/rdf4j/rdf4j-http-server/${RDF4J_VERSION}/rdf4j-http-server-${RDF4J_VERSION}.war || exit 110
+		echo "Downloading RDF4J Server"
+		download --output $rdf4jwar ${mavenRepoUrl}org/eclipse/rdf4j/rdf4j-http-server/${RDF4J_VERSION}/rdf4j-http-server-${RDF4J_VERSION}.war || exit 110
 	fi
 	echo "RDF4J http server deployed at http://rya-example-box:8080/rdf4j-server"
 	
 	echo "Installing RDF4J Workbench"
 	workbench=/var/lib/tomcat7/webapps/rdf4j-workbench.war
 	if [[ ! -s $workbench ]] ; then 
-		echo "Downloading"
-		download --output $workbench http://repo1.maven.org/maven2/org/eclipse/rdf4j/rdf4j-http-workbench/${RDF4J_VERSION}/rdf4j-http-workbench-${RDF4J_VERSION}.war || exit 111
+		echo "Downloading RDF4J Workbench"
+		download --output $workbench ${mavenRepoUrl}org/eclipse/rdf4j/rdf4j-http-workbench/${RDF4J_VERSION}/rdf4j-http-workbench-${RDF4J_VERSION}.war || exit 111
 	fi
 	echo "RDF4J workbench deployed at http://rya-example-box:8080/rdf4j-workbench"
 
 	echo "Installing Rya"
 	ryaIndexing=rya.indexing.example-${RYA_EXAMPLE_VERSION}-distribution
 	if [[ ! -s ${ryaIndexing}.zip ]] ; then
-		# Right now it's on dropbox, but eventually it'll be on maven...
-		echo "Downloading quietly, this will take some minutes with no output..."
-		download --output ${ryaIndexing}.zip https://dl.dropbox.com/s/7e74yiuq4jmu0od/${ryaIndexing}.zip?raw=1  || exit 112
+		# Eventually it'll be on maven...
+		echo "Downloading ${ryaIndexing} quietly, this will take some minutes with no output..."
+		download --output ${ryaIndexing}.zip ${mavenRepoUrl}org/apache/rya/rya.indexing.example/${RYA_EXAMPLE_VERSION}/${ryaIndexing}.zip?raw=1  || exit 112
 	fi
 	sudo mkdir --parents ${ryaIndexing}
 	sudo unzip -q -o ${ryaIndexing}.zip -d ${ryaIndexing}
@@ -338,8 +340,8 @@
 	echo "Downloading and installing new templates for RDF4J WorkBench"
 	ryaVagrant=rya.vagrant.example-${RYA_EXAMPLE_VERSION}
 	if [[ ! -s ${ryaVagrant}.jar ]] ; then
-		echo "Downloading"
-		download --output ${ryaVagrant}.jar  https://dl.dropbox.com/s/dgw63m66nubyy4z/${ryaVagrant}.jar?raw=1 || exit 120
+		echo "Downloading ${ryaVagrant}"
+		download --output ${ryaVagrant}.jar  ${mavenRepoUrl}org/apache/rya/rya.vagrant.example/${RYA_EXAMPLE_VERSION}/${ryaVagrant}.jar?raw=1 || exit 120
 	fi
 	sudo mkdir --parents ${ryaVagrant}
 	sudo unzip -q -o ${ryaVagrant}.jar -d ${ryaVagrant}
@@ -349,8 +351,8 @@
 	echo "Deploying Rya Web"
 	ryaWar=web.rya-${RYA_EXAMPLE_VERSION}.war
 	if [[ ! -s ${ryaWar} ]] ; then
-		echo "Downloading"
-		download https://dl.dropbox.com/s/332wr4b2f34dp6e/${ryaWar}?raw=1 --output ${ryaWar} || exit 121
+		echo "Downloading ${ryaWar}"
+		download ${mavenRepoUrl}org/apache/rya/web.rya/${RYA_EXAMPLE_VERSION}/${ryaWar}.jar?raw=1 --output ${ryaWar} || exit 121
 	fi
 	sudo cp ${ryaWar} /var/lib/tomcat7/webapps/web.rya.war
 	# Wait for the war to deploy
diff --git a/mapreduce/src/test/java/org/apache/rya/accumulo/mr/tools/AccumuloRdfCountToolTest.java b/mapreduce/src/test/java/org/apache/rya/accumulo/mr/tools/AccumuloRdfCountToolTest.java
index 30c6a7a..ae15b65 100644
--- a/mapreduce/src/test/java/org/apache/rya/accumulo/mr/tools/AccumuloRdfCountToolTest.java
+++ b/mapreduce/src/test/java/org/apache/rya/accumulo/mr/tools/AccumuloRdfCountToolTest.java
@@ -150,19 +150,19 @@
     }
 
 //    public void testMRObject() throws Exception {
-//        URI pred1 = VF.createIRI(litdupsNS, "pred1");
+//        IRI pred1 = VF.createIRI(litdupsNS, "pred1");
 //        Literal literal = VF.createLiteral(0);
-//        dao.add(new StatementImpl(VF.createIRI(litdupsNS, "test0"), pred1, literal));
-//        dao.add(new StatementImpl(VF.createIRI(litdupsNS, "test1"), pred1, literal));
-//        dao.add(new StatementImpl(VF.createIRI(litdupsNS, "test2"), pred1, literal));
-//        dao.add(new StatementImpl(VF.createIRI(litdupsNS, "test3"), pred1, literal));
-//        dao.add(new StatementImpl(VF.createIRI(litdupsNS, "test4"), pred1, literal));
-//        dao.add(new StatementImpl(VF.createIRI(litdupsNS, "test5"), pred1, literal));
-//        dao.add(new StatementImpl(VF.createIRI(litdupsNS, "test6"), pred1, literal));
-//        dao.add(new StatementImpl(VF.createIRI(litdupsNS, "test7"), pred1, literal));
-//        dao.add(new StatementImpl(VF.createIRI(litdupsNS, "test8"), pred1, literal));
-//        dao.add(new StatementImpl(VF.createIRI(litdupsNS, "test9"), pred1, literal));
-//        dao.add(new StatementImpl(VF.createIRI(litdupsNS, "test10"), pred1, literal));
+//        dao.add(VF.createStatement(VF.createIRI(litdupsNS, "test0"), pred1, literal));
+//        dao.add(VF.createStatement(VF.createIRI(litdupsNS, "test1"), pred1, literal));
+//        dao.add(VF.createStatement(VF.createIRI(litdupsNS, "test2"), pred1, literal));
+//        dao.add(VF.createStatement(VF.createIRI(litdupsNS, "test3"), pred1, literal));
+//        dao.add(VF.createStatement(VF.createIRI(litdupsNS, "test4"), pred1, literal));
+//        dao.add(VF.createStatement(VF.createIRI(litdupsNS, "test5"), pred1, literal));
+//        dao.add(VF.createStatement(VF.createIRI(litdupsNS, "test6"), pred1, literal));
+//        dao.add(VF.createStatement(VF.createIRI(litdupsNS, "test7"), pred1, literal));
+//        dao.add(VF.createStatement(VF.createIRI(litdupsNS, "test8"), pred1, literal));
+//        dao.add(VF.createStatement(VF.createIRI(litdupsNS, "test9"), pred1, literal));
+//        dao.add(VF.createStatement(VF.createIRI(litdupsNS, "test10"), pred1, literal));
 //        dao.commit();
 //
 //        AccumuloRdfCountTool.main(new String[]{
diff --git a/pom.xml b/pom.xml
index f31873a..05a87f3 100644
--- a/pom.xml
+++ b/pom.xml
@@ -577,6 +577,16 @@
                 <artifactId>rdf4j-repository-api</artifactId>
                 <version>${org.eclipse.rdf4j.version}</version>
             </dependency>
+            <dependency>
+                <groupId>org.eclipse.rdf4j</groupId>
+                <artifactId>rdf4j-sail-api</artifactId>
+                <version>${org.eclipse.rdf4j.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.eclipse.rdf4j</groupId>
+                <artifactId>rdf4j-sail-base</artifactId>
+                <version>${org.eclipse.rdf4j.version}</version>
+            </dependency>
 
             <dependency>
                 <groupId>com.google.guava</groupId>
diff --git a/sail/pom.xml b/sail/pom.xml
index 5fc159c..96c69e7 100644
--- a/sail/pom.xml
+++ b/sail/pom.xml
@@ -38,7 +38,6 @@
         <dependency>
             <groupId>org.eclipse.rdf4j</groupId>
             <artifactId>rdf4j-sail-api</artifactId>
-            <version>${org.eclipse.rdf4j.version}</version>
         </dependency>
 
         <dependency>
diff --git a/sail/src/test/java/org/apache/rya/RdfCloudTripleStoreUtilsTest.java b/sail/src/test/java/org/apache/rya/RdfCloudTripleStoreUtilsTest.java
index b1ba3fc..63d0ade 100644
--- a/sail/src/test/java/org/apache/rya/RdfCloudTripleStoreUtilsTest.java
+++ b/sail/src/test/java/org/apache/rya/RdfCloudTripleStoreUtilsTest.java
@@ -28,7 +28,7 @@
 //import org.eclipse.rdf4j.model.Resource;
 //import org.eclipse.rdf4j.model.IRI;
 //import org.eclipse.rdf4j.model.Value;
-//import org.eclipse.rdf4j.model.impl.ValueFactoryImpl;
+//import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
 //
 //import com.google.common.io.ByteStreams;
 //
@@ -37,8 +37,8 @@
 //public class RdfCloudTripleStoreUtilsTest extends TestCase {
 //
 //	public void testWriteReadURI() throws Exception {
-//		final ValueFactoryImpl vf = SimpleValueFactory.getInstance();
-//		URI uri = vf.createIRI("http://www.example.org/test/rel");
+//		final ValueFactory vf = SimpleValueFactory.getInstance();
+//		IRI uri = vf.createIRI("http://www.example.org/test/rel");
 //		byte[] value = writeValue(uri);
 //
 //		Value readValue = readValue(ByteStreams
@@ -47,7 +47,7 @@
 //	}
 //
 //	public void testWriteReadBNode() throws Exception {
-//		final ValueFactoryImpl vf = SimpleValueFactory.getInstance();
+//		final ValueFactory vf = SimpleValueFactory.getInstance();
 //		Value val = vf.createBNode("bnodeid");
 //		byte[] value = writeValue(val);
 //
@@ -57,7 +57,7 @@
 //	}
 //
 //	public void testWriteReadLiteral() throws Exception {
-//		final ValueFactoryImpl vf = SimpleValueFactory.getInstance();
+//		final ValueFactory vf = SimpleValueFactory.getInstance();
 //		Value val = vf.createLiteral("myliteral");
 //		byte[] value = writeValue(val);
 //
@@ -67,7 +67,7 @@
 //	}
 //
 //	public void testContexts() throws Exception {
-//		final ValueFactoryImpl vf = SimpleValueFactory.getInstance();
+//		final ValueFactory vf = SimpleValueFactory.getInstance();
 //		BNode cont1 = vf.createBNode("cont1");
 //		BNode cont2 = vf.createBNode("cont2");
 //		BNode cont3 = vf.createBNode("cont3");
diff --git a/web/web.rya/src/main/java/org/apache/cloud/rdf/web/cloudbase/sail/QueryDataServlet.java b/web/web.rya/src/main/java/org/apache/cloud/rdf/web/cloudbase/sail/QueryDataServlet.java
index a60440d..3944432 100644
--- a/web/web.rya/src/main/java/org/apache/cloud/rdf/web/cloudbase/sail/QueryDataServlet.java
+++ b/web/web.rya/src/main/java/org/apache/cloud/rdf/web/cloudbase/sail/QueryDataServlet.java
@@ -23,7 +23,7 @@
 //import RdfCloudTripleStoreConstants;
 //import RdfCloudTripleStoreConstants;
 //import org.eclipse.rdf4j.model.ValueFactory;
-//import org.eclipse.rdf4j.model.impl.ValueFactoryImpl;
+//import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
 //import org.eclipse.rdf4j.query.GraphQuery;
 //import org.eclipse.rdf4j.query.QueryLanguage;
 //import org.eclipse.rdf4j.query.TupleQuery;