STANBOL-1450: Updated to use clerezza 1.0 rdf api

git-svn-id: https://svn.apache.org/repos/asf/stanbol/trunk@1744328 13f79535-47bb-0310-9956-ffa450edef68
diff --git a/commons/indexedgraph/src/main/java/org/apache/stanbol/commons/indexedgraph/IndexedGraph.java b/commons/indexedgraph/src/main/java/org/apache/stanbol/commons/indexedgraph/IndexedGraph.java
index c2c4322..0345b23 100644
--- a/commons/indexedgraph/src/main/java/org/apache/stanbol/commons/indexedgraph/IndexedGraph.java
+++ b/commons/indexedgraph/src/main/java/org/apache/stanbol/commons/indexedgraph/IndexedGraph.java
@@ -1,75 +1,492 @@
 /*
-* Licensed to the Apache Software Foundation (ASF) under one or more
-* contributor license agreements.  See the NOTICE file distributed with
-* this work for additional information regarding copyright ownership.
-* The ASF licenses this file to You under the Apache License, Version 2.0
-* (the "License"); you may not use this file except in compliance with
-* the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.stanbol.commons.indexedgraph;
 
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Comparator;
+import java.util.HashMap;
 import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.NavigableSet;
+import java.util.SortedSet;
+import java.util.TreeSet;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.AbstractGraph;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.AbstractGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 /**
- * {@link Graph} implementation that internally uses a {@link IndexedTripleCollection}
- * to hold the RDF graph.
- * @author rwesten
+ * {@link Graph} implementation that uses indexes for <ul>
+ * <li> subject, predicate, object [SPO]
+ * <li> predicate, object, subject [POS]
+ * <li> object, subject, predicate [OSP]
+ * </ul>
+ * Indexes are maintained in {@link TreeSet}s with according {@link Comparator}
+ * instances ({@link #spoComparator}, {@link #posComparator} ,
+ * {@link #ospComparator}). {@link RDFTerm}s are compared first using the
+ * {@link RDFTerm#hashCode()} and only if this matches by using
+ * {@link RDFTerm}{@link #toString()}
+ * .<p>
+ * The {@link #filter(BlankNodeOrIRI, IRI, RDFTerm)} implementation is based on
+ * {@link TreeSet#subSet(Object, Object)}. All Iterators returned directly
+ * operate on top of one of the internal indexes.
+ * <p>
+ * This class is not public, implementations should use {@link IndexedGraph} or
+ * {@link IndexedGraph}.
  *
+ * @author rwesten
  */
 public class IndexedGraph extends AbstractGraph implements Graph {
 
-    private final TripleCollection tripleCollection;
-    
-    /**
-     * Creates a graph with the triples in tripleCollection
-     * 
-     * @param tripleCollection the collection of triples this Graph shall consist of
-     */
-    public IndexedGraph(TripleCollection tripleCollection) {
-        this.tripleCollection = new IndexedTripleCollection(tripleCollection);
+    private static final Logger log = LoggerFactory.getLogger(IndexedGraph.class);
+
+    @Override
+    public ImmutableGraph getImmutableGraph() {
+        return new IndexedImmutableGraph(this);
     }
 
     /**
-     * Create a graph with the triples provided by the Iterator
-     * @param tripleIter the iterator over the triples
+     * This map is used to ensure constant ordering for {@link BlankNode} that
+     * do have the same hashcode (and therefore result to have the same
+     * {@link BlankNode#toString()} value.
      */
-    public IndexedGraph(Iterator<Triple> tripleIter) {
-        this.tripleCollection = new IndexedTripleCollection(tripleIter);
+    private final Map<Integer, List<RDFTerm>> hashCodeConflictMap = new HashMap<Integer, List<RDFTerm>>();
+    /**
+     * Compares Triples based on Subject, Predicate, Object
+     */
+    private final Comparator<Triple> spoComparator = new Comparator<Triple>() {
+
+        @Override
+        public int compare(Triple a, Triple b) {
+            int c = IndexedGraph.compare(a.getSubject(), b.getSubject(), hashCodeConflictMap);
+            if (c == 0) {
+                c = IndexedGraph.compare(a.getPredicate(), b.getPredicate(), hashCodeConflictMap);
+                if (c == 0) {
+                    c = IndexedGraph.compare(a.getObject(), b.getObject(), hashCodeConflictMap);
+                }
+            }
+            return c;
+        }
+    };
+    /**
+     * The SPO index
+     */
+    private final NavigableSet<Triple> spo = new TreeSet<Triple>(spoComparator);
+    /**
+     * Compares Triples based on Predicate, Object, Subject
+     */
+    private final Comparator<Triple> posComparator = new Comparator<Triple>() {
+
+        @Override
+        public int compare(Triple a, Triple b) {
+            int c = IndexedGraph.compare(a.getPredicate(), b.getPredicate(), hashCodeConflictMap);
+            if (c == 0) {
+                c = IndexedGraph.compare(a.getObject(), b.getObject(), hashCodeConflictMap);
+                if (c == 0) {
+                    c = IndexedGraph.compare(a.getSubject(), b.getSubject(), hashCodeConflictMap);
+                }
+            }
+            return c;
+        }
+    };
+    /**
+     * The POS index
+     */
+    private final NavigableSet<Triple> pos = new TreeSet<Triple>(posComparator);
+    /**
+     * Compares Triples based on Object, Subject, Predicate
+     */
+    private final Comparator<Triple> ospComparator = new Comparator<Triple>() {
+
+        @Override
+        public int compare(Triple a, Triple b) {
+            int c = IndexedGraph.compare(a.getObject(), b.getObject(), hashCodeConflictMap);
+            if (c == 0) {
+                c = IndexedGraph.compare(a.getSubject(), b.getSubject(), hashCodeConflictMap);
+                if (c == 0) {
+                    c = IndexedGraph.compare(a.getPredicate(), b.getPredicate(), hashCodeConflictMap);
+                }
+            }
+            return c;
+        }
+    };
+    /**
+     * The OSP index
+     */
+    private final NavigableSet<Triple> osp = new TreeSet<Triple>(ospComparator);
+
+    /**
+     * Creates an empty {@link IndexedGraph}
+     */
+    public IndexedGraph() {
+        super();
     }
-//    /**
-//     * Create a read-only {@link Graph} wrapper over the provided 
-//     * {@link TripleCollection}
-//     * @param tripleCollection the indexed triple collection create a read-only
-//     * wrapper around
-//     */
-//    protected IndexedGraph(IndexedTripleCollection tripleCollection){
-//        this.tripleCollection = tripleCollection;
+
+    /**
+     * Creates a {@link IndexedGraph} using the passed iterator, the iterator is
+     * consumed before the constructor returns
+     *
+     * @param iterator
+     */
+    public IndexedGraph(Iterator<Triple> iterator) {
+        super();
+        while (iterator.hasNext()) {
+            Triple triple = iterator.next();
+            performAdd(triple);
+        }
+    }
+
+    /**
+     * Creates a {@link IndexedGraph} for the specified collection of triples,
+     * subsequent modification of baseSet do not affect the created instance.
+     *
+     * @param iterable over triples
+     */
+    public IndexedGraph(Collection<Triple> baseCollection) {
+        super();
+        spo.addAll(baseCollection);
+        //use internal index to fill the other indexes, because the parsed
+        //collection might be slow
+        pos.addAll(spo);
+        osp.addAll(spo);
+    }
+
+    @Override
+    protected Iterator<Triple> performFilter(final BlankNodeOrIRI subject, final IRI predicate, final RDFTerm object) {
+        if (subject == null && predicate == null && object == null) { //[n,n,n]
+            return createIterator(spo, spo.iterator());
+        }
+        final Triple low = new TripleImpl(
+                subject == null ? MIN : subject,
+                predicate == null ? MIN : predicate,
+                object == null ? MIN : object);
+        final Triple high = new TripleImpl(
+                subject == null ? MAX : subject,
+                predicate == null ? MAX : predicate,
+                object == null ? MAX : object);
+        if (subject != null && predicate != null && object != null) { // [S,P,O]
+            //NOTE: low.equals(high) in that case!
+            return createIterator(spo, spo.subSet(low, true, low, true).iterator());
+        } else if (subject != null && object == null) { //[S,n,n], [S,P,n] 
+            return createIterator(spo, spo.subSet(low, high).iterator());
+        } else if (predicate != null) { //[n,P,n], [n,P,O]
+            return createIterator(pos, pos.subSet(low, high).iterator());
+        } else { //[n,n,O] , [S,n,O]
+            return createIterator(osp, osp.subSet(low, high).iterator());
+        }
+    }
+
+    @Override
+    protected boolean performAdd(Triple triple) {
+        if (spo.add(triple)) {
+            osp.add(triple);
+            return pos.add(triple);
+        }
+        return false;
+    }
+
+    @Override
+    protected boolean performRemove(Object t) {
+        if (t instanceof Triple) {
+            Triple triple = (Triple) t;
+            if (spo.remove(triple)) {
+                osp.remove(triple);
+                return pos.remove(triple);
+            }
+        }
+        return false;
+    }
+
+    @Override
+    public int performSize() {
+        return spo.size();
+    }
+//    @Override
+//    public Iterator<Triple> iterator() {
+//        return createIterator(spo, spo.iterator());
 //    }
-    
-    @Override
-    protected Iterator<Triple> performFilter(NonLiteral subject, UriRef predicate, Resource object) {
-        return tripleCollection.filter(subject, predicate, object);
+
+    /**
+     * Returns an Iterator that ensures that calls to {@link Iterator#remove()}
+     * remove items from all three indexes
+     *
+     * @param index
+     * @param base
+     * @return
+     */
+    private Iterator<Triple> createIterator(final SortedSet<Triple> index, final Iterator<Triple> base) {
+        return new Iterator<Triple>() {
+            Triple current = null;
+
+            @Override
+            public boolean hasNext() {
+                return base.hasNext();
+            }
+
+            @Override
+            public Triple next() {
+                current = base.next();
+                return current;
+            }
+
+            @Override
+            public void remove() {
+                base.remove();
+                if (current != null) {
+                    if (!(index == spo)) {
+                        spo.remove(current);
+                    }
+                    if (!(index == pos)) {
+                        pos.remove(current);
+                    }
+                    if (!(index == osp)) {
+                        osp.remove(current);
+                    }
+                }
+            }
+        };
+
     }
 
-    
-    @Override
-    public int size() {
-        return tripleCollection.size();
+    protected static IRI MIN = new IRI("") {
+        @Override
+        public int hashCode() {
+            return Integer.MIN_VALUE;
+        }
+    ;
+    };
+    protected static IRI MAX = new IRI("") {
+        @Override
+        public int hashCode() {
+            return Integer.MAX_VALUE;
+        }
+    ;
+
+    };
+
+//    /**
+//     * Compares two resources with special support for {@link #MIN} and
+//     * {@link #MAX} to allow building {@link SortedSet#subSet(Object, Object)}
+//     * for <code>null</code> values parsed to 
+//     * {@link #filter(BlankNodeOrIRI, IRI, RDFTerm)}
+//     * @param a
+//     * @param b
+//     * @return
+//     */
+//    protected static int compareHash(RDFTerm a, RDFTerm b, Map<Integer,List<RDFTerm>> confictsMap) {
+//        int hashA = a.hashCode();
+//        int hashB = b.hashCode();
+//        if (hashA != hashB) {
+//            return hashA > hashB ? 1 : -1;
+//        }
+//        //those resources might be equals
+//        //(1) Check for MIN, MAX (used to build sub-sets). Other resources might
+//        //    have a similar hasCode
+//        int state = a == MIN || b == MAX ? -1 :
+//            a == MAX || b == MIN ? 1 : 0;
+//        if(state == 0){
+//            if(a.equals(b)){ //check of the resources are equals
+//                return 0; //return zero
+//            } else if(//we need to care about HashCode conflicts 
+//                a instanceof BlankNode && b instanceof BlankNode){ // of BlankNodes
+//                log.info("HashCode conflict for {} and {}",a,b); //we have a conflict
+//                return resolveBlankNodeHashConflict(a, b, confictsMap);
+//            } else { //same hashCode but not equals
+//                //use the String representation of the Resources to sort them
+//                String as = resource2String(a);
+//                String bs = resource2String(b);
+//                log.info("same hash code {} - compare Strings a: {}, b: {}",
+//                    new Object[]{a.hashCode(),as,bs});
+//                return as.compareTo(bs);
+//            }
+//        }
+//       return state;
+//    }
+
+    /**
+     * Resolved BlankNode hasConflics, by storing the correct order for the affected
+     * {@link Integer} in a {@link List} of RDFTerm instances.
+     * @param a the first {@link BlankNode}
+     * @param b the second {@link BlankNode}
+     * @param confictsMap the Map used to store the order of BlankNodes with conflicts
+     * @return the decision taken based on the confictsMap.
+     */
+    private static int resolveBlankNodeHashConflict(RDFTerm a, RDFTerm b,
+            Map<Integer, List<RDFTerm>> confictsMap) {
+        //This is not a bad thing. We need just to ensure constant ordering
+        //and as there is nothing we can use to distinguish we need to keep
+        //this information in a list.
+        Integer hash = Integer.valueOf(a.hashCode());
+        List<RDFTerm> resources = confictsMap.get(hash);
+        if (resources == null) { //new conflict ... just add and return
+            resources = new ArrayList<RDFTerm>(2);
+            confictsMap.put(hash, resources);
+            resources.add(a);
+            resources.add(b);
+            return -1;
+        }
+        //already conflicting resource for this hash present
+        int aIndex = -1;
+        int bIndex = -1;
+        for (int i = 0; i < resources.size() && (aIndex < 0 || bIndex < 0); i++) {
+            RDFTerm r = resources.get(i);
+            if (aIndex < 0 && r.equals(a)) {
+                aIndex = i;
+            }
+            if (bIndex < 0 && r.equals(b)) {
+                bIndex = i;
+            }
+        }
+        if (aIndex < 0) { //a not found
+            aIndex = resources.size();
+            resources.add(a);
+        }
+        if (bIndex < 0) { //b not found
+            bIndex = resources.size();
+            resources.add(b);
+        }
+        return aIndex < bIndex ? -1 : 1;
+    }
+
+    /**
+     * Compares Resources to correctly sort them within the index.<p>
+     * Sort criteria are:<ol>
+     * <li> URIs are sorted by the {@link IRI#getUnicodeString()} unicode
+     * string)
+     * <li> Literals
+     * <ol>
+     * <li> sort by the {@link Literal#getLexicalForm() lixical form}
+     * <li> sort by {@link PlainLiteral#getLanguage() language}
+     * (<code>null</code> value first)
+     * <li> sort by {@link TypedLiteral#getDataType() type} (<code>null</code>
+     * value fist
+     * </ol>
+     * <li> BlankNode
+     * <ol>
+     * <li> sorted by their
+     * {@link System#identityHashCode(Object) Object hasCode}
+     * <li> on hasCode conflicts (same hasCode but not equals) a random order is
+     * chosen and kept in the parsed conflictsMap
+     * </ol>
+     * </ol>
+     * <b>NOTEs</b><ul>
+     * <li> parsed {@link RDFTerm} are not required to correctly implement
+     * {@link Object#hashCode() hashCode} and
+     * {@link Object#equals(Object) equals}
+     * <li> parsed {@link IRI} and {@link BlankNode} and {@link Literal} MUST
+     * NOT extend/implement any of the other classes/interfaces. This means that
+     * an {@link IRI} MUST NOT implement {@link BlankNode} nor {@link Literal}
+     * <li> parsed {@link Literal}s MAY implement {@link PlainLiteral} AND
+     * {@link TypedLiteral}. This allows wrappers over frameworks that do not
+     * distinguish between those two literal types to be used with the
+     * {@link IndexedGraph}.
+     * </ul>
+     *
+     * @param a the first resource to compare
+     * @param b the second resource to compare
+     * @param confictsMap the map used to resolve BlankNodes with hasCode
+     * conflicts
+     * @return
+     */
+    protected static int compare(RDFTerm a, RDFTerm b, Map<Integer, List<RDFTerm>> confictsMap) {
+        //Handle special cases for MAX and MIN values
+        if (a == MIN || b == MAX) {
+            return -1;
+        } else if (a == MAX || b == MIN) {
+            return 1;
+        }
+        //sort (0) IRIs < (1) Literals (PlainLiterals & TypedLiterals) < (3) BlankNodes
+        int at = a instanceof IRI ? 0 : a instanceof Literal ? 1 : 2;
+        int bt = b instanceof IRI ? 0 : b instanceof Literal ? 1 : 2;
+        if (at == bt) { //same type sort the different types
+            if (at < 2) { //no BlankNode
+                //sort in alphabetic order of the string representation
+                String as = at == 0 ? ((IRI) a).getUnicodeString()
+                        : ((Literal) a).getLexicalForm();
+                String bs = bt == 0 ? ((IRI) b).getUnicodeString()
+                        : ((Literal) b).getLexicalForm();
+                int sc = as.compareTo(bs);
+                if (sc == 0 && at == 1) { //same string value and Literals
+                    //check if the language and types are the same
+                    Language al = a instanceof Literal ? ((Literal) a).getLanguage() : null;
+                    Language bl = b instanceof Literal ? ((Literal) b).getLanguage() : null;
+                    //first try to sort by language
+                    if (al == null) {
+                        sc = bl == null ? 0 : -1;
+                    } else if (bl == null) {
+                        sc = 1;
+                    } else {
+                        sc = al.toString().compareTo(bl.toString());
+                    }
+                    if (sc == 0) {
+                        //if still equals look at the dataType
+                        IRI adt = a instanceof Literal ? ((Literal) a).getDataType() : null;
+                        IRI bdt = b instanceof Literal ? ((Literal) b).getDataType() : null;
+                        if (adt == null) {
+                            sc = bdt == null ? 0 : -1;
+                        } else if (bdt == null) {
+                            sc = 1;
+                        } else {
+                            sc = adt.getUnicodeString().compareTo(bdt.getUnicodeString());
+                        }
+                    }
+                    return sc;
+                } else { //for IRIs return the string compare
+                    return sc;
+                }
+            } else { //handle BlankNodes
+                //sort BlankNodes based on hashCode
+                int ah = a.hashCode();
+                int bh = b.hashCode();
+                if (ah == bh) {
+                    if (!a.equals(b)) {
+                        //if implementations hash is the same, but the instances
+                        //are not equals, try to sort them by identity hash code
+                        int ash = System.identityHashCode(a);
+                        int bsh = System.identityHashCode(b);
+                        if (ash == bsh) { //if those are still the same, we need
+                            //to resolve the hashCode conflict by memorise the
+                            //decision in a confilctMap
+                            return resolveBlankNodeHashConflict(a, b, confictsMap);
+                        } else {
+                            return ash < bsh ? -1 : 1;
+                        }
+                    } else { //same hash and equals
+                        return 0;
+                    }
+                } else { //sort by hash
+                    return ah < bh ? -1 : 1;
+                }
+            }
+        } else {
+            return at < bt ? -1 : 1;
+        }
     }
 
 }
diff --git a/commons/indexedgraph/src/main/java/org/apache/stanbol/commons/indexedgraph/IndexedImmutableGraph.java b/commons/indexedgraph/src/main/java/org/apache/stanbol/commons/indexedgraph/IndexedImmutableGraph.java
new file mode 100644
index 0000000..c9cf76c
--- /dev/null
+++ b/commons/indexedgraph/src/main/java/org/apache/stanbol/commons/indexedgraph/IndexedImmutableGraph.java
@@ -0,0 +1,75 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements.  See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.apache.stanbol.commons.indexedgraph;
+
+import java.util.Iterator;
+
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.AbstractImmutableGraph;
+/**
+ * {@link ImmutableGraph} implementation that internally uses a {@link IndexedGraph}
+ * to hold the RDF graph.
+ * @author rwesten
+ *
+ */
+public class IndexedImmutableGraph extends AbstractImmutableGraph implements ImmutableGraph {
+
+    private final Graph tripleCollection;
+    
+    /**
+     * Creates a graph with the triples in tripleCollection
+     * 
+     * @param tripleCollection the collection of triples this ImmutableGraph shall consist of
+     */
+    public IndexedImmutableGraph(Graph tripleCollection) {
+        this.tripleCollection = new IndexedGraph(tripleCollection);
+    }
+
+    /**
+     * Create a graph with the triples provided by the Iterator
+     * @param tripleIter the iterator over the triples
+     */
+    public IndexedImmutableGraph(Iterator<Triple> tripleIter) {
+        this.tripleCollection = new IndexedGraph(tripleIter);
+    }
+//    /**
+//     * Create a read-only {@link public class IndexedImmutableGraph extends AbstractImmutableGraph implements ImmutableGraph {} wrapper over the provided 
+//     * {@link Graph}
+//     * @param tripleCollection the indexed triple collection create a read-only
+//     * wrapper around
+//     */
+//    protected IndexedGraph(IndexedGraph tripleCollection){
+//        this.tripleCollection = tripleCollection;
+//    }
+    
+    @Override
+    protected Iterator<Triple> performFilter(BlankNodeOrIRI subject, IRI predicate, RDFTerm object) {
+        return tripleCollection.filter(subject, predicate, object);
+    }
+
+    
+    @Override
+    public int performSize() {
+        return tripleCollection.size();
+    }
+
+}
diff --git a/commons/indexedgraph/src/main/java/org/apache/stanbol/commons/indexedgraph/IndexedMGraph.java b/commons/indexedgraph/src/main/java/org/apache/stanbol/commons/indexedgraph/IndexedMGraph.java
deleted file mode 100644
index 0fe3b52..0000000
--- a/commons/indexedgraph/src/main/java/org/apache/stanbol/commons/indexedgraph/IndexedMGraph.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
-* Licensed to the Apache Software Foundation (ASF) under one or more
-* contributor license agreements.  See the NOTICE file distributed with
-* this work for additional information regarding copyright ownership.
-* The ASF licenses this file to You under the Apache License, Version 2.0
-* (the "License"); you may not use this file except in compliance with
-* the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-package org.apache.stanbol.commons.indexedgraph;
-
-import java.util.Collection;
-import java.util.Iterator;
-
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Triple;
-
-public class IndexedMGraph extends IndexedTripleCollection implements MGraph {
-
-    public IndexedMGraph() {
-        super();
-    }
-
-    public IndexedMGraph(Collection<Triple> baseCollection) {
-        super(baseCollection);
-    }
-
-    public IndexedMGraph(Iterator<Triple> iterator) {
-        super(iterator);
-    }
-
-    @Override
-    public Graph getGraph() {
-        return new IndexedGraph(this);
-    }
-
-}
diff --git a/commons/indexedgraph/src/main/java/org/apache/stanbol/commons/indexedgraph/IndexedTripleCollection.java b/commons/indexedgraph/src/main/java/org/apache/stanbol/commons/indexedgraph/IndexedTripleCollection.java
deleted file mode 100644
index fbe5fd2..0000000
--- a/commons/indexedgraph/src/main/java/org/apache/stanbol/commons/indexedgraph/IndexedTripleCollection.java
+++ /dev/null
@@ -1,476 +0,0 @@
-/*
-* Licensed to the Apache Software Foundation (ASF) under one or more
-* contributor license agreements.  See the NOTICE file distributed with
-* this work for additional information regarding copyright ownership.
-* The ASF licenses this file to You under the Apache License, Version 2.0
-* (the "License"); you may not use this file except in compliance with
-* the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-package org.apache.stanbol.commons.indexedgraph;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Comparator;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.NavigableSet;
-import java.util.Set;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-import org.apache.clerezza.rdf.core.BNode;
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.AbstractTripleCollection;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * {@link TripleCollection} implementation that uses indexes for <ul>
- * <li> subject, predicate, object [SPO]
- * <li> predicate, object, subject [POS]
- * <li> object, subject, predicate [OSP]
- * </ul>
- * Indexes are maintained in {@link TreeSet}s with according {@link Comparator}
- * instances ({@link #spoComparator}, {@link #posComparator} ,
- * {@link #ospComparator}). {@link Resource}s are compared first using the
- * {@link Resource#hashCode()} and only if this matches by using
- * {@link Resource}{@link #toString()}.<p>
- * The {@link #filter(NonLiteral, UriRef, Resource)} implementation is based
- * on {@link TreeSet#subSet(Object, Object)}. All Iterators returned directly
- * operate on top of one of the internal indexes.
- * <p>
- * This class is not public, implementations should use {@link IndexedGraph} or
- * {@link IndexedMGraph}.
- *
- * @author rwesten
- */
-class IndexedTripleCollection extends AbstractTripleCollection implements TripleCollection {
-
-    private static final Logger log = LoggerFactory.getLogger(IndexedTripleCollection.class);
-    
-    /**
-     * This map is used to ensure constant ordering for {@link BNode} that do
-     * have the same hashcode (and therefore result to have the same
-     * {@link BNode#toString()} value.
-     */
-    private final Map<Integer,List<Resource>> hashCodeConflictMap = new HashMap<Integer,List<Resource>>();
-    /**
-     * Compares Triples based on Subject, Predicate, Object
-     */
-    private final Comparator<Triple> spoComparator = new Comparator<Triple>() {
-
-        @Override
-        public int compare(Triple a, Triple b) {
-            int c = IndexedTripleCollection.compare(a.getSubject(), b.getSubject(), hashCodeConflictMap);
-            if(c == 0){
-                c = IndexedTripleCollection.compare(a.getPredicate(), b.getPredicate(), hashCodeConflictMap);
-                if(c == 0){
-                    c =  IndexedTripleCollection.compare(a.getObject(), b.getObject(), hashCodeConflictMap);
-                }
-            }
-            return c;
-        }
-    };
-    /**
-     * The SPO index
-     */
-    private final NavigableSet<Triple> spo = new TreeSet<Triple>(spoComparator);
-    /**
-     * Compares Triples based on Predicate, Object, Subject
-     */
-    private final Comparator<Triple> posComparator = new Comparator<Triple>() {
-
-        @Override
-        public int compare(Triple a, Triple b) {
-            int c = IndexedTripleCollection.compare(a.getPredicate(), b.getPredicate(), hashCodeConflictMap);
-            if(c == 0){
-                c = IndexedTripleCollection.compare(a.getObject(), b.getObject(), hashCodeConflictMap);
-                if(c == 0){
-                    c =  IndexedTripleCollection.compare(a.getSubject(), b.getSubject(), hashCodeConflictMap);
-                }
-            }
-            return c;
-        }
-    };
-    /**
-     * The POS index
-     */
-    private final NavigableSet<Triple> pos = new TreeSet<Triple>(posComparator);
-    /**
-     * Compares Triples based on Object, Subject, Predicate
-     */
-    private final Comparator<Triple> ospComparator = new Comparator<Triple>() {
-
-        @Override
-        public int compare(Triple a, Triple b) {
-            int c = IndexedTripleCollection.compare(a.getObject(), b.getObject(), hashCodeConflictMap);
-            if(c == 0){
-                c = IndexedTripleCollection.compare(a.getSubject(), b.getSubject(), hashCodeConflictMap);
-                if(c == 0){
-                    c =  IndexedTripleCollection.compare(a.getPredicate(), b.getPredicate(), hashCodeConflictMap);
-                }
-            }
-            return c;
-        }
-    };
-    /**
-     * The OSP index
-     */
-    private final NavigableSet<Triple> osp = new TreeSet<Triple>(ospComparator);
-    
-    /**
-     * Creates an empty {@link IndexedTripleCollection}
-     */
-    public IndexedTripleCollection() { 
-        super();
-    }
-
-    /**
-     * Creates a {@link IndexedTripleCollection} using the passed iterator, the iterator 
-     * is consumed before the constructor returns
-     * 
-     * @param iterator
-     */
-    public IndexedTripleCollection(Iterator<Triple> iterator) {
-        super();
-        while (iterator.hasNext()) {
-            Triple triple = iterator.next();
-            performAdd(triple);
-        }
-    }
-
-    /**
-     * Creates a {@link IndexedTripleCollection} for the specified collection of triples,
-     * subsequent modification of baseSet do not affect the created instance.
-     *
-     * @param iterable over triples
-     */
-    public IndexedTripleCollection(Collection<Triple> baseCollection) {
-        super();
-        spo.addAll(baseCollection);
-        //use internal index to fill the other indexes, because the parsed
-        //collection might be slow
-        pos.addAll(spo); 
-        osp.addAll(spo);
-    }
-    
-    @Override
-    protected Iterator<Triple> performFilter(final NonLiteral subject, final UriRef predicate, final Resource object) {
-        if(subject == null && predicate == null && object == null){ //[n,n,n]
-            return createIterator(spo, spo.iterator());
-        }
-        final Triple low = new TripleImpl(
-            subject == null ? MIN : subject, 
-                    predicate == null ? MIN : predicate, 
-                            object == null ? MIN : object);
-        final Triple high = new TripleImpl(
-            subject == null ? MAX : subject, 
-                    predicate == null ? MAX : predicate, 
-                            object == null ? MAX : object);
-        if(subject != null && predicate != null && object != null){ // [S,P,O]
-            //NOTE: low.equals(high) in that case!
-            return createIterator(spo, spo.subSet(low, true, low, true).iterator());
-        } else if(subject != null && object == null){ //[S,n,n], [S,P,n] 
-            return createIterator(spo, spo.subSet(low, high).iterator());
-        } else if (predicate != null) { //[n,P,n], [n,P,O]
-            return createIterator(pos,pos.subSet(low, high).iterator());
-        } else { //[n,n,O] , [S,n,O]
-            return createIterator(osp,osp.subSet(low, high).iterator());
-        }
-    }
-
-    @Override
-    protected boolean performAdd(Triple triple) {
-        if(spo.add(triple)){
-            osp.add(triple);
-            return pos.add(triple);
-        }
-        return false;
-    }
-    
-    @Override
-    protected boolean performRemove(Triple triple) {
-        if(spo.remove(triple)){
-            osp.remove(triple);
-            return pos.remove(triple);
-        } 
-        return false;
-    }
-    
-    @Override
-    public int size() {
-        return spo.size();
-    }
-//    @Override
-//    public Iterator<Triple> iterator() {
-//        return createIterator(spo, spo.iterator());
-//    }
-
-    /**
-     * Returns an Iterator that ensures that calls to {@link Iterator#remove()}
-     * remove items from all three indexes
-     * @param index
-     * @param base
-     * @return
-     */
-    private Iterator<Triple> createIterator(final SortedSet<Triple> index,final Iterator<Triple> base){
-        return new Iterator<Triple>() {
-            Triple current = null;
-            @Override
-            public boolean hasNext() {
-                return base.hasNext();
-            }
-
-            @Override
-            public Triple next() {
-                current = base.next();
-                return current;
-            }
-
-            @Override
-            public void remove() {
-                base.remove();
-                if(current != null){
-                    if(!(index == spo)){
-                        spo.remove(current);
-                    } 
-                    if(!(index == pos)){
-                        pos.remove(current);
-                    }
-                    if(!(index == osp)){
-                        osp.remove(current);
-                    }
-                }
-            }
-        };
-        
-    }
-
-    
-    protected static UriRef MIN = new UriRef("") {
-        @Override
-        public int hashCode() {
-            return Integer.MIN_VALUE;
-        };
-    };
-    protected static UriRef MAX = new UriRef("") {
-        @Override
-        public int hashCode() {
-            return Integer.MAX_VALUE;
-        };
-    };
-
-//    /**
-//     * Compares two resources with special support for {@link #MIN} and
-//     * {@link #MAX} to allow building {@link SortedSet#subSet(Object, Object)}
-//     * for <code>null</code> values parsed to 
-//     * {@link #filter(NonLiteral, UriRef, Resource)}
-//     * @param a
-//     * @param b
-//     * @return
-//     */
-//    protected static int compareHash(Resource a, Resource b, Map<Integer,List<Resource>> confictsMap) {
-//        int hashA = a.hashCode();
-//        int hashB = b.hashCode();
-//        if (hashA != hashB) {
-//            return hashA > hashB ? 1 : -1;
-//        }
-//        //those resources might be equals
-//        //(1) Check for MIN, MAX (used to build sub-sets). Other resources might
-//        //    have a similar hasCode
-//        int state = a == MIN || b == MAX ? -1 :
-//            a == MAX || b == MIN ? 1 : 0;
-//        if(state == 0){
-//            if(a.equals(b)){ //check of the resources are equals
-//                return 0; //return zero
-//            } else if(//we need to care about HashCode conflicts 
-//                a instanceof BNode && b instanceof BNode){ // of BNodes
-//                log.info("HashCode conflict for {} and {}",a,b); //we have a conflict
-//                return resolveBNodeHashConflict(a, b, confictsMap);
-//            } else { //same hashCode but not equals
-//                //use the String representation of the Resources to sort them
-//                String as = resource2String(a);
-//                String bs = resource2String(b);
-//                log.info("same hash code {} - compare Strings a: {}, b: {}",
-//                    new Object[]{a.hashCode(),as,bs});
-//                return as.compareTo(bs);
-//            }
-//        }
-//       return state;
-//    }
-
-    /**
-     * Resolved BNode hasConflics, by storing the correct order for the affected
-     * {@link Integer} in a {@link List} of Resource instances.
-     * @param a the first {@link BNode}
-     * @param b the second {@link BNode}
-     * @param confictsMap the Map used to store the order of BNodes with conflicts
-     * @return the decision taken based on the confictsMap.
-     */
-    private static int resolveBNodeHashConflict(Resource a, Resource b,
-            Map<Integer,List<Resource>> confictsMap) {
-        //This is not a bad thing. We need just to ensure constant ordering
-        //and as there is nothing we can use to distinguish we need to keep
-        //this information in a list.
-        Integer hash = Integer.valueOf(a.hashCode());
-        List<Resource> resources = confictsMap.get(hash);
-        if(resources == null){ //new conflict ... just add and return
-            resources = new ArrayList<Resource>(2);
-            confictsMap.put(hash, resources);
-            resources.add(a);
-            resources.add(b);
-            return -1;
-        }
-        //already conflicting resource for this hash present
-        int aIndex=-1;
-        int bIndex=-1;
-        for(int i = 0; i<resources.size() && (aIndex < 0 || bIndex < 0);i++){
-            Resource r = resources.get(i);
-            if(aIndex < 0 && r.equals(a)){
-                aIndex = i;
-            }
-            if(bIndex < 0 && r.equals(b)){
-                bIndex = i;
-            }
-        }
-        if(aIndex < 0){ //a not found
-            aIndex = resources.size();
-            resources.add(a);
-        }
-        if(bIndex < 0){ //b not found
-            bIndex = resources.size();
-            resources.add(b);
-        }
-        return aIndex < bIndex ? -1 : 1;
-    }
-    /**
-     * Compares Resources to correctly sort them within the index.<p>
-     * Sort criteria are:<ol>
-     * <li> URIs are sorted by the {@link UriRef#getUnicodeString()} unicode string)
-     * <li> Literals 
-     *  <ol>
-     *      <li> sort by the {@link Literal#getLexicalForm() lixical form}
-     *      <li> sort by {@link PlainLiteral#getLanguage() language} (<code>null</code> value first)
-     *      <li> sort by {@link TypedLiteral#getDataType() type} (<code>null</code> value fist
-     *  </ol>
-     * <li> BNode 
-     *  <ol>
-     *      <li> sorted by their {@link System#identityHashCode(Object) Object hasCode}
-     *      <li> on hasCode conflicts (same hasCode but not equals) a random order is chosen
-     *      and kept in the parsed conflictsMap
-     *  </ol> 
-     * </ol>
-     * <b>NOTEs</b><ul>
-     * <li> parsed {@link Resource} are not required to correctly implement 
-     * {@link Object#hashCode() hashCode} and {@link Object#equals(Object) equals}
-     * <li> parsed {@link UriRef} and {@link BNode} and {@link Literal} MUST NOT
-     * extend/implement any of the other classes/interfaces. This means that an
-     * {@link UriRef} MUST NOT implement {@link BNode} nor {@link Literal}
-     * <li> parsed {@link Literal}s MAY implement {@link PlainLiteral} AND
-     * {@link TypedLiteral}. This allows wrappers over frameworks that do not
-     * distinguish between those two literal types to be used with the
-     * {@link IndexedTripleCollection}.
-     * </ul>
-     * 
-     * @param a the first resource to compare
-     * @param b the second resource to compare
-     * @param confictsMap the map used to resolve BNodes with hasCode conflicts 
-     * @return 
-     */
-    protected static int compare(Resource a, Resource b, Map<Integer,List<Resource>> confictsMap){
-        //Handle special cases for MAX and MIN values
-        if(a == MIN || b == MAX) {
-            return -1 ;
-        } else if(a == MAX || b == MIN){
-            return 1;
-        }
-        //sort (0) UriRefs < (1) Literals (PlainLiterals & TypedLiterals) < (3) BNodes
-        int at = a instanceof UriRef ? 0 : a instanceof Literal ? 1 : 2;
-        int bt = b instanceof UriRef ? 0 : b instanceof Literal ? 1 : 2;
-        if(at == bt){ //same type sort the different types
-            if(at < 2){ //no BNode
-                //sort in alphabetic order of the string representation
-                String as = at == 0 ? ((UriRef)a).getUnicodeString() :
-                    ((Literal)a).getLexicalForm();
-                String bs = bt == 0 ? ((UriRef)b).getUnicodeString() :
-                    ((Literal)b).getLexicalForm();
-                int sc = as.compareTo(bs);
-                if(sc == 0 && at == 1){ //same string value and Literals
-                    //check if the language and types are the same
-                    Language al = a instanceof PlainLiteral ? ((PlainLiteral)a).getLanguage() : null;
-                    Language bl = b instanceof PlainLiteral ? ((PlainLiteral)b).getLanguage() : null;
-                    //first try to sort by language
-                    if(al == null){
-                        sc = bl == null ? 0 : -1;
-                    } else if(bl == null){
-                        sc = 1;
-                    } else {
-                        sc = al.toString().compareTo(bl.toString());
-                    }
-                    if(sc == 0){
-                        //if still equals look at the dataType
-                        UriRef adt = a instanceof TypedLiteral ? ((TypedLiteral)a).getDataType() : null;
-                        UriRef bdt = b instanceof TypedLiteral ? ((TypedLiteral)b).getDataType() : null;
-                        if(adt == null){
-                            sc = bdt == null ? 0 : -1;
-                        } else if(bdt == null){
-                            sc = 1;
-                        } else {
-                            sc = adt.getUnicodeString().compareTo(bdt.getUnicodeString());
-                        }
-                    }
-                    return sc;
-                } else { //for UriRefs return the string compare
-                    return sc;
-                }
-            } else { //handle BNodes
-                //sort BNodes based on hashCode
-                int ah = a.hashCode();
-                int bh = b.hashCode();
-                if(ah == bh){
-                    if(!a.equals(b)){
-                        //if implementations hash is the same, but the instances
-                        //are not equals, try to sort them by identity hash code
-                        int ash = System.identityHashCode(a);
-                        int bsh = System.identityHashCode(b);
-                        if(ash == bsh){ //if those are still the same, we need
-                            //to resolve the hashCode conflict by memorise the
-                            //decision in a confilctMap
-                            return resolveBNodeHashConflict(a, b, confictsMap);
-                        } else {
-                            return ash < bsh ? -1 : 1;
-                        }
-                    } else { //same hash and equals
-                        return 0;
-                    }
-                } else { //sort by hash
-                    return ah < bh ? -1 : 1;
-                }
-            }
-        } else {
-            return at < bt ? -1 : 1;
-        }
-    }
-    
-}
diff --git a/commons/indexedgraph/src/test/java/org/apache/stanbol/commons/indexedgraph/IndexedGraphTest.java b/commons/indexedgraph/src/test/java/org/apache/stanbol/commons/indexedgraph/IndexedGraphTest.java
index 2ff0915..3a6cad8 100644
--- a/commons/indexedgraph/src/test/java/org/apache/stanbol/commons/indexedgraph/IndexedGraphTest.java
+++ b/commons/indexedgraph/src/test/java/org/apache/stanbol/commons/indexedgraph/IndexedGraphTest.java
@@ -27,20 +27,20 @@
 import java.util.Random;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.BNode;
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
-import org.apache.clerezza.rdf.core.test.MGraphTest;
+import org.apache.clerezza.rdf.core.test.GraphTest;
 import org.apache.clerezza.rdf.ontologies.FOAF;
 import org.apache.clerezza.rdf.ontologies.RDF;
 import org.apache.clerezza.rdf.ontologies.RDFS;
@@ -49,13 +49,13 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-public class IndexedGraphTest  extends MGraphTest {
+public class IndexedGraphTest  extends GraphTest {
 
     protected static final Logger log = LoggerFactory.getLogger(IndexedGraphTest.class);
     
-    private UriRef uriRef1 = new UriRef("http://example.org/foo");
-    private UriRef uriRef2 = new UriRef("http://example.org/bar");
-    private UriRef uriRef3 = new UriRef("http://example.org/test");
+    private IRI uriRef1 = new IRI("http://example.org/foo");
+    private IRI uriRef2 = new IRI("http://example.org/bar");
+    private IRI uriRef3 = new IRI("http://example.org/test");
     private Triple triple1 = new TripleImpl(uriRef1, uriRef2, uriRef3);
     private Triple triple2 = new TripleImpl(uriRef2, uriRef2, uriRef1);
     private Triple triple3 = new TripleImpl(uriRef3, uriRef1, uriRef3);
@@ -63,13 +63,13 @@
     private Triple triple5 = new TripleImpl(uriRef2, uriRef3, uriRef2);
     
     @Override
-    protected MGraph getEmptyMGraph() {
-        return new IndexedMGraph();
+    protected Graph getEmptyGraph() {
+        return new IndexedGraph();
     }
     @Test
     public void bNodeConsitency() {
-        MGraph mGraph = getEmptyMGraph();
-        final BNode bNode = new BNode() {
+        Graph mGraph = getEmptyGraph();
+        final BlankNode bNode = new BlankNode() {
 
             @Override
             public int hashCode() {
@@ -78,13 +78,13 @@
 
             @Override
             public boolean equals(Object o) {
-                return o instanceof BNode;
+                return o instanceof BlankNode;
             }
             
         
         };
         
-        final BNode bNodeClone = new BNode() {
+        final BlankNode bNodeClone = new BlankNode() {
 
             @Override
             public int hashCode() {
@@ -93,7 +93,7 @@
 
             @Override
             public boolean equals(Object o) {
-                return o instanceof BNode; 
+                return o instanceof BlankNode; 
             }
             
         
@@ -101,15 +101,15 @@
 
         mGraph.add(new TripleImpl(bNode, uriRef1, uriRef2));
         mGraph.add(new TripleImpl(bNodeClone, uriRef2, uriRef3));
-        NonLiteral bNodeBack = mGraph.filter(null, uriRef1, uriRef2).next().getSubject();
+        BlankNodeOrIRI bNodeBack = mGraph.filter(null, uriRef1, uriRef2).next().getSubject();
         Assert.assertEquals("The bnode we get back is not equals to the one we added", bNode, bNodeBack);
-        NonLiteral bNodeBack2 = mGraph.filter(null, uriRef2, uriRef3).next().getSubject();
+        BlankNodeOrIRI bNodeBack2 = mGraph.filter(null, uriRef2, uriRef3).next().getSubject();
         Assert.assertEquals("The returnned bnodes are no longer equals", bNodeBack, bNodeBack2);
         Assert.assertTrue("Not finding a triple when searching with equal bNode", mGraph.filter(bNodeBack, uriRef2, null).hasNext());
     }
     @Test
     public void iteratorRemove() {
-        TripleCollection itc = new IndexedTripleCollection();
+        Graph itc = new IndexedGraph();
         itc.add(triple1);
         itc.add(triple2);
         itc.add(triple3);
@@ -125,13 +125,13 @@
 
     @Test
     public void removeAll() {
-        TripleCollection itc = new IndexedTripleCollection();
+        Graph itc = new IndexedGraph();
         itc.add(triple1);
         itc.add(triple2);
         itc.add(triple3);
         itc.add(triple4);
         itc.add(triple5);
-        TripleCollection itc2 = new IndexedTripleCollection();
+        Graph itc2 = new IndexedGraph();
         itc2.add(triple1);
         itc2.add(triple3);
         itc2.add(triple5);
@@ -141,7 +141,7 @@
     
     @Test
     public void filterIteratorRemove() {
-        TripleCollection itc = new IndexedTripleCollection();
+        Graph itc = new IndexedGraph();
         itc.add(triple1);
         itc.add(triple2);
         itc.add(triple3);
@@ -157,7 +157,7 @@
 
     @Test(expected=ConcurrentModificationException.class)
     public void remove() {
-        TripleCollection itc = new IndexedTripleCollection();
+        Graph itc = new IndexedGraph();
         itc.add(triple1);
         itc.add(triple2);
         itc.add(triple3);
@@ -172,14 +172,14 @@
     }
     /**
      * Holds the test data to perform 
-     * {@link TripleCollection#filter(NonLiteral, UriRef, Resource)}
-     * tests on {@link TripleCollection} implementations
+     * {@link Graph#filter(BlankNodeOrIRI, IRI, RDFTerm)}
+     * tests on {@link Graph} implementations
      * @author rwesten
      */
     public static final class TestCase {
-        public final List<NonLiteral> subjects;
-        public final List<Resource> objects;
-        public final List<UriRef> predicates;
+        public final List<BlankNodeOrIRI> subjects;
+        public final List<RDFTerm> objects;
+        public final List<IRI> predicates;
 
         /**
          * Create a new Test with a maximum number of subjects, predicates and
@@ -189,10 +189,10 @@
          * @param pNum the maximum number of predicates
          * @param oNum the maximum number of objects
          */
-        public TestCase(TripleCollection tc,int sNum, int pNum, int oNum){
-            Set<NonLiteral> subjects = new LinkedHashSet<NonLiteral>();
-            Set<Resource> objects = new LinkedHashSet<Resource>();
-            Set<UriRef> predicates = new LinkedHashSet<UriRef>();
+        public TestCase(Graph tc,int sNum, int pNum, int oNum){
+            Set<BlankNodeOrIRI> subjects = new LinkedHashSet<BlankNodeOrIRI>();
+            Set<RDFTerm> objects = new LinkedHashSet<RDFTerm>();
+            Set<IRI> predicates = new LinkedHashSet<IRI>();
             for(Iterator<Triple> it = tc.iterator();it.hasNext();){
                 Triple t = it.next();
                 if(subjects.size() < 100){
@@ -206,11 +206,11 @@
                 }
             }
             this.subjects = Collections.unmodifiableList(
-                new ArrayList<NonLiteral>(subjects));
+                new ArrayList<BlankNodeOrIRI>(subjects));
             this.predicates = Collections.unmodifiableList(
-                new ArrayList<UriRef>(predicates));
+                new ArrayList<IRI>(predicates));
             this.objects = Collections.unmodifiableList(
-                new ArrayList<Resource>(objects));
+                new ArrayList<RDFTerm>(objects));
         }
     }
     @Test
@@ -224,12 +224,12 @@
         createGraph(graph, graphsize, seed);
         log.info("Load Time ({} triples)", graph.size());
         long start = System.currentTimeMillis();
-        MGraph sg = new SimpleMGraph(graph);
+        Graph sg = new SimpleGraph(graph);
         log.info("  ... {}: {}",sg.getClass().getSimpleName(), System.currentTimeMillis()-start);
         start = System.currentTimeMillis();
-        MGraph ig = new IndexedMGraph(graph);
+        Graph ig = new IndexedGraph(graph);
         log.info("  ... {}: {}",ig.getClass().getSimpleName(), System.currentTimeMillis()-start);
-        //Simple Graph reference test
+        //Simple ImmutableGraph reference test
         TestCase testCase = new TestCase(sg, 20, 5, 20); //reduced form 100,5,100
         log.info("Filter Performance Test (graph size {} triples, iterations {})",graphsize,iterations);
         log.info(" --- TEST {} with {} triples ---",sg.getClass().getSimpleName(),sg.size());
@@ -244,7 +244,7 @@
         Assert.assertEquals(sgr, igr); //validate filter implementation
     }
     
-    public List<Long> executeTest(TripleCollection graph, TestCase test, int testCount){
+    public List<Long> executeTest(Graph graph, TestCase test, int testCount){
         List<Long> testResults = new ArrayList<Long>();
         long start;
         long resultCount;
@@ -286,7 +286,7 @@
         return testResults;
     }
 
-    private long testSPO(TripleCollection graph, TestCase test, int testCount) {
+    private long testSPO(Graph graph, TestCase test, int testCount) {
         Iterator<Triple> it;
         long count = 0;
         int si = -1;
@@ -311,7 +311,7 @@
         return count;
     }
     
-    private long testSPn(TripleCollection graph, TestCase test, int testCount) {
+    private long testSPn(Graph graph, TestCase test, int testCount) {
         Iterator<Triple> it;
         long count = 0;
         int si = -1;
@@ -331,7 +331,7 @@
         return count;
     }
     
-    private long testSnO(TripleCollection graph, TestCase test, int testCount) {
+    private long testSnO(Graph graph, TestCase test, int testCount) {
         Iterator<Triple> it;
         long count = 0;
         int si = -1;
@@ -351,7 +351,7 @@
         return count;
     }
     
-    private long testnPO(TripleCollection graph, TestCase test, int testCount) {
+    private long testnPO(Graph graph, TestCase test, int testCount) {
         Iterator<Triple> it;
         long count = 0;
         int pi = -1;
@@ -370,7 +370,7 @@
         }
         return count;
     }
-    private long testSnn(TripleCollection graph, TestCase test, int testCount) {
+    private long testSnn(Graph graph, TestCase test, int testCount) {
         Iterator<Triple> it;
         long count = 0;
         int si = 0;
@@ -384,7 +384,7 @@
         }
         return count;
     }
-    private long testnPn(TripleCollection graph, TestCase test, int testCount) {
+    private long testnPn(Graph graph, TestCase test, int testCount) {
         Iterator<Triple> it;
         long count = 0;
         int pi;
@@ -398,7 +398,7 @@
         }
         return count;
     }
-    private long testnnO(TripleCollection graph, TestCase test, int testCount) {
+    private long testnnO(Graph graph, TestCase test, int testCount) {
         Iterator<Triple> it;
         long count = 0;
         int oi;
@@ -426,9 +426,9 @@
         double b = 2.0;//bNode
         double nb = b - (l * 2 / 3); //create new bNode
         double random;
-        NonLiteral subject = null;
-        UriRef predicate = null;
-        List<UriRef> predicateList = new ArrayList<UriRef>();
+        BlankNodeOrIRI subject = null;
+        IRI predicate = null;
+        List<IRI> predicateList = new ArrayList<IRI>();
         predicateList.add(RDF.first);
         predicateList.add(RDF.rest);
         predicateList.add(RDF.type);
@@ -444,14 +444,14 @@
         String URI_PREFIX = "http://www.test.org/bigGraph/ref";
         Language DE = new Language("de");
         Language EN = new Language("en");
-        Iterator<UriRef> predicates = predicateList.iterator();
-        List<BNode> bNodes = new ArrayList<BNode>();
-        bNodes.add(new BNode());
+        Iterator<IRI> predicates = predicateList.iterator();
+        List<BlankNode> bNodes = new ArrayList<BlankNode>();
+        bNodes.add(new BlankNode());
         for (int count = 0; tc.size() < triples; count++) {
             random = rnd.nextDouble() * 3;
             if (random >= 2.5 || count == 0) {
                 if (random <= 2.75) {
-                    subject = new UriRef(URI_PREFIX + count);
+                    subject = new IRI(URI_PREFIX + count);
                 } else {
                     int rndIndex = (int) ((random - 2.75) * bNodes.size() / (3.0 - 2.75));
                     subject = bNodes.get(rndIndex);
@@ -470,7 +470,7 @@
                 } else if (random <= d) {
                     tc.add(new TripleImpl(subject, predicate, lf.createTypedLiteral(random)));
                 } else {
-                    PlainLiteral text;
+                    Literal text;
                     if (random <= i) {
                         text = new PlainLiteralImpl("Literal for " + count);
                     } else if (random <= d) {
@@ -481,18 +481,18 @@
                     tc.add(new TripleImpl(subject, predicate, text));
                 }
             } else if (random <= b) { //bnode
-                BNode bnode;
+                BlankNode bnode;
                 if (random <= nb) {
-                    bnode = new BNode();
+                    bnode = new BlankNode();
                     bNodes.add(bnode);
                 } else { //>nb <b
                     int rndIndex = (int) ((random - nb) * bNodes.size() / (b - nb));
                     bnode = bNodes.get(rndIndex);
                 }
                 tc.add(new TripleImpl(subject, predicate, bnode));
-            } else { //UriRef
+            } else { //IRI
                 tc.add(new TripleImpl(subject, predicate,
-                        new UriRef(URI_PREFIX + count * random)));
+                        new IRI(URI_PREFIX + count * random)));
             }
         }        
     }
diff --git a/commons/installer/bundleprovider/src/main/java/org/apache/stanbol/commons/installer/provider/bundle/impl/BundleInstaller.java b/commons/installer/bundleprovider/src/main/java/org/apache/stanbol/commons/installer/provider/bundle/impl/BundleInstaller.java
index 3cc7808..6a5df54 100644
--- a/commons/installer/bundleprovider/src/main/java/org/apache/stanbol/commons/installer/provider/bundle/impl/BundleInstaller.java
+++ b/commons/installer/bundleprovider/src/main/java/org/apache/stanbol/commons/installer/provider/bundle/impl/BundleInstaller.java
@@ -217,7 +217,7 @@
                 while (resources.hasMoreElements()) {
                     URL url = resources.nextElement();
                     if(url != null){
-                        log.debug("  > installable Resource {}",url);
+                        log.debug("  > installable RDFTerm {}",url);
                         InstallableResource resource = createInstallableResource(bundle, path, url);
                         if (resource != null) {
                             updated.add(resource);
diff --git a/commons/jsonld/src/main/java/org/apache/stanbol/commons/jsonld/clerezza/ClerezzaRDFParser.java b/commons/jsonld/src/main/java/org/apache/stanbol/commons/jsonld/clerezza/ClerezzaRDFParser.java
index badfaa1..dd87651 100644
--- a/commons/jsonld/src/main/java/org/apache/stanbol/commons/jsonld/clerezza/ClerezzaRDFParser.java
+++ b/commons/jsonld/src/main/java/org/apache/stanbol/commons/jsonld/clerezza/ClerezzaRDFParser.java
@@ -3,16 +3,14 @@
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.BNode;
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 
 import com.github.jsonldjava.core.JsonLdError;
 import com.github.jsonldjava.core.JsonLdProcessor;
@@ -20,7 +18,7 @@
 import com.github.jsonldjava.core.RDFParser;
 
 /**
- * Converts a Clerezza {@link TripleCollection} to the {@link RDFDataset} used
+ * Converts a Clerezza {@link Graph} to the {@link RDFDataset} used
  * by the {@link JsonLdProcessor}
  * 
  * @author Rupert Westenthaler
@@ -35,10 +33,10 @@
     @Override
     public RDFDataset parse(Object input) throws JsonLdError {
         count = 0;
-        Map<BNode,String> bNodeMap = new HashMap<BNode,String>(1024);
+        Map<BlankNode,String> bNodeMap = new HashMap<BlankNode,String>(1024);
         final RDFDataset result = new RDFDataset();
-        if(input instanceof TripleCollection){
-            for(Triple t : ((TripleCollection)input)){
+        if(input instanceof Graph){
+            for(Triple t : ((Graph)input)){
                 handleStatement(result,t, bNodeMap);
             }
         }
@@ -46,35 +44,27 @@
         return result;
     }
 
-    private void handleStatement(RDFDataset result, Triple t, Map<BNode,String> bNodeMap) {
+    private void handleStatement(RDFDataset result, Triple t, Map<BlankNode,String> bNodeMap) {
         final String subject = getResourceValue(t.getSubject(), bNodeMap);
         final String predicate = getResourceValue(t.getPredicate(), bNodeMap);
-        final Resource object = t.getObject();
+        final RDFTerm object = t.getObject();
         
         if (object instanceof Literal) {
             
             final String value = ((Literal)object).getLexicalForm();
             final String language;
             final String datatype;
-            if(object instanceof TypedLiteral){
+            datatype = getResourceValue(((Literal)object).getDataType(), bNodeMap);
+            Language l = ((Literal)object).getLanguage();
+            if(l == null){
                 language = null;
-                datatype = getResourceValue(((TypedLiteral)object).getDataType(), bNodeMap);
-            } else if(object instanceof PlainLiteral){
-                //we use RDF 1.1 literals so we do set the RDF_LANG_STRING datatype
-                datatype = RDF_LANG_STRING;
-                Language l = ((PlainLiteral)object).getLanguage();
-                if(l == null){
-                    language = null;
-                } else {
-                    language = l.toString();
-                }
             } else {
-                throw new IllegalStateException("Unknown Literal class " + object.getClass().getName());
+                language = l.toString();
             }
             result.addTriple(subject, predicate, value, datatype, language);
             count++;
         } else {
-            result.addTriple(subject, predicate, getResourceValue((NonLiteral) object, bNodeMap));
+            result.addTriple(subject, predicate, getResourceValue((BlankNodeOrIRI) object, bNodeMap));
             count++;
         }
         
@@ -88,20 +78,20 @@
         return count;
     }
     
-    private String getResourceValue(NonLiteral nl, Map<BNode, String> bNodeMap) {
+    private String getResourceValue(BlankNodeOrIRI nl, Map<BlankNode, String> bNodeMap) {
         if (nl == null) {
             return null;
-        } else if (nl instanceof UriRef) {
-            return ((UriRef) nl).getUnicodeString();
-        } else if (nl instanceof BNode) {
+        } else if (nl instanceof IRI) {
+            return ((IRI) nl).getUnicodeString();
+        } else if (nl instanceof BlankNode) {
             String bNodeId = bNodeMap.get(nl);
             if (bNodeId == null) {
                 bNodeId = Integer.toString(bNodeMap.size());
-                bNodeMap.put((BNode) nl, bNodeId);
+                bNodeMap.put((BlankNode) nl, bNodeId);
             }
             return new StringBuilder("_:b").append(bNodeId).toString();
         } else {
-            throw new IllegalStateException("Unknwon NonLiteral type " + nl.getClass().getName() + "!");
+            throw new IllegalStateException("Unknwon BlankNodeOrIRI type " + nl.getClass().getName() + "!");
         }
     }
 }
diff --git a/commons/jsonld/src/main/java/org/apache/stanbol/commons/jsonld/clerezza/ClerezzaTripleCallback.java b/commons/jsonld/src/main/java/org/apache/stanbol/commons/jsonld/clerezza/ClerezzaTripleCallback.java
new file mode 100644
index 0000000..cf3def5
--- /dev/null
+++ b/commons/jsonld/src/main/java/org/apache/stanbol/commons/jsonld/clerezza/ClerezzaTripleCallback.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2016 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.stanbol.commons.jsonld.clerezza;
+
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
+
+import com.github.jsonldjava.core.JsonLdTripleCallback;
+import com.github.jsonldjava.core.RDFDataset;
+import org.apache.clerezza.commons.rdf.impl.utils.TypedLiteralImpl;
+
+public class ClerezzaTripleCallback implements JsonLdTripleCallback {
+
+    private static final String RDF_LANG_STRING = "http://www.w3.org/1999/02/22-rdf-syntax-ns#langString";
+
+    private Graph mGraph = new SimpleGraph();
+    private Map<String, BlankNode> bNodeMap = new HashMap<String, BlankNode>();
+
+    public void setGraph(Graph mGraph) {
+        this.mGraph = mGraph;
+        bNodeMap = new HashMap<String, BlankNode>();
+    }
+
+    public Graph getGraph() {
+        return mGraph;
+    }
+
+    private void triple(String s, String p, String o, String graph) {
+        if (s == null || p == null || o == null) {
+            // TODO: i don't know what to do here!!!!
+            return;
+        }
+
+        final BlankNodeOrIRI subject = getBlankNodeOrIRI(s);
+        final IRI predicate = new IRI(p);
+        final BlankNodeOrIRI object = getBlankNodeOrIRI(o);
+        mGraph.add(new TripleImpl(subject, predicate, object));
+    }
+
+    private void triple(String s, String p, String value, String datatype, String language,
+            String graph) {
+        final BlankNodeOrIRI subject = getBlankNodeOrIRI(s);
+        final IRI predicate = new IRI(p);
+        RDFTerm object;
+        if (language != null) {
+            object = new PlainLiteralImpl(value, new Language(language));
+        } else if (datatype == null || RDF_LANG_STRING.equals(datatype)) {
+            object = new PlainLiteralImpl(value);
+        } else {
+            object = new TypedLiteralImpl(value, new IRI(datatype));
+        }
+
+        mGraph.add(new TripleImpl(subject, predicate, object));
+    }
+
+    private BlankNodeOrIRI getBlankNodeOrIRI(String s) {
+        if (s.startsWith("_:")) {
+            return getBlankNode(s);
+        } else {
+            return new IRI(s);
+        }
+    }
+
+    private BlankNode getBlankNode(String s) {
+        if (bNodeMap.containsKey(s)) {
+            return bNodeMap.get(s);
+        } else {
+            final BlankNode result = new BlankNode();
+            bNodeMap.put(s, result);
+            return result;
+        }
+    }
+
+    @Override
+    public Object call(RDFDataset dataset) {
+        for (String graphName : dataset.graphNames()) {
+            final List<RDFDataset.Quad> quads = dataset.getQuads(graphName);
+            if ("@default".equals(graphName)) {
+                graphName = null;
+            }
+            for (final RDFDataset.Quad quad : quads) {
+                if (quad.getObject().isLiteral()) {
+                    triple(quad.getSubject().getValue(), quad.getPredicate().getValue(), quad
+                            .getObject().getValue(), quad.getObject().getDatatype(), quad
+                            .getObject().getLanguage(), graphName);
+                } else {
+                    triple(quad.getSubject().getValue(), quad.getPredicate().getValue(), quad
+                            .getObject().getValue(), graphName);
+                }
+            }
+        }
+
+        return getGraph();
+    }
+
+}
diff --git a/commons/jsonld/src/main/java/org/apache/stanbol/commons/jsonld/clerezza/JsonLdParsingProvider.java b/commons/jsonld/src/main/java/org/apache/stanbol/commons/jsonld/clerezza/JsonLdParsingProvider.java
index f01317c..d776302 100644
--- a/commons/jsonld/src/main/java/org/apache/stanbol/commons/jsonld/clerezza/JsonLdParsingProvider.java
+++ b/commons/jsonld/src/main/java/org/apache/stanbol/commons/jsonld/clerezza/JsonLdParsingProvider.java
@@ -19,8 +19,8 @@
 import java.io.IOException;
 import java.io.InputStream;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.serializedform.ParsingProvider;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.felix.scr.annotations.Component;
@@ -29,7 +29,6 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.github.jsonldjava.clerezza.ClerezzaTripleCallback;
 import com.github.jsonldjava.core.JsonLdError;
 import com.github.jsonldjava.core.JsonLdProcessor;
 import com.github.jsonldjava.utils.JsonUtils;
@@ -49,10 +48,10 @@
     private final Logger logger = LoggerFactory.getLogger(getClass());
 
     @Override
-    public void parse(MGraph target, InputStream serializedGraph, String formatIdentifier, UriRef baseUri) {
-        //The callback will add parsed triples to the target MGraph
+    public void parse(Graph target, InputStream serializedGraph, String formatIdentifier, IRI baseUri) {
+        //The callback will add parsed triples to the target Graph
         ClerezzaTripleCallback ctc = new ClerezzaTripleCallback();
-        ctc.setMGraph(target);
+        ctc.setGraph(target);
         Object input;
         int startSize = 0;
         if(logger.isDebugEnabled()){
diff --git a/commons/jsonld/src/main/java/org/apache/stanbol/commons/jsonld/clerezza/JsonLdSerializingProvider.java b/commons/jsonld/src/main/java/org/apache/stanbol/commons/jsonld/clerezza/JsonLdSerializingProvider.java
index 24e7745..ee6d6a6 100644
--- a/commons/jsonld/src/main/java/org/apache/stanbol/commons/jsonld/clerezza/JsonLdSerializingProvider.java
+++ b/commons/jsonld/src/main/java/org/apache/stanbol/commons/jsonld/clerezza/JsonLdSerializingProvider.java
@@ -27,7 +27,7 @@
 import java.util.LinkedHashMap;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.TripleCollection;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.clerezza.rdf.core.serializedform.SerializingProvider;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.felix.scr.annotations.Activate;
@@ -120,7 +120,7 @@
     private boolean prettyPrint;
     
     @Override
-    public void serialize(OutputStream serializedGraph, TripleCollection tc,  String formatIdentifier) {
+    public void serialize(OutputStream serializedGraph, Graph tc,  String formatIdentifier) {
         ClerezzaRDFParser serializer = new ClerezzaRDFParser();
         try {
             long start = System.currentTimeMillis();
diff --git a/commons/ldpath/clerezza/src/main/java/org/apache/stanbol/commons/ldpath/clerezza/ClerezzaBackend.java b/commons/ldpath/clerezza/src/main/java/org/apache/stanbol/commons/ldpath/clerezza/ClerezzaBackend.java
index 71a8c5d..2f72e06 100644
--- a/commons/ldpath/clerezza/src/main/java/org/apache/stanbol/commons/ldpath/clerezza/ClerezzaBackend.java
+++ b/commons/ldpath/clerezza/src/main/java/org/apache/stanbol/commons/ldpath/clerezza/ClerezzaBackend.java
@@ -27,20 +27,17 @@
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.locks.Lock;
 
-import org.apache.clerezza.rdf.core.BNode;
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TypedLiteralImpl;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.access.LockableMGraph;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TypedLiteralImpl;
 import org.apache.commons.collections.BidiMap;
 import org.apache.commons.collections.bidimap.DualHashBidiMap;
 import org.apache.marmotta.ldpath.api.backend.RDFBackend;
@@ -50,17 +47,17 @@
 
 /**
  * Clerezza based implementation of {@link RDFBackend} interface. This implementation uses the
- * {@link Resource} objects of Clerezza as processing unit RDFBackend.<p>
+ * {@link RDFTerm} objects of Clerezza as processing unit RDFBackend.<p>
  * 
- * For type conversions of {@link TypedLiteral}s the {@link LiteralFactory}
- * of Clerezza is used. In case parsed nodes are not {@link TypedLiteral} the
+ * For type conversions of {@link Literal}s the {@link LiteralFactory}
+ * of Clerezza is used. In case parsed nodes are not {@link Literal} the
  * super implementations of {@link AbstractBackend} are called as such also
  * support converting values based on the string representation.
  * 
  * @author anil.sinaci
  * @author Rupert Westenthaler
  */
-public class ClerezzaBackend extends AbstractBackend<Resource> implements RDFBackend<Resource> {
+public class ClerezzaBackend extends AbstractBackend<RDFTerm> implements RDFBackend<RDFTerm> {
 
     private static final Logger logger = LoggerFactory.getLogger(ClerezzaBackend.class);
 
@@ -69,7 +66,7 @@
      * <li> local name
      * <li> uri string
      * <li> {@link URI}
-     * <li> {@link UriRef}
+     * <li> {@link IRI}
      * </ul>
      * {@link #toString()} returns the uri.
      */
@@ -80,7 +77,7 @@
         String localName;
         String uriString;
         URI uri;
-        UriRef uriRef;
+        IRI uriRef;
         /**
          * uses <code>{@link #name()}{@link String#toLowerCase() .toLoverCase()}
          * </code> to generate the {@link #getLocalName()}
@@ -100,7 +97,7 @@
             this.localName = localName != null ? localName : name().toLowerCase();
             this.uriString = namespace+this.localName;
             this.uri = URI.create(uriString);
-            this.uriRef = new UriRef(uriString);
+            this.uriRef = new IRI(uriString);
         }
         public String getLocalName(){
             return localName;
@@ -111,65 +108,65 @@
         public URI getURI(){
             return uri;
         }
-        public UriRef getUriRef(){
+        public IRI getIRI(){
             return uriRef;
         }
         @Override
         public String toString() {
             return uriString;
         }
-        private static BidiMap xsdURI2UriRef = new DualHashBidiMap();
+        private static BidiMap xsdURI2IRI = new DualHashBidiMap();
         
         static {
             for(XSD type : XSD.values()){
-                xsdURI2UriRef.put(type.getURI(), type.getUriRef());
+                xsdURI2IRI.put(type.getURI(), type.getIRI());
             }
         }
-        public static URI getXsdURI(UriRef uri){
-            return (URI)xsdURI2UriRef.getKey(uri);
+        public static URI getXsdURI(IRI uri){
+            return (URI)xsdURI2IRI.getKey(uri);
         }
-        public static UriRef getXsdUriRef(URI uri){
-            return (UriRef)xsdURI2UriRef.get(uri);
+        public static IRI getXsdIRI(URI uri){
+            return (IRI)xsdURI2IRI.get(uri);
         }
     }
     
-    private TripleCollection graph;
+    private Graph graph;
     
     private static LiteralFactory lf = LiteralFactory.getInstance();
 
     /**
      * Allows sub-classes to create a instance and setting the {@link #graph}
-     * later on by using {@link #setGraph(TripleCollection)}.
+     * later on by using {@link #setGraph(Graph)}.
      */
     protected ClerezzaBackend() {
     }
     /**
-     * Constructs a Clerezza {@link RDFBackend} by using the parsed {@link TripleCollection}
-     * @param graph the {@link TripleCollection}
+     * Constructs a Clerezza {@link RDFBackend} by using the parsed {@link Graph}
+     * @param graph the {@link Graph}
      * @throws IllegalArgumentException if <code>null</code> is parsed as graph.
      */
-    public ClerezzaBackend(TripleCollection graph) {
+    public ClerezzaBackend(Graph graph) {
         if(graph == null){
-            throw new IllegalArgumentException("The parsed Graph MUST NOT be NULL!");
+            throw new IllegalArgumentException("The parsed ImmutableGraph MUST NOT be NULL!");
         }
         this.graph = graph;
     }
     
-    protected final TripleCollection getGraph(){
+    protected final Graph getGraph(){
         return this.graph;
     }
 
-    protected final void setGraph(TripleCollection graph){
+    protected final void setGraph(Graph graph){
         this.graph = graph;
     }
     
     @Override
-    public Resource createLiteral(String content) {
+    public RDFTerm createLiteral(String content) {
         return createLiteral(content,null,null);
     }
 
     @Override
-    public Resource createLiteral(String content, Locale language, URI type) {
+    public RDFTerm createLiteral(String content, Locale language, URI type) {
         logger.debug("creating literal with content \"{}\", language {}, datatype {}",
             new Object[] {content, language, type});
         if (type == null) {
@@ -179,38 +176,38 @@
                 return new PlainLiteralImpl(content, new Language(language.getLanguage()));
             }
         } else {
-            return new TypedLiteralImpl(content, XSD.getXsdUriRef(type));
+            return new TypedLiteralImpl(content, XSD.getXsdIRI(type));
         }
     }
 
     @Override
-    public Resource createURI(String uriref) {
-        return new UriRef(uriref);
+    public RDFTerm createURI(String uriref) {
+        return new IRI(uriref);
     }
 
     @Override
-    public Double doubleValue(Resource resource) {
-        if (resource instanceof TypedLiteral) {
-            return LiteralFactory.getInstance().createObject(Double.class, (TypedLiteral) resource);
+    public Double doubleValue(RDFTerm resource) {
+        if (resource instanceof Literal) {
+            return LiteralFactory.getInstance().createObject(Double.class, (Literal) resource);
         } else {
             return super.doubleValue(resource);
         }
     }
 
     @Override
-    public Locale getLiteralLanguage(Resource resource) {
-        if (resource instanceof PlainLiteral) {
-            Language lang = ((PlainLiteral) resource).getLanguage();
+    public Locale getLiteralLanguage(RDFTerm resource) {
+        if (resource instanceof Literal) {
+            Language lang = ((Literal) resource).getLanguage();
             return lang != null ? new Locale(lang.toString()) : null;
         } else {
-            throw new IllegalArgumentException("Resource " + resource.toString() + " is not a PlainLiteral");
+            throw new IllegalArgumentException("RDFTerm " + resource.toString() + " is not a PlainLiteral");
         }
     }
 
     @Override
-    public URI getLiteralType(Resource resource) {
-        if (resource instanceof TypedLiteral) {
-            UriRef type = ((TypedLiteral) resource).getDataType();
+    public URI getLiteralType(RDFTerm resource) {
+        if (resource instanceof Literal) {
+            IRI type = ((Literal) resource).getDataType();
             return type != null ? XSD.getXsdURI(type) : null;
         } else {
             throw new IllegalArgumentException("Value " + resource.toString() + " is not a literal");
@@ -218,31 +215,31 @@
     }
 
     @Override
-    public boolean isBlank(Resource resource) {
-        return resource instanceof BNode;
+    public boolean isBlank(RDFTerm resource) {
+        return resource instanceof BlankNode;
     }
 
     @Override
-    public boolean isLiteral(Resource resource) {
+    public boolean isLiteral(RDFTerm resource) {
         return resource instanceof Literal;
     }
 
     @Override
-    public boolean isURI(Resource resource) {
-        return resource instanceof UriRef;
+    public boolean isURI(RDFTerm resource) {
+        return resource instanceof IRI;
     }
 
     @Override
-    public Collection<Resource> listObjects(Resource subject, Resource property) {
-        if (!(property instanceof UriRef) || 
-                !(subject instanceof NonLiteral)) {
+    public Collection<RDFTerm> listObjects(RDFTerm subject, RDFTerm property) {
+        if (!(property instanceof IRI) || 
+                !(subject instanceof BlankNodeOrIRI)) {
             throw new IllegalArgumentException("Subject needs to be a URI or blank node, property a URI node");
         }
 
-        Collection<Resource> result = new ArrayList<Resource>();
+        Collection<RDFTerm> result = new ArrayList<RDFTerm>();
         Lock readLock = readLockGraph();
         try {
-            Iterator<Triple> triples = graph.filter((NonLiteral) subject, (UriRef) property, null);
+            Iterator<Triple> triples = graph.filter((BlankNodeOrIRI) subject, (IRI) property, null);
             while (triples.hasNext()) {
                 result.add(triples.next().getObject());
             }
@@ -256,15 +253,15 @@
     }
 
     @Override
-    public Collection<Resource> listSubjects(Resource property, Resource object) {
-        if (!(property instanceof UriRef)) {
+    public Collection<RDFTerm> listSubjects(RDFTerm property, RDFTerm object) {
+        if (!(property instanceof IRI)) {
             throw new IllegalArgumentException("Property needs to be a URI node");
         }
 
-        Collection<Resource> result = new ArrayList<Resource>();
+        Collection<RDFTerm> result = new ArrayList<RDFTerm>();
         Lock readLock = readLockGraph();
         try {
-            Iterator<Triple> triples = graph.filter(null, (UriRef) property, object);
+            Iterator<Triple> triples = graph.filter(null, (IRI) property, object);
             while (triples.hasNext()) {
                 result.add(triples.next().getSubject());
             }
@@ -277,90 +274,90 @@
     }
 
     @Override
-    public Long longValue(Resource resource) {
-        if (resource instanceof TypedLiteral) {
-            return lf.createObject(Long.class, (TypedLiteral) resource);
+    public Long longValue(RDFTerm resource) {
+        if (resource instanceof Literal) {
+            return lf.createObject(Long.class, (Literal) resource);
         } else {
             return super.longValue(resource);
         }
     }
 
     @Override
-    public String stringValue(Resource resource) {
-        if (resource instanceof UriRef) {
-            return ((UriRef) resource).getUnicodeString();
+    public String stringValue(RDFTerm resource) {
+        if (resource instanceof IRI) {
+            return ((IRI) resource).getUnicodeString();
         } else if (resource instanceof Literal) {
             return ((Literal) resource).getLexicalForm();
-        } else { //BNode
+        } else { //BlankNode
             return resource.toString();
         }
     }
 
     @Override
-    public Boolean booleanValue(Resource resource) {
-        if (resource instanceof TypedLiteral) {
-            return lf.createObject(Boolean.class, (TypedLiteral) resource);
+    public Boolean booleanValue(RDFTerm resource) {
+        if (resource instanceof Literal) {
+            return lf.createObject(Boolean.class, (Literal) resource);
         } else {
             return super.booleanValue(resource);
         }
     }
 
     @Override
-    public Date dateTimeValue(Resource resource) {
-        if (resource instanceof TypedLiteral) {
-            return lf.createObject(Date.class, (TypedLiteral) resource);
+    public Date dateTimeValue(RDFTerm resource) {
+        if (resource instanceof Literal) {
+            return lf.createObject(Date.class, (Literal) resource);
         } else {
             return super.dateTimeValue(resource);
         }
     }
 
     @Override
-    public Date dateValue(Resource resource) {
-        if (resource instanceof TypedLiteral) {
-            return lf.createObject(Date.class, (TypedLiteral) resource);
+    public Date dateValue(RDFTerm resource) {
+        if (resource instanceof Literal) {
+            return lf.createObject(Date.class, (Literal) resource);
         } else {
             return super.dateValue(resource);
         }
     }
 
     @Override
-    public Date timeValue(Resource resource) {
-        if (resource instanceof TypedLiteral) {
-            return lf.createObject(Date.class, (TypedLiteral) resource);
+    public Date timeValue(RDFTerm resource) {
+        if (resource instanceof Literal) {
+            return lf.createObject(Date.class, (Literal) resource);
         } else {
             return super.timeValue(resource);
         }
     }
 
     @Override
-    public Float floatValue(Resource resource) {
-        if (resource instanceof TypedLiteral) {
-            return lf.createObject(Float.class, (TypedLiteral) resource);
+    public Float floatValue(RDFTerm resource) {
+        if (resource instanceof Literal) {
+            return lf.createObject(Float.class, (Literal) resource);
         } else {
             return super.floatValue(resource);
         }
     }
 
     @Override
-    public Integer intValue(Resource resource) {
-        if (resource instanceof TypedLiteral) {
-            return lf.createObject(Integer.class, (TypedLiteral) resource);
+    public Integer intValue(RDFTerm resource) {
+        if (resource instanceof Literal) {
+            return lf.createObject(Integer.class, (Literal) resource);
         } else {
             return super.intValue(resource);
         }
     }
 
     @Override
-    public BigInteger integerValue(Resource resource) {
-        if (resource instanceof TypedLiteral) {
-            return lf.createObject(BigInteger.class, (TypedLiteral) resource);
+    public BigInteger integerValue(RDFTerm resource) {
+        if (resource instanceof Literal) {
+            return lf.createObject(BigInteger.class, (Literal) resource);
         } else {
             return super.integerValue(resource);
         }
     }
 
     @Override
-    public BigDecimal decimalValue(Resource resource) {
+    public BigDecimal decimalValue(RDFTerm resource) {
         //currently there is no converter for BigDecimal in clerezza
         //so as a workaround use the lexical form (as provided by the super
         //implementation
@@ -381,12 +378,8 @@
      */
     private Lock readLockGraph() {
         final Lock readLock;
-        if(graph instanceof LockableMGraph){
-            readLock = ((LockableMGraph)graph).getLock().readLock();
-            readLock.lock();
-        } else {
-            readLock = null;
-        }
+        readLock = graph.getLock().readLock();
+        readLock.lock();
         return readLock;
     }
 
diff --git a/commons/ldpath/clerezza/src/test/java/org/apache/stanbol/commons/ldpath/clerezza/ClerezzaBackendTest.java b/commons/ldpath/clerezza/src/test/java/org/apache/stanbol/commons/ldpath/clerezza/ClerezzaBackendTest.java
index 7b49c01..0986b98 100644
--- a/commons/ldpath/clerezza/src/test/java/org/apache/stanbol/commons/ldpath/clerezza/ClerezzaBackendTest.java
+++ b/commons/ldpath/clerezza/src/test/java/org/apache/stanbol/commons/ldpath/clerezza/ClerezzaBackendTest.java
@@ -39,16 +39,16 @@
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipInputStream;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.serializedform.ParsingProvider;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.clerezza.rdf.jena.parser.JenaParserProvider;
 import org.apache.marmotta.ldpath.LDPath;
 import org.apache.marmotta.ldpath.exception.LDPathParseException;
 import org.apache.marmotta.ldpath.parser.Configuration;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -74,17 +74,17 @@
     private static final String NS_SKOS = "http://www.w3.org/2004/02/skos/core#";
     private static final String NS_DBP = "http://dbpedia.org/property/";
     private static final String NS_DBO = "http://dbpedia.org/ontology/";
-    //private static final UriRef SKOS_CONCEPT = new UriRef(NS_SKOS+"Concept");
+    //private static final IRI SKOS_CONCEPT = new IRI(NS_SKOS+"Concept");
     
-    private static MGraph graph;
+    private static Graph graph;
     
     private ClerezzaBackend backend;
-    private LDPath<Resource> ldpath;
+    private LDPath<RDFTerm> ldpath;
     @BeforeClass
     public static void readTestData() throws IOException {
         ParsingProvider parser = new JenaParserProvider();
         //NOTE(rw): the new third parameter is the base URI used to resolve relative paths
-        graph = new IndexedMGraph();
+        graph = new IndexedGraph();
         InputStream in = ClerezzaBackendTest.class.getClassLoader().getResourceAsStream("testdata.rdf.zip");
         assertNotNull(in);
         ZipInputStream zipIn = new ZipInputStream(new BufferedInputStream(in));
@@ -106,36 +106,36 @@
             backend = new ClerezzaBackend(graph);
         }
         if(ldpath == null){
-            Configuration<Resource> config = new Configuration<Resource>();
+            Configuration<RDFTerm> config = new Configuration<RDFTerm>();
             config.addNamespace("dbp-prop", NS_DBP);
             config.addNamespace("dbp-ont", NS_DBO);
-            ldpath = new LDPath<Resource>(backend);
+            ldpath = new LDPath<RDFTerm>(backend);
         }
     }
     
     @Test
     public void testUriAndListImplemetnation() throws LDPathParseException {
-        UriRef nationalChampionship = new UriRef("http://cv.iptc.org/newscodes/subjectcode/15073031");
+        IRI nationalChampionship = new IRI("http://cv.iptc.org/newscodes/subjectcode/15073031");
         //this program tests:
-        // * UriRef transformers
+        // * IRI transformers
         // * #listSubjects(..) implementation
         // * #listObjects(..)  implementation
         Map<String,Collection<?>> results = ldpath.programQuery(nationalChampionship, 
             getReader("skos:broaderTransitive = (skos:broaderTransitive | ^skos:narrowerTransitive)+;"));
-        Set<Resource> expected = new HashSet<Resource>(Arrays.asList(
-            new UriRef("http://cv.iptc.org/newscodes/subjectcode/15000000"),
-            new UriRef("http://cv.iptc.org/newscodes/subjectcode/15073000")));
+        Set<RDFTerm> expected = new HashSet<RDFTerm>(Arrays.asList(
+            new IRI("http://cv.iptc.org/newscodes/subjectcode/15000000"),
+            new IRI("http://cv.iptc.org/newscodes/subjectcode/15073000")));
         Collection<?> broaderTransitive = results.get(NS_SKOS+"broaderTransitive");
         for(Object concept : broaderTransitive){
             assertNotNull(concept);
-            assertTrue(concept instanceof UriRef);
+            assertTrue(concept instanceof IRI);
             assertTrue(expected.remove(concept));
         }
         assertTrue("missing: "+expected,expected.isEmpty());
     }
     @Test
     public void testStringTransformer() throws LDPathParseException {
-        UriRef nationalChampionship = new UriRef("http://cv.iptc.org/newscodes/subjectcode/15073031");
+        IRI nationalChampionship = new IRI("http://cv.iptc.org/newscodes/subjectcode/15073031");
         Map<String,Collection<?>> results = ldpath.programQuery(nationalChampionship, 
             getReader("label = skos:prefLabel[@en-GB] :: xsd:string;"));
         Set<String> expected = new HashSet<String>(Arrays.asList(
@@ -151,7 +151,7 @@
     }
     @Test
     public void testDataTypes() throws LDPathParseException {
-        UriRef hallein = new UriRef("http://dbpedia.org/resource/Hallein");        
+        IRI hallein = new IRI("http://dbpedia.org/resource/Hallein");        
 
         StringBuilder program = new StringBuilder();
         program.append("@prefix dbp-prop : <").append(NS_DBP).append(">;");
diff --git a/commons/ldpathtemplate/src/main/java/org/apache/stanbol/commons/ldpathtemplate/LdRenderer.java b/commons/ldpathtemplate/src/main/java/org/apache/stanbol/commons/ldpathtemplate/LdRenderer.java
index 43d9e61..ad4a6d4 100644
--- a/commons/ldpathtemplate/src/main/java/org/apache/stanbol/commons/ldpathtemplate/LdRenderer.java
+++ b/commons/ldpathtemplate/src/main/java/org/apache/stanbol/commons/ldpathtemplate/LdRenderer.java
@@ -22,7 +22,7 @@
 import java.io.IOException;
 import java.io.Writer;
 
-import org.apache.clerezza.rdf.core.Resource;
+import org.apache.clerezza.commons.rdf.RDFTerm;
 import org.apache.clerezza.rdf.utils.GraphNode;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Reference;
@@ -60,9 +60,9 @@
 	public void render(GraphNode node, final String templatePath, Writer out) {	
 		//A GraphNode backend could be graph unspecific, so the same engine could be
 		//reused, possibly being signifantly more performant (caching, etc.)
-		RDFBackend<Resource> backend = new ClerezzaBackend(node.getGraph());
-		Resource context = node.getNode();
-		TemplateEngine<Resource> engine = new TemplateEngine<Resource>(backend);
+		RDFBackend<RDFTerm> backend = new ClerezzaBackend(node.getGraph());
+		RDFTerm context = node.getNode();
+		TemplateEngine<RDFTerm> engine = new TemplateEngine<RDFTerm>(backend);
 		engine.setTemplateLoader(templateLoader);
 		try {
 			engine.processFileTemplate(context, templatePath, null, out);
diff --git a/commons/opennlp/src/test/java/org/apache/commons/opennlp/ClasspathDataFileProvider.java b/commons/opennlp/src/test/java/org/apache/commons/opennlp/ClasspathDataFileProvider.java
index 91576c1..ad4afbd 100644
--- a/commons/opennlp/src/test/java/org/apache/commons/opennlp/ClasspathDataFileProvider.java
+++ b/commons/opennlp/src/test/java/org/apache/commons/opennlp/ClasspathDataFileProvider.java
@@ -71,7 +71,7 @@
         final String resourcePath = RESOURCE_BASE_PATH + filename;
         //final InputStream in = getClass().getClassLoader().getResourceAsStream(resourcePath);
         URL dataFile = getClass().getClassLoader().getResource(resourcePath);
-        //log.debug("Resource {} found: {}", (dataFile == null ? "NOT" : ""), resourcePath);
+        //log.debug("RDFTerm {} found: {}", (dataFile == null ? "NOT" : ""), resourcePath);
         return dataFile;
     }
     @Override
diff --git a/commons/owl/src/main/java/org/apache/stanbol/commons/owl/OntologyLookaheadMGraph.java b/commons/owl/src/main/java/org/apache/stanbol/commons/owl/OntologyLookaheadGraph.java
similarity index 81%
rename from commons/owl/src/main/java/org/apache/stanbol/commons/owl/OntologyLookaheadMGraph.java
rename to commons/owl/src/main/java/org/apache/stanbol/commons/owl/OntologyLookaheadGraph.java
index 0072a9d..580a6cc 100644
--- a/commons/owl/src/main/java/org/apache/stanbol/commons/owl/OntologyLookaheadMGraph.java
+++ b/commons/owl/src/main/java/org/apache/stanbol/commons/owl/OntologyLookaheadGraph.java
@@ -18,11 +18,11 @@
 
 import java.util.Iterator;
 
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
 import org.apache.clerezza.rdf.ontologies.OWL;
 import org.apache.clerezza.rdf.ontologies.RDF;
 import org.apache.stanbol.commons.owl.util.OWL2Constants;
@@ -35,27 +35,27 @@
  * @author alexdma
  * 
  */
-public class OntologyLookaheadMGraph extends SimpleMGraph {
+public class OntologyLookaheadGraph extends SimpleGraph {
 
     private Logger log = LoggerFactory.getLogger(getClass());
 
-    private UriRef ontologyIRI = null, versionIRI = null;
+    private IRI ontologyIRI = null, versionIRI = null;
 
     private int tripleCount = 0, foundIndex = -1;
 
     private int maxTriples, offset = 10;
 
-    private UriRef versionIriProperty = new UriRef(OWL2Constants.OWL_VERSION_IRI);
+    private IRI versionIriProperty = new IRI(OWL2Constants.OWL_VERSION_IRI);
 
-    public OntologyLookaheadMGraph() {
+    public OntologyLookaheadGraph() {
         this(-1, -1);
     }
 
-    public OntologyLookaheadMGraph(int maxTriples) {
+    public OntologyLookaheadGraph(int maxTriples) {
         this(maxTriples, Math.max(10, maxTriples / 10));
     }
 
-    public OntologyLookaheadMGraph(int maxTriples, int offset) {
+    public OntologyLookaheadGraph(int maxTriples, int offset) {
         if (maxTriples > 0 && offset > maxTriples) throw new IllegalArgumentException(
                 "Offset cannot be greater than the maximum triples to scan.");
         this.maxTriples = maxTriples;
@@ -64,9 +64,9 @@
 
     protected void checkOntologyId() {
         for (Iterator<Triple> it = this.filter(null, RDF.type, OWL.Ontology); it.hasNext();) {
-            NonLiteral s = it.next().getSubject();
-            if (s instanceof UriRef) {
-                ontologyIRI = (UriRef) s;
+            BlankNodeOrIRI s = it.next().getSubject();
+            if (s instanceof IRI) {
+                ontologyIRI = (IRI) s;
                 if (foundIndex <= 0) foundIndex = tripleCount;
                 break;
             }
@@ -75,9 +75,9 @@
          * TODO be more tolerant with versionIRI triples with no owl:Ontology typing?
          */
         for (Iterator<Triple> it = this.filter(null, versionIriProperty, null); it.hasNext();) {
-            Resource o = it.next().getObject();
-            if (o instanceof UriRef) {
-                versionIRI = (UriRef) o;
+            RDFTerm o = it.next().getObject();
+            if (o instanceof IRI) {
+                versionIRI = (IRI) o;
                 if (foundIndex <= 0) foundIndex = tripleCount;
                 break;
             }
@@ -94,7 +94,7 @@
         return offset;
     }
 
-    public UriRef getOntologyIRI() {
+    public IRI getOntologyIRI() {
         return ontologyIRI;
     }
 
@@ -102,7 +102,7 @@
         return tripleCount;
     }
 
-    public UriRef getVersionIRI() {
+    public IRI getVersionIRI() {
         return versionIRI;
     }
 
diff --git a/commons/owl/src/main/java/org/apache/stanbol/commons/owl/transformation/JenaToClerezzaConverter.java b/commons/owl/src/main/java/org/apache/stanbol/commons/owl/transformation/JenaToClerezzaConverter.java
index ebf2521..91598e0 100644
--- a/commons/owl/src/main/java/org/apache/stanbol/commons/owl/transformation/JenaToClerezzaConverter.java
+++ b/commons/owl/src/main/java/org/apache/stanbol/commons/owl/transformation/JenaToClerezzaConverter.java
@@ -22,9 +22,8 @@
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
 import org.apache.clerezza.rdf.core.serializedform.ParsingProvider;
 import org.apache.clerezza.rdf.core.serializedform.SerializingProvider;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
@@ -40,9 +39,9 @@
  * 
  * <ul>
  * <li> a Jena Model (see {@link Model}) to a list of Clerezza triples (see {@link Triple})
- * <li> a Jena Model to a Clerezza MGraph (see {@link MGraph})
- * <li> a Clerezza MGraph a Jena Model
- * <li> a Clerezza MGraph a Jena Graph (see {@link Graph}}
+ * <li> a Jena Model to a Clerezza Graph (see {@link Graph})
+ * <li> a Clerezza Graph a Jena Model
+ * <li> a Clerezza Graph a Jena ImmutableGraph (see {@link ImmutableGraph}}
  * </ul>
  * 
  * 
@@ -68,7 +67,7 @@
 		
 		ArrayList<Triple> clerezzaTriples = new ArrayList<Triple>();
 		
-		MGraph mGraph = jenaModelToClerezzaMGraph(model);
+		org.apache.clerezza.commons.rdf.Graph mGraph = jenaModelToClerezzaGraph(model);
 		
 		Iterator<Triple> tripleIterator = mGraph.iterator();
 		while(tripleIterator.hasNext()){
@@ -82,13 +81,13 @@
 	
 	/**
 	 * 
-	 * Converts a Jena {@link Model} to Clerezza {@link MGraph}.
+	 * Converts a Jena {@link Model} to Clerezza {@link Graph}.
 	 * 
 	 * @param model {@link Model}
-	 * @return the equivalent Clerezza {@link MGraph}.
+	 * @return the equivalent Clerezza {@link Graph}.
 	 */
 	
-	public static MGraph jenaModelToClerezzaMGraph(Model model){
+	public static org.apache.clerezza.commons.rdf.Graph jenaModelToClerezzaGraph(Model model){
 		
 		ByteArrayOutputStream out = new ByteArrayOutputStream();
 		model.write(out);
@@ -97,7 +96,7 @@
 		
 		ParsingProvider parser = new JenaParserProvider();		
 		
-		MGraph mGraph = new SimpleMGraph();
+		org.apache.clerezza.commons.rdf.Graph mGraph = new SimpleGraph();
 		parser.parse(mGraph,in, SupportedFormat.RDF_XML, null);
 		
 		return mGraph;
@@ -106,12 +105,12 @@
 	
 	
 	/**
-	 * Converts a Clerezza {@link MGraph} to a Jena {@link Model}.
+	 * Converts a Clerezza {@link Graph} to a Jena {@link Model}.
 	 * 
-	 * @param mGraph {@link MGraph}
+	 * @param mGraph {@link Graph}
 	 * @return the equivalent Jena {@link Model}.
 	 */
-	public static Model clerezzaMGraphToJenaModel(MGraph mGraph){
+	public static Model clerezzaGraphToJenaModel(org.apache.clerezza.commons.rdf.Graph mGraph){
 		
 		ByteArrayOutputStream out = new ByteArrayOutputStream();
 		
@@ -131,14 +130,14 @@
 	
 	
 	/**
-	 * Converts a Clerezza {@link MGraph} to a Jena {@link Graph}.
+	 * Converts a Clerezza {@link Graph} to a Jena {@link ImmutableGraph}.
 	 * 
-	 * @param mGraph {@link MGraph}
-	 * @return the equivalent Jena {@link Graph}.
+	 * @param mGraph {@link Graph}
+	 * @return the equivalent Jena {@link ImmutableGraph}.
 	 */
-	public static com.hp.hpl.jena.graph.Graph clerezzaMGraphToJenaGraph(MGraph mGraph){
+	public static com.hp.hpl.jena.graph.Graph clerezzaGraphToJenaGraph(org.apache.clerezza.commons.rdf.Graph mGraph){
 		
-		Model jenaModel = clerezzaMGraphToJenaModel(mGraph);
+		Model jenaModel = clerezzaGraphToJenaModel(mGraph);
 		if(jenaModel != null){
 			return jenaModel.getGraph();
 		}
diff --git a/commons/owl/src/main/java/org/apache/stanbol/commons/owl/transformation/OWLAPIToClerezzaConverter.java b/commons/owl/src/main/java/org/apache/stanbol/commons/owl/transformation/OWLAPIToClerezzaConverter.java
index 8a75c0b..3a983f7 100644
--- a/commons/owl/src/main/java/org/apache/stanbol/commons/owl/transformation/OWLAPIToClerezzaConverter.java
+++ b/commons/owl/src/main/java/org/apache/stanbol/commons/owl/transformation/OWLAPIToClerezzaConverter.java
@@ -23,10 +23,9 @@
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
+
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
 import org.apache.clerezza.rdf.core.serializedform.ParsingProvider;
 import org.apache.clerezza.rdf.core.serializedform.SerializingProvider;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
@@ -52,9 +51,9 @@
  * 
  * <ul>
  * <li>a Jena Model (see {@link Model}) to a list of Clerezza triples (see {@link Triple})
- * <li>a Jena Model to a Clerezza MGraph (see {@link MGraph})
- * <li>a Clerezza MGraph a Jena Model
- * <li>a Clerezza MGraph a Jena Graph (see {@link Graph}
+ * <li>a Jena Model to a Clerezza Graph (see {@link Graph})
+ * <li>a Clerezza Graph a Jena Model
+ * <li>a Clerezza Graph a Jena ImmutableGraph (see {@link ImmutableGraph}
  * </ul>
  * 
  * 
@@ -82,7 +81,7 @@
      */
     public static List<Triple> owlOntologyToClerezzaTriples(OWLOntology ontology) {
         ArrayList<Triple> clerezzaTriples = new ArrayList<Triple>();
-        TripleCollection mGraph = owlOntologyToClerezzaMGraph(ontology);
+        org.apache.clerezza.commons.rdf.Graph mGraph = owlOntologyToClerezzaGraph(ontology);
         Iterator<Triple> tripleIterator = mGraph.iterator();
         while (tripleIterator.hasNext()) {
             Triple triple = tripleIterator.next();
@@ -93,22 +92,22 @@
 
     /**
      * 
-     * Converts a OWL API {@link OWLOntology} to Clerezza {@link MGraph}.
+     * Converts a OWL API {@link OWLOntology} to Clerezza {@link Graph}.
      * 
      * @param ontology
      *            {@link OWLOntology}
-     * @return the equivalent Clerezza {@link MGraph}.
+     * @return the equivalent Clerezza {@link Graph}.
      */
 
-    public static TripleCollection owlOntologyToClerezzaMGraph(OWLOntology ontology) {
-        MGraph mGraph = null;
+    public static org.apache.clerezza.commons.rdf.Graph owlOntologyToClerezzaGraph(OWLOntology ontology) {
+        org.apache.clerezza.commons.rdf.Graph mGraph = null;
         ByteArrayOutputStream out = new ByteArrayOutputStream();
         OWLOntologyManager manager = ontology.getOWLOntologyManager();
         try {
             manager.saveOntology(ontology, new RDFXMLOntologyFormat(), out);
             ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
             ParsingProvider parser = new JenaParserProvider();
-            mGraph = new SimpleMGraph();
+            mGraph = new SimpleGraph();
             parser.parse(mGraph, in, SupportedFormat.RDF_XML, null);
         } catch (OWLOntologyStorageException e) {
             log.error("Failed to serialize OWL Ontology " + ontology + "for conversion", e);
@@ -118,20 +117,20 @@
     }
 
     /**
-     * Converts a Clerezza {@link MGraph} to an OWL API {@link OWLOntology}.
+     * Converts a Clerezza {@link Graph} to an OWL API {@link OWLOntology}.
      * 
      * @param mGraph
-     *            {@link MGraph}
+     *            {@link org.apache.clerezza.commons.rdf.Graph}
      * @return the equivalent OWL API {@link OWLOntology}.
      */
-    public static OWLOntology clerezzaGraphToOWLOntology(TripleCollection graph) {
+    public static OWLOntology clerezzaGraphToOWLOntology(org.apache.clerezza.commons.rdf.Graph graph) {
         OWLOntologyManager mgr = OWLManager.createOWLOntologyManager();
         // Never try to import
         mgr.addIRIMapper(new PhonyIRIMapper(Collections.<IRI> emptySet()));
         return clerezzaGraphToOWLOntology(graph, mgr);
     }
 
-    public static OWLOntology clerezzaGraphToOWLOntology(TripleCollection graph,
+    public static OWLOntology clerezzaGraphToOWLOntology(org.apache.clerezza.commons.rdf.Graph graph,
                                                          OWLOntologyManager ontologyManager) {
 
         /*
@@ -145,7 +144,7 @@
          * Alternatively, construct the whole reverse imports stack, then traverse it again, get the
          * OWLOntology version for each (with the phony mapper set) and add it to the merge pool
          * 
-         * If it works, just add all the triples to a TripleCollection, but no, we don't want to store that
+         * If it works, just add all the triples to a Graph, but no, we don't want to store that
          * change.
          */
 
diff --git a/commons/owl/src/main/java/org/apache/stanbol/commons/owl/util/OWLUtils.java b/commons/owl/src/main/java/org/apache/stanbol/commons/owl/util/OWLUtils.java
index 5e8b53a..566d807 100644
--- a/commons/owl/src/main/java/org/apache/stanbol/commons/owl/util/OWLUtils.java
+++ b/commons/owl/src/main/java/org/apache/stanbol/commons/owl/util/OWLUtils.java
@@ -21,14 +21,13 @@
 import java.io.InputStream;
 import java.util.Iterator;
 
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.clerezza.rdf.ontologies.OWL;
 import org.apache.clerezza.rdf.ontologies.RDF;
-import org.apache.stanbol.commons.owl.OntologyLookaheadMGraph;
+import org.apache.stanbol.commons.owl.OntologyLookaheadGraph;
 import org.semanticweb.owlapi.model.IRI;
 import org.semanticweb.owlapi.model.OWLOntology;
 import org.semanticweb.owlapi.model.OWLOntologyID;
@@ -83,20 +82,20 @@
      *            the RDF graph
      * @return the OWL ontology ID of the supplied graph, or null if it denotes an anonymous ontology.
      */
-    public static OWLOntologyID extractOntologyID(TripleCollection graph) {
+    public static OWLOntologyID extractOntologyID(Graph graph) {
         IRI ontologyIri = null, versionIri = null;
         Iterator<Triple> it = graph.filter(null, RDF.type, OWL.Ontology);
         if (it.hasNext()) {
-            NonLiteral subj = it.next().getSubject();
+            BlankNodeOrIRI subj = it.next().getSubject();
             if (it.hasNext()) {
                 log.warn("Multiple OWL ontology definitions found.");
                 log.warn("Ignoring all but {}", subj);
             }
-            if (subj instanceof UriRef) {
-                ontologyIri = IRI.create(((UriRef) subj).getUnicodeString());
-                Iterator<Triple> it2 = graph.filter(subj, new UriRef(OWL2Constants.OWL_VERSION_IRI),
+            if (subj instanceof org.apache.clerezza.commons.rdf.IRI) {
+                ontologyIri = IRI.create(((org.apache.clerezza.commons.rdf.IRI) subj).getUnicodeString());
+                Iterator<Triple> it2 = graph.filter(subj, new org.apache.clerezza.commons.rdf.IRI(OWL2Constants.OWL_VERSION_IRI),
                     null);
-                if (it2.hasNext()) versionIri = IRI.create(((UriRef) it2.next().getObject())
+                if (it2.hasNext()) versionIri = IRI.create(((org.apache.clerezza.commons.rdf.IRI) it2.next().getObject())
                         .getUnicodeString());
             }
         }
@@ -136,7 +135,7 @@
             versionIriOffset);
         BufferedInputStream bIn = new BufferedInputStream(content);
         bIn.mark(limit * 512); // set an appropriate limit
-        OntologyLookaheadMGraph graph = new OntologyLookaheadMGraph(limit, versionIriOffset);
+        OntologyLookaheadGraph graph = new OntologyLookaheadGraph(limit, versionIriOffset);
         try {
             parser.parse(graph, bIn, format);
         } catch (RuntimeException e) {
diff --git a/commons/owl/src/main/java/org/apache/stanbol/commons/owl/util/URIUtils.java b/commons/owl/src/main/java/org/apache/stanbol/commons/owl/util/URIUtils.java
index c1d0eb1..10773c7 100644
--- a/commons/owl/src/main/java/org/apache/stanbol/commons/owl/util/URIUtils.java
+++ b/commons/owl/src/main/java/org/apache/stanbol/commons/owl/util/URIUtils.java
@@ -18,7 +18,6 @@
 
 import java.net.URI;
 
-import org.apache.clerezza.rdf.core.UriRef;
 import org.semanticweb.owlapi.model.IRI;
 
 /**
@@ -35,25 +34,25 @@
     private URIUtils() {}
 
     /**
-     * Converts a UriRef to an IRI.
+     * Converts a IRI to an IRI.
      * 
      * @param uri
-     *            the UriRef to convert
-     * @return the IRI form of the UriRef
+     *            the IRI to convert
+     * @return the IRI form of the IRI
      */
-    public static IRI createIRI(UriRef uri) {
+    public static IRI createIRI(org.apache.clerezza.commons.rdf.IRI uri) {
         return IRI.create(uri.getUnicodeString());
     }
 
     /**
-     * Converts an IRI to a UriRef.
+     * Converts an IRI to a IRI.
      * 
      * @param uri
      *            the IRI to convert
-     * @return the UriRef form of the IRI
+     * @return the IRI form of the IRI
      */
-    public static UriRef createUriRef(IRI uri) {
-        return new UriRef(uri.toString());
+    public static org.apache.clerezza.commons.rdf.IRI createIRI(IRI uri) {
+        return new org.apache.clerezza.commons.rdf.IRI(uri.toString());
     }
 
     /**
diff --git a/commons/owl/src/main/java/org/apache/stanbol/commons/owl/web/OWLOntologyWriter.java b/commons/owl/src/main/java/org/apache/stanbol/commons/owl/web/OWLOntologyWriter.java
index 504e68a..424a9a3 100644
--- a/commons/owl/src/main/java/org/apache/stanbol/commons/owl/web/OWLOntologyWriter.java
+++ b/commons/owl/src/main/java/org/apache/stanbol/commons/owl/web/OWLOntologyWriter.java
@@ -50,7 +50,7 @@
 import javax.ws.rs.ext.MessageBodyWriter;
 import javax.ws.rs.ext.Provider;
 
-import org.apache.clerezza.rdf.core.TripleCollection;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.clerezza.rdf.core.serializedform.SerializingProvider;
 import org.apache.clerezza.rdf.jena.serializer.JenaSerializerProvider;
@@ -134,7 +134,7 @@
             // Non-native formats that require a conversion to Clerezza
             if (RDF_JSON_TYPE.equals(mediaType) || N3_TYPE.equals(mediaType)
                 || TEXT_PLAIN.equals(mediaType.toString()) || N_TRIPLE_TYPE.equals(mediaType)) {
-                TripleCollection mGraph = OWLAPIToClerezzaConverter.owlOntologyToClerezzaMGraph(ontology);
+                Graph mGraph = OWLAPIToClerezzaConverter.owlOntologyToClerezzaGraph(ontology);
                 SerializingProvider serializer = null;
                 if (RDF_JSON_TYPE.equals(mediaType)) serializer = new RdfJsonSerializingProvider();
                 else if (N3_TYPE.equals(mediaType) || N_TRIPLE_TYPE.equals(mediaType)
diff --git a/commons/owl/src/test/java/org/apache/stanbol/commons/owl/transformation/JenaToClerezzaConverterTest.java b/commons/owl/src/test/java/org/apache/stanbol/commons/owl/transformation/JenaToClerezzaConverterTest.java
index 4cc5013..d075a6e 100644
--- a/commons/owl/src/test/java/org/apache/stanbol/commons/owl/transformation/JenaToClerezzaConverterTest.java
+++ b/commons/owl/src/test/java/org/apache/stanbol/commons/owl/transformation/JenaToClerezzaConverterTest.java
@@ -19,19 +19,18 @@
 import java.util.Collection;
 import java.util.Iterator;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.stanbol.commons.owl.transformation.JenaToClerezzaConverter;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.hp.hpl.jena.graph.Graph;
 import com.hp.hpl.jena.rdf.model.Model;
 import com.hp.hpl.jena.rdf.model.ModelFactory;
 import com.hp.hpl.jena.rdf.model.Property;
@@ -52,7 +51,7 @@
 public class JenaToClerezzaConverterTest {
 
 	private static Model model;
-	private static MGraph mGraph;
+	private static Graph mGraph;
 	private static String ns = "http://incubator.apache.org/stanbol/owl#";
 	private static String foaf = "http://xmlns.com/foaf/0.1/";
 	
@@ -85,14 +84,14 @@
 		 * 	EnricoDaga isA Person
 		 *  AndreaNuzzolese knows EnricoDaga
 		 */
-		mGraph = new SimpleMGraph();
+		mGraph = new SimpleGraph();
 		
-		UriRef knowsInClerezza = new UriRef(ns+"knows");
-		UriRef rdfType = new UriRef(RDF.getURI()+"type");
-		UriRef foafPersonInClerezza = new UriRef(foaf+"Person");
+		IRI knowsInClerezza = new IRI(ns+"knows");
+		IRI rdfType = new IRI(RDF.getURI()+"type");
+		IRI foafPersonInClerezza = new IRI(foaf+"Person");
 		
-		NonLiteral andreaNuzzoleseInClerezza = new UriRef(ns+"AndreaNuzzolese");		
-		NonLiteral enricoDagaInClerezza = new UriRef(ns+"EnricoDaga");
+		BlankNodeOrIRI andreaNuzzoleseInClerezza = new IRI(ns+"AndreaNuzzolese");		
+		BlankNodeOrIRI enricoDagaInClerezza = new IRI(ns+"EnricoDaga");
 		
 		Triple triple = new TripleImpl(andreaNuzzoleseInClerezza, rdfType, foafPersonInClerezza);
 		mGraph.add(triple);
@@ -105,14 +104,14 @@
 	}
 	
 	@Test
-	public void testMGraphToJenaGraph(){
+	public void testGraphToJenaGraph(){
 		/*
-		 * Convert the MGraph to a Jena Graph.
+		 * Convert the Graph to a Jena ImmutableGraph.
 		 */
-		Graph jGraph = JenaToClerezzaConverter.clerezzaMGraphToJenaGraph(mGraph);
+		com.hp.hpl.jena.graph.Graph jGraph = JenaToClerezzaConverter.clerezzaGraphToJenaGraph(mGraph);
 		
 		/*
-		 * Print all the triples contained in the Jena Graph.
+		 * Print all the triples contained in the Jena ImmutableGraph.
 		 */
 		ExtendedIterator<com.hp.hpl.jena.graph.Triple> tripleIt = jGraph.find(null, null, null);
 		while(tripleIt.hasNext()){
@@ -122,11 +121,11 @@
 	}
 	
 	@Test
-	public void testMGraphToJenaModel(){
+	public void testGraphToJenaModel(){
 		/*
-		 * Convert the MGraph to a Jena Model.
+		 * Convert the Graph to a Jena Model.
 		 */
-		Model model = JenaToClerezzaConverter.clerezzaMGraphToJenaModel(mGraph);
+		Model model = JenaToClerezzaConverter.clerezzaGraphToJenaModel(mGraph);
 		
 		/*
 		 * Print all the triples contained in the Jena Model.
@@ -140,14 +139,14 @@
 	}
 	
 	@Test
-	public void testModelToMGraph(){
+	public void testModelToGraph(){
 		/*
-		 * Convert the Jena Model to a Clerezza MGraph.
+		 * Convert the Jena Model to a Clerezza Graph.
 		 */
-		MGraph mGraph = JenaToClerezzaConverter.jenaModelToClerezzaMGraph(model);
+		Graph mGraph = JenaToClerezzaConverter.jenaModelToClerezzaGraph(model);
 		
 		/*
-		 * Print all the triples contained in the Clerezza MGraph.
+		 * Print all the triples contained in the Clerezza Graph.
 		 */
 		Iterator<Triple> tripleIt = mGraph.iterator();
 		while(tripleIt.hasNext()){
diff --git a/commons/owl/src/test/java/org/apache/stanbol/commons/owl/transformation/OWLAPIToClerezzaConverterTest.java b/commons/owl/src/test/java/org/apache/stanbol/commons/owl/transformation/OWLAPIToClerezzaConverterTest.java
index 32bd873..d7d11a2 100644
--- a/commons/owl/src/test/java/org/apache/stanbol/commons/owl/transformation/OWLAPIToClerezzaConverterTest.java
+++ b/commons/owl/src/test/java/org/apache/stanbol/commons/owl/transformation/OWLAPIToClerezzaConverterTest.java
@@ -20,18 +20,17 @@
 import java.util.Iterator;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.stanbol.commons.owl.transformation.OWLAPIToClerezzaConverter;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.semanticweb.owlapi.apibinding.OWLManager;
-import org.semanticweb.owlapi.model.IRI;
 import org.semanticweb.owlapi.model.OWLAxiom;
 import org.semanticweb.owlapi.model.OWLClass;
 import org.semanticweb.owlapi.model.OWLDataFactory;
@@ -56,7 +55,7 @@
 public class OWLAPIToClerezzaConverterTest {
 
     private static OWLOntology ontology;
-    private static MGraph mGraph;
+    private static Graph mGraph;
     private static String ns = "http://incubator.apache.org/stanbol/owl#";
     private static String foaf = "http://xmlns.com/foaf/0.1/";
 
@@ -73,17 +72,17 @@
         OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
         OWLDataFactory factory = manager.getOWLDataFactory();
         try {
-            ontology = manager.createOntology(IRI.create(ns + "testOntology"));
+            ontology = manager.createOntology(org.semanticweb.owlapi.model.IRI.create(ns + "testOntology"));
         } catch (OWLOntologyCreationException e) {
             log.error(e.getMessage());
         }
 
         if (ontology != null) {
-            OWLClass personClass = factory.getOWLClass(IRI.create(foaf + "Person"));
-            OWLNamedIndividual andreaNuzzoleseOWL = factory.getOWLNamedIndividual(IRI
+            OWLClass personClass = factory.getOWLClass(org.semanticweb.owlapi.model.IRI.create(foaf + "Person"));
+            OWLNamedIndividual andreaNuzzoleseOWL = factory.getOWLNamedIndividual(org.semanticweb.owlapi.model.IRI
                     .create(ns + "AndreaNuzzolese"));
-            OWLNamedIndividual enricoDagaOWL = factory.getOWLNamedIndividual(IRI.create(ns + "EnricoDaga"));
-            OWLObjectProperty knowsOWL = factory.getOWLObjectProperty(IRI.create(foaf + "knows"));
+            OWLNamedIndividual enricoDagaOWL = factory.getOWLNamedIndividual(org.semanticweb.owlapi.model.IRI.create(ns + "EnricoDaga"));
+            OWLObjectProperty knowsOWL = factory.getOWLObjectProperty(org.semanticweb.owlapi.model.IRI.create(foaf + "knows"));
 
             OWLAxiom axiom = factory.getOWLClassAssertionAxiom(personClass, andreaNuzzoleseOWL);
             manager.addAxiom(ontology, axiom);
@@ -99,14 +98,14 @@
          * Set-up the Clerezza model for the test. As before simply add the triples: AndreaNuzzolese isA
          * Person EnricoDaga isA Person AndreaNuzzolese knows EnricoDaga
          */
-        mGraph = new SimpleMGraph();
+        mGraph = new SimpleGraph();
 
-        UriRef knowsInClerezza = new UriRef(ns + "knows");
-        UriRef rdfType = new UriRef(RDF.getURI() + "type");
-        UriRef foafPersonInClerezza = new UriRef(foaf + "Person");
+        IRI knowsInClerezza = new IRI(ns + "knows");
+        IRI rdfType = new IRI(RDF.getURI() + "type");
+        IRI foafPersonInClerezza = new IRI(foaf + "Person");
 
-        NonLiteral andreaNuzzoleseInClerezza = new UriRef(ns + "AndreaNuzzolese");
-        NonLiteral enricoDagaInClerezza = new UriRef(ns + "EnricoDaga");
+        BlankNodeOrIRI andreaNuzzoleseInClerezza = new IRI(ns + "AndreaNuzzolese");
+        BlankNodeOrIRI enricoDagaInClerezza = new IRI(ns + "EnricoDaga");
 
         Triple triple = new TripleImpl(andreaNuzzoleseInClerezza, rdfType, foafPersonInClerezza);
         mGraph.add(triple);
@@ -117,9 +116,9 @@
     }
 
     @Test
-    public void testMGraphToOWLOntology() {
+    public void testGraphToOWLOntology() {
         /*
-         * Transform the Clerezza MGraph to an OWLOntology.
+         * Transform the Clerezza Graph to an OWLOntology.
          */
         OWLOntology ontology = OWLAPIToClerezzaConverter.clerezzaGraphToOWLOntology(mGraph);
 
@@ -139,15 +138,15 @@
     }
 
     @Test
-    public void testOWLOntologyToMGraph() {
+    public void testOWLOntologyToGraph() {
 
         /*
-         * Transform the OWLOntology into a Clerezza MGraph.
+         * Transform the OWLOntology into a Clerezza Graph.
          */
-        TripleCollection mGraph = OWLAPIToClerezzaConverter.owlOntologyToClerezzaMGraph(ontology);
+        Graph mGraph = OWLAPIToClerezzaConverter.owlOntologyToClerezzaGraph(ontology);
 
         /*
-         * Print all the triples contained in the Clerezza MGraph.
+         * Print all the triples contained in the Clerezza Graph.
          */
         Iterator<Triple> tripleIt = mGraph.iterator();
         while (tripleIt.hasNext()) {
diff --git a/commons/owl/src/test/java/org/apache/stanbol/commons/owl/util/TestOWLUtils.java b/commons/owl/src/test/java/org/apache/stanbol/commons/owl/util/TestOWLUtils.java
index 17f00fd..b00dc4c 100644
--- a/commons/owl/src/test/java/org/apache/stanbol/commons/owl/util/TestOWLUtils.java
+++ b/commons/owl/src/test/java/org/apache/stanbol/commons/owl/util/TestOWLUtils.java
@@ -26,8 +26,8 @@
 
 import java.io.InputStream;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.TcManager;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.clerezza.rdf.jena.parser.JenaParserProvider;
@@ -35,7 +35,6 @@
 import org.junit.After;
 import org.junit.BeforeClass;
 import org.junit.Test;
-import org.semanticweb.owlapi.model.IRI;
 import org.semanticweb.owlapi.model.OWLOntologyID;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -62,9 +61,9 @@
 
     private Logger log = LoggerFactory.getLogger(getClass());
 
-    private IRI ontologyIri = IRI.create("http://stanbol.apache.org/ontologies/test");
+    private org.semanticweb.owlapi.model.IRI ontologyIri = org.semanticweb.owlapi.model.IRI.create("http://stanbol.apache.org/ontologies/test");
 
-    private UriRef uri = new UriRef("ontonet:http://stanbol.apache.org/prova");
+    private IRI uri = new IRI("ontonet:http://stanbol.apache.org/prova");
 
     /*
      * Guessing the ID of a named ontology whose IRI is near the end of the graph.
@@ -111,7 +110,7 @@
         // Minimum offset required is 2 because of an owl:versionInfo triple in-between.
         String location = "/owl/versioned.owl";
         log.info("Testing lookahead for location {}", location);
-        IRI incubatedVersion = IRI
+        org.semanticweb.owlapi.model.IRI incubatedVersion = org.semanticweb.owlapi.model.IRI
                 .create("http://svn.apache.org/repos/asf/incubator/stanbol/trunk/commons/owl/src/test/resources/owl/versioned.owl");
         OWLOntologyID expectedOntId = new OWLOntologyID(ontologyIri, incubatedVersion);
 
@@ -151,7 +150,7 @@
         // Actual distance is 102
         String location = "/owl/versioned_distance-100.owl";
         log.info("Testing lookahead for location {}", location);
-        IRI incubatedVersion = IRI
+        org.semanticweb.owlapi.model.IRI incubatedVersion = org.semanticweb.owlapi.model.IRI
                 .create("http://svn.apache.org/repos/asf/incubator/stanbol/trunk/commons/owl/src/test/resources/owl/versioned_distance-100.owl");
         OWLOntologyID expectedOntId = new OWLOntologyID(ontologyIri, incubatedVersion);
 
@@ -176,7 +175,7 @@
         // Actual distance is 102
         String location = "/owl/versioned_distance-100-reversed.owl";
         log.info("Testing lookahead for location {}", location);
-        IRI incubatedVersion = IRI
+        org.semanticweb.owlapi.model.IRI incubatedVersion = org.semanticweb.owlapi.model.IRI
                 .create("http://svn.apache.org/repos/asf/incubator/stanbol/trunk/commons/owl/src/test/resources/owl/versioned_distance-100-reversed.owl");
         OWLOntologyID expectedOntId = new OWLOntologyID(ontologyIri, incubatedVersion);
 
@@ -200,7 +199,7 @@
     public void lookaheadVersionedImmediate() throws Exception {
         String location = "/owl/versioned_immediate.owl";
         log.info("Testing lookahead for location {}", location);
-        IRI incubatedVersion = IRI
+        org.semanticweb.owlapi.model.IRI incubatedVersion = org.semanticweb.owlapi.model.IRI
                 .create("http://svn.apache.org/repos/asf/incubator/stanbol/trunk/commons/owl/src/test/resources/owl/versioned_immediate.owl");
         OWLOntologyID expectedOntId = new OWLOntologyID(ontologyIri, incubatedVersion);
 
@@ -215,27 +214,27 @@
      * Extracting the OWL ontology identifier on a *whole* ontology.
      */
     @Test
-    public void namedUriRef() throws Exception {
+    public void namedIRI() throws Exception {
         InputStream inputStream = getClass().getResourceAsStream("/owl/maincharacters.owl");
-        MGraph mg = TcManager.getInstance().createMGraph(uri);
+        Graph mg = TcManager.getInstance().createGraph(uri);
         parser.parse(mg, inputStream, "application/rdf+xml", uri);
-        assertNotNull(OWLUtils.extractOntologyID(mg.getGraph()));
+        assertNotNull(OWLUtils.extractOntologyID(mg.getImmutableGraph()));
     }
 
     /*
      * Extracting the OWL ontology identifier on a *whole* nameless ontology must return a null value.
      */
     @Test
-    public void namelessUriRef() throws Exception {
+    public void namelessIRI() throws Exception {
         InputStream inputStream = getClass().getResourceAsStream("/owl/nameless.owl");
-        MGraph mg = TcManager.getInstance().createMGraph(uri);
+        Graph mg = TcManager.getInstance().createGraph(uri);
         parser.parse(mg, inputStream, "application/rdf+xml", uri);
-        assertNull(OWLUtils.extractOntologyID(mg.getGraph()));
+        assertNull(OWLUtils.extractOntologyID(mg.getImmutableGraph()));
     }
 
     @After
     public void reset() throws Exception {
-        if (TcManager.getInstance().listTripleCollections().contains(uri)) TcManager.getInstance()
-                .deleteTripleCollection(uri);
+        if (TcManager.getInstance().listGraphs().contains(uri)) TcManager.getInstance()
+                .deleteGraph(uri);
     }
 }
diff --git a/commons/security/core/src/main/java/org/apache/stanbol/commons/security/PermissionDefinitions.java b/commons/security/core/src/main/java/org/apache/stanbol/commons/security/PermissionDefinitions.java
index c130ec0..921ab88 100644
--- a/commons/security/core/src/main/java/org/apache/stanbol/commons/security/PermissionDefinitions.java
+++ b/commons/security/core/src/main/java/org/apache/stanbol/commons/security/PermissionDefinitions.java
@@ -22,11 +22,11 @@
 import java.util.Iterator;
 import java.util.List;
 import org.osgi.service.permissionadmin.PermissionInfo;
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.ontologies.OSGI;
 import org.apache.clerezza.rdf.ontologies.PERMISSION;
 import org.apache.clerezza.rdf.ontologies.SIOC;
@@ -38,9 +38,9 @@
  */
 class PermissionDefinitions {
 
-	private MGraph systemGraph;
+	private Graph systemGraph;
 
-	PermissionDefinitions(MGraph systeGraph) {
+	PermissionDefinitions(Graph systeGraph) {
 		this.systemGraph = systeGraph;
 	}
 
@@ -55,10 +55,10 @@
 		List<PermissionInfo> permInfoList = new ArrayList<PermissionInfo>();
 
 		Iterator<Triple> ownerTriples =
-				systemGraph.filter(new UriRef(location), OSGI.owner, null);
+				systemGraph.filter(new IRI(location), OSGI.owner, null);
 
 		if (ownerTriples.hasNext()) {
-			NonLiteral user = (NonLiteral) ownerTriples.next().getObject();
+			BlankNodeOrIRI user = (BlankNodeOrIRI) ownerTriples.next().getObject();
 			lookForPermissions(user, permInfoList);
 		}
 
@@ -73,16 +73,16 @@
 	 * And if the role has another role, then execute this function recursively,
 	 * until all permissions are found.
 	 * 
-	 * @param role	a <code>NonLiteral</code> which is either a user or a role
+	 * @param role	a <code>BlankNodeOrIRI</code> which is either a user or a role
 	 * @param permInfoList	a list with all the added permissions of this bundle
 	 */
-	private void lookForPermissions(NonLiteral role, List<PermissionInfo> permInfoList) {
+	private void lookForPermissions(BlankNodeOrIRI role, List<PermissionInfo> permInfoList) {
 		Iterator<Triple> permissionTriples =
 				systemGraph.filter(role, PERMISSION.hasPermission, null);
 
 		while (permissionTriples.hasNext()) {
 
-			NonLiteral permission = (NonLiteral) permissionTriples.next().getObject();
+			BlankNodeOrIRI permission = (BlankNodeOrIRI) permissionTriples.next().getObject();
 
 			Iterator<Triple> javaPermissionTriples =
 					systemGraph.filter(permission, PERMISSION.javaPermissionEntry, null);
@@ -100,7 +100,7 @@
 				systemGraph.filter(role, SIOC.has_function, null);
 
 		while (roleTriples.hasNext()) {
-			NonLiteral anotherRole = (NonLiteral) roleTriples.next().getObject();
+			BlankNodeOrIRI anotherRole = (BlankNodeOrIRI) roleTriples.next().getObject();
 			this.lookForPermissions(anotherRole, permInfoList);
 		}
 	}
diff --git a/commons/security/core/src/main/java/org/apache/stanbol/commons/security/UserAwarePolicy.java b/commons/security/core/src/main/java/org/apache/stanbol/commons/security/UserAwarePolicy.java
index 9a5fd74..ec57b54 100644
--- a/commons/security/core/src/main/java/org/apache/stanbol/commons/security/UserAwarePolicy.java
+++ b/commons/security/core/src/main/java/org/apache/stanbol/commons/security/UserAwarePolicy.java
@@ -41,12 +41,13 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.clerezza.rdf.core.BNode;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
 import org.apache.clerezza.rdf.ontologies.PERMISSION;
 import org.apache.clerezza.rdf.ontologies.PLATFORM;
 import org.apache.clerezza.rdf.ontologies.RDF;
@@ -73,7 +74,7 @@
 	final Logger logger = LoggerFactory.getLogger(UserAwarePolicy.class);
 	
 	@Reference(target=SystemConfig.SYSTEM_GRAPH_FILTER)
-	private MGraph systemGraph;
+	private Graph systemGraph;
 	
 	/**
 	 * Stores the mapping between a String describing the permission and the
@@ -175,25 +176,25 @@
 	private List<String> getAllPermissionsOfAUserByName(String userName)
 			throws UserUnregisteredException {
 
-		NonLiteral user = getUserByName(userName);
+		BlankNodeOrIRI user = getUserByName(userName);
 		
 		List<String> result = getPermissionEntriesOfAUser(user, userName);
 		Iterator<Triple> roleTriples = systemGraph.filter(user,
 				SIOC.has_function, null);
 
 		while (roleTriples.hasNext()) {
-			NonLiteral anotherRole = (NonLiteral) roleTriples.next()
+			BlankNodeOrIRI anotherRole = (BlankNodeOrIRI) roleTriples.next()
 					.getObject();
 			result.addAll(getPermissionEntriesOfARole(anotherRole, userName, user));
 		}
-		Iterator<NonLiteral> baseRoles = getResourcesOfType(PERMISSION.BaseRole);
+		Iterator<BlankNodeOrIRI> baseRoles = getResourcesOfType(PERMISSION.BaseRole);
 		while(baseRoles.hasNext()) {
 			result.addAll(getPermissionEntriesOfARole(baseRoles.next(), userName, user));
 		}
 		return result;
 	}
 
-	private NonLiteral getUserByName(String userName)
+	private BlankNodeOrIRI getUserByName(String userName)
 			throws UserUnregisteredException {
 		Iterator<Triple> triples = systemGraph.filter(null, PLATFORM.userName,
 				new PlainLiteralImpl(userName));
@@ -204,29 +205,29 @@
 		throw new UserUnregisteredException(userName);
 	}
 
-	private List<String> getPermissionEntriesOfAUser(NonLiteral user, String userName) {
+	private List<String> getPermissionEntriesOfAUser(BlankNodeOrIRI user, String userName) {
 		List<String> result = getPermissionEntriesOfARole(user, userName, user);
-		if (user instanceof UriRef) {
+		if (user instanceof IRI) {
 			synchronized(permissionProviders) {
 				for (WebIdBasedPermissionProvider p : permissionProviders) {
-					result.addAll(p.getPermissions((UriRef)user));
+					result.addAll(p.getPermissions((IRI)user));
 				}
 			}
 		}
 		return result;
 	}
 	//note that users are roles too
-	private List<String> getPermissionEntriesOfARole(NonLiteral role, String userName, NonLiteral user) {
+	private List<String> getPermissionEntriesOfARole(BlankNodeOrIRI role, String userName, BlankNodeOrIRI user) {
 		List<String> result = new ArrayList<String>();
 		Iterator<Triple> permsForRole = systemGraph.filter(role,
 				PERMISSION.hasPermission, null);
 
 		while (permsForRole.hasNext()) {
 			Iterator<Triple> javaPermForRole = systemGraph.filter(
-					(BNode) permsForRole.next().getObject(),
+					(BlankNode) permsForRole.next().getObject(),
 					PERMISSION.javaPermissionEntry, null);
 			if (javaPermForRole.hasNext()) {
-				PlainLiteralImpl permissionEntry = (PlainLiteralImpl) javaPermForRole
+				Literal permissionEntry = (Literal) javaPermForRole
 						.next().getObject();
 				String permission = permissionEntry.getLexicalForm();
 				if(permission.contains("{username}")) {
@@ -238,10 +239,10 @@
 		return result;
 	}
 	
-	private Iterator<NonLiteral> getResourcesOfType(UriRef type) {
+	private Iterator<BlankNodeOrIRI> getResourcesOfType(IRI type) {
 		final Iterator<Triple> triples =
 				systemGraph.filter(null, RDF.type, type);
-		return new Iterator<NonLiteral>() {
+		return new Iterator<BlankNodeOrIRI>() {
 
 			@Override
 			public boolean hasNext() {
@@ -249,7 +250,7 @@
 			}
 
 			@Override
-			public NonLiteral next() {
+			public BlankNodeOrIRI next() {
 				return triples.next().getSubject();
 			}
 
diff --git a/commons/security/core/src/main/java/org/apache/stanbol/commons/security/WebIdBasedPermissionProvider.java b/commons/security/core/src/main/java/org/apache/stanbol/commons/security/WebIdBasedPermissionProvider.java
index 9f77ab2..056d63a 100644
--- a/commons/security/core/src/main/java/org/apache/stanbol/commons/security/WebIdBasedPermissionProvider.java
+++ b/commons/security/core/src/main/java/org/apache/stanbol/commons/security/WebIdBasedPermissionProvider.java
@@ -18,7 +18,7 @@
 package org.apache.stanbol.commons.security;
 
 import java.util.Collection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 /**
  * Services implementing this interface provide additional permissions
@@ -37,5 +37,5 @@
 	 * @param webId the uri identifying the user (aka Web-Id)
 	 * @return the string descriptions of the permissions
 	 */
-	Collection<String> getPermissions(UriRef webId);
+	Collection<String> getPermissions(IRI webId);
 }
diff --git a/commons/security/core/src/main/java/org/apache/stanbol/commons/security/auth/AuthenticationCheckerImpl.java b/commons/security/core/src/main/java/org/apache/stanbol/commons/security/auth/AuthenticationCheckerImpl.java
index 3845e27..3c16a33 100644
--- a/commons/security/core/src/main/java/org/apache/stanbol/commons/security/auth/AuthenticationCheckerImpl.java
+++ b/commons/security/core/src/main/java/org/apache/stanbol/commons/security/auth/AuthenticationCheckerImpl.java
@@ -25,16 +25,16 @@
 import org.apache.felix.scr.annotations.Reference;
 import org.apache.felix.scr.annotations.Service;
 import org.apache.stanbol.commons.security.PasswordUtil;
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
 import org.apache.clerezza.rdf.ontologies.PERMISSION;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.clerezza.platform.config.SystemConfig;
-import org.apache.clerezza.rdf.core.access.LockableMGraph;
 import org.apache.clerezza.rdf.ontologies.PLATFORM;
 
 /**
@@ -50,7 +50,7 @@
 	private final static Logger logger = LoggerFactory.getLogger(AuthenticationCheckerImpl.class);
 
 	@Reference(target=SystemConfig.SYSTEM_GRAPH_FILTER)
-	private LockableMGraph systemGraph;
+	private Graph systemGraph;
 
 	/**
 	 * Checks if the provided username and password matches a username and
@@ -68,7 +68,7 @@
 		if (security != null) {
 			AccessController.checkPermission(new CheckAuthenticationPermission());
 		}
-		NonLiteral agent = getAgentFromGraph(userName);
+		BlankNodeOrIRI agent = getAgentFromGraph(userName);
 		String storedPassword = getPasswordOfAgent(agent);
 		if (storedPassword.equals(PasswordUtil.convertPassword(password))) {
 			logger.debug("user {} successfully authenticated", userName);
@@ -79,8 +79,8 @@
 		}
 	}
 
-	private NonLiteral getAgentFromGraph(String userName) throws NoSuchAgent {
-		NonLiteral agent;
+	private BlankNodeOrIRI getAgentFromGraph(String userName) throws NoSuchAgent {
+		BlankNodeOrIRI agent;
 		Lock l = systemGraph.getLock().readLock();
 		l.lock();
 		try {
@@ -97,7 +97,7 @@
 		return agent;
 	}
 
-	private String getPasswordOfAgent(NonLiteral agent) {
+	private String getPasswordOfAgent(BlankNodeOrIRI agent) {
 		String storedPassword = "";
 		Lock l = systemGraph.getLock().readLock();
 		l.lock();
diff --git a/commons/security/core/src/test/java/org/apache/stanbol/commons/security/PermissionDefinitionsTest.java b/commons/security/core/src/test/java/org/apache/stanbol/commons/security/PermissionDefinitionsTest.java
index c81a13a..09fa125 100644
--- a/commons/security/core/src/test/java/org/apache/stanbol/commons/security/PermissionDefinitionsTest.java
+++ b/commons/security/core/src/test/java/org/apache/stanbol/commons/security/PermissionDefinitionsTest.java
@@ -23,8 +23,8 @@
 
 import org.junit.*;
 import org.osgi.service.permissionadmin.PermissionInfo;
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.stanbol.commons.security.PermissionDefinitions;
 
@@ -52,11 +52,11 @@
 	@Before
 	public void setUp() {
 
-		final Graph graph = Parser.getInstance()
+		final ImmutableGraph graph = Parser.getInstance()
 				.parse(getClass().getResourceAsStream("systemgraph.nt"),
 						"text/rdf+n3");		
 		this.permissionDefinitions = new PermissionDefinitions(
-				new SimpleMGraph(graph.iterator()));
+				new SimpleGraph(graph.iterator()));
 
 		this.allPermissions = new PermissionInfo[] {
 				new PermissionInfo(
diff --git a/commons/security/usermanagement/src/main/java/org/apache/stanbol/commons/usermanagement/Ontology.java b/commons/security/usermanagement/src/main/java/org/apache/stanbol/commons/usermanagement/Ontology.java
index 781b3f4..cc2e477 100644
--- a/commons/security/usermanagement/src/main/java/org/apache/stanbol/commons/usermanagement/Ontology.java
+++ b/commons/security/usermanagement/src/main/java/org/apache/stanbol/commons/usermanagement/Ontology.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.commons.usermanagement;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 public final class Ontology {
 
@@ -28,19 +28,19 @@
    public final static String userManagementBase = "http://stanbol.apache.org/ontologies/usermanagement#";
     
 	
-	public final static UriRef EditableUser = 
-			new UriRef("EditableUser");
+	public final static IRI EditableUser = 
+			new IRI("EditableUser");
 	
-	public final static UriRef Change = 
-			new UriRef(userManagementBase + "Change");
+	public final static IRI Change = 
+			new IRI(userManagementBase + "Change");
 	
-	public final static UriRef predicate = 
-			new UriRef(userManagementBase + "predicate");
+	public final static IRI predicate = 
+			new IRI(userManagementBase + "predicate");
 	
-	public final static UriRef oldValue = 
-			new UriRef(userManagementBase + "oldValue");
+	public final static IRI oldValue = 
+			new IRI(userManagementBase + "oldValue");
 	
-	public final static UriRef newValue = 
-			new UriRef(userManagementBase + "newValue");
+	public final static IRI newValue = 
+			new IRI(userManagementBase + "newValue");
 
 }
diff --git a/commons/security/usermanagement/src/main/java/org/apache/stanbol/commons/usermanagement/resource/UserResource.java b/commons/security/usermanagement/src/main/java/org/apache/stanbol/commons/usermanagement/resource/UserResource.java
index a62038f..1ce13ff 100644
--- a/commons/security/usermanagement/src/main/java/org/apache/stanbol/commons/usermanagement/resource/UserResource.java
+++ b/commons/security/usermanagement/src/main/java/org/apache/stanbol/commons/usermanagement/resource/UserResource.java
@@ -16,10 +16,7 @@
  */
 package org.apache.stanbol.commons.usermanagement.resource;
 
-import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
 import java.io.UnsupportedEncodingException;
 import java.net.URI;
 import java.security.Policy;
@@ -31,7 +28,6 @@
 import java.util.concurrent.locks.Lock;
 import javax.ws.rs.Consumes;
 import javax.ws.rs.DELETE;
-import javax.ws.rs.DefaultValue;
 import javax.ws.rs.FormParam;
 import javax.ws.rs.GET;
 import javax.ws.rs.POST;
@@ -40,7 +36,6 @@
 import javax.ws.rs.PathParam;
 import javax.ws.rs.Produces;
 import javax.ws.rs.QueryParam;
-import javax.ws.rs.WebApplicationException;
 import javax.ws.rs.core.CacheControl;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MediaType;
@@ -48,21 +43,17 @@
 import javax.ws.rs.core.UriBuilder;
 import javax.ws.rs.core.UriInfo;
 import org.apache.clerezza.platform.config.SystemConfig;
-import org.apache.clerezza.rdf.core.BNode;
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.access.LockableMGraph;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.SimpleGraph;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
@@ -74,8 +65,6 @@
 import org.apache.clerezza.rdf.ontologies.RDFS;
 import org.apache.clerezza.rdf.ontologies.SIOC;
 import org.apache.clerezza.rdf.utils.GraphNode;
-import org.apache.clerezza.rdf.utils.MGraphUtils;
-import org.apache.clerezza.rdf.utils.MGraphUtils.NoSuchSubGraphException;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Property;
 import org.apache.felix.scr.annotations.Reference;
@@ -98,7 +87,7 @@
 
     private static Logger log = LoggerFactory.getLogger(UserResource.class);
     @Reference(target = SystemConfig.SYSTEM_GRAPH_FILTER)
-    private LockableMGraph systemGraph;
+    private Graph systemGraph;
     @Reference
     private Serializer serializer;
     @Reference
@@ -106,7 +95,7 @@
     private static GraphNode dummyNode;
 
     static {
-        dummyNode = new GraphNode(new BNode(), new SimpleMGraph());
+        dummyNode = new GraphNode(new BlankNode(), new SimpleGraph());
         dummyNode.addProperty(RDF.type, FOAF.Agent);
     }
     // **********************************
@@ -128,11 +117,11 @@
      */
     @GET
     @Path("users/{username}")
-    public TripleCollection getUserContext(@PathParam("username") String userName)
+    public Graph getUserContext(@PathParam("username") String userName)
             throws UnsupportedEncodingException {
         GraphNode userNode = getUser(userName);
         if (userNode == null) { // a kludge
-            return new SimpleMGraph();
+            return new SimpleGraph();
         }
         return userNode.getNodeContext();
     }
@@ -180,9 +169,9 @@
     @GET
     @Path("roles/{username}")
     @Produces(SupportedFormat.TURTLE)
-    public TripleCollection getUserRoles(@PathParam("username") String userName)
+    public Graph getUserRoles(@PathParam("username") String userName)
             throws UnsupportedEncodingException {
-        MGraph rolesGraph = getUserRolesGraph(userName);
+        Graph rolesGraph = getUserRolesGraph(userName);
 
         // case of no roles not handled - what best to return : empty graph or
         // 404?
@@ -238,7 +227,7 @@
     @POST
     @Consumes(SupportedFormat.TURTLE)
     @Path("change-user")
-    public Response changeUser(Graph inputGraph) {
+    public Response changeUser(ImmutableGraph inputGraph) {
 
         Lock readLock = systemGraph.getLock().readLock();
         readLock.lock();
@@ -252,7 +241,7 @@
         if (changes.hasNext()) {
             Triple changeTriple = changes.next();
 
-            NonLiteral changeNode = changeTriple.getSubject();
+            BlankNodeOrIRI changeNode = changeTriple.getSubject();
 
             Literal userName = (Literal) inputGraph
                     .filter(changeNode, PLATFORM.userName, null).next()
@@ -262,10 +251,10 @@
                     .filter(null, PLATFORM.userName, userName);
 
             //     if (userTriples.hasNext()) {
-            NonLiteral userNode = userTriples.next()
+            BlankNodeOrIRI userNode = userTriples.next()
                     .getSubject();
 
-            UriRef predicateUriRef = (UriRef) inputGraph
+            IRI predicateIRI = (IRI) inputGraph
                     .filter(changeNode, Ontology.predicate, null).next()
                     .getObject();
 
@@ -273,25 +262,25 @@
             Iterator<Triple> iterator = inputGraph.filter(changeNode,
                     Ontology.oldValue, null);
 
-            Resource oldValue = null;
+            RDFTerm oldValue = null;
 
             if (iterator.hasNext()) {
 
                 oldValue = iterator.next().getObject();
-                // Triple oldTriple = systemGraph.filter(null, predicateUriRef,
+                // Triple oldTriple = systemGraph.filter(null, predicateIRI,
                 // oldValue).next();
                 Iterator<Triple> oldTriples = systemGraph.filter(userNode,
-                        predicateUriRef, oldValue);
+                        predicateIRI, oldValue);
                 if (oldTriples.hasNext()) {
                     oldTriple = oldTriples.next();
                 }
             }
 
-            Resource newValue = inputGraph
+            RDFTerm newValue = inputGraph
                     .filter(changeNode, Ontology.newValue, null).next()
                     .getObject();
 
-            newTriple = new TripleImpl(userNode, predicateUriRef,
+            newTriple = new TripleImpl(userNode, predicateIRI,
                     newValue);
             // }
         }
@@ -346,7 +335,7 @@
             readLock.unlock();
         }
 
-        MGraph rolesGraph = getUserRolesGraph(userName);
+        Graph rolesGraph = getUserRolesGraph(userName);
 
         ArrayList<String> userRoleNames = new ArrayList<String>();
 
@@ -400,7 +389,7 @@
     @PUT
     @Path("users/{username}")
     @Consumes(SupportedFormat.TURTLE)
-    public Response createUser(@Context UriInfo uriInfo, @PathParam("username") String userName, Graph inputGraph) {
+    public Response createUser(@Context UriInfo uriInfo, @PathParam("username") String userName, ImmutableGraph inputGraph) {
         Lock writeLock = systemGraph.getLock().writeLock();
         writeLock.lock();
         systemGraph.addAll(inputGraph);
@@ -439,11 +428,11 @@
     @POST
     @Consumes(SupportedFormat.TURTLE)
     @Path("add-user")
-    public Response addUser(@Context UriInfo uriInfo, Graph inputGraph) {
+    public Response addUser(@Context UriInfo uriInfo, ImmutableGraph inputGraph) {
 
         Iterator<Triple> agents = inputGraph.filter(null, null, FOAF.Agent);
 
-        NonLiteral userNode = agents.next().getSubject();
+        BlankNodeOrIRI userNode = agents.next().getSubject();
 
         Iterator<Triple> userTriples = inputGraph.filter(userNode, null, null);
 
@@ -492,8 +481,8 @@
      * @param userName
      */
     private void remove(String userName) {
-        Resource userResource = getNamedUser(userName).getNode();
-        Iterator<Triple> userTriples = systemGraph.filter((NonLiteral) userResource, null, null);
+        RDFTerm userResource = getNamedUser(userName).getNode();
+        Iterator<Triple> userTriples = systemGraph.filter((BlankNodeOrIRI) userResource, null, null);
 
         ArrayList<Triple> buffer = new ArrayList<Triple>();
 
@@ -508,7 +497,7 @@
             readLock.unlock();
         }
 
-        // Graph context = getNamedUser(userName).getNodeContext();
+        // ImmutableGraph context = getNamedUser(userName).getNodeContext();
         Lock writeLock = systemGraph.getLock().writeLock();
         writeLock.lock();
         try {
@@ -544,7 +533,7 @@
     @POST
     @Consumes(SupportedFormat.TURTLE)
     @Path("delete-user")
-    public Response deleteUser(Graph inputGraph) {
+    public Response deleteUser(ImmutableGraph inputGraph) {
 
         Iterator<Triple> userNameTriples = inputGraph.filter(null,
                 PLATFORM.userName, null);
@@ -662,7 +651,7 @@
 
         try {
             while (roleIterator.hasNext()) {
-                NonLiteral role = roleIterator.next().getSubject();
+                BlankNodeOrIRI role = roleIterator.next().getSubject();
                 Iterator<Triple> roleNameTriples = systemGraph.filter(role, DC.title,
                         null);
                 while (roleNameTriples.hasNext()) {
@@ -706,8 +695,8 @@
      * @param userName
      */
     private void deleteRole(String roleName) {
-        Resource roleResource = getNamedRole(roleName).getNode();
-        Iterator<Triple> roleTriples = systemGraph.filter((NonLiteral) roleResource, null, null);
+        RDFTerm roleResource = getNamedRole(roleName).getNode();
+        Iterator<Triple> roleTriples = systemGraph.filter((BlankNodeOrIRI) roleResource, null, null);
 
         ArrayList<Triple> buffer = new ArrayList<Triple>();
 
@@ -766,7 +755,7 @@
      * @return user node in system graph
      */
     private GraphNode createRole(String newRoleName, String comment) {
-        BNode subject = new BNode();
+        BlankNode subject = new BlankNode();
         GraphNode roleNode = new GraphNode(subject, systemGraph);
         roleNode.addProperty(RDF.type, PERMISSION.Role);
         roleNode.addProperty(DC.title, new PlainLiteralImpl(newRoleName));
@@ -779,7 +768,7 @@
             String comment,
             List<String> permissions) {
 
-        NonLiteral roleResource = (NonLiteral) roleNode.getNode();
+        BlankNodeOrIRI roleResource = (BlankNodeOrIRI) roleNode.getNode();
 
         if (permissions != null) {
             clearPermissions(roleResource);
@@ -899,10 +888,10 @@
             changeLiteral(userNode, PERMISSION.passwordSha1, passwordSha1);
         }
         if (email != null && !email.equals("")) {
-            changeResource(userNode, FOAF.mbox, new UriRef("mailto:" + email));
+            changeResource(userNode, FOAF.mbox, new IRI("mailto:" + email));
         }
 
-        NonLiteral userResource = (NonLiteral) userNode.getNode();
+        BlankNodeOrIRI userResource = (BlankNodeOrIRI) userNode.getNode();
 
         if (roles != null) {
             clearRoles(userResource);
@@ -964,7 +953,7 @@
         try {
             while (permissionTriples.hasNext()) {
                 Triple triple = permissionTriples.next();
-                Resource permissionResource = triple.getObject();
+                RDFTerm permissionResource = triple.getObject();
                 buffer.add(new GraphNode(permissionResource, systemGraph));
             }
         } finally {
@@ -988,13 +977,13 @@
      * @param userName
      * @return roles graph
      */
-    private MGraph getUserRolesGraph(String userName) {
+    private Graph getUserRolesGraph(String userName) {
         GraphNode userNode = getUser(userName);
 
-        Iterator<Resource> functionIterator = userNode
+        Iterator<RDFTerm> functionIterator = userNode
                 .getObjects(SIOC.has_function);
 
-        SimpleMGraph rolesGraph = new SimpleMGraph();
+        SimpleGraph rolesGraph = new SimpleGraph();
 
         while (functionIterator.hasNext()) {
 
@@ -1002,14 +991,14 @@
                     systemGraph);
 
             Iterator<Triple> roleIterator = systemGraph.filter(
-                    (NonLiteral) functionNode.getNode(), RDF.type,
+                    (BlankNodeOrIRI) functionNode.getNode(), RDF.type,
                     PERMISSION.Role);
 
             // needs lock?
             while (roleIterator.hasNext()) {
                 Triple roleTriple = roleIterator.next();
                 // rolesGraph.add(roleTriple);
-                NonLiteral roleNode = roleTriple.getSubject();
+                BlankNodeOrIRI roleNode = roleTriple.getSubject();
                 SimpleGraph detailsGraph = new SimpleGraph(systemGraph.filter(
                         roleNode, null, null));
                 rolesGraph.addAll(detailsGraph);
@@ -1025,7 +1014,7 @@
      * @return user node in system graph
      */
     private GraphNode createUser(String newUserName) {
-        BNode subject = new BNode();
+        BlankNode subject = new BlankNode();
 
         GraphNode userNode = new GraphNode(subject, systemGraph);
         userNode.addProperty(RDF.type, FOAF.Agent);
@@ -1036,7 +1025,7 @@
     // move later?
     public final static String rolesBase = "urn:x-localhost/role/";
 
-    private void clearRoles(NonLiteral userResource) {
+    private void clearRoles(BlankNodeOrIRI userResource) {
         systemGraph.removeAll(filterToArray(userResource, SIOC.has_function, null));
     }
 
@@ -1048,7 +1037,7 @@
      * @param object
      * @return
      */
-    private List<Triple> filterToArray(NonLiteral subject, UriRef predicate, Resource object) {
+    private List<Triple> filterToArray(BlankNodeOrIRI subject, IRI predicate, RDFTerm object) {
         Iterator<Triple> triples = systemGraph.filter(subject, predicate, object);
         ArrayList<Triple> buffer = new ArrayList<Triple>();
         Lock readLock = systemGraph.getLock().readLock();
@@ -1077,12 +1066,12 @@
 
         // otherwise make a new one as a named node
         if (roleNode == null) {
-            UriRef roleUriRef = new UriRef(rolesBase + roleName);
+            IRI roleIRI = new IRI(rolesBase + roleName);
 
-            roleNode = new GraphNode(roleUriRef, systemGraph);
+            roleNode = new GraphNode(roleIRI, systemGraph);
             roleNode.addProperty(RDF.type, PERMISSION.Role);
             roleNode.addProperty(DC.title, new PlainLiteralImpl(roleName));
-            userNode.addProperty(SIOC.has_function, roleUriRef);
+            userNode.addProperty(SIOC.has_function, roleIRI);
         } else {
             userNode.addProperty(SIOC.has_function, roleNode.getNode());
         }
@@ -1095,7 +1084,7 @@
         if (hasPermission(subjectNode, permissionString)) {
             return subjectNode;
         }
-        GraphNode permissionNode = new GraphNode(new BNode(), systemGraph);
+        GraphNode permissionNode = new GraphNode(new BlankNode(), systemGraph);
         permissionNode.addProperty(RDF.type, PERMISSION.Permission);
         // permissionNode.addProperty(DC.title, new PlainLiteralImpl(permissionName));
         subjectNode.addProperty(PERMISSION.hasPermission, permissionNode.getNode());
@@ -1105,12 +1094,12 @@
 
     private boolean hasPermission(GraphNode userNode, String permissionString) {
         boolean has = false;
-        Iterator<Triple> existingPermissions = systemGraph.filter((NonLiteral) userNode.getNode(), PERMISSION.hasPermission, null);
+        Iterator<Triple> existingPermissions = systemGraph.filter((BlankNodeOrIRI) userNode.getNode(), PERMISSION.hasPermission, null);
         Lock readLock = systemGraph.getLock().readLock();
         readLock.lock();
         try { // check to see if the user already has this permission
             while (existingPermissions.hasNext()) {
-                NonLiteral permissionNode = (NonLiteral) existingPermissions.next().getObject();
+                BlankNodeOrIRI permissionNode = (BlankNodeOrIRI) existingPermissions.next().getObject();
                 Iterator<Triple> permissionTriples = systemGraph.filter(permissionNode, PERMISSION.javaPermissionEntry, null);
                 while (permissionTriples.hasNext()) {
                     Literal permission = (Literal) permissionTriples.next().getObject();
@@ -1132,7 +1121,7 @@
 //                <http://clerezza.org/2008/10/permission#javaPermissionEntry>
 //                        "(java.security.AllPermission \"\" \"\")"
 //              ] ;
-    private void clearPermissions(NonLiteral subject) {
+    private void clearPermissions(BlankNodeOrIRI subject) {
         ArrayList<Triple> buffer = new ArrayList<Triple>();
 
         Lock readLock = systemGraph.getLock().readLock();
@@ -1142,7 +1131,7 @@
             while (permissions.hasNext()) {
                 Triple permissionTriple = permissions.next();
                 buffer.add(permissionTriple);
-                NonLiteral permissionNode = (NonLiteral) permissionTriple.getObject();
+                BlankNodeOrIRI permissionNode = (BlankNodeOrIRI) permissionTriple.getObject();
                 Iterator<Triple> permissionTriples = systemGraph.filter(permissionNode, null, null);
                 while (permissionTriples.hasNext()) {
                     buffer.add(permissionTriples.next());
@@ -1160,7 +1149,7 @@
     private GraphNode getTitleNode(String title) {
         Iterator<Triple> triples = systemGraph.filter(null, DC.title, new PlainLiteralImpl(title));
         if (triples.hasNext()) {
-            Resource resource = triples.next().getSubject();
+            RDFTerm resource = triples.next().getSubject();
             return new GraphNode(resource, systemGraph);
         }
         return null;
@@ -1176,15 +1165,15 @@
      * @param predicate property of the triple to change
      * @param newValue new value for given predicate
      */
-    private void changeLiteral(GraphNode userNode, UriRef predicate,
+    private void changeLiteral(GraphNode userNode, IRI predicate,
             String newValue) {
 
         Iterator<Triple> oldTriples = systemGraph.filter(
-                (NonLiteral) userNode.getNode(), predicate, null);
+                (BlankNodeOrIRI) userNode.getNode(), predicate, null);
 
         ArrayList<Triple> oldBuffer = new ArrayList<Triple>();
 
-        Resource oldObject = null;
+        RDFTerm oldObject = null;
 
         Lock readLock = systemGraph.getLock().readLock();
         readLock.lock();
@@ -1201,7 +1190,7 @@
         // filter appears to see plain literals and xsd:strings as differerent
         // so not
         // userNode.addPropertyValue(predicate, newValue);
-        PlainLiteral newObject = new PlainLiteralImpl(newValue);
+        Literal newObject = new PlainLiteralImpl(newValue);
         userNode.addProperty(predicate, newObject);
 
         if (newObject.equals(oldObject)) {
@@ -1218,11 +1207,11 @@
      * @param predicate property of the triple to change
      * @param newValue new value for given predicate
      */
-    private void changeResource(GraphNode userNode, UriRef predicate,
-            UriRef newValue) {
+    private void changeResource(GraphNode userNode, IRI predicate,
+            IRI newValue) {
 
         Iterator<Triple> oldTriples = systemGraph.filter(
-                (NonLiteral) userNode.getNode(), predicate, null);
+                (BlankNodeOrIRI) userNode.getNode(), predicate, null);
 
         ArrayList<Triple> oldBuffer = new ArrayList<Triple>();
 
@@ -1232,7 +1221,7 @@
             while (oldTriples.hasNext()) {
                 Triple triple = oldTriples.next();
 
-                Resource oldValue = triple.getObject();
+                RDFTerm oldValue = triple.getObject();
                 if (newValue.equals(oldValue)) {
                     return;
                 }
@@ -1272,7 +1261,7 @@
         return getResourcesOfType(FOAF.Agent);
     }
 
-    private Set<GraphNode> getResourcesOfType(UriRef type) {
+    private Set<GraphNode> getResourcesOfType(IRI type) {
         Lock readLock = systemGraph.getLock().readLock();
         readLock.lock();
         try {
diff --git a/commons/solr/core/src/main/java/org/apache/stanbol/commons/solr/utils/StanbolResourceLoader.java b/commons/solr/core/src/main/java/org/apache/stanbol/commons/solr/utils/StanbolResourceLoader.java
index 8bf8dc9..27d7c6c 100644
--- a/commons/solr/core/src/main/java/org/apache/stanbol/commons/solr/utils/StanbolResourceLoader.java
+++ b/commons/solr/core/src/main/java/org/apache/stanbol/commons/solr/utils/StanbolResourceLoader.java
@@ -98,7 +98,7 @@
             in = classloader.getResourceAsStream(resource);
         }
         if(in == null){
-            throw new IOException("Unable to load Resource '"+resource+"' from "
+            throw new IOException("Unable to load RDFTerm '"+resource+"' from "
                 + (parent != null ? ("parent (message: "+parentMessage+") and from") : "")
                 + "classpath!");
         }
diff --git a/commons/solr/install/src/main/java/org/apache/stanbol/commons/solr/install/impl/SolrIndexInstaller.java b/commons/solr/install/src/main/java/org/apache/stanbol/commons/solr/install/impl/SolrIndexInstaller.java
index 6155cde..a8e4f6b 100644
--- a/commons/solr/install/src/main/java/org/apache/stanbol/commons/solr/install/impl/SolrIndexInstaller.java
+++ b/commons/solr/install/src/main/java/org/apache/stanbol/commons/solr/install/impl/SolrIndexInstaller.java
@@ -262,7 +262,7 @@
         String extension = FilenameUtils.getExtension(filePath);
         String archiveFormat = SUPPORTED_SOLR_ARCHIVE_FORMAT.get(extension);
         if (archiveFormat == null) {
-            log.error("Unable to process Solr Index Archive from Resource " + registeredResource.getURL()
+            log.error("Unable to process Solr Index Archive from RDFTerm " + registeredResource.getURL()
                       + "because of unsupported archive format \"" + extension + "\" (supported are "
                       + SUPPORTED_SOLR_ARCHIVE_FORMAT.keySet() + ")");
             return null;
diff --git a/commons/solr/managed/src/main/java/org/apache/stanbol/commons/solr/managed/impl/ManagedSolrServerImpl.java b/commons/solr/managed/src/main/java/org/apache/stanbol/commons/solr/managed/impl/ManagedSolrServerImpl.java
index 73365c6..56202ab 100644
--- a/commons/solr/managed/src/main/java/org/apache/stanbol/commons/solr/managed/impl/ManagedSolrServerImpl.java
+++ b/commons/solr/managed/src/main/java/org/apache/stanbol/commons/solr/managed/impl/ManagedSolrServerImpl.java
@@ -985,7 +985,7 @@
                                        //not available
                                         ais = null;
                                     } catch (ArchiveException e) {
-                                        log.error("Unable to open ArchiveInputStream for Resource '"+
+                                        log.error("Unable to open ArchiveInputStream for RDFTerm '"+
                                             archive+"'!",e);
                                         ais = null;
                                     }
@@ -1027,7 +1027,7 @@
             try {
                 ais = ManagementUtils.getArchiveInputStream(resourceName, is);
             } catch (ArchiveException e) {
-                log.error("Unable to open ArchiveInputStream for Resource '"+
+                log.error("Unable to open ArchiveInputStream for RDFTerm '"+
                     resourceName+"'!",e);
                 ais = null;
             }
diff --git a/commons/solr/managed/src/main/java/org/apache/stanbol/commons/solr/managed/standalone/ClassPathDataFileProvider.java b/commons/solr/managed/src/main/java/org/apache/stanbol/commons/solr/managed/standalone/ClassPathDataFileProvider.java
index 89a30a7..38efa18 100644
--- a/commons/solr/managed/src/main/java/org/apache/stanbol/commons/solr/managed/standalone/ClassPathDataFileProvider.java
+++ b/commons/solr/managed/src/main/java/org/apache/stanbol/commons/solr/managed/standalone/ClassPathDataFileProvider.java
@@ -109,7 +109,7 @@
         // load default OpenNLP models from classpath (embedded in the defaultdata bundle)
         final String resourcePath = path + filename;
         final URL dataFile = getClass().getClassLoader().getResource(resourcePath);
-        //log.debug("Resource {} found: {}", (in == null ? "NOT" : ""), resourcePath);
+        //log.debug("RDFTerm {} found: {}", (in == null ? "NOT" : ""), resourcePath);
         return dataFile;
     }
 
diff --git a/commons/stanboltools/bundledatafileprovider/src/main/java/org/apache/stanbol/commons/stanboltools/datafileprovider/bundle/impl/BundleDataFileProvider.java b/commons/stanboltools/bundledatafileprovider/src/main/java/org/apache/stanbol/commons/stanboltools/datafileprovider/bundle/impl/BundleDataFileProvider.java
index 71876d9..07a734d 100644
--- a/commons/stanboltools/bundledatafileprovider/src/main/java/org/apache/stanbol/commons/stanboltools/datafileprovider/bundle/impl/BundleDataFileProvider.java
+++ b/commons/stanboltools/bundledatafileprovider/src/main/java/org/apache/stanbol/commons/stanboltools/datafileprovider/bundle/impl/BundleDataFileProvider.java
@@ -98,7 +98,7 @@
     public InputStream getInputStream(String bundleSymbolicName,
             String filename, Map<String, String> comments) throws IOException {
         URL resource = getDataFile(bundleSymbolicName, filename);
-        log.debug("Resource {} found: {}", (resource == null ? "NOT" : ""), filename);
+        log.debug("RDFTerm {} found: {}", (resource == null ? "NOT" : ""), filename);
         return resource != null ? resource.openStream() : null;
     }
 
diff --git a/commons/stanboltools/datafileprovider/src/main/java/org/apache/stanbol/commons/stanboltools/datafileprovider/DataFileProvider.java b/commons/stanboltools/datafileprovider/src/main/java/org/apache/stanbol/commons/stanboltools/datafileprovider/DataFileProvider.java
index ef7537c..e0c24e0 100644
--- a/commons/stanboltools/datafileprovider/src/main/java/org/apache/stanbol/commons/stanboltools/datafileprovider/DataFileProvider.java
+++ b/commons/stanboltools/datafileprovider/src/main/java/org/apache/stanbol/commons/stanboltools/datafileprovider/DataFileProvider.java
@@ -62,7 +62,7 @@
      * @param filename name of the file to open
      * @param comments Optional - how to get a more complete version
      *        of the data file, licensing information, etc.
-     * @return <code>true</code> if the requested Resource is available.
+     * @return <code>true</code> if the requested RDFTerm is available.
      * Otherwise <code>false</code>
      */
     boolean isAvailable(String bundleSymbolicName,
diff --git a/commons/stanboltools/datafileprovider/src/main/java/org/apache/stanbol/commons/stanboltools/datafileprovider/impl/tracking/DataFileTrackerImpl.java b/commons/stanboltools/datafileprovider/src/main/java/org/apache/stanbol/commons/stanboltools/datafileprovider/impl/tracking/DataFileTrackerImpl.java
index 8040514..330bef5 100644
--- a/commons/stanboltools/datafileprovider/src/main/java/org/apache/stanbol/commons/stanboltools/datafileprovider/impl/tracking/DataFileTrackerImpl.java
+++ b/commons/stanboltools/datafileprovider/src/main/java/org/apache/stanbol/commons/stanboltools/datafileprovider/impl/tracking/DataFileTrackerImpl.java
@@ -224,7 +224,7 @@
                 trackedResources.put(r, trackingState);
             }
             trackingState.addListener(resourceListener);
-            if(!trackedResources.isEmpty()){ //maybe this was the first added Resource
+            if(!trackedResources.isEmpty()){ //maybe this was the first added RDFTerm
                 startTracking(); //so me might want to start tracking
             }
         }
diff --git a/commons/stanboltools/datafileprovider/src/main/java/org/apache/stanbol/commons/stanboltools/datafileprovider/impl/tracking/TrackingState.java b/commons/stanboltools/datafileprovider/src/main/java/org/apache/stanbol/commons/stanboltools/datafileprovider/impl/tracking/TrackingState.java
index 15fd15c..d5f26e2 100644
--- a/commons/stanboltools/datafileprovider/src/main/java/org/apache/stanbol/commons/stanboltools/datafileprovider/impl/tracking/TrackingState.java
+++ b/commons/stanboltools/datafileprovider/src/main/java/org/apache/stanbol/commons/stanboltools/datafileprovider/impl/tracking/TrackingState.java
@@ -32,12 +32,12 @@
  * Internally used to manage {@link DataFileListener} and the state of
  * tracked DataFiles.<p>
  * Note that different {@link DataFileListener}s may have different {@link STATE}
- * for the same Resource (e.g. if a new {@link DataFileListener} is registered
+ * for the same RDFTerm (e.g. if a new {@link DataFileListener} is registered
  * for a resource it will start with {@link STATE#UNKNOWN} while all the other
  * Listeners will be in the state of the resource (either {@link STATE#AVAILABLE}
  * or {@link STATE#UNAVAILABLE}). Only after the next tracking the newly added
  * {@link DataFileListener} will get fired and be updated to the current state
- * of the Resource.<p>
+ * of the RDFTerm.<p>
  * This model will also allow to introduce an ERROR state that could be used
  * to manage that some {@link DataFileListener} where not able to consume a
  * current version of a data file.
diff --git a/commons/web/base/src/main/java/org/apache/stanbol/commons/web/base/writers/GraphWriter.java b/commons/web/base/src/main/java/org/apache/stanbol/commons/web/base/writers/GraphWriter.java
index ad4a877..336edae 100644
--- a/commons/web/base/src/main/java/org/apache/stanbol/commons/web/base/writers/GraphWriter.java
+++ b/commons/web/base/src/main/java/org/apache/stanbol/commons/web/base/writers/GraphWriter.java
@@ -43,7 +43,7 @@
 import javax.ws.rs.ext.MessageBodyWriter;
 import javax.ws.rs.ext.Provider;
 
-import org.apache.clerezza.rdf.core.TripleCollection;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Property;
@@ -58,7 +58,7 @@
 @Property(name="javax.ws.rs", boolValue=true)
 @Provider
 // @Produces({TEXT_PLAIN, N3, N_TRIPLE, RDF_XML, TURTLE, X_TURTLE, RDF_JSON, APPLICATION_JSON})
-public class GraphWriter implements MessageBodyWriter<TripleCollection> {
+public class GraphWriter implements MessageBodyWriter<Graph> {
 
     /**
      * The media type for JSON-LD (<code>application/ld+json</code>)
@@ -91,10 +91,10 @@
 
     public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
         String mediaTypeString = mediaType.getType() + '/' + mediaType.getSubtype();
-        return TripleCollection.class.isAssignableFrom(type) && supportedMediaTypes.contains(mediaTypeString);
+        return Graph.class.isAssignableFrom(type) && supportedMediaTypes.contains(mediaTypeString);
     }
 
-    public long getSize(TripleCollection t,
+    public long getSize(Graph t,
                         Class<?> type,
                         Type genericType,
                         Annotation[] annotations,
@@ -102,7 +102,7 @@
         return -1;
     }
 
-    public void writeTo(TripleCollection t,
+    public void writeTo(Graph t,
                         Class<?> type,
                         Type genericType,
                         Annotation[] annotations,
diff --git a/commons/web/base/src/main/java/org/apache/stanbol/commons/web/base/writers/ResultSetToXml.java b/commons/web/base/src/main/java/org/apache/stanbol/commons/web/base/writers/ResultSetToXml.java
index 15035ab..6819609 100644
--- a/commons/web/base/src/main/java/org/apache/stanbol/commons/web/base/writers/ResultSetToXml.java
+++ b/commons/web/base/src/main/java/org/apache/stanbol/commons/web/base/writers/ResultSetToXml.java
@@ -21,12 +21,11 @@
 import javax.xml.parsers.DocumentBuilderFactory;
 import javax.xml.parsers.ParserConfigurationException;
 
-import org.apache.clerezza.rdf.core.BNode;
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.sparql.ResultSet;
 import org.apache.clerezza.rdf.core.sparql.SolutionMapping;
 import org.apache.clerezza.rdf.core.sparql.query.Variable;
@@ -84,23 +83,19 @@
         }
     }
 
-    private Element createValueElement(Resource resource, Document doc) {
+    private Element createValueElement(RDFTerm resource, Document doc) {
         Element value;
-        if (resource instanceof UriRef) {
+        if (resource instanceof IRI) {
             value = doc.createElement("uri");
-            value.appendChild(doc.createTextNode(((UriRef) resource)
+            value.appendChild(doc.createTextNode(((IRI) resource)
                     .getUnicodeString()));
-        } else if (resource instanceof TypedLiteral) {
+        } else if (resource instanceof Literal) {
             value = doc.createElement("literal");
-            value.appendChild(doc.createTextNode(((TypedLiteral) resource)
+            value.appendChild(doc.createTextNode(((Literal)resource)
                     .getLexicalForm()));
-            value.setAttribute("datatype", (((TypedLiteral) resource)
+            value.setAttribute("datatype", (((Literal) resource)
                     .getDataType().getUnicodeString()));
-        } else if (resource instanceof PlainLiteral) {
-            value = doc.createElement("literal");
-            value.appendChild(doc.createTextNode(((PlainLiteral) resource)
-                    .getLexicalForm()));
-            Language lang = ((PlainLiteral) resource).getLanguage();
+            Language lang = ((Literal) resource).getLanguage();
             if (lang != null) {
                 value.setAttribute("xml:lang", (lang.toString()));
             }
diff --git a/commons/web/rdfviewable-writer/src/main/java/org/apache/stanbol/commons/web/rdfviewable/writer/RECIPES.java b/commons/web/rdfviewable-writer/src/main/java/org/apache/stanbol/commons/web/rdfviewable/writer/RECIPES.java
index acefc3b..85155f0 100644
--- a/commons/web/rdfviewable-writer/src/main/java/org/apache/stanbol/commons/web/rdfviewable/writer/RECIPES.java
+++ b/commons/web/rdfviewable-writer/src/main/java/org/apache/stanbol/commons/web/rdfviewable/writer/RECIPES.java
@@ -15,7 +15,7 @@
  */

 package org.apache.stanbol.commons.web.rdfviewable.writer;

 

-import org.apache.clerezza.rdf.core.UriRef;

+import org.apache.clerezza.commons.rdf.IRI;

 

 /**

  * Used ontologicaal terms from recipes ontology

@@ -29,13 +29,13 @@
      */

     private RECIPES() {}

 

-    public static final UriRef Recipe = new UriRef("http://vocab.netlabs.org/recipe#Recipe");

+    public static final IRI Recipe = new IRI("http://vocab.netlabs.org/recipe#Recipe");

     

-    public static final UriRef recipeDomain = new UriRef("http://vocab.netlabs.org/recipe#recipeDomain");

+    public static final IRI recipeDomain = new IRI("http://vocab.netlabs.org/recipe#recipeDomain");

     

-    public static final UriRef ingredient = new UriRef("http://vocab.netlabs.org/recipe#ingredient");

+    public static final IRI ingredient = new IRI("http://vocab.netlabs.org/recipe#ingredient");

     

-    public static final UriRef ingredientProperty = new UriRef("http://vocab.netlabs.org/recipe#ingredientProperty");

+    public static final IRI ingredientProperty = new IRI("http://vocab.netlabs.org/recipe#ingredientProperty");

     

-    public static final UriRef ingredientInverseProperty = new UriRef("http://vocab.netlabs.org/recipe#ingredientInverseProperty");

+    public static final IRI ingredientInverseProperty = new IRI("http://vocab.netlabs.org/recipe#ingredientInverseProperty");

 }

diff --git a/commons/web/rdfviewable-writer/src/main/java/org/apache/stanbol/commons/web/rdfviewable/writer/RecipesGraphProvider.java b/commons/web/rdfviewable-writer/src/main/java/org/apache/stanbol/commons/web/rdfviewable/writer/RecipesGraphProvider.java
index c0cc0e9..0cd8d6f 100644
--- a/commons/web/rdfviewable-writer/src/main/java/org/apache/stanbol/commons/web/rdfviewable/writer/RecipesGraphProvider.java
+++ b/commons/web/rdfviewable-writer/src/main/java/org/apache/stanbol/commons/web/rdfviewable/writer/RecipesGraphProvider.java
@@ -18,8 +18,8 @@
 import java.io.IOException;

 import java.net.URL;

 import java.util.Enumeration;

-import org.apache.clerezza.rdf.core.MGraph;

-import org.apache.clerezza.rdf.core.TripleCollection;

+import org.apache.clerezza.commons.rdf.Graph;

+import org.apache.clerezza.commons.rdf.Graph;

 import org.apache.clerezza.rdf.core.serializedform.Parser;

 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;

 import org.apache.felix.scr.annotations.Activate;

@@ -27,7 +27,7 @@
 import org.apache.felix.scr.annotations.Deactivate;

 import org.apache.felix.scr.annotations.Reference;

 import org.apache.felix.scr.annotations.Service;

-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;

+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;

 import org.osgi.framework.Bundle;

 import org.osgi.framework.BundleContext;

 import org.osgi.framework.BundleEvent;

@@ -47,15 +47,15 @@
     private static Logger log = 

             LoggerFactory.getLogger(RecipesGraphProvider.class);

     

-    private MGraph recipesGraph = null;

+    private Graph recipesGraph = null;

     

-    public TripleCollection getRecipesGraph() {

+    public Graph getRecipesGraph() {

         return recipesGraph;

     }

     

     @Activate

     protected void activate(BundleContext context) {

-        recipesGraph = new IndexedMGraph();

+        recipesGraph = new IndexedGraph();

         context.addBundleListener(this);

         for (Bundle b : context.getBundles()) {

             if (b.getState() == Bundle.ACTIVE) {

diff --git a/commons/web/rdfviewable-writer/src/main/java/org/apache/stanbol/commons/web/rdfviewable/writer/impl/RdfSerializingWriter.java b/commons/web/rdfviewable-writer/src/main/java/org/apache/stanbol/commons/web/rdfviewable/writer/impl/RdfSerializingWriter.java
index 9bf46d4..2f04a11 100644
--- a/commons/web/rdfviewable-writer/src/main/java/org/apache/stanbol/commons/web/rdfviewable/writer/impl/RdfSerializingWriter.java
+++ b/commons/web/rdfviewable-writer/src/main/java/org/apache/stanbol/commons/web/rdfviewable/writer/impl/RdfSerializingWriter.java
@@ -34,13 +34,12 @@
 import javax.ws.rs.core.UriInfo;
 import javax.ws.rs.ext.MessageBodyWriter;
 import javax.ws.rs.ext.Provider;
-import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.clerezza.rdf.utils.GraphNode;
@@ -113,17 +112,17 @@
         this.uriInfo = uriInfo;
     }
 
-    private TripleCollection getExpandedContext(GraphNode node, GraphNode recipe) {
-        final TripleCollection result = new SimpleMGraph(node.getNodeContext());
-        final Set<Resource> expandedResources = new HashSet<Resource>();
+    private Graph getExpandedContext(GraphNode node, GraphNode recipe) {
+        final Graph result = new SimpleGraph(node.getNodeContext());
+        final Set<RDFTerm> expandedResources = new HashSet<RDFTerm>();
         expandedResources.add(node.getNode());
         while (true) {
-            Set<Resource> additionalExpansionRes = getAdditionalExpansionResources(result, recipe);
+            Set<RDFTerm> additionalExpansionRes = getAdditionalExpansionResources(result, recipe);
             additionalExpansionRes.removeAll(expandedResources);
             if (additionalExpansionRes.size() == 0) {
                 return result;
             }
-            for (Resource resource : additionalExpansionRes) {
+            for (RDFTerm resource : additionalExpansionRes) {
                 final GraphNode additionalNode = new GraphNode(resource, node.getGraph());
                 result.addAll(additionalNode.getNodeContext());
                 expandedResources.add(resource);
@@ -131,14 +130,14 @@
         }
     }
 
-    private Set<Resource> getAdditionalExpansionResources(TripleCollection tc, GraphNode recipe) {
-        final Set<UriRef> subjectExpansionProperties = getSubjectExpansionProperties(recipe);
-        final Set<UriRef> objectExpansionProperties = getObjectExpansionProperties(recipe);
-        final Set<Resource> result = new HashSet<Resource>();
+    private Set<RDFTerm> getAdditionalExpansionResources(Graph tc, GraphNode recipe) {
+        final Set<IRI> subjectExpansionProperties = getSubjectExpansionProperties(recipe);
+        final Set<IRI> objectExpansionProperties = getObjectExpansionProperties(recipe);
+        final Set<RDFTerm> result = new HashSet<RDFTerm>();
         if ((subjectExpansionProperties.size() > 0)
                 || (objectExpansionProperties.size() > 0)) {
             for (Triple triple : tc) {
-                final UriRef predicate = triple.getPredicate();
+                final IRI predicate = triple.getPredicate();
                 if (subjectExpansionProperties.contains(predicate)) {
                     result.add(triple.getSubject());
                 }
@@ -150,44 +149,44 @@
         return result;
     }
 
-    private Set<UriRef> getSubjectExpansionProperties(GraphNode recipe) {
+    private Set<IRI> getSubjectExpansionProperties(GraphNode recipe) {
         final MultivaluedMap<String, String> queryParams = uriInfo.getQueryParameters(true);
         final List<String> paramValues = queryParams.get(SUBJ_EXP_PARAM);
-        final Set<UriRef> result = new HashSet<UriRef>();
+        final Set<IRI> result = new HashSet<IRI>();
         if (paramValues != null) {
             for (String uriString : paramValues) {
-                result.add(new UriRef(uriString));
+                result.add(new IRI(uriString));
             }
         }
         if (recipe != null) {
             Iterator<GraphNode> ingredients = recipe.getObjectNodes(RECIPES.ingredient);
             while (ingredients.hasNext()) {
-                Iterator<Resource> properties = 
+                Iterator<RDFTerm> properties = 
                         ingredients.next().getObjects(RECIPES.ingredientInverseProperty);
                 while (properties.hasNext()) {
-                    result.add((UriRef)properties.next());
+                    result.add((IRI)properties.next());
                 }
             }
         }
         return result;
     }
 
-    private Set<UriRef> getObjectExpansionProperties(GraphNode recipe) {
+    private Set<IRI> getObjectExpansionProperties(GraphNode recipe) {
         final MultivaluedMap<String, String> queryParams = uriInfo.getQueryParameters(true);
         final List<String> paramValues = queryParams.get(OBJ_EXP_PARAM);
-        final Set<UriRef> result = new HashSet<UriRef>();
+        final Set<IRI> result = new HashSet<IRI>();
         if (paramValues != null) {
             for (String uriString : paramValues) {
-                result.add(new UriRef(uriString));
+                result.add(new IRI(uriString));
             }
         }
         if (recipe != null) {
             Iterator<GraphNode> ingredients = recipe.getObjectNodes(RECIPES.ingredient);
             while (ingredients.hasNext()) {
-                Iterator<Resource> properties = 
+                Iterator<RDFTerm> properties = 
                         ingredients.next().getObjects(RECIPES.ingredientProperty);
                 while (properties.hasNext()) {
-                    result.add((UriRef)properties.next());
+                    result.add((IRI)properties.next());
                 }
             }
         }
@@ -196,7 +195,7 @@
     }
 
     private GraphNode getRecipe(String templatePath) {
-        TripleCollection rg = recipesGraphProvider.getRecipesGraph();
+        Graph rg = recipesGraphProvider.getRecipesGraph();
         GraphNode literalNode = new GraphNode(new PlainLiteralImpl(templatePath), rg);
         Iterator<GraphNode> recipes = literalNode.getSubjectNodes(RECIPES.recipeDomain);
         if (recipes.hasNext()) {
diff --git a/commons/web/sparql/src/main/java/org/apache/stanbol/commons/web/sparql/resource/SparqlEndpointResource.java b/commons/web/sparql/src/main/java/org/apache/stanbol/commons/web/sparql/resource/SparqlEndpointResource.java
index 20fff18..7e1a197 100644
--- a/commons/web/sparql/src/main/java/org/apache/stanbol/commons/web/sparql/resource/SparqlEndpointResource.java
+++ b/commons/web/sparql/src/main/java/org/apache/stanbol/commons/web/sparql/resource/SparqlEndpointResource.java
@@ -34,8 +34,8 @@
 import javax.ws.rs.core.Response.ResponseBuilder;
 import javax.ws.rs.core.Response.Status;
 
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.TcManager;
 import org.apache.clerezza.rdf.core.sparql.ParseException;
 import org.apache.felix.scr.annotations.Activate;
@@ -53,7 +53,7 @@
 
 /**
  * This is the SPARQL endpoint which is used throughout the Stanbol. It uses {@link BundleContext} to retrive
- * {@link TripleCollection} s registered to OSGi environment. To be able to execute SPARQL queries on triple
+ * {@link Graph} s registered to OSGi environment. To be able to execute SPARQL queries on triple
  * collections, they should be registered to the OSGi environment with the following parameters:
  * 
  * <p>
@@ -97,7 +97,7 @@
     }*/
 
     /**
-     * HTTP GET service to execute SPARQL queries on {@link TripleCollection}s registered to OSGi environment.
+     * HTTP GET service to execute SPARQL queries on {@link Graph}s registered to OSGi environment.
      * If a <code>null</code>, it is assumed that the request is coming from the HTML interface of SPARQL
      * endpoint. Otherwise the query is executed on the triple collection specified by <code>graphUri</code>.
      * But, if no graph uri is passed, then the triple collection having highest service.ranking value is
@@ -120,19 +120,19 @@
                            @QueryParam(value = "query") String sparqlQuery,
                            @Context HttpHeaders headers) throws InvalidSyntaxException {
         if (sparqlQuery == null) {
-            populateTripleCollectionList(getServices(null));
+            populateGraphList(getServices(null));
             return Response.ok(new Viewable("index", this), TEXT_HTML).build();
         }
         
         String mediaType = "application/sparql-results+xml";
 
-        TripleCollection tripleCollection = getTripleCollection(graphUri);
+        Graph tripleCollection = getGraph(graphUri);
         ResponseBuilder rb;
         if (tripleCollection != null) {
             Object result;
 			try {
 				result = tcManager.executeSparqlQuery(sparqlQuery, tripleCollection);
-		        if (result instanceof TripleCollection) {
+		        if (result instanceof Graph) {
 		            mediaType = "application/rdf+xml";
 		        }
 	            rb = Response.ok(result, mediaType);
@@ -148,7 +148,7 @@
     }
 
     /**
-     * HTTP GET service to execute SPARQL queries on {@link TripleCollection}s registered to OSGi environment.
+     * HTTP GET service to execute SPARQL queries on {@link Graph}s registered to OSGi environment.
      * For details, see {@link #sparql(String, String, HttpHeaders)}
      */
     @POST
@@ -160,42 +160,42 @@
         return sparql(graphUri, sparqlQuery, headers);
     }
 
-    private TripleCollection getTripleCollection(String graphUri) throws InvalidSyntaxException {
-        Map<ServiceReference,TripleCollection> services = getServices(graphUri);
+    private Graph getGraph(String graphUri) throws InvalidSyntaxException {
+        Map<ServiceReference,Graph> services = getServices(graphUri);
         if (services != null && services.size() > 0) {
             return services.get(services.keySet().iterator().next());
         }
         return null;
     }
 
-    private void populateTripleCollectionList(Map<ServiceReference,TripleCollection> services) {
+    private void populateGraphList(Map<ServiceReference,Graph> services) {
         if (services != null) {
             for (ServiceReference service : services.keySet()) {
                 Object graphUri = service.getProperty(GRAPH_URI);
-                if (service.getProperty(GRAPH_URI) instanceof UriRef) {
-                    graphUri = ((UriRef) graphUri).getUnicodeString();
+                if (service.getProperty(GRAPH_URI) instanceof IRI) {
+                    graphUri = ((IRI) graphUri).getUnicodeString();
                 }
                 Object graphName = service.getProperty("graph.name");
                 Object graphDescription = service.getProperty("graph.description");
                 if (graphUri instanceof String && graphName instanceof String
                     && graphDescription instanceof String) {
-                    tripleCollections.add(new TripleCollectionInfo((String) graphUri, (String) graphName,
+                    tripleCollections.add(new GraphInfo((String) graphUri, (String) graphName,
                             (String) graphDescription));
                 }
             }
         }
     }
 
-    private Map<ServiceReference,TripleCollection> getServices(String graphUri) throws InvalidSyntaxException {
-        Map<ServiceReference,TripleCollection> registeredGraphs = new LinkedHashMap<ServiceReference,TripleCollection>();
-        ServiceReference[] refs = bundleContext.getServiceReferences(TripleCollection.class.getName(),
+    private Map<ServiceReference,Graph> getServices(String graphUri) throws InvalidSyntaxException {
+        Map<ServiceReference,Graph> registeredGraphs = new LinkedHashMap<ServiceReference,Graph>();
+        ServiceReference[] refs = bundleContext.getServiceReferences(Graph.class.getName(),
             getFilter(graphUri));
         if (refs != null) {
             if (refs.length > 1) {
                 Arrays.sort(refs);
             }
             for (ServiceReference ref : refs) {
-                registeredGraphs.put(ref, (TripleCollection) bundleContext.getService(ref));
+                registeredGraphs.put(ref, (Graph) bundleContext.getService(ref));
             }
         }
         return registeredGraphs;
@@ -211,7 +211,7 @@
             filterString = new StringBuilder();
         }
         filterString
-                .append(String.format(constraint, Constants.OBJECTCLASS, TripleCollection.class.getName()));
+                .append(String.format(constraint, Constants.OBJECTCLASS, Graph.class.getName()));
         if (graphUri != null) {
             filterString.append(')');
         }
@@ -222,18 +222,18 @@
      * HTML View
      */
 
-    private List<TripleCollectionInfo> tripleCollections = new ArrayList<SparqlEndpointResource.TripleCollectionInfo>();
+    private List<GraphInfo> tripleCollections = new ArrayList<SparqlEndpointResource.GraphInfo>();
 
-    public List<TripleCollectionInfo> getTripleCollectionList() {
+    public List<GraphInfo> getGraphList() {
         return this.tripleCollections;
     }
 
-    public class TripleCollectionInfo {
+    public class GraphInfo {
         private String graphUri;
         private String graphName;
         private String graphDescription;
 
-        public TripleCollectionInfo(String graphUri, String graphName, String graphDescription) {
+        public GraphInfo(String graphUri, String graphName, String graphDescription) {
             this.graphUri = graphUri;
             this.graphName = graphName != null ? graphName : "";
             this.graphDescription = graphDescription != null ? graphDescription : "";
diff --git a/commons/web/sparql/src/main/resources/templates/imports/sparql.ftl b/commons/web/sparql/src/main/resources/templates/imports/sparql.ftl
index 8f04c4c..6541a2e 100644
--- a/commons/web/sparql/src/main/resources/templates/imports/sparql.ftl
+++ b/commons/web/sparql/src/main/resources/templates/imports/sparql.ftl
@@ -16,23 +16,23 @@
 -->
 <#macro form>
 <fieldset>
-  <legend>Registered TripleCollections</legend>
+  <legend>Registered Graphs</legend>
   <#-- graph list -->
-  <#if it.tripleCollectionList?size &gt; 0>
+  <#if it.graphList?size &gt; 0>
     <select id="graphList" onChange='javascript:graphChangeHandler();'>
-    	<#list it.tripleCollectionList as tcInfo>
+    	<#list it.graphList as tcInfo>
     		<option value="${tcInfo.graphUri}">${tcInfo.graphUri}</option>
     	</#list>
     </select>
   <#else>
-    There is no registered TripleCollection.
+    There is no registered Graph.
   </#if>
 </fieldset>
 
-<#if it.tripleCollectionList?size &gt; 0>
+<#if it.graphList?size &gt; 0>
   <fieldset>
     <legend>Details of Selected Graph</legend>
-    <#list it.tripleCollectionList as tcInfo>
+    <#list it.graphList as tcInfo>
     	<ul id="${tcInfo.graphUri}" class="graphDetailInvisible">
     		<li>Graph Name: ${tcInfo.graphName}</li>
     		<li>Graph Description: ${tcInfo.graphDescription}</li>
diff --git a/development/archetypes/enhancement-engine/src/main/resources/archetype-resources/src/main/java/ExampleEnhancer.java b/development/archetypes/enhancement-engine/src/main/resources/archetype-resources/src/main/java/ExampleEnhancer.java
index b30109e..4bf1478 100644
--- a/development/archetypes/enhancement-engine/src/main/resources/archetype-resources/src/main/java/ExampleEnhancer.java
+++ b/development/archetypes/enhancement-engine/src/main/resources/archetype-resources/src/main/java/ExampleEnhancer.java
@@ -9,10 +9,10 @@
 import java.util.Map;
 import java.util.Map.Entry;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.ontologies.DCTERMS;
 import org.apache.clerezza.rdf.ontologies.RDFS;
 import org.apache.felix.scr.annotations.Activate;
@@ -138,7 +138,7 @@
     public int canEnhance(ContentItem ci) throws EngineException {
         // check if a Content in the supported type is available
         //NOTE: you can parse multiple content types
-        Entry<UriRef,Blob> textBlob = ContentItemHelper.getBlob(
+        Entry<IRI,Blob> textBlob = ContentItemHelper.getBlob(
             ci, Collections.singleton("text/plain"));
         if(textBlob == null) {
             return CANNOT_ENHANCE;
@@ -168,15 +168,15 @@
         
         //(3) write the enhancement results        
         // get the metadata graph
-        MGraph metadata = ci.getMetadata();
+        Graph metadata = ci.getMetadata();
         //NOTE: as we allow synchronous calls we need to use read/write
         // locks on the ContentItem
         ci.getLock().writeLock().lock();
         try {
             // TODO: replace this with real enhancements
-            UriRef textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
+            IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
             metadata.add(new TripleImpl(textAnnotation, DCTERMS.type, 
-                    new UriRef("http://example.org/ontology/LengthEnhancement")));
+                    new IRI("http://example.org/ontology/LengthEnhancement")));
             metadata.add(new TripleImpl(textAnnotation, RDFS.comment,
                     new PlainLiteralImpl("A text of " + contentLength + " charaters")));
         } finally {
diff --git a/development/archetypes/statefull-webmodule/src/main/resources/archetype-resources/src/main/java/Ontology.java b/development/archetypes/statefull-webmodule/src/main/resources/archetype-resources/src/main/java/Ontology.java
index e0aa5ab..72d1612 100644
--- a/development/archetypes/statefull-webmodule/src/main/resources/archetype-resources/src/main/java/Ontology.java
+++ b/development/archetypes/statefull-webmodule/src/main/resources/archetype-resources/src/main/java/Ontology.java
@@ -3,7 +3,7 @@
 #set( $symbol_escape = '\' )
 package ${package};
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 
 /**
@@ -17,26 +17,26 @@
      * of the resource of which the IRI is specified in the "iri" query parameter.
      * 
      */
-    public static final UriRef ResourceResolver = new UriRef("http://example.org/service-description${symbol_pound}ResourceResolver");
+    public static final IRI ResourceResolver = new IRI("http://example.org/service-description${symbol_pound}ResourceResolver");
     
     /**
      * Point to the resource resolved by the subject.
      */
-    public static final UriRef describes = new UriRef("http://example.org/service-description${symbol_pound}describes");
+    public static final IRI describes = new IRI("http://example.org/service-description${symbol_pound}describes");
     
     /**
      * The description of a Request in the log.
      */
-    public static final UriRef LoggedRequest = new UriRef("http://example.org/service-description${symbol_pound}LoggedRequest");
+    public static final IRI LoggedRequest = new IRI("http://example.org/service-description${symbol_pound}LoggedRequest");
     
     /**
      * The User Agent performing the requested described by the subject.
      */
-    public static final UriRef userAgent = new UriRef("http://example.org/service-description${symbol_pound}userAgent");
+    public static final IRI userAgent = new IRI("http://example.org/service-description${symbol_pound}userAgent");
     
     /**
      * The Entity of which a description was requested in the request
      * described by the subject.
      */
-    public static final UriRef requestedEntity = new UriRef("http://example.org/service-description${symbol_pound}requestedEntity");
+    public static final IRI requestedEntity = new IRI("http://example.org/service-description${symbol_pound}requestedEntity");
 }
diff --git a/development/archetypes/statefull-webmodule/src/main/resources/archetype-resources/src/main/java/ResourceResolver.java b/development/archetypes/statefull-webmodule/src/main/resources/archetype-resources/src/main/java/ResourceResolver.java
index 29b2b01..9a04620 100644
--- a/development/archetypes/statefull-webmodule/src/main/resources/archetype-resources/src/main/java/ResourceResolver.java
+++ b/development/archetypes/statefull-webmodule/src/main/resources/archetype-resources/src/main/java/ResourceResolver.java
@@ -15,26 +15,26 @@
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.UriInfo;
 import org.apache.clerezza.jaxrs.utils.TrailingSlash;
-import org.apache.clerezza.rdf.core.BNode;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.EntityAlreadyExistsException;
 import org.apache.clerezza.rdf.core.access.TcManager;
 import org.apache.clerezza.rdf.core.access.security.TcAccessController;
 import org.apache.clerezza.rdf.core.access.security.TcPermission;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
 import org.apache.clerezza.rdf.ontologies.DC;
 import org.apache.clerezza.rdf.ontologies.RDF;
 import org.apache.clerezza.rdf.ontologies.RDFS;
 import org.apache.clerezza.rdf.utils.GraphNode;
-import org.apache.clerezza.rdf.utils.UnionMGraph;
+import org.apache.clerezza.rdf.utils.UnionGraph;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Deactivate;
 import org.apache.felix.scr.annotations.Property;
 import org.apache.felix.scr.annotations.Reference;
 import org.apache.felix.scr.annotations.Service;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.commons.web.viewable.RdfViewable;
 import org.apache.stanbol.entityhub.model.clerezza.RdfValueFactory;
 import org.apache.stanbol.entityhub.servicesapi.model.Entity;
@@ -75,13 +75,13 @@
     /**
      * This is the name of the graph in which we "log" the requests
      */
-    private UriRef REQUEST_LOG_GRAPH_NAME = new UriRef("http://example.org/resource-resolver-log.graph");
+    private IRI REQUEST_LOG_GRAPH_NAME = new IRI("http://example.org/resource-resolver-log.graph");
     
     @Activate
     protected void activate(ComponentContext context) {
         log.info("The example service is being activated");
         try {
-            tcManager.createMGraph(REQUEST_LOG_GRAPH_NAME);
+            tcManager.createGraph(REQUEST_LOG_GRAPH_NAME);
             //now make sure everybody can read from the graph
             //or more precisly, anybody who can read the content-graph
             TcAccessController tca = tcManager.getTcAccessController();
@@ -105,7 +105,7 @@
      */
     @GET
     public RdfViewable serviceEntry(@Context final UriInfo uriInfo, 
-            @QueryParam("iri") final UriRef iri, 
+            @QueryParam("iri") final IRI iri, 
             @HeaderParam("user-agent") String userAgent) throws Exception {
         //this maks sure we are nt invoked with a trailing slash which would affect
         //relative resolution of links (e.g. css)
@@ -113,18 +113,18 @@
         final String resourcePath = uriInfo.getAbsolutePath().toString();
         //The URI at which this service was accessed accessed, this will be the 
         //central serviceUri in the response
-        final UriRef serviceUri = new UriRef(resourcePath);
+        final IRI serviceUri = new IRI(resourcePath);
         //the in memory graph to which the triples for the response are added
-        final MGraph responseGraph = new IndexedMGraph();
+        final Graph responseGraph = new IndexedGraph();
         //A union graph containing both the response specif triples as well 
         //as the log-graph
-        final UnionMGraph resultGraph = new UnionMGraph(responseGraph, getRequestLogGraph());
+        final UnionGraph resultGraph = new UnionGraph(responseGraph, getRequestLogGraph());
         //This GraphNode represents the service within our result graph
         final GraphNode node = new GraphNode(serviceUri, resultGraph);
         //The triples will be added to the first graph of the union
         //i.e. to the in-memory responseGraph
         node.addProperty(RDF.type, Ontology.ResourceResolver);
-        node.addProperty(RDFS.comment, new PlainLiteralImpl("A Resource Resolver"));
+        node.addProperty(RDFS.comment, new PlainLiteralImpl("A RDFTerm Resolver"));
         if (iri != null) {
             node.addProperty(Ontology.describes, iri);
             addResourceDescription(iri, responseGraph);
@@ -136,11 +136,11 @@
     
 
     /**
-     * Add the description of a serviceUri to the specified MGraph using SiteManager.
+     * Add the description of a serviceUri to the specified Graph using SiteManager.
      * The description includes the metadata provided by the SiteManager.
      * 
      */
-    private void addResourceDescription(UriRef iri, MGraph mGraph) {
+    private void addResourceDescription(IRI iri, Graph mGraph) {
         final Entity entity = siteManager.getEntity(iri.getUnicodeString());
         if (entity != null) {
             final RdfValueFactory valueFactory = new RdfValueFactory(mGraph);
@@ -158,15 +158,15 @@
     /**
      * Logs a request to the log-graph
      */
-    private void logRequest(final UriRef iri, final String userAgent) {
+    private void logRequest(final IRI iri, final String userAgent) {
         //writing to a persistent graph requires some special permission
         //by executing the code in a do-priviledged section
         //the user doesn't need this permissions, anonymous users are thus not
         //asked to log in
         AccessController.doPrivileged(new PrivilegedAction<Object>() {
             public Object run() {
-                final MGraph logGraph = getRequestLogGraph();
-                GraphNode loggedRequest = new GraphNode(new BNode(), logGraph);
+                final Graph logGraph = getRequestLogGraph();
+                GraphNode loggedRequest = new GraphNode(new BlankNode(), logGraph);
                 loggedRequest.addProperty(RDF.type, Ontology.LoggedRequest);
                 loggedRequest.addPropertyValue(DC.date, new Date());
                 loggedRequest.addPropertyValue(Ontology.userAgent, userAgent);
@@ -178,12 +178,12 @@
     }
 
     /**
-     * This returns the existing MGraph for the log .
+     * This returns the existing Graph for the log .
      * 
-     * @return the MGraph to which the requests are logged
+     * @return the Graph to which the requests are logged
      */
-    private MGraph getRequestLogGraph() {
-        return tcManager.getMGraph(REQUEST_LOG_GRAPH_NAME);
+    private Graph getRequestLogGraph() {
+        return tcManager.getGraph(REQUEST_LOG_GRAPH_NAME);
     }
     
 }
diff --git a/development/archetypes/stateless-webmodule/src/main/resources/archetype-resources/src/main/java/MultiEnhancer.java b/development/archetypes/stateless-webmodule/src/main/resources/archetype-resources/src/main/java/MultiEnhancer.java
index 1054167..4ed0c3b 100644
--- a/development/archetypes/stateless-webmodule/src/main/resources/archetype-resources/src/main/java/MultiEnhancer.java
+++ b/development/archetypes/stateless-webmodule/src/main/resources/archetype-resources/src/main/java/MultiEnhancer.java
@@ -29,9 +29,9 @@
 import org.apache.clerezza.jaxrs.utils.TrailingSlash;
 import org.apache.clerezza.jaxrs.utils.form.FormFile;
 import org.apache.clerezza.jaxrs.utils.form.MultiPartBody;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
 import org.apache.clerezza.rdf.ontologies.RDF;
 import org.apache.clerezza.rdf.ontologies.RDFS;
 import org.apache.clerezza.rdf.utils.GraphNode;
@@ -41,7 +41,7 @@
 import org.apache.felix.scr.annotations.Property;
 import org.apache.felix.scr.annotations.Reference;
 import org.apache.felix.scr.annotations.Service;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.commons.web.viewable.RdfViewable;
 import org.apache.stanbol.enhancer.servicesapi.Chain;
 import org.apache.stanbol.enhancer.servicesapi.ChainManager;
@@ -101,9 +101,9 @@
         final String resourcePath = uriInfo.getAbsolutePath().toString();
         //The URI at which this service was accessed accessed, this will be the 
         //central serviceUri in the response
-        final UriRef serviceUri = new UriRef(resourcePath);
+        final IRI serviceUri = new IRI(resourcePath);
         //the in memory graph to which the triples for the response are added
-        final MGraph responseGraph = new IndexedMGraph();
+        final Graph responseGraph = new IndexedGraph();
         //This GraphNode represents the service within our result graph
         final GraphNode node = new GraphNode(serviceUri, responseGraph);
         //The triples will be added to the first graph of the union
@@ -137,9 +137,9 @@
             enhancementJobManager.enhanceContent(contentItem, chain);
         }
         //this contains the enhancement results
-        final MGraph resultGraph = contentItem.getMetadata();
+        final Graph resultGraph = contentItem.getMetadata();
         //this is the IRI assigned to the subitted content
-        final UriRef contentIri = contentItem.getUri();
+        final IRI contentIri = contentItem.getUri();
         //this represent the submitted Content within the resultGraph
         final GraphNode node = new GraphNode(contentIri, resultGraph);
         //node is the "root" for rendering the results 
diff --git a/development/archetypes/stateless-webmodule/src/main/resources/archetype-resources/src/main/java/Ontology.java b/development/archetypes/stateless-webmodule/src/main/resources/archetype-resources/src/main/java/Ontology.java
index ecde19d..82a526e 100644
--- a/development/archetypes/stateless-webmodule/src/main/resources/archetype-resources/src/main/java/Ontology.java
+++ b/development/archetypes/stateless-webmodule/src/main/resources/archetype-resources/src/main/java/Ontology.java
@@ -19,7 +19,7 @@
 #set( $symbol_escape = '\' )
 package ${package};
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 
 /**
@@ -33,6 +33,6 @@
      * containing the content to be enhance as one field and optionally the
      * requested enhancment chain in the other.
      */
-    public static final UriRef MultiEnhancer = new UriRef("http://example.org/service-description${symbol_pound}MultiEnhancer");
+    public static final IRI MultiEnhancer = new IRI("http://example.org/service-description${symbol_pound}MultiEnhancer");
     
 }
diff --git a/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/CeliConstants.java b/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/CeliConstants.java
index 316db02..624ef8c 100644
--- a/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/CeliConstants.java
+++ b/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/CeliConstants.java
@@ -16,7 +16,7 @@
  */
 package org.apache.stanbol.enhancer.engines.celi;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.osgi.framework.BundleContext;
 import org.osgi.service.component.ComponentContext;
 
@@ -64,10 +64,10 @@
      * Concept used to annotate sentiment expressions within text
      *  TODO: Find standard ontology for reference or check if it is OK to define new properties in the FISE namespace
      */
- 	UriRef SENTIMENT_EXPRESSION = new UriRef("http://fise.iks-project.eu/ontology/Sentiment Expression");
+ 	IRI SENTIMENT_EXPRESSION = new IRI("http://fise.iks-project.eu/ontology/Sentiment Expression");
  	/**
      * Datatype property (targets double literals) used to represent the polarity of a sentiment expression
      *  TODO: Find standard ontology for reference or check if it is OK to define new properties in the FISE namespace
      */
- 	UriRef HAS_SENTIMENT_EXPRESSION_POLARITY=new UriRef("http://fise.iks-project.eu/ontology/hasSentimentPolarityValue");
+ 	IRI HAS_SENTIMENT_EXPRESSION_POLARITY=new IRI("http://fise.iks-project.eu/ontology/hasSentimentPolarityValue");
 }
diff --git a/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/CeliMorphoFeatures.java b/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/CeliMorphoFeatures.java
index 9a34f59..3cc4dd5 100644
--- a/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/CeliMorphoFeatures.java
+++ b/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/CeliMorphoFeatures.java
@@ -23,11 +23,11 @@
 import java.util.Map.Entry;
 import java.util.Vector;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.stanbol.enhancer.engines.celi.lemmatizer.impl.CeliLemmatizerEnhancementEngine;
 import org.apache.stanbol.enhancer.engines.celi.lemmatizer.impl.Reading;
 import org.apache.stanbol.enhancer.nlp.model.Token;
@@ -61,13 +61,13 @@
 
     private static CeliTagSetRegistry tagRegistry = CeliTagSetRegistry.getInstance();
 
-    public static final UriRef HAS_NUMBER = new UriRef("http://purl.org/olia/olia.owl#hasNumber");
-    public static final UriRef HAS_GENDER = new UriRef("http://purl.org/olia/olia.owl#hasGender");
-    public static final UriRef HAS_PERSON = new UriRef("http://purl.org/olia/olia.owl#hasPerson");
-    public static final UriRef HAS_CASE = new UriRef("http://purl.org/olia/olia.owl#hasCase");
-    public static final UriRef HAS_DEFINITENESS = new UriRef("http://purl.org/olia/olia.owl#hasDefiniteness");
-    public static final UriRef HAS_MOOD = new UriRef("http://purl.org/olia/olia.owl#hasMood");
-    public static final UriRef HAS_TENSE = new UriRef("http://purl.org/olia/olia.owl#hasTense");
+    public static final IRI HAS_NUMBER = new IRI("http://purl.org/olia/olia.owl#hasNumber");
+    public static final IRI HAS_GENDER = new IRI("http://purl.org/olia/olia.owl#hasGender");
+    public static final IRI HAS_PERSON = new IRI("http://purl.org/olia/olia.owl#hasPerson");
+    public static final IRI HAS_CASE = new IRI("http://purl.org/olia/olia.owl#hasCase");
+    public static final IRI HAS_DEFINITENESS = new IRI("http://purl.org/olia/olia.owl#hasDefiniteness");
+    public static final IRI HAS_MOOD = new IRI("http://purl.org/olia/olia.owl#hasMood");
+    public static final IRI HAS_TENSE = new IRI("http://purl.org/olia/olia.owl#hasTense");
 
     public static CeliMorphoFeatures parseFrom(Reading reading, String lang){
         if(reading == null){
@@ -105,7 +105,7 @@
 	    super(lemma);
 	}
 
-	public Collection<? extends Triple> featuresAsTriples(UriRef textAnnotation, Language lang) {
+	public Collection<? extends Triple> featuresAsTriples(IRI textAnnotation, Language lang) {
 		Collection<TripleImpl> result = new Vector<TripleImpl>();
 		result.add(new TripleImpl(textAnnotation, CeliLemmatizerEnhancementEngine.hasLemmaForm, 
 		    new PlainLiteralImpl(getLemma(), lang)));
diff --git a/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/classification/impl/CeliClassificationEnhancementEngine.java b/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/classification/impl/CeliClassificationEnhancementEngine.java
index 17f61b9..025fa25 100644
--- a/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/classification/impl/CeliClassificationEnhancementEngine.java
+++ b/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/classification/impl/CeliClassificationEnhancementEngine.java
@@ -37,12 +37,12 @@
 
 import javax.xml.soap.SOAPException;
 
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Deactivate;
@@ -116,7 +116,7 @@
 	/**
 	 * Currently used as fise:entity-type for TopicAnnotations
 	 */
-	private static final UriRef OWL_CLASS = new UriRef("http://www.w3.org/2002/07/owl#Class");
+	private static final IRI OWL_CLASS = new IRI("http://www.w3.org/2002/07/owl#Class");
 	
 	private Logger log = LoggerFactory.getLogger(getClass());
 
@@ -204,7 +204,7 @@
                     + "in the canEnhance method! -> This indicated an Bug in the "
                     + "implementation of the " + "EnhancementJobManager!");
         }
-		Entry<UriRef, Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMTYPES);
+		Entry<IRI, Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMTYPES);
 		if (contentPart == null) {
 			throw new IllegalStateException("No ContentPart with Mimetype '" 
 			        + TEXT_PLAIN_MIMETYPE + "' found for ContentItem " 
@@ -248,20 +248,20 @@
 		if(lista.isEmpty()){ //not topics found
 		    return; //nothing to do
 		}
-		MGraph g = ci.getMetadata();
+		Graph g = ci.getMetadata();
 		//NOTE: EnhancementEngines that use "ENHANCE_ASYNC" need to acquire a
 		//      writeLock before modifications to the enhancement metadata
 		ci.getLock().writeLock().lock();
 		try {
     		//see STANBOL-617 for rules how to encode extracted topics
     		//we need a single TextAnnotation to link all TopicAnnotations
-    		UriRef textAnnotation = createTextEnhancement(ci, this);
+    		IRI textAnnotation = createTextEnhancement(ci, this);
     		// add the dc:type skos:Concept
     		g.add(new TripleImpl(textAnnotation, DC_TYPE, SKOS_CONCEPT));
     		
     		//not create the fise:TopicAnnotations
     		for (Concept ne : lista) {
-    		    UriRef topicAnnotation = EnhancementEngineHelper.createTopicEnhancement(ci, this);
+    		    IRI topicAnnotation = EnhancementEngineHelper.createTopicEnhancement(ci, this);
     	        g.add(new TripleImpl(topicAnnotation, ENHANCER_ENTITY_REFERENCE, ne.getUri()));
                 g.add(new TripleImpl(topicAnnotation, ENHANCER_ENTITY_LABEL, 
                     new PlainLiteralImpl(ne.getLabel())));
diff --git a/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/classification/impl/ClassificationClientHTTP.java b/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/classification/impl/ClassificationClientHTTP.java
index 8f69aed..d624c0a 100644
--- a/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/classification/impl/ClassificationClientHTTP.java
+++ b/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/classification/impl/ClassificationClientHTTP.java
@@ -36,7 +36,7 @@
 import javax.xml.soap.SOAPPart;
 import javax.xml.transform.stream.StreamSource;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.impl.util.Base64;
 import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.stanbol.enhancer.engines.celi.utils.Utils;
@@ -148,7 +148,7 @@
 			//      to the most specific dbpedia ontology class is best suited.
 			String model = result.getElementsByTagNameNS("*","label").item(0).getTextContent();
 			model=model.substring(1, model.length()-1);
-			UriRef modelConcept = selectClassificationClass(model);
+			IRI modelConcept = selectClassificationClass(model);
 			String conf=result.getElementsByTagNameNS("*","score").item(0).getTextContent();
 			Double confidence= new Double(conf);
 			extractedConcepts.add(new Concept(model,modelConcept,confidence));
@@ -168,7 +168,7 @@
      * @param classificationLabels the label string
      * @return the selected label
      */
-    private UriRef selectClassificationClass(String classificationLabels) {
+    private IRI selectClassificationClass(String classificationLabels) {
         //NOTE: (rwesten) In general it would be better if CELI could provide
         //      de-referenceable URLs for those suggestions.
         //      If that is possible one would no longer need to link to the
@@ -184,7 +184,7 @@
         int end = classificationLabels.charAt(classificationLabels.length()-1) == ']' ?
                 classificationLabels.length() - 1 : classificationLabels.length();
         String[] tmps = classificationLabels.substring(start, end).split(" ");
-        return new UriRef(NamespaceEnum.dbpedia_ont.getNamespace()+ //the namespace
+        return new IRI(NamespaceEnum.dbpedia_ont.getNamespace()+ //the namespace
             (tmps.length > 1 ? tmps[1] : tmps[0])); //the Class for the label
     }	
 	
diff --git a/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/classification/impl/Concept.java b/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/classification/impl/Concept.java
index 2b3ef90..ab635e3 100644
--- a/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/classification/impl/Concept.java
+++ b/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/classification/impl/Concept.java
@@ -16,15 +16,15 @@
  */
 package org.apache.stanbol.enhancer.engines.celi.classification.impl;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 public class Concept {
 	
 	private final String label;
-	private final UriRef uri;
+	private final IRI uri;
 	private final Double confidence;
 	
-	public Concept(String label, UriRef uri,Double confidence) {
+	public Concept(String label, IRI uri,Double confidence) {
 		super();
 		this.label = label;
 		this.uri = uri;
@@ -42,7 +42,7 @@
     }
 
 
-    public UriRef getUri() {
+    public IRI getUri() {
         return uri;
     }
 	
diff --git a/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/langid/impl/CeliLanguageIdentifierEnhancementEngine.java b/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/langid/impl/CeliLanguageIdentifierEnhancementEngine.java
index df3ded0..47f1fbe 100644
--- a/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/langid/impl/CeliLanguageIdentifierEnhancementEngine.java
+++ b/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/langid/impl/CeliLanguageIdentifierEnhancementEngine.java
@@ -34,10 +34,10 @@
 import javax.xml.soap.SOAPException;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Deactivate;
@@ -158,7 +158,7 @@
 	
 	@Override
 	public void computeEnhancements(ContentItem ci) throws EngineException {
-		Entry<UriRef, Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMTYPES);
+		Entry<IRI, Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMTYPES);
 		if (contentPart == null) {
 			throw new IllegalStateException("No ContentPart with Mimetype '" + TEXT_PLAIN_MIMETYPE + "' found for ContentItem " + ci.getUri() + ": This is also checked in the canEnhance method! -> This "
 					+ "indicated an Bug in the implementation of the " + "EnhancementJobManager!");
@@ -183,12 +183,12 @@
 			else 
 				lista = this.client.guessQueryLanguage(text);
 			
-			MGraph g = ci.getMetadata();
+			Graph g = ci.getMetadata();
 			//in ENHANCE_ASYNC we need to use read/write locks on the ContentItem
 			ci.getLock().writeLock().lock();
 			try {
     			GuessedLanguage gl = lista.get(0);
-    			UriRef textEnhancement = EnhancementEngineHelper.createTextEnhancement(ci, this);
+    			IRI textEnhancement = EnhancementEngineHelper.createTextEnhancement(ci, this);
     		    g.add(new TripleImpl(textEnhancement, DC_LANGUAGE, new PlainLiteralImpl(gl.getLang())));
     			g.add(new TripleImpl(textEnhancement, ENHANCER_CONFIDENCE, literalFactory.createTypedLiteral(gl.getConfidence())));
 				g.add(new TripleImpl(textEnhancement, DC_TYPE, DCTERMS_LINGUISTIC_SYSTEM));
diff --git a/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/lemmatizer/impl/CeliLemmatizerEnhancementEngine.java b/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/lemmatizer/impl/CeliLemmatizerEnhancementEngine.java
index 97ee8c3..a839dc8 100644
--- a/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/lemmatizer/impl/CeliLemmatizerEnhancementEngine.java
+++ b/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/lemmatizer/impl/CeliLemmatizerEnhancementEngine.java
@@ -34,13 +34,13 @@
 
 import javax.xml.soap.SOAPException;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Deactivate;
@@ -86,7 +86,7 @@
 })
 public class CeliLemmatizerEnhancementEngine extends AbstractEnhancementEngine<IOException, RuntimeException> implements EnhancementEngine, ServiceProperties {
 	// TODO: check if it is OK to define new properties in the FISE namespace
-	public static final UriRef hasLemmaForm = new UriRef("http://fise.iks-project.eu/ontology/hasLemmaForm");
+	public static final IRI hasLemmaForm = new IRI("http://fise.iks-project.eu/ontology/hasLemmaForm");
 
     /**
      * This ensures that no connections to external services are made if Stanbol is started in offline mode as the OnlineMode service will only be available if OfflineMode is deactivated.
@@ -185,7 +185,7 @@
 					+ "implementation of the " + "EnhancementJobManager!");
 		}
 
-		Entry<UriRef, Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMTYPES);
+		Entry<IRI, Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMTYPES);
 		if (contentPart == null) {
 			throw new IllegalStateException("No ContentPart with Mimetype '" + TEXT_PLAIN_MIMETYPE + "' found for ContentItem " + ci.getUri() + ": This is also checked in the canEnhance method! -> This "
 					+ "indicated an Bug in the implementation of the " + "EnhancementJobManager!");
@@ -201,7 +201,7 @@
 			return;
 		}
 
-		MGraph graph = ci.getMetadata();
+		Graph graph = ci.getMetadata();
 
 		if (this.completeMorphoAnalysis) {
 			this.addMorphoAnalysisEnhancement(ci, text, language, graph);
@@ -210,7 +210,7 @@
 		}
 	}
 
-	private void addMorphoAnalysisEnhancement(ContentItem ci, String text, String language, MGraph g) throws EngineException {
+	private void addMorphoAnalysisEnhancement(ContentItem ci, String text, String language, Graph g) throws EngineException {
 		Language lang = new Language(language); // clerezza language for PlainLiterals
 		List<LexicalEntry> terms;
 		try {
@@ -229,7 +229,7 @@
 				List<CeliMorphoFeatures> mFeatures = this.convertLexicalEntryToMorphFeatures(le, language);
 				for (CeliMorphoFeatures feat : mFeatures) {
 					// Create a text annotation for each interpretation produced by the morphological analyzer
-					UriRef textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
+					IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
 					g.add(new TripleImpl(textAnnotation, ENHANCER_SELECTED_TEXT, new PlainLiteralImpl(le.getWordForm(), lang)));
 					if (le.from >= 0 && le.to > 0) {
 						g.add(new TripleImpl(textAnnotation, ENHANCER_START, literalFactory.createTypedLiteral(le.from)));
@@ -244,7 +244,7 @@
 		}
 	}
 
-	private void addLemmatizationEnhancement(ContentItem ci, String text, String language, MGraph g) throws EngineException {
+	private void addLemmatizationEnhancement(ContentItem ci, String text, String language, Graph g) throws EngineException {
 		Language lang = new Language(language); // clerezza language for PlainLiterals
 		String lemmatizedContents;
 		try {
@@ -257,7 +257,7 @@
 		// get a write lock before writing the enhancements
 		ci.getLock().writeLock().lock();
 		try {
-			UriRef textEnhancement = EnhancementEngineHelper.createTextEnhancement(ci, this);
+			IRI textEnhancement = EnhancementEngineHelper.createTextEnhancement(ci, this);
 			g.add(new TripleImpl(textEnhancement, CeliLemmatizerEnhancementEngine.hasLemmaForm, new PlainLiteralImpl(lemmatizedContents, lang)));
 		} finally {
 			ci.getLock().writeLock().unlock();
diff --git a/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/ner/impl/CeliNamedEntityExtractionEnhancementEngine.java b/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/ner/impl/CeliNamedEntityExtractionEnhancementEngine.java
index 4fff9b0..8b56f91 100644
--- a/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/ner/impl/CeliNamedEntityExtractionEnhancementEngine.java
+++ b/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/ner/impl/CeliNamedEntityExtractionEnhancementEngine.java
@@ -39,15 +39,15 @@
 
 import javax.xml.soap.SOAPException;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.clerezza.rdf.core.NoConvertorException;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Deactivate;
@@ -98,7 +98,7 @@
 	 */
 	public static final Literal LANG_ID_ENGINE_NAME = LiteralFactory.getInstance().createTypedLiteral("org.apache.stanbol.enhancer.engines.celi.langid.impl.CeliLanguageIdentifierEnhancementEngine");
 
-	private static Map<String, UriRef> entityTypes = new HashMap<String, UriRef>();
+	private static Map<String, IRI> entityTypes = new HashMap<String, IRI>();
 	static {
 		entityTypes.put("pers", OntologicalClasses.DBPEDIA_PERSON);
 		entityTypes.put("PER", OntologicalClasses.DBPEDIA_PERSON);
@@ -248,7 +248,7 @@
 
 	@Override
 	public void computeEnhancements(ContentItem ci) throws EngineException {
-		Entry<UriRef, Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMTYPES);
+		Entry<IRI, Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMTYPES);
 		if (contentPart == null) {
 			throw new IllegalStateException("No ContentPart with Mimetype '" + TEXT_PLAIN_MIMETYPE + "' found for ContentItem " + ci.getUri() + ": This is also checked in the canEnhance method! -> This "
 					+ "indicated an Bug in the implementation of the " + "EnhancementJobManager!");
@@ -272,11 +272,11 @@
 			List<NamedEntity> lista = this.client.extractEntities(text, language);
 			LiteralFactory literalFactory = LiteralFactory.getInstance();
 
-			MGraph g = ci.getMetadata();
+			Graph g = ci.getMetadata();
 
 			for (NamedEntity ne : lista) {
 				try {
-					UriRef textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
+					IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
 					//add selected text as PlainLiteral in the language extracted from the text
 					g.add(new TripleImpl(textAnnotation, ENHANCER_SELECTED_TEXT, 
 					    new PlainLiteralImpl(ne.getFormKind(),lang)));
@@ -307,7 +307,7 @@
 		return supportedLangs.contains(language);
 	}
 
-	private Resource getEntityRefForType(String type) {
+	private RDFTerm getEntityRefForType(String type) {
 		if (!entityTypes.containsKey(type))
 			return OntologicalClasses.SKOS_CONCEPT;
 		else
diff --git a/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/sentimentanalysis/impl/CeliSentimentAnalysisEngine.java b/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/sentimentanalysis/impl/CeliSentimentAnalysisEngine.java
index 351aa4c..49022d0 100644
--- a/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/sentimentanalysis/impl/CeliSentimentAnalysisEngine.java
+++ b/enhancement-engines/celi/src/main/java/org/apache/stanbol/enhancer/engines/celi/sentimentanalysis/impl/CeliSentimentAnalysisEngine.java
@@ -37,13 +37,13 @@
 
 import javax.xml.soap.SOAPException;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.clerezza.rdf.core.NoConvertorException;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Deactivate;
@@ -209,7 +209,7 @@
 
 	@Override
 	public void computeEnhancements(ContentItem ci) throws EngineException {
-		Entry<UriRef, Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMTYPES);
+		Entry<IRI, Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMTYPES);
 		if (contentPart == null) {
 			throw new IllegalStateException("No ContentPart with Mimetype '" + TEXT_PLAIN_MIMETYPE + "' found for ContentItem " + ci.getUri() + ": This is also checked in the canEnhance method! -> This "
 					+ "indicated an Bug in the implementation of the " + "EnhancementJobManager!");
@@ -233,11 +233,11 @@
 			List<SentimentExpression> lista = this.client.extractSentimentExpressions(text, language);
 			LiteralFactory literalFactory = LiteralFactory.getInstance();
 
-			MGraph g = ci.getMetadata();
+			Graph g = ci.getMetadata();
 
 			for (SentimentExpression se : lista) {
 				try {
-					UriRef textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
+					IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
 					//add selected text as PlainLiteral in the language extracted from the text
 					g.add(new TripleImpl(textAnnotation, ENHANCER_SELECTED_TEXT,  new PlainLiteralImpl(se.getSnippetStr(),lang)));
 					g.add(new TripleImpl(textAnnotation, DC_TYPE, CeliConstants.SENTIMENT_EXPRESSION));
diff --git a/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/classification/impl/CeliClassificationEnhancementEngineTest.java b/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/classification/impl/CeliClassificationEnhancementEngineTest.java
index 98146df..dfb337e 100644
--- a/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/classification/impl/CeliClassificationEnhancementEngineTest.java
+++ b/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/classification/impl/CeliClassificationEnhancementEngineTest.java
@@ -29,10 +29,10 @@
 import java.util.Hashtable;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.engines.celi.CeliConstants;
 import org.apache.stanbol.enhancer.engines.celi.testutils.MockComponentContext;
@@ -98,7 +98,7 @@
 			classificationEngine.computeEnhancements(ci);
 
 	        TestUtils.logEnhancements(ci);
-	         HashMap<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+	         HashMap<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
 	            expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());
 	            expectedValues.put(Properties.DC_CREATOR, LiteralFactory.getInstance().createTypedLiteral(
 	                classificationEngine.getClass().getName()));
diff --git a/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/langid/impl/CeliLanguageIdentifierEnhancementEngineTest.java b/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/langid/impl/CeliLanguageIdentifierEnhancementEngineTest.java
index 68bb14b..27bbcda 100644
--- a/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/langid/impl/CeliLanguageIdentifierEnhancementEngineTest.java
+++ b/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/langid/impl/CeliLanguageIdentifierEnhancementEngineTest.java
@@ -26,8 +26,8 @@
 import java.util.Hashtable;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.engines.celi.CeliConstants;
 import org.apache.stanbol.enhancer.engines.celi.testutils.MockComponentContext;
@@ -84,7 +84,7 @@
 
 	        TestUtils.logEnhancements(ci);
 			
-			HashMap<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+			HashMap<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
 	        expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());
 	        expectedValues.put(Properties.DC_CREATOR, LiteralFactory.getInstance().createTypedLiteral(
 	            langIdentifier.getClass().getName()));
diff --git a/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/lemmatizer/impl/CeliAnalyzedTextLemmatizerEngineTest.java b/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/lemmatizer/impl/CeliAnalyzedTextLemmatizerEngineTest.java
index 75ce6a7..b183194 100644
--- a/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/lemmatizer/impl/CeliAnalyzedTextLemmatizerEngineTest.java
+++ b/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/lemmatizer/impl/CeliAnalyzedTextLemmatizerEngineTest.java
@@ -28,8 +28,8 @@
 
 import junit.framework.Assert;
 
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.engines.celi.CeliConstants;
 import org.apache.stanbol.enhancer.engines.celi.testutils.MockComponentContext;
diff --git a/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/lemmatizer/impl/CeliLemmatizerEnhancementEngineTest.java b/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/lemmatizer/impl/CeliLemmatizerEnhancementEngineTest.java
index 41e19ef..c0665ee 100644
--- a/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/lemmatizer/impl/CeliLemmatizerEnhancementEngineTest.java
+++ b/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/lemmatizer/impl/CeliLemmatizerEnhancementEngineTest.java
@@ -37,15 +37,14 @@
 import java.util.Iterator;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.engines.celi.CeliConstants;
 import org.apache.stanbol.enhancer.engines.celi.CeliMorphoFeatures;
@@ -118,17 +117,17 @@
 
 		TestUtils.logEnhancements(ci);
 		//validate enhancement
-        HashMap<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+        HashMap<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
         expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());
         expectedValues.put(Properties.DC_CREATOR, LiteralFactory.getInstance().createTypedLiteral(
             morphoAnalysisEngine.getClass().getName()));
         Iterator<Triple> lemmaTextAnnotationIterator = ci.getMetadata().filter(null, RDF_TYPE, ENHANCER_TEXTANNOTATION);
         assertTrue("A TextAnnotation is expected by this Test", lemmaTextAnnotationIterator.hasNext());
-        NonLiteral lemmaTextAnnotation = lemmaTextAnnotationIterator.next().getSubject();
-        assertTrue("TextAnnoations MUST BE UriRefs!",lemmaTextAnnotation instanceof UriRef);
+        BlankNodeOrIRI lemmaTextAnnotation = lemmaTextAnnotationIterator.next().getSubject();
+        assertTrue("TextAnnoations MUST BE IRIs!",lemmaTextAnnotation instanceof IRI);
         assertFalse("Only a single TextAnnotation is expected by this Test", lemmaTextAnnotationIterator.hasNext());
         //validate the enhancement metadata
-        validateEnhancement(ci.getMetadata(), (UriRef)lemmaTextAnnotation, expectedValues);
+        validateEnhancement(ci.getMetadata(), (IRI)lemmaTextAnnotation, expectedValues);
         //validate the lemma form TextAnnotation
         int lemmaForms = validateLemmaFormProperty(ci.getMetadata(), lemmaTextAnnotation,"it");
         assertTrue("Only a single LemmaForm property is expected if '"+ MORPHOLOGICAL_ANALYSIS+"=false'",lemmaForms == 1);
@@ -154,7 +153,7 @@
 
         TestUtils.logEnhancements(ci);
         //validate enhancements
-        HashMap<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+        HashMap<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
         expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());
         expectedValues.put(Properties.DC_CREATOR, LiteralFactory.getInstance().createTypedLiteral(
             morphoAnalysisEngine.getClass().getName()));
@@ -166,7 +165,7 @@
         //  -> this might be used to test that there are no TextAnnotations
         int textAnnotationCount = 0;
         while (textAnnotationIterator.hasNext()) {
-            UriRef textAnnotation = (UriRef) textAnnotationIterator.next().getSubject();
+            IRI textAnnotation = (IRI) textAnnotationIterator.next().getSubject();
             // test if selected Text is added
             validateTextAnnotation(ci.getMetadata(), textAnnotation,TERM,expectedValues);
             textAnnotationCount++;
@@ -187,18 +186,18 @@
      * @param lang the language of the analyzed text
      * @return The number of lemma forms found
      */
-    private int validateLemmaFormProperty(TripleCollection enhancements, NonLiteral textAnnotation, String lang) {
+    private int validateLemmaFormProperty(Graph enhancements, BlankNodeOrIRI textAnnotation, String lang) {
         Iterator<Triple> lemmaFormsIterator = enhancements.filter(textAnnotation, hasLemmaForm, null);
         assertTrue("No lemma form value found for TextAnnotation "+textAnnotation+"!", lemmaFormsIterator.hasNext());
         int lemmaFormCount = 0;
         while(lemmaFormsIterator.hasNext()){
             lemmaFormCount++;
-            Resource lemmaForms = lemmaFormsIterator.next().getObject();
-            assertTrue("Lemma Forms value are expected of type PlainLiteral", lemmaForms instanceof PlainLiteral);
-            assertFalse("Lemma forms MUST NOT be empty",((PlainLiteral)lemmaForms).getLexicalForm().isEmpty());
-            assertNotNull("Language of the Lemma Form literal MUST BE not null",((PlainLiteral)lemmaForms).getLanguage());
+            RDFTerm lemmaForms = lemmaFormsIterator.next().getObject();
+            assertTrue("Lemma Forms value are expected of type Literal", lemmaForms instanceof Literal);
+            assertFalse("Lemma forms MUST NOT be empty",((Literal)lemmaForms).getLexicalForm().isEmpty());
+            assertNotNull("Language of the Lemma Form literal MUST BE not null",((Literal)lemmaForms).getLanguage());
             assertEquals("Language of the Lemma Form literal MUST BE the same as for the parsed text",
-                lang, ((PlainLiteral)lemmaForms).getLanguage().toString());
+                lang, ((Literal)lemmaForms).getLanguage().toString());
         }
         return lemmaFormCount;
     }
@@ -207,14 +206,14 @@
      * @param enhancements The graph with the enhancements
      * @param textAnnotation the TextAnnotation to check
      */
-    private void validateMorphoFeatureProperty(TripleCollection enhancements, NonLiteral textAnnotation) {
+    private void validateMorphoFeatureProperty(Graph enhancements, BlankNodeOrIRI textAnnotation) {
     	//This taste checks for known morpho features of a given input (constant TERM)
         Iterator<Triple> morphoFeatureIterator = enhancements.filter(textAnnotation, RDF_TYPE, null);
         assertTrue("No POS Morpho Feature value found for TextAnnotation "+textAnnotation+"!", morphoFeatureIterator.hasNext());
         while(morphoFeatureIterator.hasNext()){
-            Resource morphoFeature = morphoFeatureIterator.next().getObject();
-            assertTrue("Morpho Feature value are expected of typed literal", morphoFeature instanceof UriRef);
-            String feature=((UriRef)morphoFeature).getUnicodeString();
+            RDFTerm morphoFeature = morphoFeatureIterator.next().getObject();
+            assertTrue("Morpho Feature value are expected of typed literal", morphoFeature instanceof IRI);
+            String feature=((IRI)morphoFeature).getUnicodeString();
             assertFalse("Morpho Feature MUST NOT be empty",feature.isEmpty());
             if(feature.startsWith(OLIA_NAMESPACE)){
             	String key=feature.substring(OLIA_NAMESPACE.length());
@@ -225,9 +224,9 @@
         morphoFeatureIterator = enhancements.filter(textAnnotation, CeliMorphoFeatures.HAS_GENDER, null);
         assertTrue("No Gender Morpho Feature value found for TextAnnotation "+textAnnotation+"!", morphoFeatureIterator.hasNext());
         if(morphoFeatureIterator.hasNext()){
-            Resource morphoFeature = morphoFeatureIterator.next().getObject();
-            assertTrue("Morpho Feature value are expected of typed literal", morphoFeature instanceof UriRef);
-            String feature=((UriRef)morphoFeature).getUnicodeString();
+            RDFTerm morphoFeature = morphoFeatureIterator.next().getObject();
+            assertTrue("Morpho Feature value are expected of typed literal", morphoFeature instanceof IRI);
+            String feature=((IRI)morphoFeature).getUnicodeString();
             assertFalse("Morpho Feature MUST NOT be empty",feature.isEmpty());
             if(feature.startsWith(OLIA_NAMESPACE)){
             	String key=feature.substring(OLIA_NAMESPACE.length());
@@ -238,9 +237,9 @@
         morphoFeatureIterator = enhancements.filter(textAnnotation, CeliMorphoFeatures.HAS_NUMBER, null);
         assertTrue("No Number Morpho Feature value found for TextAnnotation "+textAnnotation+"!", morphoFeatureIterator.hasNext());
         if(morphoFeatureIterator.hasNext()){
-            Resource morphoFeature = morphoFeatureIterator.next().getObject();
-            assertTrue("Morpho Feature value are expected of typed literal", morphoFeature instanceof UriRef);
-            String feature=((UriRef)morphoFeature).getUnicodeString();
+            RDFTerm morphoFeature = morphoFeatureIterator.next().getObject();
+            assertTrue("Morpho Feature value are expected of typed literal", morphoFeature instanceof IRI);
+            String feature=((IRI)morphoFeature).getUnicodeString();
             assertFalse("Morpho Feature MUST NOT be empty",feature.isEmpty());
             if(feature.startsWith(OLIA_NAMESPACE)){
             	String key=feature.substring(OLIA_NAMESPACE.length());
@@ -251,10 +250,10 @@
         morphoFeatureIterator = enhancements.filter(textAnnotation, CeliLemmatizerEnhancementEngine.hasLemmaForm, null);
         assertTrue("No Number Morpho Feature value found for TextAnnotation "+textAnnotation+"!", morphoFeatureIterator.hasNext());
         if(morphoFeatureIterator.hasNext()){
-            Resource morphoFeature = morphoFeatureIterator.next().getObject();
-            assertTrue("Lemma Forms value are expected of type PlainLiteral", morphoFeature instanceof PlainLiteral);
-            assertFalse("Lemma forms MUST NOT be empty",((PlainLiteral)morphoFeature).getLexicalForm().isEmpty());
-            String feature=((PlainLiteral)morphoFeature).getLexicalForm();
+            RDFTerm morphoFeature = morphoFeatureIterator.next().getObject();
+            assertTrue("Lemma Forms value are expected of type Literal", morphoFeature instanceof Literal);
+            assertFalse("Lemma forms MUST NOT be empty",((Literal)morphoFeature).getLexicalForm().isEmpty());
+            String feature=((Literal)morphoFeature).getLexicalForm();
             assertTrue("Lemma of "+TERM+" should be "+TERM , (feature.equals(TERM)));
         }
         
diff --git a/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/ner/impl/CeliNamedEntityExtractionEnhancementEngineTest.java b/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/ner/impl/CeliNamedEntityExtractionEnhancementEngineTest.java
index b6192b8..aa654e6 100644
--- a/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/ner/impl/CeliNamedEntityExtractionEnhancementEngineTest.java
+++ b/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/ner/impl/CeliNamedEntityExtractionEnhancementEngineTest.java
@@ -26,10 +26,10 @@
 import java.util.Hashtable;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.engines.celi.CeliConstants;
 import org.apache.stanbol.enhancer.engines.celi.classification.impl.CeliClassificationEnhancementEngine;
@@ -103,7 +103,7 @@
 
 			TestUtils.logEnhancements(ci);
 			
-			HashMap<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+			HashMap<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
 			expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());
 			expectedValues.put(Properties.DC_CREATOR, LiteralFactory.getInstance().createTypedLiteral(
 			    nerEngine.getClass().getName()));
@@ -125,23 +125,23 @@
 		//this.testInput(CeliNamedEntityExtractionEnhancementEngineTest.TEXT_fr2, "fr");
 	}
 
-//	private int checkAllEntityAnnotations(MGraph g) {
+//	private int checkAllEntityAnnotations(Graph g) {
 //		Iterator<Triple> entityAnnotationIterator = g.filter(null, RDF_TYPE, ENHANCER_ENTITYANNOTATION);
 //		int entityAnnotationCount = 0;
 //		while (entityAnnotationIterator.hasNext()) {
-//			UriRef entityAnnotation = (UriRef) entityAnnotationIterator.next().getSubject();
+//			IRI entityAnnotation = (IRI) entityAnnotationIterator.next().getSubject();
 //			entityAnnotationCount++;
 //		}
 //		return entityAnnotationCount;
 //	}
 //
-//	private int checkAllTextAnnotations(MGraph g, String content) {
+//	private int checkAllTextAnnotations(Graph g, String content) {
 //		Iterator<Triple> textAnnotationIterator = g.filter(null, RDF_TYPE, ENHANCER_TEXTANNOTATION);
 //		// test if a textAnnotation is present
 //		assertTrue(textAnnotationIterator.hasNext());
 //		int textAnnotationCount = 0;
 //		while (textAnnotationIterator.hasNext()) {
-//			UriRef textAnnotation = (UriRef) textAnnotationIterator.next().getSubject();
+//			IRI textAnnotation = (IRI) textAnnotationIterator.next().getSubject();
 //			textAnnotationCount++;
 //		}
 //		return textAnnotationCount;
diff --git a/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/sentimentanalysis/impl/CeliAnalyzedTextSentimentAnalysisEngineTest.java b/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/sentimentanalysis/impl/CeliAnalyzedTextSentimentAnalysisEngineTest.java
index 7cd45f1..6dcef52 100644
--- a/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/sentimentanalysis/impl/CeliAnalyzedTextSentimentAnalysisEngineTest.java
+++ b/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/sentimentanalysis/impl/CeliAnalyzedTextSentimentAnalysisEngineTest.java
@@ -26,8 +26,8 @@
 
 import junit.framework.Assert;
 
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.engines.celi.CeliConstants;
 import org.apache.stanbol.enhancer.engines.celi.lemmatizer.impl.CeliAnalyzedTextLemmatizerEngineTest;
diff --git a/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/sentimentanalysis/impl/CeliSentimentAnalysisEngineTest.java b/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/sentimentanalysis/impl/CeliSentimentAnalysisEngineTest.java
index 2f5bb7f..f0b2100 100644
--- a/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/sentimentanalysis/impl/CeliSentimentAnalysisEngineTest.java
+++ b/enhancement-engines/celi/src/test/java/org/apache/stanbol/enhancer/engines/celi/sentimentanalysis/impl/CeliSentimentAnalysisEngineTest.java
@@ -27,10 +27,10 @@
 import java.util.Hashtable;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.engines.celi.CeliConstants;
 import org.apache.stanbol.enhancer.engines.celi.testutils.MockComponentContext;
@@ -92,7 +92,7 @@
 
 			TestUtils.logEnhancements(ci);
 
-			HashMap<UriRef, Resource> expectedValues = new HashMap<UriRef, Resource>();
+			HashMap<IRI, RDFTerm> expectedValues = new HashMap<IRI, RDFTerm>();
 			expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());
 			expectedValues.put(Properties.DC_CREATOR, LiteralFactory.getInstance().createTypedLiteral(sentimentAnalysisEngine.getClass().getName()));
 			expectedValues.put(DC_TYPE, CeliConstants.SENTIMENT_EXPRESSION);
diff --git a/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/Constants.java b/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/Constants.java
index f05e7f0..3d3fdf8 100644
--- a/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/Constants.java
+++ b/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/Constants.java
@@ -22,7 +22,7 @@
 import java.util.HashSet;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Property;
 
 /**
@@ -60,17 +60,17 @@
 	 * Definition of some Spotlight specific properties added to
 	 * fise:EntityAnnotations created by this Engine
 	 */
-	UriRef PROPERTY_CONTEXTUAL_SCORE = new UriRef(
+	IRI PROPERTY_CONTEXTUAL_SCORE = new IRI(
 			SPOTLIGHT_NAME_SPACE + "contextualScore");
-	UriRef PROPERTY_PERCENTAGE_OF_SECOND_RANK = new UriRef(
+	IRI PROPERTY_PERCENTAGE_OF_SECOND_RANK = new IRI(
 			SPOTLIGHT_NAME_SPACE + "percentageOfSecondRank");
-	UriRef PROPERTY_SUPPORT = new UriRef(
+	IRI PROPERTY_SUPPORT = new IRI(
 			SPOTLIGHT_NAME_SPACE + "support");
-	UriRef PROPERTY_PRIOR_SCORE = new UriRef(
+	IRI PROPERTY_PRIOR_SCORE = new IRI(
 			SPOTLIGHT_NAME_SPACE + "priorScore");
-	UriRef PROPERTY_FINAL_SCORE = new UriRef(
+	IRI PROPERTY_FINAL_SCORE = new IRI(
 			SPOTLIGHT_NAME_SPACE + "finalScore");
-	UriRef PROPERTY_SIMILARITY_SCORE = new UriRef(
+	IRI PROPERTY_SIMILARITY_SCORE = new IRI(
 			SPOTLIGHT_NAME_SPACE + "similarityScore");
 	
 	Charset UTF8 = Charset.forName("UTF-8");
diff --git a/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/annotate/DBPSpotlightAnnotateEnhancementEngine.java b/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/annotate/DBPSpotlightAnnotateEnhancementEngine.java
index 8a1018b..64c5d11 100644
--- a/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/annotate/DBPSpotlightAnnotateEnhancementEngine.java
+++ b/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/annotate/DBPSpotlightAnnotateEnhancementEngine.java
@@ -42,8 +42,8 @@
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.commons.io.IOUtils;
 import org.apache.felix.scr.annotations.Component;
@@ -195,7 +195,7 @@
 		String text = SpotlightEngineUtils.getPlainContent(ci);
 
 		Collection<Annotation> dbpslGraph = doPostRequest(text,ci.getUri());
-		Map<SurfaceForm,UriRef> surfaceForm2TextAnnotation = new HashMap<SurfaceForm,UriRef>();
+		Map<SurfaceForm,IRI> surfaceForm2TextAnnotation = new HashMap<SurfaceForm,IRI>();
 		if (dbpslGraph != null) {
 			// Acquire a write lock on the ContentItem when adding the
 			// enhancements
@@ -235,9 +235,9 @@
 	 */
 	protected void createEnhancements(Collection<Annotation> occs,
 			ContentItem ci, String text, Language language,
-			Map<SurfaceForm,UriRef> surfaceForm2TextAnnotation) {
+			Map<SurfaceForm,IRI> surfaceForm2TextAnnotation) {
 		for (Annotation occ : occs) {
-			UriRef textAnnotation = surfaceForm2TextAnnotation.get(occ.surfaceForm);
+			IRI textAnnotation = surfaceForm2TextAnnotation.get(occ.surfaceForm);
 			if(textAnnotation == null){ //not yet written ... create a new
     			textAnnotation = SpotlightEngineUtils.createTextEnhancement(
     					occ.surfaceForm, this, ci, text, language);
@@ -260,7 +260,7 @@
 	 * @throws EngineException
 	 *             if the request cannot be sent
 	 */
-	protected Collection<Annotation> doPostRequest(String text, UriRef contentItemUri)
+	protected Collection<Annotation> doPostRequest(String text, IRI contentItemUri)
 			throws EngineException {
 		HttpURLConnection connection = null;
 		BufferedWriter wr = null;
diff --git a/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/candidates/DBPSpotlightCandidatesEnhancementEngine.java b/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/candidates/DBPSpotlightCandidatesEnhancementEngine.java
index 001e59c..4218cf4 100644
--- a/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/candidates/DBPSpotlightCandidatesEnhancementEngine.java
+++ b/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/candidates/DBPSpotlightCandidatesEnhancementEngine.java
@@ -44,10 +44,10 @@
 import java.util.Iterator;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.commons.io.IOUtils;
 import org.apache.felix.scr.annotations.Component;
@@ -236,16 +236,16 @@
 			ContentItem ci, String text, Language language) {
 
 		// TODO create TextEnhancement (form, start, end, type?)
-		HashMap<String, UriRef> entityAnnotationMap = new HashMap<String, UriRef>();
+		HashMap<String, IRI> entityAnnotationMap = new HashMap<String, IRI>();
 
-		MGraph model = ci.getMetadata();
+		Graph model = ci.getMetadata();
 		for (SurfaceForm occ : occs) {
-			UriRef textAnnotation = SpotlightEngineUtils.createTextEnhancement(
+			IRI textAnnotation = SpotlightEngineUtils.createTextEnhancement(
 					occ, this, ci, text, language);
 			Iterator<CandidateResource> resources = occ.resources.iterator();
 			while (resources.hasNext()) {
 				CandidateResource resource = resources.next();
-				UriRef entityAnnotation = SpotlightEngineUtils.createEntityAnnotation(
+				IRI entityAnnotation = SpotlightEngineUtils.createEntityAnnotation(
 						resource, this, ci, textAnnotation);
 				entityAnnotationMap.put(resource.localName, entityAnnotation);
 			}
@@ -269,7 +269,7 @@
 	 * @throws EngineException
 	 *             if the request cannot be sent
 	 */
-	protected Collection<SurfaceForm> doPostRequest(String text,UriRef contentItemUri)
+	protected Collection<SurfaceForm> doPostRequest(String text,IRI contentItemUri)
 			throws EngineException {
 		HttpURLConnection connection = null;
 		BufferedWriter wr = null;
diff --git a/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/disambiguate/DBPSpotlightDisambiguateEnhancementEngine.java b/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/disambiguate/DBPSpotlightDisambiguateEnhancementEngine.java
index 282853e..950d798 100644
--- a/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/disambiguate/DBPSpotlightDisambiguateEnhancementEngine.java
+++ b/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/disambiguate/DBPSpotlightDisambiguateEnhancementEngine.java
@@ -49,14 +49,14 @@
 import java.util.Iterator;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.commons.io.IOUtils;
 import org.apache.felix.scr.annotations.Component;
@@ -137,7 +137,7 @@
 	 * holds the existing TextAnnotations, which are used as input for DBpedia
 	 * Spotlight, and later for linking of the results
 	 */
-	private Hashtable<String, UriRef> textAnnotationsMap;
+	private Hashtable<String, IRI> textAnnotationsMap;
 
     private int connectionTimeout;
 	/**
@@ -207,7 +207,7 @@
 
 
 		// Retrieve the existing text annotations (requires read lock)
-		MGraph graph = ci.getMetadata();
+		Graph graph = ci.getMetadata();
 		String xmlTextAnnotations = this.getSpottedXml(text, graph);
 		Collection<Annotation> dbpslGraph = doPostRequest(text,
 				xmlTextAnnotations, ci.getUri());
@@ -247,14 +247,14 @@
 	 */
 	public void createEnhancements(Collection<Annotation> occs,
 			ContentItem ci, Language language) {
-		HashMap<Resource, UriRef> entityAnnotationMap = new HashMap<Resource, UriRef>();
+		HashMap<RDFTerm, IRI> entityAnnotationMap = new HashMap<RDFTerm, IRI>();
 
 		for (Annotation occ : occs) {
 
 			if (textAnnotationsMap.get(occ.surfaceForm) != null) {
-				UriRef textAnnotation = textAnnotationsMap.get(occ.surfaceForm);
-				MGraph model = ci.getMetadata();
-				UriRef entityAnnotation = EnhancementEngineHelper
+				IRI textAnnotation = textAnnotationsMap.get(occ.surfaceForm);
+				Graph model = ci.getMetadata();
+				IRI entityAnnotation = EnhancementEngineHelper
 						.createEntityEnhancement(ci, this);
 				entityAnnotationMap.put(occ.uri, entityAnnotation);
 				Literal label = new PlainLiteralImpl(occ.surfaceForm.name, language);
@@ -268,7 +268,7 @@
 					Iterator<String> it = t.iterator();
 					while (it.hasNext())
 						model.add(new TripleImpl(entityAnnotation,
-								ENHANCER_ENTITY_TYPE, new UriRef(it.next())));
+								ENHANCER_ENTITY_TYPE, new IRI(it.next())));
 				}
 				model.add(new TripleImpl(entityAnnotation,
 						ENHANCER_ENTITY_REFERENCE, occ.uri));
@@ -290,7 +290,7 @@
 	 *             if the request cannot be sent
 	 */
 	protected Collection<Annotation> doPostRequest(String text,
-			String xmlTextAnnotations, UriRef contentItemUri) throws EngineException {
+			String xmlTextAnnotations, IRI contentItemUri) throws EngineException {
 		HttpURLConnection connection = null;
 		BufferedWriter wr = null;
 		try {
@@ -381,16 +381,16 @@
 		return Annotation.parseAnnotations(xmlDoc);
 	}
 
-	private String getSpottedXml(String text, MGraph graph) {
+	private String getSpottedXml(String text, Graph graph) {
 		StringBuilder xml = new StringBuilder();
-		textAnnotationsMap = new Hashtable<String, UriRef>();
+		textAnnotationsMap = new Hashtable<String, IRI>();
 
 		xml.append(String.format("<annotation text=\"%s\">", text));
 		try {
 			for (Iterator<Triple> it = graph.filter(null, RDF_TYPE,
 					TechnicalClasses.ENHANCER_TEXTANNOTATION); it.hasNext();) {
 				// Triple tAnnotation = it.next();
-				UriRef uri = (UriRef) it.next().getSubject();
+				IRI uri = (IRI) it.next().getSubject();
 				String surfaceForm = EnhancementEngineHelper.getString(graph,
 						uri, ENHANCER_SELECTED_TEXT);
 				if (surfaceForm != null) {
diff --git a/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/model/Annotation.java b/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/model/Annotation.java
index 1ccd04f..61d6d62 100644
--- a/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/model/Annotation.java
+++ b/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/model/Annotation.java
@@ -25,8 +25,8 @@
 import java.util.List;

 import java.util.Set;

 

-import org.apache.clerezza.rdf.core.Resource;

-import org.apache.clerezza.rdf.core.UriRef;

+import org.apache.clerezza.commons.rdf.RDFTerm;

+import org.apache.clerezza.commons.rdf.IRI;

 import org.w3c.dom.Document;

 import org.w3c.dom.Element;

 import org.w3c.dom.NodeList;

@@ -72,7 +72,7 @@
 		IGNORED_DBP_TYPES = Collections.unmodifiableSet(ignored);

 	}

 	

-	public Resource uri;

+	public RDFTerm uri;

 	//TODO: change this to a list with the parsed types

 	//      Processing of XML results should be done during parsing

 	public String types;

@@ -134,13 +134,13 @@
 	 * @return a Collection<DBPSLAnnotation> with all annotations

 	 */

 	public static Collection<Annotation> parseAnnotations(Document xmlDoc) {

-		NodeList nList = getElementsByTagName(xmlDoc, "Resource");

+		NodeList nList = getElementsByTagName(xmlDoc, "RDFTerm");

 		Collection<Annotation> dbpslAnnos = new HashSet<Annotation>();

 

 		for (int temp = 0; temp < nList.getLength(); temp++) {

 			Annotation dbpslann = new Annotation();

 			Element node = (Element) nList.item(temp);

-			dbpslann.uri = new UriRef(node.getAttribute("URI"));

+			dbpslann.uri = new IRI(node.getAttribute("URI"));

 			dbpslann.support = (new Integer(node.getAttribute("support")))

 					.intValue();

 			dbpslann.types = node.getAttribute("types");

diff --git a/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/model/CandidateResource.java b/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/model/CandidateResource.java
index 69fabb7..2ea8809 100644
--- a/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/model/CandidateResource.java
+++ b/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/model/CandidateResource.java
@@ -21,13 +21,13 @@
 import java.util.Collection;

 import java.util.HashSet;

 

-import org.apache.clerezza.rdf.core.UriRef;

+import org.apache.clerezza.commons.rdf.IRI;

 import org.w3c.dom.Document;

 import org.w3c.dom.Element;

 import org.w3c.dom.Node;

 import org.w3c.dom.NodeList;

 

-//import org.apache.clerezza.rdf.core.Resource;

+//import org.apache.clerezza.commons.rdf.RDFTerm;

 

 /**

  * Stores the candidate ressources given by DBPedia Spotlight Candidates.

@@ -52,8 +52,8 @@
 						support, priorScore, finalScore);

 	}

 	

-	public UriRef getUri(){

-	    return new UriRef(new StringBuilder("http://dbpedia.org/resource/")

+	public IRI getUri(){

+	    return new IRI(new StringBuilder("http://dbpedia.org/resource/")

 	    .append(localName).toString());

 	}

 	

diff --git a/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/model/SurfaceForm.java b/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/model/SurfaceForm.java
index 725bc33..d76db0a 100644
--- a/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/model/SurfaceForm.java
+++ b/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/model/SurfaceForm.java
@@ -28,7 +28,7 @@
 import org.w3c.dom.NodeList;

 

 

-//import org.apache.clerezza.rdf.core.Resource;

+//import org.apache.clerezza.commons.rdf.RDFTerm;

 

 /**

  * Stores the surface forms given by DBPedia Spotlight Candidates.

diff --git a/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/spot/DBPSpotlightSpotEnhancementEngine.java b/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/spot/DBPSpotlightSpotEnhancementEngine.java
index 38e1d53..b43e233 100644
--- a/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/spot/DBPSpotlightSpotEnhancementEngine.java
+++ b/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/spot/DBPSpotlightSpotEnhancementEngine.java
@@ -37,10 +37,10 @@
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.commons.io.IOUtils;
 import org.apache.felix.scr.annotations.Component;
@@ -201,11 +201,11 @@
 	protected void createEnhancements(Collection<SurfaceForm> occs,
 			ContentItem ci,  String content, Language lang) {
 
-		HashMap<String, UriRef> entityAnnotationMap = new HashMap<String, UriRef>();
+		HashMap<String, IRI> entityAnnotationMap = new HashMap<String, IRI>();
 
-		MGraph model = ci.getMetadata();
+		Graph model = ci.getMetadata();
 		for (SurfaceForm occ : occs) {
-			UriRef textAnnotation = SpotlightEngineUtils.createTextEnhancement(
+			IRI textAnnotation = SpotlightEngineUtils.createTextEnhancement(
 					occ, this, ci, content, lang);
 			if (entityAnnotationMap.containsKey(occ.name)) {
 				model.add(new TripleImpl(entityAnnotationMap.get(occ.name),
@@ -228,7 +228,7 @@
 	 * @throws EngineException
 	 *             if the request cannot be sent
 	 */
-	protected Collection<SurfaceForm> doPostRequest(String text,UriRef contentItemUri)
+	protected Collection<SurfaceForm> doPostRequest(String text,IRI contentItemUri)
 			throws EngineException {
 		//rwesten: reimplemented this so that the request
 		//         is directly written to the request instead
diff --git a/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/utils/SpotlightEngineUtils.java b/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/utils/SpotlightEngineUtils.java
index 6583f71..5bcca05 100644
--- a/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/utils/SpotlightEngineUtils.java
+++ b/enhancement-engines/dbpedia-spotlight/src/main/java/org/apache/stanbol/enhancer/engines/dbpspotlight/utils/SpotlightEngineUtils.java
@@ -45,13 +45,13 @@
 import java.util.Dictionary;
 import java.util.Map.Entry;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.stanbol.enhancer.engines.dbpspotlight.Constants;
 import org.apache.stanbol.enhancer.engines.dbpspotlight.model.Annotation;
 import org.apache.stanbol.enhancer.engines.dbpspotlight.model.CandidateResource;
@@ -116,7 +116,7 @@
 	}
 	public static String getPlainContent(ContentItem ci) 
 			throws EngineException {
-		Entry<UriRef, Blob> contentPart = ContentItemHelper.getBlob(ci,
+		Entry<IRI, Blob> contentPart = ContentItemHelper.getBlob(ci,
 				SUPPORTED_MIMTYPES);
 		if (contentPart == null) {
 			throw new IllegalStateException(
@@ -203,11 +203,11 @@
      * @param lang the language of the content or <code>null</code>
      * @return the URI of the created fise:TextAnnotation
      */
-	public static UriRef createTextEnhancement(SurfaceForm occ,
+	public static IRI createTextEnhancement(SurfaceForm occ,
 			EnhancementEngine engine, ContentItem ci, String content,
 			Language lang) {
-		MGraph model = ci.getMetadata();
-		UriRef textAnnotation = EnhancementEngineHelper
+		Graph model = ci.getMetadata();
+		IRI textAnnotation = EnhancementEngineHelper
 				.createTextEnhancement(ci, engine);
 		model.add(new TripleImpl(textAnnotation, ENHANCER_SELECTED_TEXT,
 				new PlainLiteralImpl(occ.name, lang)));
@@ -217,7 +217,7 @@
 				literalFactory.createTypedLiteral(occ.offset
 						+ occ.name.length())));
 		if(occ.type != null && !occ.type.isEmpty()){
-			model.add(new TripleImpl(textAnnotation, DC_TYPE, new UriRef(
+			model.add(new TripleImpl(textAnnotation, DC_TYPE, new IRI(
 					occ.type)));
 		}
 		model.add(new TripleImpl(textAnnotation, ENHANCER_SELECTION_CONTEXT, 
@@ -237,11 +237,11 @@
 	 * created fise:EntityAnnotation
 	 * @return the URI of the created fise:TextAnnotation
 	 */
-	public static UriRef createEntityAnnotation(CandidateResource resource,
-			EnhancementEngine engine, ContentItem ci, UriRef textAnnotation) {
-		UriRef entityAnnotation = EnhancementEngineHelper
+	public static IRI createEntityAnnotation(CandidateResource resource,
+			EnhancementEngine engine, ContentItem ci, IRI textAnnotation) {
+		IRI entityAnnotation = EnhancementEngineHelper
 				.createEntityEnhancement(ci, engine);
-		MGraph model = ci.getMetadata();
+		Graph model = ci.getMetadata();
 		Literal label = new PlainLiteralImpl(resource.label,
 				new Language("en"));
 		model.add(new TripleImpl(entityAnnotation, DC_RELATION,
@@ -276,9 +276,9 @@
 	 */
 	public static void createEntityAnnotation(Annotation annotation, 
 			EnhancementEngine engine, ContentItem ci,
-			UriRef textAnnotation, Language language) {
-		MGraph model = ci.getMetadata();
-		UriRef entityAnnotation = EnhancementEngineHelper
+			IRI textAnnotation, Language language) {
+		Graph model = ci.getMetadata();
+		IRI entityAnnotation = EnhancementEngineHelper
 				.createEntityEnhancement(ci, engine);
 		Literal label = new PlainLiteralImpl(annotation.surfaceForm.name,
 				language);
@@ -290,7 +290,7 @@
 				ENHANCER_ENTITY_REFERENCE, annotation.uri));
 		//set the fise:entity-type
 		for(String type : annotation.getTypeNames()){
-			UriRef annotationType = new UriRef(type);
+			IRI annotationType = new IRI(type);
 			model.add(new TripleImpl(entityAnnotation,
 					ENHANCER_ENTITY_TYPE, annotationType));
 		}
diff --git a/enhancement-engines/dbpedia-spotlight/src/test/java/org/apache/stanbol/enhancer/engines/dbpspotlight/annotate/DBPSpotlightAnnotateEnhancementTest.java b/enhancement-engines/dbpedia-spotlight/src/test/java/org/apache/stanbol/enhancer/engines/dbpspotlight/annotate/DBPSpotlightAnnotateEnhancementTest.java
index 72e3487..6c1f677 100644
--- a/enhancement-engines/dbpedia-spotlight/src/test/java/org/apache/stanbol/enhancer/engines/dbpspotlight/annotate/DBPSpotlightAnnotateEnhancementTest.java
+++ b/enhancement-engines/dbpedia-spotlight/src/test/java/org/apache/stanbol/enhancer/engines/dbpspotlight/annotate/DBPSpotlightAnnotateEnhancementTest.java
@@ -28,10 +28,10 @@
 import java.util.Map.Entry;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.engines.dbpspotlight.Constants;
 import org.apache.stanbol.enhancer.engines.dbpspotlight.TestDefaults;
@@ -77,7 +77,7 @@
 	private static ContentItemFactory ciFactory = InMemoryContentItemFactory.getInstance();
 	
 	private ContentItem ci;
-	private static Entry<UriRef, Blob> textContentPart;
+	private static Entry<IRI, Blob> textContentPart;
 
 	@BeforeClass
 	public static void oneTimeSetup() throws Exception {
@@ -126,7 +126,7 @@
 	    } catch (EngineException e) {
             RemoteServiceHelper.checkServiceUnavailable(e);
         }
-        HashMap<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+        HashMap<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
         expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());
         expectedValues.put(Properties.DC_CREATOR, LiteralFactory.getInstance().createTypedLiteral(
         		dbpslight.getClass().getName()));
diff --git a/enhancement-engines/dbpedia-spotlight/src/test/java/org/apache/stanbol/enhancer/engines/dbpspotlight/candidates/DBPSpotlightCandidatesEnhancementTest.java b/enhancement-engines/dbpedia-spotlight/src/test/java/org/apache/stanbol/enhancer/engines/dbpspotlight/candidates/DBPSpotlightCandidatesEnhancementTest.java
index 94db25f..de57664 100644
--- a/enhancement-engines/dbpedia-spotlight/src/test/java/org/apache/stanbol/enhancer/engines/dbpspotlight/candidates/DBPSpotlightCandidatesEnhancementTest.java
+++ b/enhancement-engines/dbpedia-spotlight/src/test/java/org/apache/stanbol/enhancer/engines/dbpspotlight/candidates/DBPSpotlightCandidatesEnhancementTest.java
@@ -28,10 +28,10 @@
 import java.util.Map.Entry;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.engines.dbpspotlight.Constants;
 import org.apache.stanbol.enhancer.engines.dbpspotlight.TestDefaults;
@@ -76,7 +76,7 @@
 	private static ContentItemFactory ciFactory = InMemoryContentItemFactory.getInstance();
 	
 	private ContentItem ci;
-	private static Entry<UriRef, Blob> textContentPart;
+	private static Entry<IRI, Blob> textContentPart;
 
 	@BeforeClass
 	public static void oneTimeSetup() throws Exception {
@@ -126,7 +126,7 @@
             RemoteServiceHelper.checkServiceUnavailable(e);
             return;
         }
-        HashMap<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+        HashMap<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
         expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());
         expectedValues.put(Properties.DC_CREATOR, LiteralFactory.getInstance().createTypedLiteral(
         		dbpslight.getClass().getName()));
diff --git a/enhancement-engines/dbpedia-spotlight/src/test/java/org/apache/stanbol/enhancer/engines/dbpspotlight/disambiguate/DBPSpotlightDisambiguateEnhancementTest.java b/enhancement-engines/dbpedia-spotlight/src/test/java/org/apache/stanbol/enhancer/engines/dbpspotlight/disambiguate/DBPSpotlightDisambiguateEnhancementTest.java
index 94e8be0..1f049fb 100644
--- a/enhancement-engines/dbpedia-spotlight/src/test/java/org/apache/stanbol/enhancer/engines/dbpspotlight/disambiguate/DBPSpotlightDisambiguateEnhancementTest.java
+++ b/enhancement-engines/dbpedia-spotlight/src/test/java/org/apache/stanbol/enhancer/engines/dbpspotlight/disambiguate/DBPSpotlightDisambiguateEnhancementTest.java
@@ -27,13 +27,13 @@
 import java.util.HashMap;
 import java.util.Map.Entry;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.io.IOUtils;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.engines.dbpspotlight.Constants;
@@ -83,7 +83,7 @@
 	private static ContentItemFactory ciFactory = InMemoryContentItemFactory.getInstance();
 	
 	private ContentItem ci;
-	private static Entry<UriRef, Blob> textContentPart;
+	private static Entry<IRI, Blob> textContentPart;
 
 	@BeforeClass
 	public static void oneTimeSetup() throws Exception {
@@ -108,9 +108,9 @@
 		//we need also to create a fise:TextAnnotation to test disambiguation
 		String selected = "Angela Merkel";
 		Language en = new Language("en");
-		UriRef textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, 
+		IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, 
 				new DBPSpotlightSpotEnhancementEngine());
-		MGraph model = ci.getMetadata();
+		Graph model = ci.getMetadata();
 		model.add(new TripleImpl(textAnnotation, Properties.ENHANCER_SELECTED_TEXT, 
 				new PlainLiteralImpl(selected,en)));
 		model.add(new TripleImpl(textAnnotation, Properties.ENHANCER_SELECTION_CONTEXT, 
@@ -159,7 +159,7 @@
             RemoteServiceHelper.checkServiceUnavailable(e);
             return;
         }
-        HashMap<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+        HashMap<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
         expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());
         expectedValues.put(Properties.DC_CREATOR, LiteralFactory.getInstance().createTypedLiteral(
         		dbpslight.getClass().getName()));
diff --git a/enhancement-engines/dbpedia-spotlight/src/test/java/org/apache/stanbol/enhancer/engines/dbpspotlight/spot/DBPSpotlightSpotEnhancementTest.java b/enhancement-engines/dbpedia-spotlight/src/test/java/org/apache/stanbol/enhancer/engines/dbpspotlight/spot/DBPSpotlightSpotEnhancementTest.java
index 129ab4d..d4428bd 100644
--- a/enhancement-engines/dbpedia-spotlight/src/test/java/org/apache/stanbol/enhancer/engines/dbpspotlight/spot/DBPSpotlightSpotEnhancementTest.java
+++ b/enhancement-engines/dbpedia-spotlight/src/test/java/org/apache/stanbol/enhancer/engines/dbpspotlight/spot/DBPSpotlightSpotEnhancementTest.java
@@ -29,10 +29,10 @@
 import java.util.Map.Entry;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.engines.dbpspotlight.Constants;
 import org.apache.stanbol.enhancer.engines.dbpspotlight.TestDefaults;
@@ -76,7 +76,7 @@
 	private static ContentItemFactory ciFactory = InMemoryContentItemFactory.getInstance();
 	
 	private ContentItem ci;
-	private static Entry<UriRef, Blob> textContentPart;
+	private static Entry<IRI, Blob> textContentPart;
 	
 	@BeforeClass
 	public static void oneTimeSetup() throws Exception {
@@ -129,7 +129,7 @@
             RemoteServiceHelper.checkServiceUnavailable(e);
             return;
         }
-        HashMap<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+        HashMap<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
         expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());
         expectedValues.put(Properties.DC_CREATOR, LiteralFactory.getInstance().createTypedLiteral(
         		dbpslight.getClass().getName()));
diff --git a/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceConstants.java b/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceConstants.java
index 887426c..92c0b54 100644
--- a/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceConstants.java
+++ b/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceConstants.java
@@ -19,10 +19,9 @@
 import java.util.Collections;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.servicesapi.rdf.Properties;
 
 /**
@@ -79,7 +78,7 @@
      * dereferencing entities.
      * @since 0.12.1 (<a href="https://issues.apache.org/jira/browse/STANBOL-1334">STANBOL-1334</a>)
      */
-    Set<UriRef> DEFAULT_ENTITY_REFERENCES = Collections.unmodifiableSet(
+    Set<IRI> DEFAULT_ENTITY_REFERENCES = Collections.unmodifiableSet(
         Collections.singleton(Properties.ENHANCER_ENTITY_REFERENCE));
 
     /**
diff --git a/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceContext.java b/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceContext.java
index 29f332e..cacb9fe 100644
--- a/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceContext.java
+++ b/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceContext.java
@@ -13,7 +13,7 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.commons.lang.StringUtils;
 import org.apache.stanbol.commons.namespaceprefix.NamespaceMappingUtils;
 import org.apache.stanbol.commons.namespaceprefix.NamespacePrefixService;
@@ -48,7 +48,7 @@
     private Set<String> languages;
     private List<String> fields;
     private String program;
-    private HashSet<UriRef> entityReferences;
+    private HashSet<IRI> entityReferences;
     
     
     
@@ -84,13 +84,13 @@
                 DereferenceConstants.ENTITY_REFERENCES);
         }
         //start with the references present in the config
-        this.entityReferences = new HashSet<UriRef>(getConfig().getEntityReferences());
+        this.entityReferences = new HashSet<IRI>(getConfig().getEntityReferences());
         if(entityRefProps != null && !entityRefProps.isEmpty()){
             NamespacePrefixService nps = engine.getConfig().getNsPrefixService();
             for(String prop : entityRefProps){
                 if(!StringUtils.isBlank(prop)){
                     try {
-                        entityReferences.add(new UriRef(
+                        entityReferences.add(new IRI(
                             NamespaceMappingUtils.getConfiguredUri(nps, prop)));
                     } catch(IllegalArgumentException e){
                         throw new DereferenceConfigurationException(e, 
@@ -287,7 +287,7 @@
      * @return the entity reference properties
      * @see DereferenceEngineConfig#getEntityReferences()
      */
-    public HashSet<UriRef> getEntityReferences() {
+    public HashSet<IRI> getEntityReferences() {
         return entityReferences;
     }
     
diff --git a/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceEngineConfig.java b/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceEngineConfig.java
index 5f10b5c..64118b5 100644
--- a/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceEngineConfig.java
+++ b/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceEngineConfig.java
@@ -13,7 +13,7 @@
 import java.util.Locale;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.commons.lang.StringUtils;
 import org.apache.stanbol.commons.namespaceprefix.NamespacePrefixService;
 import org.apache.stanbol.enhancer.servicesapi.EnhancementEngine;
@@ -30,7 +30,7 @@
     private String ldpath;
     private List<String> dereferenced;
     private Set<String> languages;
-    private Set<UriRef> entityReferences;
+    private Set<IRI> entityReferences;
 
     /**
      * Creates a DereferenceEngine configuration based on a Dictionary. Typically
@@ -127,7 +127,7 @@
      * is present
      * @since 0.12.1 (<a href="https://issues.apache.org/jira/browse/STANBOL-1334">STANBOL-1334</a>)
      */
-    public Set<UriRef> getEntityReferences() {
+    public Set<IRI> getEntityReferences() {
         return entityReferences;
     }
 
@@ -164,17 +164,17 @@
      * @return
      * @throws ConfigurationException
      */
-    private Set<UriRef> parseEntityReferences() throws ConfigurationException {
-        Set<UriRef> entityRefPropUris;
+    private Set<IRI> parseEntityReferences() throws ConfigurationException {
+        Set<IRI> entityRefPropUris;
         Collection<String> entityProps = EnhancementEngineHelper.getConfigValues(
             config, ENTITY_REFERENCES, String.class);
         if(entityProps == null || entityProps.isEmpty()){
             entityRefPropUris = DEFAULT_ENTITY_REFERENCES;
         } else {
-            entityRefPropUris = new HashSet<UriRef>(entityProps.size());
+            entityRefPropUris = new HashSet<IRI>(entityProps.size());
             for(String prop : entityProps){
                 if(!StringUtils.isBlank(prop)){
-                    entityRefPropUris.add(new UriRef(getConfiguredUri(nsPrefixService, 
+                    entityRefPropUris.add(new IRI(getConfiguredUri(nsPrefixService, 
                         ENTITY_REFERENCES, prop.trim())));
                 }
             }
diff --git a/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceException.java b/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceException.java
index aaddd07..ddb656b 100644
--- a/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceException.java
+++ b/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceException.java
@@ -16,16 +16,16 @@
  */
 package org.apache.stanbol.enhancer.engines.dereference;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 public class DereferenceException extends Exception {
     
     private static final long serialVersionUID = 1524436328783083428L;
 
-    public DereferenceException(UriRef entity, Throwable t){
+    public DereferenceException(IRI entity, Throwable t){
         super("Unable to dereference Entity " + entity+ "!", t);
     }
-    public DereferenceException(UriRef entity, String reason){
+    public DereferenceException(IRI entity, String reason){
         super("Unable to dereference Entity " + entity + 
             (reason != null ? ": "+ reason : "") + "!");
     }
diff --git a/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/EntityDereferenceEngine.java b/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/EntityDereferenceEngine.java
index 4b50cc7..85d077f 100644
--- a/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/EntityDereferenceEngine.java
+++ b/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/EntityDereferenceEngine.java
@@ -36,11 +36,11 @@
 import java.util.regex.Pattern;
 import java.util.regex.PatternSyntaxException;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.commons.stanboltools.offline.OfflineMode;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
 import org.apache.stanbol.enhancer.servicesapi.EngineException;
@@ -218,15 +218,15 @@
         long start = System.nanoTime();
         Map<String,Object> enhancemntProps = EnhancementEngineHelper.getEnhancementProperties(this, ci);
         final DereferenceContext derefContext;
-        final MGraph metadata = ci.getMetadata();
-        Set<UriRef> referencedEntities = new HashSet<UriRef>();
+        final Graph metadata = ci.getMetadata();
+        Set<IRI> referencedEntities = new HashSet<IRI>();
         ci.getLock().readLock().lock();
         try {
             //(1) Create the DereferenceContext
             if(filterContentLanguages){
                 //parse the languages detected for the content
                 Set<String> contentLanguages = new HashSet<String>();
-                for(NonLiteral langAnno : EnhancementEngineHelper.getLanguageAnnotations(metadata)){
+                for(BlankNodeOrIRI langAnno : EnhancementEngineHelper.getLanguageAnnotations(metadata)){
                     contentLanguages.add(EnhancementEngineHelper.getString(metadata, langAnno, DC_LANGUAGE));
                 }
                 enhancemntProps.put(DereferenceContext.INTERNAL_CONTENT_LANGUAGES, contentLanguages);
@@ -250,18 +250,18 @@
             
             //parse the referenced entities from the graph
             //(2) read all Entities we need to dereference from the parsed contentItem
-            Set<UriRef> checked = new HashSet<UriRef>();
+            Set<IRI> checked = new HashSet<IRI>();
             //since STANBOL-1334 the list of properties that refer to entities can be configured
-            for(UriRef referenceProperty : derefContext.getEntityReferences()){
+            for(IRI referenceProperty : derefContext.getEntityReferences()){
                 Iterator<Triple> entityReferences = metadata.filter(null, referenceProperty, null);
                 while(entityReferences.hasNext()){
                     Triple triple = entityReferences.next();
-                    Resource entityReference = triple.getObject();
-                    if((entityReference instanceof UriRef) && //only URIs
-                    		checked.add((UriRef)entityReference) && //do not check a URI twice
-                    		chekcFallbackMode((UriRef)entityReference, metadata) && //fallback mode
-                    		checkURI((UriRef)entityReference)){ //URI prefixes and patterns
-                        boolean added = referencedEntities.add((UriRef)entityReference);
+                    RDFTerm entityReference = triple.getObject();
+                    if((entityReference instanceof IRI) && //only URIs
+                    		checked.add((IRI)entityReference) && //do not check a URI twice
+                    		chekcFallbackMode((IRI)entityReference, metadata) && //fallback mode
+                    		checkURI((IRI)entityReference)){ //URI prefixes and patterns
+                        boolean added = referencedEntities.add((IRI)entityReference);
                         if(added && log.isTraceEnabled()){
                             log.trace("  ... schedule Entity {} (referenced-by: {})", 
                                 entityReference, referenceProperty);
@@ -282,13 +282,13 @@
             referencedEntities.size());
         //(2) dereference the Entities
         ExecutorService executor = dereferencer.getExecutor();
-        Set<UriRef> failedEntities = new HashSet<UriRef>();
+        Set<IRI> failedEntities = new HashSet<IRI>();
         int dereferencedCount = 0;
         List<DereferenceJob> dereferenceJobs = new ArrayList<DereferenceJob>(
                 referencedEntities.size());
         if(executor != null && !executor.isShutdown()){ //dereference using executor
             //schedule all entities to dereference
-            for(final UriRef entity : referencedEntities){
+            for(final IRI entity : referencedEntities){
                 DereferenceJob dereferenceJob = new DereferenceJob(entity, 
                     metadata, writeLock, derefContext);
                 dereferenceJob.setFuture(executor.submit(dereferenceJob));
@@ -318,7 +318,7 @@
                 }
             }
         } else { //dereference using the current thread
-            for(UriRef entity : referencedEntities){
+            for(IRI entity : referencedEntities){
                 try {
                     log.trace("  ... dereference {}", entity);
                     if(dereferencer.dereference(entity, metadata, writeLock, derefContext)){
@@ -357,7 +357,7 @@
         return name;
     }
 
-    protected boolean chekcFallbackMode(UriRef entityReference, MGraph metadata) {
+    protected boolean chekcFallbackMode(IRI entityReference, Graph metadata) {
 		return fallbackMode ? //in case we use fallback mode
 				//filter entities for those an outgoing relation is present
 				!metadata.filter(entityReference, null, null).hasNext() :
@@ -370,7 +370,7 @@
      * @return <code>true</code> if this entity should be scheduled for
      * dereferencing. <code>false</code> if not.
      */
-    protected boolean checkURI(UriRef entity){
+    protected boolean checkURI(IRI entity){
     	if(!uriFilterPresent){ //if no prefix nor pattern is set
     		return true; //accept all
     	}
@@ -454,14 +454,14 @@
      */
     class DereferenceJob implements Callable<Boolean> {
         
-        final UriRef entity;
-        final MGraph metadata;
+        final IRI entity;
+        final Graph metadata;
         final Lock writeLock;
         final DereferenceContext derefContext;
 
         private Future<Boolean> future;
         
-        DereferenceJob(UriRef entity, MGraph metadata, Lock writeLock, 
+        DereferenceJob(IRI entity, Graph metadata, Lock writeLock, 
             DereferenceContext derefContext){
             this.entity = entity;
             this.metadata = metadata;
diff --git a/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/EntityDereferencer.java b/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/EntityDereferencer.java
index 2afb47b..0b041dc 100644
--- a/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/EntityDereferencer.java
+++ b/enhancement-engines/dereference/core/src/main/java/org/apache/stanbol/enhancer/engines/dereference/EntityDereferencer.java
@@ -20,8 +20,8 @@
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.locks.Lock;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.commons.stanboltools.offline.OfflineMode;
 import org.apache.stanbol.enhancer.servicesapi.EnhancementEngine;
 
@@ -54,7 +54,7 @@
     ExecutorService getExecutor();
 
     /**
-     * Dereferences the Entity with the parsed {@link UriRef} by copying the
+     * Dereferences the Entity with the parsed {@link IRI} by copying the
      * data to the parsed graph
      * @param graph the graph to add the dereferenced entity 
      * @param entity the uri of the Entity to dereference
@@ -72,7 +72,7 @@
      * @throws DereferenceException on any error while dereferencing the
      * requested Entity
      */
-    boolean dereference(UriRef entity, MGraph graph, Lock writeLock, 
+    boolean dereference(IRI entity, Graph graph, Lock writeLock, 
             DereferenceContext dereferenceContext) throws DereferenceException;
         
 }
diff --git a/enhancement-engines/dereference/core/src/test/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceEngineTest.java b/enhancement-engines/dereference/core/src/test/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceEngineTest.java
index a4e87de..dfb4c1f 100644
--- a/enhancement-engines/dereference/core/src/test/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceEngineTest.java
+++ b/enhancement-engines/dereference/core/src/test/java/org/apache/stanbol/enhancer/engines/dereference/DereferenceEngineTest.java
@@ -34,15 +34,15 @@
 import java.util.concurrent.Executors;
 import java.util.concurrent.locks.Lock;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.commons.stanboltools.offline.OfflineMode;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
@@ -71,21 +71,21 @@
     /**
      * The metadata used by this test
      */
-    private static TripleCollection testData;
+    private static Graph testData;
     
-    private static TripleCollection testMetadata;
+    private static Graph testMetadata;
     
-    public static final UriRef NAME = new UriRef(NamespaceEnum.rdfs+"label");
-    public static final UriRef TYPE = new UriRef(NamespaceEnum.rdf+"type");
-    public static final UriRef REDIRECT = new UriRef(NamespaceEnum.rdfs+"seeAlso");
+    public static final IRI NAME = new IRI(NamespaceEnum.rdfs+"label");
+    public static final IRI TYPE = new IRI(NamespaceEnum.rdf+"type");
+    public static final IRI REDIRECT = new IRI(NamespaceEnum.rdfs+"seeAlso");
     
-    public static final UriRef OTHER_ENTITY_REFERENCE = new UriRef(
+    public static final IRI OTHER_ENTITY_REFERENCE = new IRI(
         "http://www.example.org/stanbol/enhancer/dereference/test#other-entity-reference");
 
     private static final ContentItemFactory ciFactory = InMemoryContentItemFactory.getInstance();
     
     private static final LiteralFactory lf = LiteralFactory.getInstance();
-    private static final UriRef SKOS_NOTATION = new UriRef(NamespaceEnum.skos+"notation");
+    private static final IRI SKOS_NOTATION = new IRI(NamespaceEnum.skos+"notation");
     private static final Language LANG_EN = new Language("en");
     private static final Language LANG_DE = new Language("de");
 
@@ -97,14 +97,14 @@
     
     @BeforeClass
     public static void setUpServices() throws IOException {
-        testData = new IndexedMGraph();
+        testData = new IndexedGraph();
         long seed = System.currentTimeMillis();
         log.info("Test seed "+ seed);
         Random random = new Random(seed);
         int numEntities = 0;
         for(int i = 0; i < NUM_ENTITIES ; i++){
             if(random.nextFloat() <= PERCENTAGE_PRESENT){ //do not create all entities
-                UriRef uri = new UriRef("urn:test:entity"+i);
+                IRI uri = new IRI("urn:test:entity"+i);
                 testData.add(new TripleImpl(uri, RDF_TYPE, SKOS_CONCEPT));
                 testData.add(new TripleImpl(uri, RDFS_LABEL, 
                     new PlainLiteralImpl("entity "+i, LANG_EN)));
@@ -116,20 +116,20 @@
             }
         }
         log.info(" ... created {} Entities",numEntities);
-        testMetadata = new IndexedMGraph();
+        testMetadata = new IndexedGraph();
         int numLinks = 0;
         int numOtherLinks = 0;
         for(int i = 0; i < NUM_ENTITIES ; i++){
             float r = random.nextFloat();
             if(r < PERCENTAGE_LINKED){
-                UriRef enhancementUri = new UriRef("urn:test:enhancement"+i);
-                UriRef entityUri = new UriRef("urn:test:entity"+i);
+                IRI enhancementUri = new IRI("urn:test:enhancement"+i);
+                IRI entityUri = new IRI("urn:test:entity"+i);
                 //we do not need any other triple for testing in the contentItem
                 testMetadata.add(new TripleImpl(enhancementUri, ENHANCER_ENTITY_REFERENCE, entityUri));
                 numLinks++;
             } else if((r-PERCENTAGE_LINKED) < PERCENTAGE_LINKED_OTHER){
-                UriRef enhancementUri = new UriRef("urn:test:enhancement"+i);
-                UriRef entityUri = new UriRef("urn:test:entity"+i);
+                IRI enhancementUri = new IRI("urn:test:enhancement"+i);
+                IRI entityUri = new IRI("urn:test:entity"+i);
                 //we do not need any other triple for testing in the contentItem
                 testMetadata.add(new TripleImpl(enhancementUri, OTHER_ENTITY_REFERENCE, entityUri));
                 numOtherLinks++;
@@ -141,7 +141,7 @@
     }
 
     public static ContentItem getContentItem(final String id) throws IOException {
-        ContentItem ci = ciFactory.createContentItem(new UriRef(id), new StringSource("Not used"));
+        ContentItem ci = ciFactory.createContentItem(new IRI(id), new StringSource("Not used"));
         ci.getMetadata().addAll(testMetadata);
         return ci;
     }
@@ -239,19 +239,19 @@
         validateDereferencedEntities(ci.getMetadata(), OTHER_ENTITY_REFERENCE, ENHANCER_ENTITY_REFERENCE);
     }
     
-    private void validateDereferencedEntities(TripleCollection metadata, UriRef...entityReferenceFields) {
-        MGraph expected = new IndexedMGraph();
-        for(UriRef entityReferenceField : entityReferenceFields){
+    private void validateDereferencedEntities(Graph metadata, IRI...entityReferenceFields) {
+        Graph expected = new IndexedGraph();
+        for(IRI entityReferenceField : entityReferenceFields){
             Iterator<Triple> referenced = metadata.filter(null, entityReferenceField, null);
             while(referenced.hasNext()){
-                UriRef entity = (UriRef)referenced.next().getObject();
+                IRI entity = (IRI)referenced.next().getObject();
                 Iterator<Triple> entityTriples = testData.filter(entity, null, null);
                 while(entityTriples.hasNext()){
                     expected.add(entityTriples.next());
                 }
             }
         }
-        MGraph notExpected = new IndexedMGraph(testData);
+        Graph notExpected = new IndexedGraph(testData);
         notExpected.removeAll(expected);
         Assert.assertTrue(metadata.containsAll(expected));
         Assert.assertTrue(Collections.disjoint(metadata, notExpected));
@@ -276,7 +276,7 @@
         }
 
         @Override
-        public boolean dereference(UriRef entity, MGraph graph, Lock writeLock, DereferenceContext context) throws DereferenceException {
+        public boolean dereference(IRI entity, Graph graph, Lock writeLock, DereferenceContext context) throws DereferenceException {
             Iterator<Triple> entityTriples = testData.filter(entity, null, null);
             if(entityTriples.hasNext()){
                 writeLock.lock();
diff --git a/enhancement-engines/dereference/entityhub/src/main/java/org/apache/stanbol/enhancer/engines/dereference/entityhub/TrackingDereferencerBase.java b/enhancement-engines/dereference/entityhub/src/main/java/org/apache/stanbol/enhancer/engines/dereference/entityhub/TrackingDereferencerBase.java
index 19beb6e..e510abf 100644
--- a/enhancement-engines/dereference/entityhub/src/main/java/org/apache/stanbol/enhancer/engines/dereference/entityhub/TrackingDereferencerBase.java
+++ b/enhancement-engines/dereference/entityhub/src/main/java/org/apache/stanbol/enhancer/engines/dereference/entityhub/TrackingDereferencerBase.java
@@ -32,10 +32,10 @@
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.locks.Lock;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
 import org.apache.commons.lang.StringUtils;
 import org.apache.marmotta.ldpath.api.backend.RDFBackend;
 import org.apache.marmotta.ldpath.exception.LDPathParseException;
@@ -283,7 +283,7 @@
     }
     
     @Override
-    public final boolean dereference(UriRef uri, MGraph graph, Lock writeLock, DereferenceContext dc) throws DereferenceException {
+    public final boolean dereference(IRI uri, Graph graph, Lock writeLock, DereferenceContext dc) throws DereferenceException {
         T service = getService();
         if(service == null){
             throw new DereferenceException(uri, serviceClass.getClass().getSimpleName() 
@@ -321,7 +321,7 @@
     }
     /**
      * Executes the {@link #ldpathProgram} using the parsed URI as context and
-     * writes the the results to the parsed Graph
+     * writes the the results to the parsed ImmutableGraph
      * @param uri the context
      * @param rdfBackend the RdfBackend the LDPath program is executed on
      * @param ldpathProgram The {@link Program} parsed via the dereference context
@@ -331,15 +331,15 @@
      * @throws DereferenceException on any {@link EntityhubException} while
      * executing the LDPath program
      */
-    private void copyLdPath(UriRef uri, RDFBackend<Object> rdfBackend, Program<Object> ldpathProgram,
-            Set<String> langs, MGraph graph, Lock writeLock) throws DereferenceException {
+    private void copyLdPath(IRI uri, RDFBackend<Object> rdfBackend, Program<Object> ldpathProgram,
+            Set<String> langs, Graph graph, Lock writeLock) throws DereferenceException {
         //A RdfReference needs to be used as context
         RdfReference context = valueFactory.createReference(uri);
         //create the representation that stores results in an intermediate
         //graph (we do not want partial results on an error
-        MGraph ldPathResults = new SimpleMGraph();
+        Graph ldPathResults = new SimpleGraph();
         RdfRepresentation result = valueFactory.createRdfRepresentation(uri, ldPathResults);
-        //execute the LDPath Program and write results to the RDF Graph
+        //execute the LDPath Program and write results to the RDF ImmutableGraph
         try {
 	        for(org.apache.marmotta.ldpath.model.fields.FieldMapping<?,Object> mapping : ldpathProgram.getFields()) {
 	        	Collection<?> values;
@@ -405,8 +405,8 @@
      * @param graph the graph to store the mapping results
      * @param writeLock the write lock for the graph
      */
-    private void copyMapped(UriRef uri, Representation rep, FieldMapper fieldMapper, Set<String> langs, 
-            MGraph graph, Lock writeLock) {
+    private void copyMapped(IRI uri, Representation rep, FieldMapper fieldMapper, Set<String> langs, 
+            Graph graph, Lock writeLock) {
         //NOTE: The fieldMapper parsed via the context does already have a
         //      filter for the parsed languages. Because of that the old code
         //      adding such a language filter is no longer needed
@@ -439,7 +439,7 @@
      * @param graph the graph to copy the data
      * @param writeLock the write lock for the graph
      */
-    private void copyAll(UriRef uri, Representation rep, MGraph graph, Lock writeLock) {
+    private void copyAll(IRI uri, Representation rep, Graph graph, Lock writeLock) {
         writeLock.lock();
         try {
         	if(log.isTraceEnabled()){
diff --git a/enhancement-engines/disambiguation-mlt/src/main/java/org/apache/stanbol/enhancer/engine/disambiguation/mlt/DisambiguationData.java b/enhancement-engines/disambiguation-mlt/src/main/java/org/apache/stanbol/enhancer/engine/disambiguation/mlt/DisambiguationData.java
index 5e88af3..89339d2 100644
--- a/enhancement-engines/disambiguation-mlt/src/main/java/org/apache/stanbol/enhancer/engine/disambiguation/mlt/DisambiguationData.java
+++ b/enhancement-engines/disambiguation-mlt/src/main/java/org/apache/stanbol/enhancer/engine/disambiguation/mlt/DisambiguationData.java
@@ -31,9 +31,9 @@
 import java.util.Set;
 import java.util.TreeMap;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
 import org.apache.stanbol.enhancer.servicesapi.helper.EnhancementEngineHelper;
 import org.apache.stanbol.enhancer.servicesapi.rdf.NamespaceEnum;
@@ -60,7 +60,7 @@
      * This is needed during writing the disambiguation results to the EnhancementStructure to know if one
      * needs to clone an fise:EntityAnnotation or not.
      */
-    public Map<UriRef,Set<UriRef>> suggestionMap = new HashMap<UriRef,Set<UriRef>>();
+    public Map<IRI,Set<IRI>> suggestionMap = new HashMap<IRI,Set<IRI>>();
 
     /**
      * Holds the center position of the fise:TextAnnotation fise:selected-text as key and the SavedEntity
@@ -79,7 +79,7 @@
      * List of all fise:textAnnotations that can be used for disambiguation. the key is the URI and the value
      * is the {@link SavedEntity} with the extracted information.
      */
-    public Map<UriRef,SavedEntity> textAnnotations = new HashMap<UriRef,SavedEntity>();
+    public Map<IRI,SavedEntity> textAnnotations = new HashMap<IRI,SavedEntity>();
 
     // List to contain old confidence values that are to removed
     // List<Triple> loseConfidence = new ArrayList<Triple>();
@@ -92,14 +92,14 @@
      * ambiguations for all entities (which will be removed eventually)
      */
     public static DisambiguationData createFromContentItem(ContentItem ci) {
-        MGraph graph = ci.getMetadata();
+        Graph graph = ci.getMetadata();
         DisambiguationData data = new DisambiguationData();
         Iterator<Triple> it = graph.filter(null, RDF_TYPE, TechnicalClasses.ENHANCER_TEXTANNOTATION);
         while (it.hasNext()) {
-            UriRef uri = (UriRef) it.next().getSubject();
+            IRI uri = (IRI) it.next().getSubject();
             // TODO: rwesten: do we really want to ignore fise:TextAnnotations that link to
             // to an other one (typically two TextAnnotations that select the exact same text)
-            // if (graph.filter(uri, new UriRef(NamespaceEnum.dc + "relation"), null).hasNext()) {
+            // if (graph.filter(uri, new IRI(NamespaceEnum.dc + "relation"), null).hasNext()) {
             // continue;
             // }
 
@@ -110,9 +110,9 @@
                     Integer.valueOf((savedEntity.getStart() + savedEntity.getEnd()) / 2), savedEntity);
                 // add information to the #suggestionMap
                 for (Suggestion s : savedEntity.getSuggestions()) {
-                    Set<UriRef> textAnnotations = data.suggestionMap.get(s.getEntityAnnotation());
+                    Set<IRI> textAnnotations = data.suggestionMap.get(s.getEntityAnnotation());
                     if (textAnnotations == null) {
-                        textAnnotations = new HashSet<UriRef>();
+                        textAnnotations = new HashSet<IRI>();
                         data.suggestionMap.put(s.getEntityAnnotation(), textAnnotations);
                     }
                     textAnnotations.add(savedEntity.getUri());
diff --git a/enhancement-engines/disambiguation-mlt/src/main/java/org/apache/stanbol/enhancer/engine/disambiguation/mlt/DisambiguatorEngine.java b/enhancement-engines/disambiguation-mlt/src/main/java/org/apache/stanbol/enhancer/engine/disambiguation/mlt/DisambiguatorEngine.java
index e8d714b..ee92690 100755
--- a/enhancement-engines/disambiguation-mlt/src/main/java/org/apache/stanbol/enhancer/engine/disambiguation/mlt/DisambiguatorEngine.java
+++ b/enhancement-engines/disambiguation-mlt/src/main/java/org/apache/stanbol/enhancer/engine/disambiguation/mlt/DisambiguatorEngine.java
@@ -33,10 +33,10 @@
 import java.util.Set;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.lang.StringUtils;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -190,7 +190,7 @@
     public void computeEnhancements(ContentItem ci) throws EngineException {
 
         String textContent;
-        Entry<UriRef,Blob> textBlob = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
+        Entry<IRI,Blob> textBlob = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
         if (textBlob != null) {
             try {
                 textContent = ContentItemHelper.getText(textBlob.getValue());
@@ -202,7 +202,7 @@
             textContent = null;
         }
 
-        MGraph graph = ci.getMetadata();
+        Graph graph = ci.getMetadata();
 
         // (1) read the data from the content item
         String contentLangauge;
@@ -346,11 +346,11 @@
     // the disambiguation result the ambiguation is kept but the overall
     // fise:confidence values are reduced by #confidenceWeight (ensured to be
     // less than 1)
-    // protected List<Triple> unchangedConfidences(List<UriRef> subsumed,
-    // MGraph graph,
+    // protected List<Triple> unchangedConfidences(List<IRI> subsumed,
+    // Graph graph,
     // List<Triple> loseConfidence) {
     // for (int i = 0; i < subsumed.size(); i++) {
-    // UriRef uri = subsumed.get(i);
+    // IRI uri = subsumed.get(i);
     // Iterator<Triple> confidenceTriple = graph.filter(uri, ENHANCER_CONFIDENCE, null);
     // while (confidenceTriple.hasNext()) {
     // loseConfidence.remove(confidenceTriple.next());
@@ -417,7 +417,7 @@
             if (maxScore == null) {
                 maxScore = score;
             }
-            UriRef uri = new UriRef(guess.getId());
+            IRI uri = new IRI(guess.getId());
             Suggestion suggestion = savedEntity.getSuggestion(uri);
             if (suggestion == null) {
                 log.info(" - not found {}", guess.getId());
@@ -460,13 +460,13 @@
     // NOTE (rwesten): now done as part of the disambiguateSuggestions(..)
     // method.
     // protected boolean intersectionCheck(List<Suggestion> matches,
-    // List<UriRef> subsumed,
-    // MGraph graph,
+    // List<IRI> subsumed,
+    // Graph graph,
     // String contentLangauge) {
     // for (int i = 0; i < subsumed.size(); i++) {
-    // UriRef uri = subsumed.get(i);
+    // IRI uri = subsumed.get(i);
     //
-    // UriRef uri1 = EnhancementEngineHelper.getReference(graph, uri, new UriRef(NamespaceEnum.fise
+    // IRI uri1 = EnhancementEngineHelper.getReference(graph, uri, new IRI(NamespaceEnum.fise
     // + "entity-reference"));
     //
     // String selectedText = EnhancementEngineHelper.getString(graph, uri, ENHANCER_ENTITY_LABEL);
@@ -576,16 +576,16 @@
     // method. Results are stored in the Suggestions (member of SavedEntiy) and
     // than written back to the EnhancementStructure in a separate step
     // protected List<Triple> intersection(List<Suggestion> matches,
-    // List<UriRef> subsumed,
-    // MGraph graph,
+    // List<IRI> subsumed,
+    // Graph graph,
     // List<Triple> gainConfidence,
     // String contentLangauge) {
     //
     // for (int i = 0; i < subsumed.size(); i++) {
     // boolean matchFound = false;
-    // UriRef uri = subsumed.get(i);
+    // IRI uri = subsumed.get(i);
     //
-    // UriRef uri1 = EnhancementEngineHelper.getReference(graph, uri, new UriRef(NamespaceEnum.fise
+    // IRI uri1 = EnhancementEngineHelper.getReference(graph, uri, new IRI(NamespaceEnum.fise
     // + "entity-reference"));
     //
     // for (int j = 0; j < matches.size(); j++) {
@@ -596,8 +596,8 @@
     // && suggestName.compareToIgnoreCase(uri1.getUnicodeString()) == 0) {
     // Triple confidenceTriple = new TripleImpl(uri, ENHANCER_CONFIDENCE, LiteralFactory
     // .getInstance().createTypedLiteral(suggestion.getScore()));
-    // Triple contributorTriple = new TripleImpl((UriRef) confidenceTriple.getSubject(),
-    // new UriRef(NamespaceEnum.dc + "contributor"), LiteralFactory.getInstance()
+    // Triple contributorTriple = new TripleImpl((IRI) confidenceTriple.getSubject(),
+    // new IRI(NamespaceEnum.dc + "contributor"), LiteralFactory.getInstance()
     // .createTypedLiteral(this.getClass().getName()));
     // gainConfidence.add(confidenceTriple);
     // gainConfidence.add(contributorTriple);
@@ -608,7 +608,7 @@
     // if (!matchFound) {
     // Triple confidenceTriple = new TripleImpl(uri, ENHANCER_CONFIDENCE, LiteralFactory
     // .getInstance().createTypedLiteral(0.0));
-    // Triple contributorTriple = new TripleImpl((UriRef) confidenceTriple.getSubject(), new UriRef(
+    // Triple contributorTriple = new TripleImpl((IRI) confidenceTriple.getSubject(), new IRI(
     // NamespaceEnum.dc + "contributor"), LiteralFactory.getInstance().createTypedLiteral(
     // this.getClass().getName()));
     // gainConfidence.add(confidenceTriple);
@@ -620,7 +620,7 @@
     // }
 
     /* Removes the value in lose confidence from the graph */
-    protected void removeOldConfidenceFromGraph(MGraph graph, List<Triple> loseConfidence) {
+    protected void removeOldConfidenceFromGraph(Graph graph, List<Triple> loseConfidence) {
         for (int i = 0; i < loseConfidence.size(); i++) {
             Triple elementToRemove = loseConfidence.get(i);
             graph.remove(elementToRemove);
@@ -635,7 +635,7 @@
      * @param disData
      *            the disambiguation data
      */
-    protected void applyDisambiguationResults(MGraph graph, DisambiguationData disData) {
+    protected void applyDisambiguationResults(Graph graph, DisambiguationData disData) {
         for (SavedEntity savedEntity : disData.textAnnotations.values()) {
             for (Suggestion s : savedEntity.getSuggestions()) {
                 if (s.getDisambiguatedConfidence() != null) {
@@ -697,8 +697,8 @@
      * @param textAnnotation
      * @return
      */
-    public static UriRef cloneTextAnnotation(MGraph graph, UriRef entityAnnotation, UriRef textAnnotation) {
-        UriRef copy = new UriRef("urn:enhancement-" + EnhancementEngineHelper.randomUUID());
+    public static IRI cloneTextAnnotation(Graph graph, IRI entityAnnotation, IRI textAnnotation) {
+        IRI copy = new IRI("urn:enhancement-" + EnhancementEngineHelper.randomUUID());
         Iterator<Triple> it = graph.filter(entityAnnotation, null, null);
         // we can not add triples to the graph while iterating. So store them
         // in a list and add later
diff --git a/enhancement-engines/disambiguation-mlt/src/main/java/org/apache/stanbol/enhancer/engine/disambiguation/mlt/SavedEntity.java b/enhancement-engines/disambiguation-mlt/src/main/java/org/apache/stanbol/enhancer/engine/disambiguation/mlt/SavedEntity.java
index ceb3b77..d8504fb 100644
--- a/enhancement-engines/disambiguation-mlt/src/main/java/org/apache/stanbol/enhancer/engine/disambiguation/mlt/SavedEntity.java
+++ b/enhancement-engines/disambiguation-mlt/src/main/java/org/apache/stanbol/enhancer/engine/disambiguation/mlt/SavedEntity.java
@@ -35,12 +35,11 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.servicesapi.helper.EnhancementEngineHelper;
 import org.apache.stanbol.enhancer.servicesapi.rdf.NamespaceEnum;
 import org.apache.stanbol.enhancer.servicesapi.rdf.Properties;
@@ -58,8 +57,8 @@
      */
     private final static LiteralFactory literalFactory = LiteralFactory.getInstance();
     private String name;
-    private UriRef type;
-    private UriRef uri;
+    private IRI type;
+    private IRI uri;
     private String context;
     private Integer start;
     private Integer end;
@@ -68,7 +67,7 @@
      * Map with the suggestion. The key is the URI of the fise:EntityAnnotation and the value is the Triple
      * with the confidence value
      */
-    private Map<UriRef,Suggestion> suggestions = new LinkedHashMap<UriRef,Suggestion>();
+    private Map<IRI,Suggestion> suggestions = new LinkedHashMap<IRI,Suggestion>();
 
     /**
      * The name of the Entityhub {@link Site} managing the suggestions of this fise:TextAnnotation
@@ -76,7 +75,7 @@
     private String site;
 
     /**
-     * private constructor only used by {@link #createFromTextAnnotation(TripleCollection, NonLiteral)}
+     * private constructor only used by {@link #createFromTextAnnotation(Graph, BlankNodeOrIRI)}
      */
     private SavedEntity() {}
 
@@ -90,7 +89,7 @@
      * @return the {@link SavedEntity} or <code>null</code> if the parsed text annotation is missing required
      *         information.
      */
-    public static SavedEntity createFromTextAnnotation(TripleCollection graph, UriRef textAnnotation) {
+    public static SavedEntity createFromTextAnnotation(Graph graph, IRI textAnnotation) {
         SavedEntity entity = new SavedEntity();
         entity.uri = textAnnotation;
         entity.name = EnhancementEngineHelper.getString(graph, textAnnotation, ENHANCER_SELECTED_TEXT);
@@ -140,7 +139,7 @@
         // NOTE: this iterator will also include dc:relation between fise:TextAnnotation's
         // but in those cases NULL will be returned as suggestion
         while (suggestions.hasNext()) {
-            UriRef entityAnnotation = (UriRef) suggestions.next().getSubject();
+            IRI entityAnnotation = (IRI) suggestions.next().getSubject();
             Suggestion suggestion = Suggestion.createFromEntityAnnotation(graph, entityAnnotation);
             if (suggestion != null) {
                 suggestionList.add(suggestion);
@@ -199,7 +198,7 @@
      * 
      * @return the type
      */
-    public final UriRef getType() {
+    public final IRI getType() {
         return type;
     }
 
@@ -218,7 +217,7 @@
         return String.format("SavedEntity %s (name=%s | type=%s)", uri, name, type);
     }
 
-    public UriRef getUri() {
+    public IRI getUri() {
         return this.uri;
     }
 
@@ -238,7 +237,7 @@
         return suggestions.values();
     }
 
-    public Suggestion getSuggestion(UriRef uri) {
+    public Suggestion getSuggestion(IRI uri) {
         return suggestions.get(uri);
     }
 
diff --git a/enhancement-engines/disambiguation-mlt/src/main/java/org/apache/stanbol/enhancer/engine/disambiguation/mlt/Suggestion.java b/enhancement-engines/disambiguation-mlt/src/main/java/org/apache/stanbol/enhancer/engine/disambiguation/mlt/Suggestion.java
index 0c0c57c..8a0749a 100644
--- a/enhancement-engines/disambiguation-mlt/src/main/java/org/apache/stanbol/enhancer/engine/disambiguation/mlt/Suggestion.java
+++ b/enhancement-engines/disambiguation-mlt/src/main/java/org/apache/stanbol/enhancer/engine/disambiguation/mlt/Suggestion.java
@@ -24,10 +24,10 @@
 import java.util.SortedMap;
 import java.util.SortedSet;
 
-import org.apache.clerezza.rdf.core.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
 import org.apache.stanbol.enhancer.servicesapi.helper.EnhancementEngineHelper;
 import org.apache.stanbol.enhancer.servicesapi.rdf.Properties;
@@ -47,10 +47,10 @@
 
     private static final LiteralFactory lf = LiteralFactory.getInstance();
 
-    private static final UriRef ENTITYHUB_SITE = new UriRef(RdfResourceEnum.site.getUri());
+    private static final IRI ENTITYHUB_SITE = new IRI(RdfResourceEnum.site.getUri());
 
-    private UriRef entityAnnotation;
-    private UriRef entityUri;
+    private IRI entityAnnotation;
+    private IRI entityUri;
     private Double originalConfidnece;
 
     private Entity entity;
@@ -58,13 +58,13 @@
     private Double disambiguatedConfidence;
     private String site;
 
-    private Suggestion(UriRef entityAnnotation) {
+    private Suggestion(IRI entityAnnotation) {
         this.entityAnnotation = entityAnnotation;
     }
 
     public Suggestion(Entity entity) {
         this.entity = entity;
-        this.entityUri = new UriRef(entity.getId());
+        this.entityUri = new IRI(entity.getId());
         this.site = entity.getSite();
     }
 
@@ -76,7 +76,7 @@
      * @param entityAnnotation
      * @return
      */
-    public static Suggestion createFromEntityAnnotation(TripleCollection graph, UriRef entityAnnotation) {
+    public static Suggestion createFromEntityAnnotation(Graph graph, IRI entityAnnotation) {
         Suggestion suggestion = new Suggestion(entityAnnotation);
         suggestion.entityUri =
                 EnhancementEngineHelper.getReference(graph, entityAnnotation, ENHANCER_ENTITY_REFERENCE);
@@ -110,7 +110,7 @@
      * 
      * @return the URI of the fise:EntityAnnotation or <code>null</code> if not present.
      */
-    public UriRef getEntityAnnotation() {
+    public IRI getEntityAnnotation() {
         return entityAnnotation;
     }
 
@@ -124,7 +124,7 @@
      * @param uri
      *            the uri of the cloned fise:EntityAnnotation
      */
-    public void setEntityAnnotation(UriRef uri) {
+    public void setEntityAnnotation(IRI uri) {
         this.entityAnnotation = uri;
     }
 
@@ -133,7 +133,7 @@
      * 
      * @return the URI
      */
-    public UriRef getEntityUri() {
+    public IRI getEntityUri() {
         return entityUri;
     }
 
@@ -238,8 +238,8 @@
                 : result;
     }
     
-    private static String getOrigin(TripleCollection graph, UriRef entityAnnotation) {
-        UriRef uOrigin = EnhancementEngineHelper.getReference(graph, entityAnnotation, ENHANCER_ORIGIN);
+    private static String getOrigin(Graph graph, IRI entityAnnotation) {
+        IRI uOrigin = EnhancementEngineHelper.getReference(graph, entityAnnotation, ENHANCER_ORIGIN);
         if (uOrigin != null) {
             return uOrigin.getUnicodeString();
         } else {
diff --git a/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/CoMentionConstants.java b/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/CoMentionConstants.java
index d783b2a..5275a92 100644
--- a/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/CoMentionConstants.java
+++ b/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/CoMentionConstants.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.engines.entitycomention;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.engines.entitylinking.config.EntityLinkerConfig;
 
 public interface CoMentionConstants {
@@ -25,11 +25,11 @@
      * The {@link EntityLinkerConfig#NAME_FIELD} uri internally used by the
      * {@link EntityCoMentionEngine}.
      */
-    UriRef CO_MENTION_LABEL_FIELD = new UriRef("urn:org.stanbol:enhander.engine.entitycomention:co-mention-label");
+    IRI CO_MENTION_LABEL_FIELD = new IRI("urn:org.stanbol:enhander.engine.entitycomention:co-mention-label");
     
     /**
      * The {@link EntityLinkerConfig#TYPE_FIELD} uri internally used by the
      * {@link EntityCoMentionEngine}.
      */
-    UriRef CO_MENTION_TYPE_FIELD = new UriRef("urn:org.stanbol:enhander.engine.entitycomention:co-mention-type");
+    IRI CO_MENTION_TYPE_FIELD = new IRI("urn:org.stanbol:enhander.engine.entitycomention:co-mention-type");
 }
diff --git a/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/EntityCoMentionEngine.java b/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/EntityCoMentionEngine.java
index 20d4453..d847b15 100644
--- a/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/EntityCoMentionEngine.java
+++ b/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/EntityCoMentionEngine.java
@@ -47,16 +47,16 @@
 import java.util.Map.Entry;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.lang.StringUtils;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -199,8 +199,8 @@
         linkerConfig.setRedirectProcessingMode(RedirectProcessingMode.IGNORE);
         //remove all type mappings
         linkerConfig.setDefaultDcType(null);
-        Set<UriRef> mappedUris = new HashSet<UriRef>(linkerConfig.getTypeMappings().keySet());
-        for(UriRef mappedUri : mappedUris){
+        Set<IRI> mappedUris = new HashSet<IRI>(linkerConfig.getTypeMappings().keySet());
+        for(IRI mappedUri : mappedUris){
             linkerConfig.setTypeMapping(mappedUri.getUnicodeString(), null);
         }
         //parse confidence adjustment value (STANBOL-1219)
@@ -283,12 +283,12 @@
         //create the in-memory database for the mentioned Entities
         ContentItemMentionBuilder entityMentionIndex = new ContentItemMentionBuilder(
             labelTokenizer, language, linkerConfig.getDefaultLanguage());
-        MGraph metadata = ci.getMetadata();
-        Set<UriRef> textAnnotations = new HashSet<UriRef>();
+        Graph metadata = ci.getMetadata();
+        Set<IRI> textAnnotations = new HashSet<IRI>();
         ci.getLock().readLock().lock();
         try { //iterate over all TextAnnotations (mentions of Entities)
             for(Iterator<Triple> it = metadata.filter(null, RDF_TYPE, ENHANCER_TEXTANNOTATION); it.hasNext();){
-                UriRef ta = (UriRef)it.next().getSubject();
+                IRI ta = (IRI)it.next().getSubject();
                 entityMentionIndex.registerTextAnnotation(ta, metadata);
                 textAnnotations.add(ta); //store the registered text annotations
             }
@@ -314,21 +314,21 @@
     }
 
     private void writeComentions(ContentItem ci,Collection<LinkedEntity> comentions, String language,
-            Set<UriRef> textAnnotations) {
+            Set<IRI> textAnnotations) {
         Language languageObject = null;
         if(language != null && !language.isEmpty()){
             languageObject = new Language(language);
         }
         
-        MGraph metadata = ci.getMetadata();
+        Graph metadata = ci.getMetadata();
         //we MUST adjust the confidence level of existing annotations only once
         //se we need to keep track of those
-        Set<NonLiteral> adjustedSuggestions = new HashSet<NonLiteral>();
+        Set<BlankNodeOrIRI> adjustedSuggestions = new HashSet<BlankNodeOrIRI>();
         log.debug("Write Co-Mentions:");
         for(LinkedEntity comention : comentions){
             log.debug(" > {}",comention);
             //URIs of TextAnnotations for the initial mention of this co-mention
-            Collection<UriRef> initialMentions = new ArrayList<UriRef>(comention.getSuggestions().size());
+            Collection<IRI> initialMentions = new ArrayList<IRI>(comention.getSuggestions().size());
             for(Suggestion suggestion : comention.getSuggestions()){
                 Entity entity = suggestion.getEntity();
                 if(textAnnotations.contains(entity.getUri())){
@@ -344,14 +344,14 @@
                 //search for existing text annotation
                 boolean ignore = false;
                 //search for textAnnotations with the same end
-                UriRef textAnnotation = null;
+                IRI textAnnotation = null;
                 Iterator<Triple> it = metadata.filter(null, ENHANCER_START, startLiteral);
                 while(it.hasNext()){
                     Triple t = it.next();
                     Integer end = EnhancementEngineHelper.get(metadata, t.getSubject(), ENHANCER_END, Integer.class, literalFactory);
                     if(end != null && textAnnotations.contains(t.getSubject())){
                             //metadata.filter(t.getSubject(), RDF_TYPE, ENHANCER_TEXTANNOTATION).hasNext()){
-                        textAnnotation = (UriRef)t.getSubject();
+                        textAnnotation = (IRI)t.getSubject();
                         if(end > occurrence.getEnd()){
                             // there is an other TextAnnotation selecting a bigger Span
                             //so we should ignore this Occurrence
@@ -365,7 +365,7 @@
                     Integer start = EnhancementEngineHelper.get(metadata, t.getSubject(), ENHANCER_START, Integer.class, literalFactory);
                     if(start != null && textAnnotations.contains(t.getSubject())){
                             //metadata.filter(t.getSubject(), RDF_TYPE, ENHANCER_TEXTANNOTATION).hasNext()){
-                        textAnnotation = (UriRef)t.getSubject();
+                        textAnnotation = (IRI)t.getSubject();
                         if(start < occurrence.getStart()){
                             // there is an other TextAnnotation selecting a bigger Span
                             //so we should ignore this Occurrence
@@ -399,10 +399,10 @@
                         //    ENHANCER_CONFIDENCE, Double.class, literalFactory);
                     }
                     //now process initial mention(s) for the co-mention
-                    Set<UriRef> dcTypes = new HashSet<UriRef>();
-                    for(UriRef initialMention : initialMentions){
+                    Set<IRI> dcTypes = new HashSet<IRI>();
+                    for(IRI initialMention : initialMentions){
                         //get the dc:type(s) of the initial mentions
-                        Iterator<UriRef> dcTypesIt = getReferences(metadata, initialMention, DC_TYPE);
+                        Iterator<IRI> dcTypesIt = getReferences(metadata, initialMention, DC_TYPE);
                         while(dcTypesIt.hasNext()){
                             dcTypes.add(dcTypesIt.next());
                         }
@@ -419,12 +419,12 @@
                         //now we need to compare the suggestions of the initial
                         //mention(s) with the existing one. 
                         //Get information about the suggestions of the initial mention
-                        Map<Resource,Double> initialSuggestions = new HashMap<Resource,Double>();
-                        Map<Resource, Resource> initialSuggestedEntities = new HashMap<Resource,Resource>();
+                        Map<RDFTerm,Double> initialSuggestions = new HashMap<RDFTerm,Double>();
+                        Map<RDFTerm, RDFTerm> initialSuggestedEntities = new HashMap<RDFTerm,RDFTerm>();
                         for(Iterator<Triple> suggestions = metadata.filter(null, DC_RELATION, initialMention); suggestions.hasNext();){
                             if(!textAnnotations.contains(suggestions)) {
-                                NonLiteral suggestion = suggestions.next().getSubject();
-                                Resource suggestedEntity = EnhancementEngineHelper.getReference(metadata, suggestion, ENHANCER_ENTITY_REFERENCE);
+                                BlankNodeOrIRI suggestion = suggestions.next().getSubject();
+                                RDFTerm suggestedEntity = EnhancementEngineHelper.getReference(metadata, suggestion, ENHANCER_ENTITY_REFERENCE);
                                 if(suggestedEntity != null){ //it has a suggestion
                                     Double confidence = EnhancementEngineHelper.get(
                                         metadata, suggestion, ENHANCER_CONFIDENCE, Double.class, literalFactory);
@@ -441,18 +441,18 @@
                         }
                         //now we collect existing Suggestions for this TextAnnoation where we need
                         //to adjust the confidence (quite some things to check ....)
-                        Map<NonLiteral, Double> existingSuggestions = new HashMap<NonLiteral,Double>();
+                        Map<BlankNodeOrIRI, Double> existingSuggestions = new HashMap<BlankNodeOrIRI,Double>();
                     	if(maxConfidence != null && confidenceAdjustmentFactor < 1){
                     	    //suggestions are defined by incoming dc:releation
 	                        for(Iterator<Triple> esIt = metadata.filter(null, DC_RELATION, textAnnotation);esIt.hasNext();){
-	                        	NonLiteral existingSuggestion = esIt.next().getSubject();
+	                        	BlankNodeOrIRI existingSuggestion = esIt.next().getSubject();
 	                        	//but not all of them are suggestions
 	                        	if(!textAnnotations.contains(existingSuggestion)) { //ignore fise:TextAnnotations
 	                                Double existingConfidence = EnhancementEngineHelper.get(metadata, existingSuggestion, 
                                         ENHANCER_CONFIDENCE, Double.class, literalFactory);
 	                                //ignore fise:TextAnnotations also suggested for the initial mention
                                     if(!initialSuggestions.containsKey(existingSuggestion)){
-                                        Resource suggestedEntity = EnhancementEngineHelper.getReference(metadata, existingSuggestion, ENHANCER_ENTITY_REFERENCE);
+                                        RDFTerm suggestedEntity = EnhancementEngineHelper.getReference(metadata, existingSuggestion, ENHANCER_ENTITY_REFERENCE);
                                         //we might also have different fise:TextAnnotations that
                                         //fise:entity-reference to an Entity present in the
                                         //suggestions for the initial mention
@@ -463,7 +463,7 @@
                                             } //else confidence already adjusted
                                         } else { // different fise:EntityAnnotation, but same reference Entity
                                             //we need to check confidences to decide what to do
-                                            Resource initialSuggestion = initialSuggestedEntities.get(suggestedEntity);
+                                            RDFTerm initialSuggestion = initialSuggestedEntities.get(suggestedEntity);
                                             Double initialConfidence = initialSuggestions.get(initialSuggestion);
                                             if(initialConfidence == null || (existingConfidence != null && 
                                                     existingConfidence.compareTo(initialConfidence) >= 0)){
@@ -493,7 +493,7 @@
                                     }
 	                        	} //else ignore dc:relations to other fise:TextAnnotations
  	                        }
-	                        for(Entry<NonLiteral,Double> entry : existingSuggestions.entrySet()){
+	                        for(Entry<BlankNodeOrIRI,Double> entry : existingSuggestions.entrySet()){
 	                        	if(entry.getValue() != null){
 	                        		double adjustedConfidence = entry.getValue() * confidenceAdjustmentFactor;
 	                        		if(maxExistingConfidence == null || adjustedConfidence > maxExistingConfidence){
@@ -506,8 +506,8 @@
 	                        }
                     	}
                     	//add the suggestions of the initial mention to this one
-                        for(Resource suggestion : initialSuggestions.keySet()){
-                            metadata.add(new TripleImpl((NonLiteral)suggestion, DC_RELATION, textAnnotation));
+                        for(RDFTerm suggestion : initialSuggestions.keySet()){
+                            metadata.add(new TripleImpl((BlankNodeOrIRI)suggestion, DC_RELATION, textAnnotation));
     
                         }
                         //finally link the co-mentation with the initial one
@@ -524,7 +524,7 @@
                     		maxConfidence.compareTo(maxExistingConfidence) >= 0);
                     boolean addCoMentionDcTypes = maxExistingConfidence == null ||
                     		(maxConfidence != null && maxConfidence.compareTo(maxExistingConfidence) >= 1);
-                    Iterator<UriRef> existingDcTypesIt = getReferences(metadata, textAnnotation, DC_TYPE);
+                    Iterator<IRI> existingDcTypesIt = getReferences(metadata, textAnnotation, DC_TYPE);
                     while(existingDcTypesIt.hasNext()){ //do not add existing
                     	//remove dc:type triples if they are not re-added later and
                     	//removeExistingDcTypes == true
@@ -534,7 +534,7 @@
                         }
                     }
                     if(addCoMentionDcTypes){
-	                    for(UriRef dcType : dcTypes){ //add missing
+	                    for(IRI dcType : dcTypes){ //add missing
 	                        metadata.add(new TripleImpl(textAnnotation, DC_TYPE, dcType));
 	                    }
                     }
diff --git a/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/impl/ContentItemMentionBuilder.java b/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/impl/ContentItemMentionBuilder.java
index 91def8e..f827d1f 100644
--- a/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/impl/ContentItemMentionBuilder.java
+++ b/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/impl/ContentItemMentionBuilder.java
@@ -33,11 +33,11 @@
 import java.util.TreeMap;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.engines.entitycomention.CoMentionConstants;
 import org.apache.stanbol.enhancer.engines.entitylinking.LabelTokenizer;
 import org.apache.stanbol.enhancer.engines.entitylinking.impl.LinkingStateAware;
@@ -64,7 +64,7 @@
         super(labelTokenizer,CoMentionConstants.CO_MENTION_LABEL_FIELD, languages);
     }
 
-    public void registerTextAnnotation(UriRef textAnnotation, TripleCollection metadata){
+    public void registerTextAnnotation(IRI textAnnotation, Graph metadata){
         String selectedText = EnhancementEngineHelper.getString(metadata, textAnnotation, ENHANCER_SELECTED_TEXT);
         if(selectedText != null){
             //NOTE: Typically it is not possible to find co-mentions for Entities with a
diff --git a/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/impl/EntityMention.java b/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/impl/EntityMention.java
index c00af6c..83119d2 100644
--- a/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/impl/EntityMention.java
+++ b/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/impl/EntityMention.java
@@ -18,10 +18,9 @@
 
 import java.util.Iterator;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.commons.collections.IteratorUtils;
 import org.apache.stanbol.enhancer.engines.entitycomention.CoMentionConstants;
 import org.apache.stanbol.enhancer.engines.entitycomention.EntityCoMentionEngine;
@@ -31,7 +30,7 @@
 
 /**
  * {@link Entity} implementation used by the {@link EntityCoMentionEngine}. It
- * overrides the {@link #getText(UriRef)} and {@link #getReferences(UriRef)}
+ * overrides the {@link #getText(IRI)} and {@link #getReferences(IRI)}
  * methods to use the a different labelField if 
  * {@link CoMentionConstants#CO_MENTION_LABEL_FIELD} is parsed as parameter.
  * This allows the {@link EntityLinker} to use different properties for different
@@ -43,11 +42,11 @@
     /**
      * The label field of this Entity
      */
-    private final UriRef nameField;
+    private final IRI nameField;
     /**
      * The type field of this Entity
      */
-    private final UriRef typeField;
+    private final IRI typeField;
     /**
      * The start/end char indexes char index of the first mention
      */
@@ -58,15 +57,15 @@
 
     /**
      * Creates a new MentionEntity for the parsed parameters
-     * @param uri the {@link UriRef} of the Entity 
-     * @param data the {@link MGraph} with the data for the Entity
-     * @param labelField the {@link UriRef} of the property holding the
+     * @param uri the {@link IRI} of the Entity 
+     * @param data the {@link Graph} with the data for the Entity
+     * @param labelField the {@link IRI} of the property holding the
      * labels of this Entity. This property will be used for all calls to
-     * {@link #getText(UriRef)} and {@link #getReferences(UriRef)} if
+     * {@link #getText(IRI)} and {@link #getReferences(IRI)} if
      * {@link CoMentionConstants#CO_MENTION_LABEL_FIELD} is parsed as parameter
      * @param span the start/end char indexes of the mention
      */
-    public EntityMention(UriRef uri, TripleCollection data, UriRef labelField, UriRef typeField, Integer[] span) {
+    public EntityMention(IRI uri, Graph data, IRI labelField, IRI typeField, Integer[] span) {
         super(uri, data);
         if(labelField == null){
             throw new IllegalArgumentException("The LabelField MUST NOT be NULL!");
@@ -87,18 +86,18 @@
      * Wrapps the parsed Entity and redirects calls to 
      * {@link CoMentionConstants#CO_MENTION_LABEL_FIELD} to the parsed labelField
      * @param entity the Entity to wrap
-     * @param labelField the {@link UriRef} of the property holding the
+     * @param labelField the {@link IRI} of the property holding the
      * labels of this Entity. This property will be used for all calls to
-     * {@link #getText(UriRef)} and {@link #getReferences(UriRef)} if
+     * {@link #getText(IRI)} and {@link #getReferences(IRI)} if
      * {@link CoMentionConstants#CO_MENTION_LABEL_FIELD} is parsed as parameter
      * @param index the char index of the initial mention in the document
      */
-    public EntityMention(Entity entity, UriRef labelField, UriRef typeField, Integer[] span) {
+    public EntityMention(Entity entity, IRI labelField, IRI typeField, Integer[] span) {
         this(entity.getUri(), entity.getData(),labelField,typeField,span);
     }
 
     @Override
-    public Iterator<PlainLiteral> getText(UriRef field) {
+    public Iterator<Literal> getText(IRI field) {
         if(CO_MENTION_FIELD_HASH == field.hashCode() && //avoid calling equals
                 CoMentionConstants.CO_MENTION_LABEL_FIELD.equals(field)){
             return super.getText(nameField);
@@ -111,7 +110,7 @@
     }
     
     @Override
-    public Iterator<UriRef> getReferences(UriRef field) {
+    public Iterator<IRI> getReferences(IRI field) {
         if(CO_MENTION_FIELD_HASH == field.hashCode() && //avoid calling equals
                 CoMentionConstants.CO_MENTION_LABEL_FIELD.equals(field)){
             return super.getReferences(nameField);
@@ -155,7 +154,7 @@
      * @return the field (property) used to obtain the labels of this mention
      * @see EntityLinkerConfig#getNameField()
      */
-    public UriRef getNameField() {
+    public IRI getNameField() {
         return nameField;
     }
     /**
@@ -165,7 +164,7 @@
      * @return the field (property) used to obtain the type of this mention
      * @see EntityLinkerConfig#getTypeField()
      */
-    public UriRef getTypeField() {
+    public IRI getTypeField() {
         return typeField;
     }
     
diff --git a/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/impl/InMemoryEntityIndex.java b/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/impl/InMemoryEntityIndex.java
index d65509b..e30053e 100644
--- a/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/impl/InMemoryEntityIndex.java
+++ b/enhancement-engines/entitycomention/src/main/java/org/apache/stanbol/enhancer/engines/entitycomention/impl/InMemoryEntityIndex.java
@@ -32,13 +32,11 @@
 import java.util.SortedMap;
 import java.util.TreeMap;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.enhancer.engines.entitylinking.Entity;
 import org.apache.stanbol.enhancer.engines.entitylinking.EntitySearcher;
 import org.apache.stanbol.enhancer.engines.entitylinking.LabelTokenizer;
@@ -58,13 +56,13 @@
     protected final LabelTokenizer tokenizer;
     //Holds Entity data
     private SortedMap<String,Collection<Entity>> index = new TreeMap<String,Collection<Entity>>(String.CASE_INSENSITIVE_ORDER);
-    private Map<UriRef,Entity> entities = new HashMap<UriRef,Entity>();
+    private Map<IRI,Entity> entities = new HashMap<IRI,Entity>();
     private Set<String> indexLanguages;
     protected String language;
-    protected UriRef nameField;
+    protected IRI nameField;
 
     
-    public InMemoryEntityIndex(LabelTokenizer tokenizer, UriRef nameField, String...languages) {
+    public InMemoryEntityIndex(LabelTokenizer tokenizer, IRI nameField, String...languages) {
         this.indexLanguages = languages == null || languages.length < 1 ? 
                 Collections.singleton((String)null) : 
                         new HashSet<String>(Arrays.asList(languages));
@@ -80,9 +78,9 @@
             log.debug(" > register {}",entity);
         }
         entities.put(entity.getUri(), entity);
-        Iterator<PlainLiteral> labels = entity.getText(nameField);
+        Iterator<Literal> labels = entity.getText(nameField);
         while(labels.hasNext()){
-            PlainLiteral label = labels.next();
+            Literal label = labels.next();
             String lang = label.getLanguage() == null ? null : label.getLanguage().toString();
             if(indexLanguages.contains(lang)){
                 for(String token : tokenizer.tokenize(label.getLexicalForm(),null)){
@@ -100,13 +98,13 @@
     }
     
     @Override
-    public Entity get(UriRef id, Set<UriRef> includeFields, String...languages) throws IllegalStateException {
+    public Entity get(IRI id, Set<IRI> includeFields, String...languages) throws IllegalStateException {
         return entities.get(id);
     }
 
     @Override
-    public Collection<? extends Entity> lookup(UriRef field,
-                                           Set<UriRef> includeFields,
+    public Collection<? extends Entity> lookup(IRI field,
+                                           Set<IRI> includeFields,
                                            List<String> search, String[] languages,
                                            Integer numResults, Integer offset) throws IllegalStateException {
         //this assumes that 
@@ -233,7 +231,7 @@
     }
 
     @Override
-    public Map<UriRef,Collection<Resource>> getOriginInformation() {
+    public Map<IRI,Collection<RDFTerm>> getOriginInformation() {
         return Collections.emptyMap();
     }
 }
diff --git a/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/datamodel/PlaceAdjectival.java b/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/datamodel/PlaceAdjectival.java
index 2b59330..13a9a53 100644
--- a/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/datamodel/PlaceAdjectival.java
+++ b/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/datamodel/PlaceAdjectival.java
@@ -16,7 +16,8 @@
  */
 package org.apache.stanbol.enhancer.engines.entitycoreference.datamodel;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
+
 
 /**
  * Represents a place adjectival inside a {@link Span}.
@@ -36,18 +37,18 @@
     private int endIdx;
 
     /**
-     * The {@link UriRef} in the {@link SiteManager} or {@link Entityhub} that this place adjectival points
+     * The {@link IRI} in the {@link SiteManager} or {@link Entityhub} that this place adjectival points
      * to.
      */
-    private UriRef placeUri;
+    private IRI placeUri;
 
-    public PlaceAdjectival(int startIdx, int endIdx, UriRef placeUri) {
+    public PlaceAdjectival(int startIdx, int endIdx, IRI placeUri) {
         this.startIdx = startIdx;
         this.endIdx = endIdx;
         this.placeUri = placeUri;
     }
 
-    public UriRef getPlaceUri() {
+    public IRI getPlaceUri() {
         return placeUri;
     }
 
diff --git a/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/impl/CoreferenceFinder.java b/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/impl/CoreferenceFinder.java
index bcaf662..eb13544 100644
--- a/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/impl/CoreferenceFinder.java
+++ b/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/impl/CoreferenceFinder.java
@@ -25,8 +25,9 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import org.apache.clerezza.commons.rdf.IRI;
 
-import org.apache.clerezza.rdf.core.UriRef;
+
 import org.apache.stanbol.enhancer.engines.entitycoreference.Constants;
 import org.apache.stanbol.enhancer.engines.entitycoreference.datamodel.NounPhrase;
 import org.apache.stanbol.enhancer.engines.entitycoreference.datamodel.PlaceAdjectival;
@@ -251,7 +252,7 @@
          */
         if (nounPhrase.hasNers()) {
             List<Span> npNers = nounPhrase.getNerChunks();
-            UriRef nerType = ner.getAnnotation(NlpAnnotations.NER_ANNOTATION).value().getType();
+            IRI nerType = ner.getAnnotation(NlpAnnotations.NER_ANNOTATION).value().getType();
 
             for (Span npNer : npNers) {
                 /*
@@ -264,7 +265,7 @@
                 Entity npEntity = lookupEntity(npNer, language);
 
                 if (npEntity != null) {
-                    UriRef npNerType = npNer.getAnnotation(NlpAnnotations.NER_ANNOTATION).value().getType();
+                    IRI npNerType = npNer.getAnnotation(NlpAnnotations.NER_ANNOTATION).value().getType();
                     Set<String> rulesOntologyAttr = new HashSet<String>();
 
                     if (OntologicalClasses.DBPEDIA_PLACE.equals(npNerType)) {
@@ -327,7 +328,7 @@
             if (this.config.shouldExcludeClass(typeUri)) continue;
 
             // First try the in memory index
-            Set<String> labels = this.entityTypeIndex.lookupEntityType(new UriRef(typeUri), language);
+            Set<String> labels = this.entityTypeIndex.lookupEntityType(new IRI(typeUri), language);
 
             if (labels == null) {
                 Site site = getReferencedSite();
@@ -343,7 +344,7 @@
                         labels.add(labelIterator.next().getText());
                     }
 
-                    this.entityTypeIndex.addEntityType(new UriRef(typeUri), language, labels);
+                    this.entityTypeIndex.addEntityType(new IRI(typeUri), language, labels);
                 }
             }
             
diff --git a/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/impl/CoreferenceFinderConfig.java b/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/impl/CoreferenceFinderConfig.java
index 1e5e17b..0a70712 100644
--- a/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/impl/CoreferenceFinderConfig.java
+++ b/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/impl/CoreferenceFinderConfig.java
@@ -20,8 +20,8 @@
 import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
+import org.apache.clerezza.commons.rdf.IRI;
 
-import org.apache.clerezza.rdf.core.UriRef;
 import org.apache.stanbol.enhancer.engines.entitycoreference.datamodel.NounPhrase;
 import org.apache.stanbol.enhancer.servicesapi.rdf.OntologicalClasses;
 import org.osgi.service.cm.ConfigurationException;
@@ -42,12 +42,12 @@
     /**
      * The Uris for spatial properties for the NER to be inspected when doing the coref spatial match.
      */
-    private Map<UriRef,Set<String>> spatialAttributes;
+    private Map<IRI,Set<String>> spatialAttributes;
     
     /**
      * The Uris for org membership properties for the NER to be inspected when doing the coref match.
      */
-    private Map<UriRef,Set<String>> orgMembershipAttributes;
+    private Map<IRI,Set<String>> orgMembershipAttributes;
 
     /**
      * Entity classes which will not be used for coreference because they are too general.
@@ -62,8 +62,8 @@
 						           String entityClassesToExclude) throws ConfigurationException {
     	this.maxDistance = maxDistance;
     	
-    	this.spatialAttributes = new HashMap<UriRef,Set<String>>();
-    	this.orgMembershipAttributes = new HashMap<UriRef, Set<String>>();
+    	this.spatialAttributes = new HashMap<IRI,Set<String>>();
+    	this.orgMembershipAttributes = new HashMap<IRI, Set<String>>();
     	
         if (spatialAttrForPerson != null) {
         	Set<String> attributes = new HashSet<String>();
@@ -124,7 +124,7 @@
      *            of the Entity type for which we want to get the ontology.
      * @return
      */
-    public Set<String> getSpatialAttributes(UriRef uri) {
+    public Set<String> getSpatialAttributes(IRI uri) {
         return this.spatialAttributes.get(uri);
     }
 
@@ -135,7 +135,7 @@
      *            of the Entity type for which we want to get the ontology.
      * @return
      */
-    public Set<String> getOrgMembershipAttributes(UriRef uri) {
+    public Set<String> getOrgMembershipAttributes(IRI uri) {
         return this.orgMembershipAttributes.get(uri);
     }
     
diff --git a/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/impl/Dictionaries.java b/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/impl/Dictionaries.java
index a222370..b37e32b 100644
--- a/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/impl/Dictionaries.java
+++ b/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/impl/Dictionaries.java
@@ -23,7 +23,8 @@
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
+
 import org.apache.stanbol.enhancer.engines.entitycoreference.Constants;
 import org.apache.stanbol.enhancer.engines.entitycoreference.datamodel.NounPhrase;
 import org.apache.stanbol.enhancer.engines.entitycoreference.datamodel.PlaceAdjectival;
@@ -38,18 +39,18 @@
  */
 class Dictionaries {
     /**
-     * Contains the list of place adjectivals in the form: language -> adjectival -> UriRef -> adjectival ->
-     * UriRef There are Places that have multiple adjectivals so in this map there are adjectivals that point
-     * to the same UriRef but that ensures a fast lookup.
+     * Contains the list of place adjectivals in the form: language -> adjectival -> IRI -> adjectival ->
+     * IRI There are Places that have multiple adjectivals so in this map there are adjectivals that point
+     * to the same IRI but that ensures a fast lookup.
      */
-    private Map<String,Map<String,UriRef>> placeAdjectivalsMap;
+    private Map<String,Map<String,IRI>> placeAdjectivalsMap;
     
     public Dictionaries(String[] languages, String entityUriBase) throws ConfigurationException {
         placeAdjectivalsMap = new HashMap<>();
 
         for (String language : languages) {
             String line = null;
-            Map<String,UriRef> languagePlaceAdjMap = new HashMap<>();
+            Map<String,IRI> languagePlaceAdjMap = new HashMap<>();
             InputStream langIn = null;
             BufferedReader reader = null;
 
@@ -62,7 +63,7 @@
                     String[] splittedLine = line.split("\t");
                     String place = splittedLine[0];
                     String adjectivals = splittedLine[1];
-                    UriRef ref = new UriRef(entityUriBase + place.trim());
+                    IRI ref = new IRI(entityUriBase + place.trim());
                     String[] adjectivalsArray = adjectivals.split(",");
 
                     for (String adjectival : adjectivalsArray) {
@@ -99,7 +100,7 @@
      */
     public PlaceAdjectival findPlaceAdjectival(String language, NounPhrase nounPhrase) {
         List<Span> tokens = nounPhrase.getTokens();
-        Map<String,UriRef> langPlaceAdjectivalsMap = placeAdjectivalsMap.get(language);
+        Map<String,IRI> langPlaceAdjectivalsMap = placeAdjectivalsMap.get(language);
         /*
          * Go through all 1-grams and 2-grams and see if we have a match in the place adjectivals map. 2-grams
          * should be good enough since there are no 3-gram places at least from what I saw.
diff --git a/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/impl/InMemoryEntityTypeIndex.java b/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/impl/InMemoryEntityTypeIndex.java
index 3142291..930500d 100644
--- a/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/impl/InMemoryEntityTypeIndex.java
+++ b/enhancement-engines/entitycoreference/src/main/java/org/apache/stanbol/enhancer/engines/entitycoreference/impl/InMemoryEntityTypeIndex.java
@@ -20,7 +20,7 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 /**
  * Memory cache for storing often used Entity Type (Class) information.
@@ -32,10 +32,10 @@
     /**
      * The index having as key the Uri of the class and the value the set of labels ordered by language.
      */
-    private Map<UriRef,Map<String,Set<String>>> index;
+    private Map<IRI,Map<String,Set<String>>> index;
 
     public InMemoryEntityTypeIndex() {
-        index = new HashMap<UriRef,Map<String,Set<String>>>();
+        index = new HashMap<IRI,Map<String,Set<String>>>();
     }
 
     /**
@@ -45,7 +45,7 @@
      * @param language
      * @return
      */
-    public Set<String> lookupEntityType(UriRef uri, String language) {
+    public Set<String> lookupEntityType(IRI uri, String language) {
         Map<String,Set<String>> langMap = index.get(uri);
 
         if (langMap != null) {
@@ -62,7 +62,7 @@
      * @param language
      * @param labels
      */
-    public void addEntityType(UriRef uri, String language, Set<String> labels) {
+    public void addEntityType(IRI uri, String language, Set<String> labels) {
         Map<String,Set<String>> langMap = index.get(uri);
 
         if (langMap == null) {
diff --git a/enhancement-engines/entitycoreference/src/test/java/org/apache/stanbol/enhancer/engines/entitycoreference/EntityCoReferenceEngineTest.java b/enhancement-engines/entitycoreference/src/test/java/org/apache/stanbol/enhancer/engines/entitycoreference/EntityCoReferenceEngineTest.java
index 74177b6..98c813c 100644
--- a/enhancement-engines/entitycoreference/src/test/java/org/apache/stanbol/enhancer/engines/entitycoreference/EntityCoReferenceEngineTest.java
+++ b/enhancement-engines/entitycoreference/src/test/java/org/apache/stanbol/enhancer/engines/entitycoreference/EntityCoReferenceEngineTest.java
@@ -11,10 +11,10 @@
 import java.util.Hashtable;

 import java.util.Map.Entry;

 

-import org.apache.clerezza.rdf.core.MGraph;

-import org.apache.clerezza.rdf.core.UriRef;

-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;

-import org.apache.clerezza.rdf.core.impl.TripleImpl;

+import org.apache.clerezza.commons.rdf.Graph;

+import org.apache.clerezza.commons.rdf.IRI;

+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;

+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;

 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;

 import org.apache.stanbol.enhancer.nlp.NlpAnnotations;

 import org.apache.stanbol.enhancer.nlp.coref.CorefFeature;

@@ -85,13 +85,13 @@
 	@Test

 	public void testSpatialCoref() throws EngineException, IOException {

 		ContentItem ci = ciFactory.createContentItem(new StringSource(SPATIAL_TEXT));

-		MGraph graph = ci.getMetadata();

-		UriRef textEnhancement = EnhancementEngineHelper.createTextEnhancement(ci, engine);

+		Graph graph = ci.getMetadata();

+		IRI textEnhancement = EnhancementEngineHelper.createTextEnhancement(ci, engine);

 		graph.add(new TripleImpl(textEnhancement, DC_LANGUAGE, new PlainLiteralImpl("en")));

 		graph.add(new TripleImpl(textEnhancement, ENHANCER_CONFIDENCE, new PlainLiteralImpl("100.0")));

 		graph.add(new TripleImpl(textEnhancement, DC_TYPE, DCTERMS_LINGUISTIC_SYSTEM));

 

-		Entry<UriRef, Blob> textBlob = ContentItemHelper.getBlob(ci, Collections.singleton("text/plain"));

+		Entry<IRI, Blob> textBlob = ContentItemHelper.getBlob(ci, Collections.singleton("text/plain"));

 		AnalysedText at = atFactory.createAnalysedText(ci, textBlob.getValue());

 

 		Sentence sentence1 = at.addSentence(0, SPATIAL_SENTENCE_1.indexOf(".") + 1);

diff --git a/enhancement-engines/entityhublinking/src/main/java/org/apache/stanbol/enhancer/engines/entityhublinking/EntitySearcherUtils.java b/enhancement-engines/entityhublinking/src/main/java/org/apache/stanbol/enhancer/engines/entityhublinking/EntitySearcherUtils.java
index 4463a5b..398f2df 100644
--- a/enhancement-engines/entityhublinking/src/main/java/org/apache/stanbol/enhancer/engines/entityhublinking/EntitySearcherUtils.java
+++ b/enhancement-engines/entityhublinking/src/main/java/org/apache/stanbol/enhancer/engines/entityhublinking/EntitySearcherUtils.java
@@ -21,7 +21,7 @@
 import java.util.List;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.lang.StringUtils;
@@ -49,8 +49,8 @@
      * @return
      */
     public final static FieldQuery createFieldQuery(FieldQueryFactory factory,
-                                        UriRef field,
-                                        Set<UriRef> includeFields,
+                                        IRI field,
+                                        Set<IRI> includeFields,
                                         List<String> search,
                                         String... languages) {
         if(field == null || field.getUnicodeString().isEmpty()){
@@ -67,7 +67,7 @@
             if(!includeFields.contains(field.getUnicodeString())){
                 query.addSelectedField(field.getUnicodeString());
             }
-            for(UriRef select : includeFields){
+            for(IRI select : includeFields){
                 query.addSelectedField(select.getUnicodeString());
             }
         }
diff --git a/enhancement-engines/entityhublinking/src/main/java/org/apache/stanbol/enhancer/engines/entityhublinking/EntityhubEntity.java b/enhancement-engines/entityhublinking/src/main/java/org/apache/stanbol/enhancer/engines/entityhublinking/EntityhubEntity.java
index 7177006..52d73d5 100644
--- a/enhancement-engines/entityhublinking/src/main/java/org/apache/stanbol/enhancer/engines/entityhublinking/EntityhubEntity.java
+++ b/enhancement-engines/entityhublinking/src/main/java/org/apache/stanbol/enhancer/engines/entityhublinking/EntityhubEntity.java
@@ -19,9 +19,9 @@
 import java.util.Iterator;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.engines.entitylinking.Entity;
 import org.apache.stanbol.enhancer.servicesapi.helper.EnhancementEngineHelper;
 import org.apache.stanbol.entityhub.model.clerezza.RdfRepresentation;
@@ -33,10 +33,10 @@
 public class EntityhubEntity extends Entity {
     
     private static RdfValueFactory vf = RdfValueFactory.getInstance();
-    private static UriRef entityRanking = new UriRef(RdfResourceEnum.entityRank.getUri());
+    private static IRI entityRanking = new IRI(RdfResourceEnum.entityRank.getUri());
     
-    public EntityhubEntity(Representation rep, Set<UriRef> fields, Set<String> languages) {
-        super(new UriRef(rep.getId()), 
+    public EntityhubEntity(Representation rep, Set<IRI> fields, Set<String> languages) {
+        super(new IRI(rep.getId()), 
             toGraph(rep, fields, languages));
     }
     @Override
@@ -44,13 +44,13 @@
         return EnhancementEngineHelper.get(data, uri, entityRanking, Float.class, lf);
     }
     /**
-     * Converts {@link Representation}s to RDF ({@link TripleCollection}) and
+     * Converts {@link Representation}s to RDF ({@link Graph}) and
      * also filter literals with languages other than the parsed one
      * @param rep
      * @param languages
      * @return
      */
-    private static TripleCollection toGraph(Representation rep, Set<UriRef> includeFields, Set<String> languages){
+    private static Graph toGraph(Representation rep, Set<IRI> includeFields, Set<String> languages){
         if (rep instanceof RdfRepresentation) {
             return ((RdfRepresentation) rep).getRdfGraph();
         } else {
diff --git a/enhancement-engines/entityhublinking/src/main/java/org/apache/stanbol/enhancer/engines/entityhublinking/EntityhubSearcher.java b/enhancement-engines/entityhublinking/src/main/java/org/apache/stanbol/enhancer/engines/entityhublinking/EntityhubSearcher.java
index e3c3670..8441ec3 100644
--- a/enhancement-engines/entityhublinking/src/main/java/org/apache/stanbol/enhancer/engines/entityhublinking/EntityhubSearcher.java
+++ b/enhancement-engines/entityhublinking/src/main/java/org/apache/stanbol/enhancer/engines/entityhublinking/EntityhubSearcher.java
@@ -25,9 +25,9 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
 import org.apache.stanbol.enhancer.engines.entitylinking.Entity;
 import org.apache.stanbol.enhancer.engines.entitylinking.EntitySearcher;
 import org.apache.stanbol.enhancer.engines.entitylinking.EntitySearcherException;
@@ -43,7 +43,7 @@
 public final class EntityhubSearcher extends TrackingEntitySearcher<Entityhub> implements EntitySearcher {
     
     private final Integer limit;
-    private Map<UriRef,Collection<Resource>> originInfo;
+    private Map<IRI,Collection<RDFTerm>> originInfo;
 
     public EntityhubSearcher(BundleContext context, Integer limit) {
         this(context,limit,null);
@@ -52,13 +52,13 @@
         super(context,Entityhub.class,null,customizer);
         this.limit = limit != null && limit > 0 ? limit : null;
         this.originInfo = Collections.singletonMap(
-            new UriRef(RdfResourceEnum.site.getUri()), 
-            (Collection<Resource>)Collections.singleton(
-                (Resource)new PlainLiteralImpl("entityhub")));
+            new IRI(RdfResourceEnum.site.getUri()), 
+            (Collection<RDFTerm>)Collections.singleton(
+                (RDFTerm)new PlainLiteralImpl("entityhub")));
     }
     
     @Override
-    public Entity get(UriRef id,Set<UriRef> fields, String...languages) throws EntitySearcherException {
+    public Entity get(IRI id,Set<IRI> fields, String...languages) throws EntitySearcherException {
         if(id == null || id.getUnicodeString().isEmpty()){
             return null;
         }
@@ -89,8 +89,8 @@
     }
 
     @Override
-    public Collection<? extends Entity> lookup(UriRef field,
-                                           Set<UriRef> includeFields,
+    public Collection<? extends Entity> lookup(IRI field,
+                                           Set<IRI> includeFields,
                                            List<String> search,
                                            String[] languages,
                                            Integer limit, Integer offset) throws EntitySearcherException {
@@ -138,7 +138,7 @@
     }
 
     @Override
-    public Map<UriRef,Collection<Resource>> getOriginInformation() {
+    public Map<IRI,Collection<RDFTerm>> getOriginInformation() {
         return originInfo;
     }
     
diff --git a/enhancement-engines/entityhublinking/src/main/java/org/apache/stanbol/enhancer/engines/entityhublinking/ReferencedSiteSearcher.java b/enhancement-engines/entityhublinking/src/main/java/org/apache/stanbol/enhancer/engines/entityhublinking/ReferencedSiteSearcher.java
index 8bde110..67e77a1 100644
--- a/enhancement-engines/entityhublinking/src/main/java/org/apache/stanbol/enhancer/engines/entityhublinking/ReferencedSiteSearcher.java
+++ b/enhancement-engines/entityhublinking/src/main/java/org/apache/stanbol/enhancer/engines/entityhublinking/ReferencedSiteSearcher.java
@@ -25,9 +25,9 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
 import org.apache.stanbol.enhancer.engines.entitylinking.Entity;
 import org.apache.stanbol.enhancer.engines.entitylinking.EntitySearcher;
 import org.apache.stanbol.enhancer.engines.entitylinking.impl.Statistic;
@@ -49,7 +49,7 @@
     
     private final String siteId;
     private final Integer limit;
-    private Map<UriRef,Collection<Resource>> originInfo;
+    private Map<IRI,Collection<RDFTerm>> originInfo;
     Statistic queryStats = new Statistic("query", 100, log);
     Statistic resultStats = new Statistic("result", 1000, log);
     public ReferencedSiteSearcher(BundleContext context,String siteId, Integer limit){
@@ -62,13 +62,13 @@
         this.siteId = siteId;
         this.limit = limit != null && limit > 0 ? limit : null;
         this.originInfo = Collections.singletonMap(
-            new UriRef(RdfResourceEnum.site.getUri()), 
-            (Collection<Resource>)Collections.singleton(
-                (Resource)new PlainLiteralImpl(siteId)));
+            new IRI(RdfResourceEnum.site.getUri()), 
+            (Collection<RDFTerm>)Collections.singleton(
+                (RDFTerm)new PlainLiteralImpl(siteId)));
     }
     
     @Override
-    public Entity get(UriRef id,Set<UriRef> fields, String ... languages) {
+    public Entity get(IRI id,Set<IRI> fields, String ... languages) {
         if(id == null || id.getUnicodeString().isEmpty()){
             return null;
         }
@@ -99,8 +99,8 @@
     }
 
     @Override
-    public Collection<? extends Entity> lookup(UriRef field,
-                                           Set<UriRef> includeFields,
+    public Collection<? extends Entity> lookup(IRI field,
+                                           Set<IRI> includeFields,
                                            List<String> search,
                                            String[] languages,
                                            Integer limit, Integer offset) throws IllegalStateException {
@@ -156,7 +156,7 @@
     }
     
     @Override
-    public Map<UriRef,Collection<Resource>> getOriginInformation() {
+    public Map<IRI,Collection<RDFTerm>> getOriginInformation() {
         return originInfo;
     }
 }
diff --git a/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/Entity.java b/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/Entity.java
index 9038d6e..2d78ca8 100644
--- a/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/Entity.java
+++ b/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/Entity.java
@@ -19,12 +19,11 @@
 import java.util.Iterator;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
+
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.commons.collections.Predicate;
 import org.apache.commons.collections.PredicateUtils;
 import org.apache.commons.collections.Transformer;
@@ -46,44 +45,44 @@
             return ((Triple)input).getObject();
         }
     };
-    protected static final Predicate PLAIN_LITERALS = PredicateUtils.instanceofPredicate(PlainLiteral.class);
-    protected static final Predicate TYPED_LITERALS = PredicateUtils.instanceofPredicate(TypedLiteral.class);
-    protected static final Predicate REFERENCES = PredicateUtils.instanceofPredicate(UriRef.class);
+    protected static final Predicate PLAIN_LITERALS = PredicateUtils.instanceofPredicate(Literal.class);
+    //protected static final Predicate TYPED_LITERALS = PredicateUtils.instanceofPredicate(TypedLiteral.class);
+    protected static final Predicate REFERENCES = PredicateUtils.instanceofPredicate(IRI.class);
     /**
      * The URI of the Entity
      */
-     protected final UriRef uri;
+     protected final IRI uri;
     /**
      * The data of the Entity. The graph is expected to contain all information
      * of the entity by containing {@link Triple}s that use the {@link #uri} as
      * {@link Triple#getSubject() subject}
      */
-    protected final TripleCollection data;
+    protected final Graph data;
     
     /**
      * Constructs a new Entity
      * @param uri
      * @param data
      */
-    public Entity(UriRef uri, TripleCollection data) {
+    public Entity(IRI uri, Graph data) {
         this.uri = uri;
         this.data = data;
     }
-    public final UriRef getUri() {
+    public final IRI getUri() {
         return uri;
     }
     public final String getId(){
         return uri.getUnicodeString();
     }
-    public final TripleCollection getData() {
+    public final Graph getData() {
         return data;
     }
     @SuppressWarnings("unchecked")
-    public Iterator<PlainLiteral> getText(UriRef field) {
+    public Iterator<Literal> getText(IRI field) {
         return new FilterIterator(new TransformIterator(data.filter(uri, field, null), TRIPLE2OBJECT), PLAIN_LITERALS);
     }
     @SuppressWarnings("unchecked")
-    public Iterator<UriRef> getReferences(UriRef field){
+    public Iterator<IRI> getReferences(IRI field){
         return new FilterIterator(new TransformIterator(data.filter(uri, field, null), TRIPLE2OBJECT), REFERENCES);
     }
     
diff --git a/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/EntitySearcher.java b/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/EntitySearcher.java
index 71303b9..187543c 100644
--- a/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/EntitySearcher.java
+++ b/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/EntitySearcher.java
@@ -21,9 +21,8 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
 
 /**
  * Interface used to search for Entities (e.g. as defined by a Controlled
@@ -52,7 +51,7 @@
      * @throws IllegalArgumentException if the parsed field is <code>null</code>;
      * the list with the search terms is <code>null</code> or empty;
      */
-    Collection<? extends Entity> lookup(UriRef field, Set<UriRef> selectedFields, 
+    Collection<? extends Entity> lookup(IRI field, Set<IRI> selectedFields, 
         List<String> search, String[] languages, Integer limit, Integer offset) 
                 throws EntitySearcherException;
     /**
@@ -67,7 +66,7 @@
      * Entity with the parsed Id
      * @throws IllegalArgumentException if the parsed id is <code>null</code>
      */
-    Entity get(UriRef id,Set<UriRef> selectedFields, String...languages) throws EntitySearcherException;
+    Entity get(IRI id,Set<IRI> selectedFields, String...languages) throws EntitySearcherException;
     /**
      * Returns <code>true</code> if this EntitySearcher can operate without
      * dependencies to remote services. This is important because Stanbol can
@@ -90,5 +89,5 @@
      * @return the predicate[1..1] -> predicate[1..*] tuples added to any 
      * 'fise:EntityAnnotation'.
      */
-    Map<UriRef,Collection<Resource>> getOriginInformation();
+    Map<IRI,Collection<RDFTerm>> getOriginInformation();
 }
\ No newline at end of file
diff --git a/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/config/EntityLinkerConfig.java b/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/config/EntityLinkerConfig.java
index 7f21aad..caa94aa 100644
--- a/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/config/EntityLinkerConfig.java
+++ b/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/config/EntityLinkerConfig.java
@@ -29,7 +29,7 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.commons.namespaceprefix.NamespaceMappingUtils;
 import org.apache.stanbol.commons.namespaceprefix.NamespacePrefixService;
 import org.apache.stanbol.enhancer.engines.entitylinking.EntitySearcher;
@@ -260,17 +260,17 @@
     /**
      * Default value for {@link #getNameField()} (rdfs:label)
      */
-    public static final UriRef DEFAULT_NAME_FIELD = new UriRef(
+    public static final IRI DEFAULT_NAME_FIELD = new IRI(
         "http://www.w3.org/2000/01/rdf-schema#label");
     /**
      * Default value for {@link #getTypeField()} (rdf:type)
      */
-    public static final UriRef DEFAULT_TYPE_FIELD = new UriRef(
+    public static final IRI DEFAULT_TYPE_FIELD = new IRI(
         "http://www.w3.org/1999/02/22-rdf-syntax-ns#type");
     /**
      * Default value for {@link #getRedirectField()} (rdf:seeAlso)
      */
-    public static final UriRef DEFAULT_REDIRECT_FIELD = new UriRef(
+    public static final IRI DEFAULT_REDIRECT_FIELD = new IRI(
         "http://www.w3.org/2000/01/rdf-schema#seeAlso");
     /**
      * The default language used to search for labels regardless of the language
@@ -300,41 +300,41 @@
      * Default mapping for Concept types to dc:type values added for
      * TextAnnotations.
      */
-    public static final Map<UriRef,UriRef> DEFAULT_ENTITY_TYPE_MAPPINGS;
+    public static final Map<IRI,IRI> DEFAULT_ENTITY_TYPE_MAPPINGS;
     
     static { //the default mappings for the three types used by the Stanbol Enhancement Structure
-        Map<UriRef,UriRef> mappings = new HashMap<UriRef,UriRef>();
+        Map<IRI,IRI> mappings = new HashMap<IRI,IRI>();
         mappings.put(OntologicalClasses.DBPEDIA_ORGANISATION, OntologicalClasses.DBPEDIA_ORGANISATION);
-        mappings.put(new UriRef("http://dbpedia.org/ontology/Newspaper"), OntologicalClasses.DBPEDIA_ORGANISATION);
-        mappings.put(new UriRef("http://schema.org/Organization"), OntologicalClasses.DBPEDIA_ORGANISATION);
+        mappings.put(new IRI("http://dbpedia.org/ontology/Newspaper"), OntologicalClasses.DBPEDIA_ORGANISATION);
+        mappings.put(new IRI("http://schema.org/Organization"), OntologicalClasses.DBPEDIA_ORGANISATION);
 //        mappings.put(NamespaceEnum.dailymed+"organization",OntologicalClasses.DBPEDIA_ORGANISATION);
         
         mappings.put(OntologicalClasses.DBPEDIA_PERSON, OntologicalClasses.DBPEDIA_PERSON);
-        mappings.put(new UriRef("http://xmlns.com/foaf/0.1/Person"), OntologicalClasses.DBPEDIA_PERSON);
-        mappings.put(new UriRef("http://schema.org/Person"), OntologicalClasses.DBPEDIA_PERSON);
+        mappings.put(new IRI("http://xmlns.com/foaf/0.1/Person"), OntologicalClasses.DBPEDIA_PERSON);
+        mappings.put(new IRI("http://schema.org/Person"), OntologicalClasses.DBPEDIA_PERSON);
 
         mappings.put(OntologicalClasses.DBPEDIA_PLACE, OntologicalClasses.DBPEDIA_PLACE);
-        mappings.put(new UriRef("http://schema.org/Place"), OntologicalClasses.DBPEDIA_PLACE);
-        mappings.put(new UriRef("http://www.opengis.net/gml/_Feature"), OntologicalClasses.DBPEDIA_PLACE);
+        mappings.put(new IRI("http://schema.org/Place"), OntologicalClasses.DBPEDIA_PLACE);
+        mappings.put(new IRI("http://www.opengis.net/gml/_Feature"), OntologicalClasses.DBPEDIA_PLACE);
 
         mappings.put(OntologicalClasses.SKOS_CONCEPT, OntologicalClasses.SKOS_CONCEPT);
 
-//        UriRef DRUG = new UriRef(NamespaceEnum.drugbank+"drugs");
+//        IRI DRUG = new IRI(NamespaceEnum.drugbank+"drugs");
 //        mappings.put(DRUG.getUnicodeString(), DRUG);
 //        mappings.put(NamespaceEnum.dbpediaOnt+"Drug", DRUG);
 //        mappings.put(NamespaceEnum.dailymed+"drugs", DRUG);
 //        mappings.put(NamespaceEnum.sider+"drugs", DRUG);
 //        mappings.put(NamespaceEnum.tcm+"Medicine", DRUG);
 //        
-//        UriRef DISEASE = new UriRef(NamespaceEnum.diseasome+"diseases");
+//        IRI DISEASE = new IRI(NamespaceEnum.diseasome+"diseases");
 //        mappings.put(DISEASE.getUnicodeString(), DISEASE);
 //        mappings.put(NamespaceEnum.linkedct+"condition", DISEASE);
 //        mappings.put(NamespaceEnum.tcm+"Disease", DISEASE);
 //
-//        UriRef SIDE_EFFECT = new UriRef(NamespaceEnum.sider+"side_effects");
+//        IRI SIDE_EFFECT = new IRI(NamespaceEnum.sider+"side_effects");
 //        mappings.put(SIDE_EFFECT.getUnicodeString(), SIDE_EFFECT);
 //        
-//        UriRef INGREDIENT = new UriRef(NamespaceEnum.dailymed+"ingredients");
+//        IRI INGREDIENT = new IRI(NamespaceEnum.dailymed+"ingredients");
 //        mappings.put(INGREDIENT.getUnicodeString(), INGREDIENT);
                 
         DEFAULT_ENTITY_TYPE_MAPPINGS = Collections.unmodifiableMap(mappings);
@@ -403,8 +403,8 @@
      * Holds the mappings of rdf:type used by concepts to dc:type values used
      * by TextAnnotations. 
      */
-    private Map<UriRef,UriRef> typeMappings;
-    private Map<UriRef, UriRef> unmodTypeMappings;
+    private Map<IRI,IRI> typeMappings;
+    private Map<IRI, IRI> unmodTypeMappings;
     /**
      * The mode on how to process redirect for Entities. 
      */
@@ -412,16 +412,16 @@
     /**
      * the default DC Type
      */
-    private UriRef defaultDcType;
-    private UriRef nameField;
-    private UriRef redirectField;
-    private UriRef typeField;
-    private Map<UriRef,Integer> blacklistedTypes = new HashMap<UriRef,Integer>();
-    private Map<UriRef,Integer> whitelistedTypes = new HashMap<UriRef,Integer>();
+    private IRI defaultDcType;
+    private IRI nameField;
+    private IRI redirectField;
+    private IRI typeField;
+    private Map<IRI,Integer> blacklistedTypes = new HashMap<IRI,Integer>();
+    private Map<IRI,Integer> whitelistedTypes = new HashMap<IRI,Integer>();
     private Boolean defaultWhitelistTypes = null;
-    private Set<UriRef> dereferencedFields = new HashSet<UriRef>();
+    private Set<IRI> dereferencedFields = new HashSet<IRI>();
 
-    private Set<UriRef> __selectedFields;
+    private Set<IRI> __selectedFields;
     /**
      * The language always included in searches (regardless of the language
      * detected for the text.
@@ -513,7 +513,7 @@
         setMaxSuggestions(DEFAULT_SUGGESTIONS);
         setMaxSearchTokens(DEFAULT_MAX_SEARCH_TOKENS);
         setRedirectProcessingMode(DEFAULT_REDIRECT_PROCESSING_MODE);
-        typeMappings = new HashMap<UriRef,UriRef>(DEFAULT_ENTITY_TYPE_MAPPINGS);
+        typeMappings = new HashMap<IRI,IRI>(DEFAULT_ENTITY_TYPE_MAPPINGS);
         unmodTypeMappings = Collections.unmodifiableMap(typeMappings);
         setDefaultDcType(typeMappings.remove(null));
         setNameField(DEFAULT_NAME_FIELD);
@@ -559,7 +559,7 @@
             if(value.toString().isEmpty()){
                 throw new ConfigurationException(NAME_FIELD,"The configured name field MUST NOT be empty");
             }
-            linkerConfig.setNameField(new UriRef(
+            linkerConfig.setNameField(new IRI(
                 getFullName(prefixService,NAME_FIELD,value.toString())));
         }
         
@@ -577,7 +577,7 @@
             if(value.toString().isEmpty()){
                 throw new ConfigurationException(TYPE_FIELD,"The configured name field MUST NOT be empty");
             }
-            linkerConfig.setTypeField(new UriRef(
+            linkerConfig.setTypeField(new IRI(
                 getFullName(prefixService, TYPE_FIELD, value.toString())));
         }
         
@@ -587,7 +587,7 @@
             if(value.toString().isEmpty()){
                 throw new ConfigurationException(NAME_FIELD,"The configured name field MUST NOT be empty");
             }
-            linkerConfig.setRedirectField(new UriRef(
+            linkerConfig.setRedirectField(new IRI(
                 getFullName(prefixService,REDIRECT_FIELD,value.toString())));
         }
         
@@ -846,13 +846,13 @@
                             sourceTypes[0],o);
                         continue configs;
                     }
-                    UriRef targetUri = new UriRef(targetType);
+                    IRI targetUri = new IRI(targetType);
                     for(String sourceType : sourceTypes){
                         if(!sourceType.isEmpty()){
                             sourceType = getFullName(prefixService,TYPE_MAPPINGS,sourceType.trim()); //support for ns:localName
                             try { //validate
                                 new URI(sourceType);
-                                UriRef old = linkerConfig.setTypeMapping(sourceType, targetUri);
+                                IRI old = linkerConfig.setTypeMapping(sourceType, targetUri);
                                 if(old == null){
                                     log.info(" > add type mapping {} > {}", sourceType,targetType);
                                 } else {
@@ -887,20 +887,20 @@
                 for(String field : (String[])value){
                     if(field != null && !field.isEmpty()){
                         linkerConfig.getDereferencedFields().add(
-                            new UriRef(getFullName(prefixService,DEREFERENCE_ENTITIES_FIELDS,field)));
+                            new IRI(getFullName(prefixService,DEREFERENCE_ENTITIES_FIELDS,field)));
                     }
                 }
             } else if(value instanceof Collection<?>){
                 for(Object field : (Collection<?>)value){
                     if(field != null && !field.toString().isEmpty()){
                         linkerConfig.getDereferencedFields().add(
-                            new UriRef(getFullName(prefixService,DEREFERENCE_ENTITIES_FIELDS,field.toString())));
+                            new IRI(getFullName(prefixService,DEREFERENCE_ENTITIES_FIELDS,field.toString())));
                     }
                 }
             } else if(value instanceof String){
                 if(!value.toString().isEmpty()){
                     linkerConfig.getDereferencedFields().add(
-                        new UriRef(getFullName(prefixService,DEREFERENCE_ENTITIES_FIELDS,value.toString())));
+                        new IRI(getFullName(prefixService,DEREFERENCE_ENTITIES_FIELDS,value.toString())));
                 }
             } else if(value != null){
                 throw new ConfigurationException(DEREFERENCE_ENTITIES_FIELDS, 
@@ -980,7 +980,7 @@
                     throw new ConfigurationException(ENTITY_TYPES, "The list of whitelisted/blacklisted "
                         + "MUST NOT contain '!' (configured: "+entityTypesConfig+")!");
                 }
-                UriRef uri = new UriRef(getFullName(prefixService, ENTITY_TYPES, 
+                IRI uri = new IRI(getFullName(prefixService, ENTITY_TYPES, 
                     blacklisted ? type.substring(1) : type));
                 if(blacklisted){
                     linkerConfig.addBlacklistType(uri, Integer.valueOf(i));
@@ -1026,7 +1026,7 @@
      * (e.g. rdfs:label, skos:prefLabel). Needs to return the full URI
      * @return the field used for the names of in the Taxonomy.
      */
-    public final UriRef getNameField() {
+    public final IRI getNameField() {
         return nameField;
     }
     /**
@@ -1034,7 +1034,7 @@
      * (e.g. rdfs:label, skos:prefLabel).
      * @param nameField the nameField to set
      */
-    public final void setNameField(UriRef nameField) {
+    public final void setNameField(IRI nameField) {
         this.nameField = nameField;
         __selectedFields = null;
     }
@@ -1043,21 +1043,21 @@
      * set that allows to configure the fields that should be dereferenced
      * @return
      */
-    public final Set<UriRef> getDereferencedFields(){
+    public final Set<IRI> getDereferencedFields(){
         return dereferencedFields;
     }
     /**
      * The field used to follow redirects (typically rdf:seeAlso)
      * @return the redirect field
      */
-    public final UriRef getRedirectField() {
+    public final IRI getRedirectField() {
         return redirectField;
     }
     /**
      * The field used to follow redirects (typically rdf:seeAlso)
      * @param redirectField the redirectField to set
      */
-    public final void setRedirectField(UriRef redirectField) {
+    public final void setRedirectField(IRI redirectField) {
         this.redirectField = redirectField;
         __selectedFields = null;
     }
@@ -1065,14 +1065,14 @@
      * The field used to lookup the types (typically rdf:type)
      * @return the field name used to lookup types
      */
-    public final UriRef getTypeField() {
+    public final IRI getTypeField() {
         return typeField;
     }
     /**
      * The field used to lookup the types (typically rdf:type)
      * @param typeField the typeField to set
      */
-    public final void setTypeField(UriRef typeField) {
+    public final void setTypeField(IRI typeField) {
         this.typeField = typeField;
         __selectedFields = null;
     }
@@ -1175,28 +1175,28 @@
      * @param conceptType the concept type to remove the mapping
      * @return the previously mapped dc:type value or <code>null</code> if
      * no mapping for the parsed concept type was present
-    public UriRef removeTypeMapping(UriRef conceptType){
+    public IRI removeTypeMapping(IRI conceptType){
         return typeMappings.remove(conceptType);
     }
      */
     /**
      * 
      * @param conceptType the type of the concept or <code>null</code> to
-     * add the default dc:type mapping. See also {@link #setDefaultDcType(UriRef)}
+     * add the default dc:type mapping. See also {@link #setDefaultDcType(IRI)}
      * @param dcType the dc:type for the parsed concept type
      * @return the previously mapped dc:type value if an existing mapping
      * was updated or <code>null</code> if a new mapping was added.
      */
-    public UriRef setTypeMapping(String conceptType, UriRef dcType){
+    public IRI setTypeMapping(String conceptType, IRI dcType){
         if(dcType == null) {
-            return typeMappings.remove(conceptType == null ? null : new UriRef(conceptType));
+            return typeMappings.remove(conceptType == null ? null : new IRI(conceptType));
         } else {
             if(conceptType == null){ //handle setting of the default dc:type value
-                UriRef oldDefault = getDefaultDcType();
+                IRI oldDefault = getDefaultDcType();
                 setDefaultDcType(dcType);
                 return oldDefault;
             }
-            return typeMappings.put(new UriRef(conceptType), dcType);
+            return typeMappings.put(new IRI(conceptType), dcType);
         }
     }
     
@@ -1207,7 +1207,7 @@
      * cases.
      * @param defaultDcType the defaultDcType to set
      */
-    public void setDefaultDcType(UriRef defaultDcType) {
+    public void setDefaultDcType(IRI defaultDcType) {
         this.defaultDcType = defaultDcType;
     }
     /**
@@ -1216,7 +1216,7 @@
      * explicit mapping exists
      * @return the defaultDcType
      */
-    public UriRef getDefaultDcType() {
+    public IRI getDefaultDcType() {
         return defaultDcType;
     }
     /**
@@ -1238,7 +1238,7 @@
      * Getter for the read only mappings of type mappings
      * @return the type mappings (read only)
      */
-    public Map<UriRef,UriRef> getTypeMappings() {
+    public Map<IRI,IRI> getTypeMappings() {
         return unmodTypeMappings;
     }
     /**
@@ -1502,9 +1502,9 @@
      * @return the selected fields for queries against the linked vocabulary.
      * @deprecated Use a Dereference Engine instead (STANBOL-336)
      */
-    public Set<UriRef> getSelectedFields() {
+    public Set<IRI> getSelectedFields() {
         if(__selectedFields == null){
-            Set<UriRef> fields = new HashSet<UriRef>();
+            Set<IRI> fields = new HashSet<IRI>();
             fields.add(nameField);
             fields.add(typeField);
             if(redirectProcessingMode != RedirectProcessingMode.IGNORE){
@@ -1568,7 +1568,7 @@
     /**
      * Adds an type to the blacklist
      */
-    public final void addBlacklistType(UriRef type, Integer order) {
+    public final void addBlacklistType(IRI type, Integer order) {
         if(type != null && order != null){
             blacklistedTypes.put(type, order);
         }
@@ -1576,7 +1576,7 @@
     /**
      * Adds an type to the blacklist
      */
-    public final void addWhitelistType(UriRef type, Integer order) {
+    public final void addWhitelistType(IRI type, Integer order) {
         if(type != null && order != null){
             whitelistedTypes.put(type, order);
         }
@@ -1600,7 +1600,7 @@
     /**
      * @param ignoredTypes the ignoredTypes to set
      */
-    public final Map<UriRef, Integer> getBlacklistedTypes() {
+    public final Map<IRI, Integer> getBlacklistedTypes() {
         return blacklistedTypes;
     }
     
@@ -1608,7 +1608,7 @@
     /**
      * @param ignoredTypes the ignoredTypes to set
      */
-    public final Map<UriRef, Integer> getWhitelistedTypes() {
+    public final Map<IRI, Integer> getWhitelistedTypes() {
         return whitelistedTypes;
     }
     /**
diff --git a/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/engine/EntityLinkingEngine.java b/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/engine/EntityLinkingEngine.java
index 330148c..5309a56 100644
--- a/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/engine/EntityLinkingEngine.java
+++ b/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/engine/EntityLinkingEngine.java
@@ -34,17 +34,16 @@
 import java.util.Map.Entry;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
-import org.apache.clerezza.rdf.core.impl.TypedLiteralImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TypedLiteralImpl;
 import org.apache.commons.lang.StringUtils;
 import org.apache.felix.scr.annotations.ReferenceCardinality;
 import org.apache.felix.scr.annotations.ReferencePolicy;
@@ -98,9 +97,9 @@
      */
     public static final Integer DEFAULT_ORDER = ServiceProperties.ORDERING_DEFAULT - 10;
     
-    private static final UriRef XSD_DOUBLE = new UriRef("http://www.w3.org/2001/XMLSchema#double");
+    private static final IRI XSD_DOUBLE = new IRI("http://www.w3.org/2001/XMLSchema#double");
     
-    private static final UriRef ENHANCER_ENTITY_RANKING = new UriRef(NamespaceEnum.fise + "entity-ranking");
+    private static final IRI ENHANCER_ENTITY_RANKING = new IRI(NamespaceEnum.fise + "entity-ranking");
     
     /**
      * The name of this engine
@@ -293,23 +292,23 @@
         if(language != null && !language.isEmpty()){
             languageObject = new Language(language);
         }
-        Set<UriRef> dereferencedEntitis = new HashSet<UriRef>();
+        Set<IRI> dereferencedEntitis = new HashSet<IRI>();
         
-        MGraph metadata = ci.getMetadata();
+        Graph metadata = ci.getMetadata();
         for(LinkedEntity linkedEntity : linkedEntities){
-            Collection<UriRef> textAnnotations = new ArrayList<UriRef>(linkedEntity.getOccurrences().size());
+            Collection<IRI> textAnnotations = new ArrayList<IRI>(linkedEntity.getOccurrences().size());
             //first create the TextAnnotations for the Occurrences
             for(Occurrence occurrence : linkedEntity.getOccurrences()){
                 Literal startLiteral = literalFactory.createTypedLiteral(occurrence.getStart());
                 Literal endLiteral = literalFactory.createTypedLiteral(occurrence.getEnd());
                 //search for existing text annotation
                 Iterator<Triple> it = metadata.filter(null, ENHANCER_START, startLiteral);
-                UriRef textAnnotation = null;
+                IRI textAnnotation = null;
                 while(it.hasNext()){
                     Triple t = it.next();
                     if(metadata.filter(t.getSubject(), ENHANCER_END, endLiteral).hasNext() &&
                             metadata.filter(t.getSubject(), RDF_TYPE, ENHANCER_TEXTANNOTATION).hasNext()){
-                        textAnnotation = (UriRef)t.getSubject();
+                        textAnnotation = (IRI)t.getSubject();
                         break;
                     }
                 }
@@ -335,7 +334,7 @@
                         new PlainLiteralImpl(this.getClass().getName())));
                 }
                 //add dc:types (even to existing)
-                for(UriRef dcType : linkedEntity.getTypes()){
+                for(IRI dcType : linkedEntity.getTypes()){
                     metadata.add(new TripleImpl(
                         textAnnotation, Properties.DC_TYPE, dcType));
                 }
@@ -343,26 +342,26 @@
             }
             //now the EntityAnnotations for the Suggestions
             for(Suggestion suggestion : linkedEntity.getSuggestions()){
-                UriRef entityAnnotation = EnhancementEngineHelper.createEntityEnhancement(ci, this);
+                IRI entityAnnotation = EnhancementEngineHelper.createEntityEnhancement(ci, this);
                 //should we use the label used for the match, or search the
                 //representation for the best label ... currently its the matched one
-                PlainLiteral label = suggestion.getBestLabel(linkerConfig.getNameField(),language);
+                Literal label = suggestion.getBestLabel(linkerConfig.getNameField(),language);
                 Entity entity = suggestion.getEntity();
                 metadata.add(new TripleImpl(entityAnnotation, Properties.ENHANCER_ENTITY_LABEL, label));
                 metadata.add(new TripleImpl(entityAnnotation,ENHANCER_ENTITY_REFERENCE, entity.getUri()));
-                Iterator<UriRef> suggestionTypes = entity.getReferences(linkerConfig.getTypeField());
+                Iterator<IRI> suggestionTypes = entity.getReferences(linkerConfig.getTypeField());
                 while(suggestionTypes.hasNext()){
                     metadata.add(new TripleImpl(entityAnnotation, 
                         Properties.ENHANCER_ENTITY_TYPE, suggestionTypes.next()));
                 }
                 metadata.add(new TripleImpl(entityAnnotation,
                     Properties.ENHANCER_CONFIDENCE, literalFactory.createTypedLiteral(suggestion.getScore())));
-                for(UriRef textAnnotation : textAnnotations){
+                for(IRI textAnnotation : textAnnotations){
                     metadata.add(new TripleImpl(entityAnnotation, Properties.DC_RELATION, textAnnotation));
                 }
                 //add origin information of the EntiySearcher
-                for(Entry<UriRef,Collection<Resource>> originInfo : entitySearcher.getOriginInformation().entrySet()){
-                    for(Resource value : originInfo.getValue()){
+                for(Entry<IRI,Collection<RDFTerm>> originInfo : entitySearcher.getOriginInformation().entrySet()){
+                    for(RDFTerm value : originInfo.getValue()){
                         metadata.add(new TripleImpl(entityAnnotation, 
                             originInfo.getKey(),value));
                     }
diff --git a/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/EntityLinker.java b/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/EntityLinker.java
index 06ac3e1..6528af2 100644
--- a/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/EntityLinker.java
+++ b/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/EntityLinker.java
@@ -31,11 +31,11 @@
 import java.util.Set;
 import java.util.TreeMap;
 
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.lang.StringUtils;
 import org.apache.stanbol.enhancer.engines.entitylinking.Entity;
 import org.apache.stanbol.enhancer.engines.entitylinking.EntitySearcher;
@@ -496,22 +496,22 @@
      * @param conceptTypes The list of suggestions
      * @return the types values for the {@link LinkedEntity}
      */
-    private Set<UriRef> getLinkedEntityTypes(Collection<Suggestion> suggestions){
-        Collection<UriRef> conceptTypes = new HashSet<UriRef>();
+    private Set<IRI> getLinkedEntityTypes(Collection<Suggestion> suggestions){
+        Collection<IRI> conceptTypes = new HashSet<IRI>();
         double score = -1; //only consider types of the best ranked Entities
         for(Suggestion suggestion : suggestions){
             double actScore = suggestion.getScore();
             if(actScore < score){
                 break;
             }
-            for(Iterator<UriRef> types = 
+            for(Iterator<IRI> types = 
                 suggestion.getEntity().getReferences(linkerConfig.getTypeField()); 
                 types.hasNext();conceptTypes.add(types.next()));
         }
-        Map<UriRef,UriRef> typeMappings = linkerConfig.getTypeMappings();
-        Set<UriRef> dcTypes = new HashSet<UriRef>();
-        for(UriRef conceptType : conceptTypes){
-            UriRef dcType = typeMappings.get(conceptType);
+        Map<IRI,IRI> typeMappings = linkerConfig.getTypeMappings();
+        Set<IRI> dcTypes = new HashSet<IRI>();
+        for(IRI conceptType : conceptTypes){
+            IRI dcType = typeMappings.get(conceptType);
             if(dcType != null){
                 dcTypes.add(dcType);
             }
@@ -541,13 +541,13 @@
             return; //Redirects for ResultMatch are already processed ... ignore
         }
         Entity result = suggestion.getResult();
-        Iterator<UriRef> redirects = result.getReferences(linkerConfig.getRedirectField());
+        Iterator<IRI> redirects = result.getReferences(linkerConfig.getRedirectField());
         switch (linkerConfig.getRedirectProcessingMode()) {
             case ADD_VALUES:
-                TripleCollection entityData = result.getData();
-                UriRef entityUri = result.getUri();
+                Graph entityData = result.getData();
+                IRI entityUri = result.getUri();
                 while(redirects.hasNext()){
-                    UriRef redirect = redirects.next();
+                    IRI redirect = redirects.next();
                     if(redirect != null){
                         Entity redirectedEntity = entitySearcher.get(redirect,
                             linkerConfig.getSelectedFields());
@@ -564,7 +564,7 @@
                 }
             case FOLLOW:
                 while(redirects.hasNext()){
-                    UriRef redirect = redirects.next();
+                    IRI redirect = redirects.next();
                     if(redirect != null){
                         Entity redirectedEntity = entitySearcher.get(redirect,
                             linkerConfig.getSelectedFields());
@@ -734,13 +734,13 @@
         }
     }
     
-    public boolean filterEntity(Iterator<UriRef> entityTypes){
-        Map<UriRef, Integer> whiteList = linkerConfig.getWhitelistedTypes();
-        Map<UriRef, Integer> blackList = linkerConfig.getBlacklistedTypes();
+    public boolean filterEntity(Iterator<IRI> entityTypes){
+        Map<IRI, Integer> whiteList = linkerConfig.getWhitelistedTypes();
+        Map<IRI, Integer> blackList = linkerConfig.getBlacklistedTypes();
         Integer w = null;
         Integer b = null;
         while(entityTypes.hasNext()){
-            UriRef type = entityTypes.next();
+            IRI type = entityTypes.next();
             Integer act = whiteList.get(type);
             if(act != null){
                 if(w == null || act.compareTo(w) < 0){
@@ -789,22 +789,22 @@
         String curLang = documentLang; //language of the current sentence
         String defLang = defaultLang; //configured default language 
         String mainLang = documentMainLang;
-        Collection<PlainLiteral> mainLangLabels;
+        Collection<Literal> mainLangLabels;
         if(documentMainLang != null){
             mainLang = documentMainLang;
-            mainLangLabels = new ArrayList<PlainLiteral>();
+            mainLangLabels = new ArrayList<Literal>();
         } else {
             mainLang = documentLang;
             mainLangLabels = Collections.emptyList();
         }
-        Iterator<PlainLiteral> labels = entity.getText(linkerConfig.getNameField());
+        Iterator<Literal> labels = entity.getText(linkerConfig.getNameField());
         Suggestion match = new Suggestion(entity);
-        Collection<PlainLiteral> defaultLabels = new ArrayList<PlainLiteral>();
+        Collection<Literal> defaultLabels = new ArrayList<Literal>();
         boolean matchedLangLabel = false;
         //avoid matching multiple labels with the exact same lexical.
         Set<String> matchedLabels = new HashSet<String>();
         while(labels.hasNext()){
-            PlainLiteral label = labels.next();
+            Literal label = labels.next();
             //numLabels++;
             String lang = label.getLanguage() != null ? label.getLanguage().toString() : null;
             String text = label.getLexicalForm();
@@ -831,7 +831,7 @@
         }
         //try to match main language labels
         if(!matchedLangLabel || match.getMatch() == MATCH.NONE){
-            for(PlainLiteral mainLangLabel : mainLangLabels){
+            for(Literal mainLangLabel : mainLangLabels){
                 if(!matchedLabels.contains(mainLangLabel.getLexicalForm())){
                     matchLabel(searchTokens, match, mainLangLabel);
                     matchedLabels.add(mainLangLabel.getLexicalForm());
@@ -843,7 +843,7 @@
         // * no label in the current language or
         // * no MATCH was found in the current language
         if(!matchedLangLabel || match.getMatch() == MATCH.NONE){
-            for(PlainLiteral defaultLangLabel : defaultLabels){
+            for(Literal defaultLangLabel : defaultLabels){
                 if(!matchedLabels.contains(defaultLangLabel.getLexicalForm())){
                     matchLabel(searchTokens, match, defaultLangLabel);
                     matchedLabels.add(defaultLangLabel.getLexicalForm());
@@ -857,7 +857,7 @@
      * @param suggestion
      * @param label
      */
-    private void matchLabel(List<TokenData> searchTokens, Suggestion suggestion, PlainLiteral label) {
+    private void matchLabel(List<TokenData> searchTokens, Suggestion suggestion, Literal label) {
 //        test.begin();
         String text = label.getLexicalForm();
         String lang = label.getLanguage() == null ? null : label.getLanguage().toString();
diff --git a/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/LabelMatch.java b/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/LabelMatch.java
index 03a6b12..0b8fb1a 100644
--- a/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/LabelMatch.java
+++ b/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/LabelMatch.java
@@ -17,8 +17,8 @@
 package org.apache.stanbol.enhancer.engines.entitylinking.impl;
 
 import java.util.Comparator;
+import org.apache.clerezza.commons.rdf.Literal;
 
-import org.apache.clerezza.rdf.core.PlainLiteral;
 import org.apache.stanbol.enhancer.engines.entitylinking.impl.Suggestion.MATCH;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -35,7 +35,7 @@
     private int start = 0;
     private int span = 0;
     private int processableMatchCount = 0;
-    private PlainLiteral label;
+    private Literal label;
     private int labelTokenCount = 0;
     private double score;
     /**
@@ -56,11 +56,11 @@
      * @param start
      * @param span
      */
-    protected LabelMatch(int start, int span, PlainLiteral label){
+    protected LabelMatch(int start, int span, Literal label){
         this(start,span,span,span,1f,label,span,span);
     }
     
-    protected LabelMatch(int start, int span,int processableMatchCount, int matchCount, float tokenMatchScore,PlainLiteral label,int labelTokenCount, int coveredLabelTokenCount){
+    protected LabelMatch(int start, int span,int processableMatchCount, int matchCount, float tokenMatchScore,Literal label,int labelTokenCount, int coveredLabelTokenCount){
         if(start < 0){
             throw new IllegalArgumentException("parsed start position MUST BE >= 0!");
         }
@@ -136,7 +136,7 @@
      * based match for the given search tokens.
      * @return the label
      */
-    public PlainLiteral getMatchedLabel() {
+    public Literal getMatchedLabel() {
         return label;
     }
     /**
diff --git a/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/LinkedEntity.java b/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/LinkedEntity.java
index 4f180e7..aa099cf 100644
--- a/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/LinkedEntity.java
+++ b/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/LinkedEntity.java
@@ -22,7 +22,7 @@
 import java.util.List;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.nlp.model.Section;
 import org.apache.stanbol.enhancer.nlp.model.Token;
 
@@ -128,7 +128,7 @@
         }
     }
     private final String selectedText;
-    private final Set<UriRef> types;
+    private final Set<IRI> types;
     private final List<Suggestion> suggestions;
     private final Collection<Occurrence> occurrences = new ArrayList<Occurrence>();
     private final Collection<Occurrence> unmodOccurrences = Collections.unmodifiableCollection(occurrences);
@@ -138,7 +138,7 @@
      * @param suggestions the entity suggestions
      * @param types the types of the linked entity. 
      */
-    protected LinkedEntity(String selectedText, List<Suggestion> suggestions, Set<UriRef> types) {
+    protected LinkedEntity(String selectedText, List<Suggestion> suggestions, Set<IRI> types) {
         this.suggestions = Collections.unmodifiableList(suggestions);
         this.selectedText = selectedText;
         this.types = Collections.unmodifiableSet(types);
@@ -152,7 +152,7 @@
      * @param types the types of the linked entity. 
      */
     protected LinkedEntity(Section section,Token startToken,Token endToken, 
-                           List<Suggestion> suggestions, Set<UriRef> types) {
+                           List<Suggestion> suggestions, Set<IRI> types) {
         this(startToken.getSpan().substring(startToken.getStart(), endToken.getEnd()),
             suggestions,types);
         addOccurrence(section, startToken,endToken);
@@ -169,7 +169,7 @@
      * Getter for read only list of types
      * @return the types
      */
-    public Set<UriRef> getTypes() {
+    public Set<IRI> getTypes() {
         return types;
     }
     /**
diff --git a/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/Suggestion.java b/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/Suggestion.java
index 369e0a5..88c8370 100644
--- a/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/Suggestion.java
+++ b/enhancement-engines/entitylinking/engine/src/main/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/Suggestion.java
@@ -27,9 +27,9 @@
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.stanbol.enhancer.engines.entitylinking.Entity;
 import org.apache.stanbol.enhancer.engines.entitylinking.EntitySearcher;
 
@@ -106,18 +106,18 @@
      * @param language the language
      * @return the best match or {@link Suggestion#getMatchedLabel()} if non is found
      */
-    public PlainLiteral getBestLabel(UriRef nameField, String language){
+    public Literal getBestLabel(IRI nameField, String language){
         Entity rep = getEntity();
         //start with the matched label -> so if we do not find a better one
         //we will use the matched!
-        PlainLiteral matchedLabel = getMatchedLabel();
-        PlainLiteral label = matchedLabel;
+        Literal matchedLabel = getMatchedLabel();
+        Literal label = matchedLabel;
         // 1. check if the returned Entity does has a label -> if not return null
         // add labels (set only a single label. Use "en" if available!
-        Iterator<PlainLiteral> labels = rep.getText(nameField);
+        Iterator<Literal> labels = rep.getText(nameField);
         boolean matchFound = false;
         while (labels.hasNext() && !matchFound) {
-            PlainLiteral actLabel = labels.next();
+            Literal actLabel = labels.next();
             if(label == null){
                 label = actLabel;
             }
@@ -142,7 +142,7 @@
      * Shorthand for {@link #getLabelMatch()}.getMatchedLabel()
      * @return the label or <code>null</code> if {@link MATCH#NONE}
      */
-    public PlainLiteral getMatchedLabel() {
+    public Literal getMatchedLabel() {
         return getLabelMatch().getMatchedLabel();
     }
     protected void setMatch(MATCH matchType) {
diff --git a/enhancement-engines/entitylinking/engine/src/test/java/org/apache/stanbol/enhancer/engines/entitylinking/engine/EntityLinkingEngineTest.java b/enhancement-engines/entitylinking/engine/src/test/java/org/apache/stanbol/enhancer/engines/entitylinking/engine/EntityLinkingEngineTest.java
index b4dd52f..11a8661 100644
--- a/enhancement-engines/entitylinking/engine/src/test/java/org/apache/stanbol/enhancer/engines/entitylinking/engine/EntityLinkingEngineTest.java
+++ b/enhancement-engines/entitylinking/engine/src/test/java/org/apache/stanbol/enhancer/engines/entitylinking/engine/EntityLinkingEngineTest.java
@@ -40,15 +40,15 @@
 import java.util.List;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.engines.entitylinking.Entity;
 import org.apache.stanbol.enhancer.engines.entitylinking.LabelTokenizer;
@@ -119,58 +119,58 @@
     
     static TestSearcherImpl searcher;
     
-    public static final UriRef NAME = new UriRef(NamespaceEnum.rdfs+"label");
-    public static final UriRef TYPE = new UriRef(NamespaceEnum.rdf+"type");
-    public static final UriRef REDIRECT = new UriRef(NamespaceEnum.rdfs+"seeAlso");
+    public static final IRI NAME = new IRI(NamespaceEnum.rdfs+"label");
+    public static final IRI TYPE = new IRI(NamespaceEnum.rdf+"type");
+    public static final IRI REDIRECT = new IRI(NamespaceEnum.rdfs+"seeAlso");
 
     @BeforeClass
     public static void setUpServices() throws IOException {
         searcher = new TestSearcherImpl(TEST_REFERENCED_SITE_NAME,NAME,new SimpleLabelTokenizer());
         //add some terms to the searcher
-        MGraph graph = new IndexedMGraph();
-        UriRef uri = new UriRef("urn:test:PatrickMarshall");
+        Graph graph = new IndexedGraph();
+        IRI uri = new IRI("urn:test:PatrickMarshall");
         graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("Patrick Marshall")));
         graph.add(new TripleImpl(uri, TYPE, OntologicalClasses.DBPEDIA_PERSON));
         searcher.addEntity(new Entity(uri, graph));
         
-        uri = new UriRef("urn:test:Geologist");
+        uri = new IRI("urn:test:Geologist");
         graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("Geologist")));
-        graph.add(new TripleImpl(uri, TYPE, new UriRef(NamespaceEnum.skos+"Concept")));
-        graph.add(new TripleImpl(uri, REDIRECT, new UriRef("urn:test:redirect:Geologist")));
+        graph.add(new TripleImpl(uri, TYPE, new IRI(NamespaceEnum.skos+"Concept")));
+        graph.add(new TripleImpl(uri, REDIRECT, new IRI("urn:test:redirect:Geologist")));
         searcher.addEntity(new Entity(uri, graph));
         //a redirect
-        uri = new UriRef("urn:test:redirect:Geologist");
+        uri = new IRI("urn:test:redirect:Geologist");
         graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("Geologe (redirect)")));
-        graph.add(new TripleImpl(uri, TYPE, new UriRef(NamespaceEnum.skos+"Concept")));
+        graph.add(new TripleImpl(uri, TYPE, new IRI(NamespaceEnum.skos+"Concept")));
         searcher.addEntity(new Entity(uri, graph));
 
-        uri = new UriRef("urn:test:NewZealand");
+        uri = new IRI("urn:test:NewZealand");
         graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("New Zealand")));
         graph.add(new TripleImpl(uri, TYPE, OntologicalClasses.DBPEDIA_PLACE));
         searcher.addEntity(new Entity(uri, graph));
 
-        uri = new UriRef("urn:test:UniversityOfOtago");
+        uri = new IRI("urn:test:UniversityOfOtago");
         graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("University of Otago")));
         graph.add(new TripleImpl(uri, TYPE, OntologicalClasses.DBPEDIA_ORGANISATION));
         searcher.addEntity(new Entity(uri, graph));
         
-        uri = new UriRef("urn:test:University");
+        uri = new IRI("urn:test:University");
         graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("University")));
-        graph.add(new TripleImpl(uri, TYPE, new UriRef(NamespaceEnum.skos+"Concept")));
+        graph.add(new TripleImpl(uri, TYPE, new IRI(NamespaceEnum.skos+"Concept")));
         searcher.addEntity(new Entity(uri, graph));
 
-        uri = new UriRef("urn:test:Otago");
+        uri = new IRI("urn:test:Otago");
         graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("Otago")));
         graph.add(new TripleImpl(uri, TYPE, OntologicalClasses.DBPEDIA_PLACE));
         searcher.addEntity(new Entity(uri, graph));
         //add a 2nd Otago (Place and University
-        uri = new UriRef("urn:test:Otago_Texas");
+        uri = new IRI("urn:test:Otago_Texas");
         graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("Otago (Texas)")));
         graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("Otago")));
         graph.add(new TripleImpl(uri, TYPE, OntologicalClasses.DBPEDIA_PLACE));
         searcher.addEntity(new Entity(uri, graph));
 
-        uri = new UriRef("urn:test:UniversityOfOtago_Texas");
+        uri = new IRI("urn:test:UniversityOfOtago_Texas");
         graph.add(new TripleImpl(uri, NAME, new PlainLiteralImpl("University of Otago (Texas)")));
         graph.add(new TripleImpl(uri, TYPE, OntologicalClasses.DBPEDIA_ORGANISATION));
         searcher.addEntity(new Entity(uri, graph));
@@ -254,7 +254,7 @@
     }
 
     public static ContentItem getContentItem(final String id, final String text) throws IOException {
-        return ciFactory.createContentItem(new UriRef(id),new StringSource(text));
+        return ciFactory.createContentItem(new IRI(id),new StringSource(text));
     }
     /**
      * This tests the EntityLinker functionality (if the expected Entities
@@ -396,7 +396,7 @@
         //compute the enhancements
         engine.computeEnhancements(ci);
         //validate the enhancement results
-        Map<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+        Map<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
         expectedValues.put(ENHANCER_EXTRACTED_FROM, ci.getUri());
         expectedValues.put(DC_CREATOR,LiteralFactory.getInstance().createTypedLiteral(
             engine.getClass().getName()));
@@ -410,18 +410,18 @@
         assertEquals("Five fise:EntityAnnotations are expected by this Test", 5, numEntityAnnotations);
     }
     /**
-     * Similar to {@link EnhancementStructureHelper#validateAllEntityAnnotations(org.apache.clerezza.rdf.core.TripleCollection, Map)}
+     * Similar to {@link EnhancementStructureHelper#validateAllEntityAnnotations(org.apache.clerezza.commons.rdf.Graph, Map)}
      * but in addition checks fise:confidence [0..1] and entityhub:site properties
      * @param ci
      * @param expectedValues
      * @return
      */
-    private static int validateAllEntityAnnotations(ContentItem ci, Map<UriRef,Resource> expectedValues){
+    private static int validateAllEntityAnnotations(ContentItem ci, Map<IRI,RDFTerm> expectedValues){
         Iterator<Triple> entityAnnotationIterator = ci.getMetadata().filter(null,
                 RDF_TYPE, ENHANCER_ENTITYANNOTATION);
         int entityAnnotationCount = 0;
         while (entityAnnotationIterator.hasNext()) {
-            UriRef entityAnnotation = (UriRef) entityAnnotationIterator.next().getSubject();
+            IRI entityAnnotation = (IRI) entityAnnotationIterator.next().getSubject();
             // test if selected Text is added
             validateEntityAnnotation(ci.getMetadata(), entityAnnotation, expectedValues);
             //validate also that the confidence is between [0..1]
@@ -438,12 +438,12 @@
 //                    +"',entityAnnotation "+entityAnnotation+")",
 //                    0.0 <= confidence.doubleValue());
             //Test the entityhub:site property (STANBOL-625)
-            UriRef ENTITYHUB_SITE = new UriRef(NamespaceEnum.entityhub+"site");
+            IRI ENTITYHUB_SITE = new IRI(NamespaceEnum.entityhub+"site");
             Iterator<Triple> entitySiteIterator = ci.getMetadata().filter(entityAnnotation, 
                 ENTITYHUB_SITE, null);
             assertTrue("Expected entityhub:site value is missing (entityAnnotation "
                     +entityAnnotation+")",entitySiteIterator.hasNext());
-            Resource siteResource = entitySiteIterator.next().getObject();
+            RDFTerm siteResource = entitySiteIterator.next().getObject();
             assertTrue("entityhub:site values MUST BE Literals", siteResource instanceof Literal);
             assertEquals("'"+TEST_REFERENCED_SITE_NAME+"' is expected as "
                 + "entityhub:site value", TEST_REFERENCED_SITE_NAME, 
diff --git a/enhancement-engines/entitylinking/engine/src/test/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/TestSearcherImpl.java b/enhancement-engines/entitylinking/engine/src/test/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/TestSearcherImpl.java
index e0b5ade..2033901 100644
--- a/enhancement-engines/entitylinking/engine/src/test/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/TestSearcherImpl.java
+++ b/enhancement-engines/entitylinking/engine/src/test/java/org/apache/stanbol/enhancer/engines/entitylinking/impl/TestSearcherImpl.java
@@ -29,10 +29,10 @@
 import java.util.SortedMap;
 import java.util.TreeMap;
 
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
 import org.apache.stanbol.enhancer.engines.entitylinking.Entity;
 import org.apache.stanbol.enhancer.engines.entitylinking.EntitySearcher;
 import org.apache.stanbol.enhancer.engines.entitylinking.LabelTokenizer;
@@ -40,29 +40,29 @@
 
 public class TestSearcherImpl implements EntitySearcher {
 
-    private final UriRef nameField;
+    private final IRI nameField;
     private final LabelTokenizer tokenizer;
     
     private SortedMap<String,Collection<Entity>> data = new TreeMap<String,Collection<Entity>>(String.CASE_INSENSITIVE_ORDER);
-    private Map<UriRef,Entity> entities = new HashMap<UriRef,Entity>();
-    private Map<UriRef,Collection<Resource>> originInfo;
+    private Map<IRI,Entity> entities = new HashMap<IRI,Entity>();
+    private Map<IRI,Collection<RDFTerm>> originInfo;
 
     
-    public TestSearcherImpl(String siteId,UriRef nameField, LabelTokenizer tokenizer) {
+    public TestSearcherImpl(String siteId,IRI nameField, LabelTokenizer tokenizer) {
         this.nameField = nameField;
         this.tokenizer = tokenizer;
         this.originInfo = Collections.singletonMap(
-            new UriRef(NamespaceEnum.entityhub+"site"), 
-            (Collection<Resource>)Collections.singleton(
-                (Resource)new PlainLiteralImpl(siteId)));
+            new IRI(NamespaceEnum.entityhub+"site"), 
+            (Collection<RDFTerm>)Collections.singleton(
+                (RDFTerm)new PlainLiteralImpl(siteId)));
     }
     
     
     public void addEntity(Entity rep){
         entities.put(rep.getUri(), rep);
-        Iterator<PlainLiteral> labels = rep.getText(nameField);
+        Iterator<Literal> labels = rep.getText(nameField);
         while(labels.hasNext()){
-            PlainLiteral label = labels.next();
+            Literal label = labels.next();
             for(String token : tokenizer.tokenize(label.getLexicalForm(),null)){
                 Collection<Entity> values = data.get(token);
                 if(values == null){
@@ -76,13 +76,13 @@
     }
     
     @Override
-    public Entity get(UriRef id, Set<UriRef> includeFields, String...lanuages) throws IllegalStateException {
+    public Entity get(IRI id, Set<IRI> includeFields, String...lanuages) throws IllegalStateException {
         return entities.get(id);
     }
 
     @Override
-    public Collection<? extends Entity> lookup(UriRef field,
-                                           Set<UriRef> includeFields,
+    public Collection<? extends Entity> lookup(IRI field,
+                                           Set<IRI> includeFields,
                                            List<String> search,
                                            String[] languages,Integer numResults, Integer offset) throws IllegalStateException {
         if(field.equals(nameField)){
@@ -124,7 +124,7 @@
     }
 
     @Override
-    public Map<UriRef,Collection<Resource>> getOriginInformation() {
+    public Map<IRI,Collection<RDFTerm>> getOriginInformation() {
         return originInfo;
     }
 }
diff --git a/enhancement-engines/entitylinking/labeltokenizer-opennlp/src/test/java/org/apache/stanbol/enhancer/engines/entitylinking/labeltokenizer/opennlp/ClasspathDataFileProvider.java b/enhancement-engines/entitylinking/labeltokenizer-opennlp/src/test/java/org/apache/stanbol/enhancer/engines/entitylinking/labeltokenizer/opennlp/ClasspathDataFileProvider.java
index 5631cb4..7f36b72 100644
--- a/enhancement-engines/entitylinking/labeltokenizer-opennlp/src/test/java/org/apache/stanbol/enhancer/engines/entitylinking/labeltokenizer/opennlp/ClasspathDataFileProvider.java
+++ b/enhancement-engines/entitylinking/labeltokenizer-opennlp/src/test/java/org/apache/stanbol/enhancer/engines/entitylinking/labeltokenizer/opennlp/ClasspathDataFileProvider.java
@@ -69,7 +69,7 @@
         // load default OpenNLP models from classpath (embedded in the defaultdata bundle)
         final String resourcePath = RESOURCE_BASE_PATH + filename;
         final URL dataFile = getClass().getClassLoader().getResource(resourcePath);
-        //log.debug("Resource {} found: {}", (in == null ? "NOT" : ""), resourcePath);
+        //log.debug("RDFTerm {} found: {}", (in == null ? "NOT" : ""), resourcePath);
         return dataFile;
     }
 }
diff --git a/enhancement-engines/entitytagging/src/main/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/EnhancementRDFUtils.java b/enhancement-engines/entitytagging/src/main/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/EnhancementRDFUtils.java
index 1943166..a4e4c52 100644
--- a/enhancement-engines/entitytagging/src/main/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/EnhancementRDFUtils.java
+++ b/enhancement-engines/entitytagging/src/main/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/EnhancementRDFUtils.java
@@ -26,14 +26,14 @@
 import java.util.Collection;
 import java.util.Iterator;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.stanbol.enhancer.servicesapi.EnhancementEngine;
 import org.apache.stanbol.enhancer.servicesapi.helper.EnhancementEngineHelper;
 import org.apache.stanbol.entityhub.servicesapi.model.Reference;
@@ -59,7 +59,7 @@
      * @param literalFactory
      *            the LiteralFactory to use
      * @param graph
-     *            the MGraph to use
+     *            the Graph to use
      * @param contentItemId
      *            the contentItemId the enhancement is extracted from
      * @param relatedEnhancements
@@ -69,11 +69,11 @@
      * @param nameField the field used to extract the name
      * @param lang the preferred language to include or <code>null</code> if none
      */
-    public static UriRef writeEntityAnnotation(EnhancementEngine engine,
+    public static IRI writeEntityAnnotation(EnhancementEngine engine,
                                                LiteralFactory literalFactory,
-                                               MGraph graph,
-                                               UriRef contentItemId,
-                                               Collection<NonLiteral> relatedEnhancements,
+                                               Graph graph,
+                                               IRI contentItemId,
+                                               Collection<BlankNodeOrIRI> relatedEnhancements,
                                                Suggestion suggestion,
                                                String nameField, 
                                                String lang) {
@@ -104,13 +104,13 @@
             literal = new PlainLiteralImpl(label.getText(), new Language(label.getLanguage()));
         }
         // Now create the entityAnnotation
-        UriRef entityAnnotation = EnhancementEngineHelper.createEntityEnhancement(graph, engine,
+        IRI entityAnnotation = EnhancementEngineHelper.createEntityEnhancement(graph, engine,
             contentItemId);
         // first relate this entity annotation to the text annotation(s)
-        for (NonLiteral enhancement : relatedEnhancements) {
+        for (BlankNodeOrIRI enhancement : relatedEnhancements) {
             graph.add(new TripleImpl(entityAnnotation, DC_RELATION, enhancement));
         }
-        UriRef entityUri = new UriRef(rep.getId());
+        IRI entityUri = new IRI(rep.getId());
         // add the link to the referred entity
         graph.add(new TripleImpl(entityAnnotation, ENHANCER_ENTITY_REFERENCE, entityUri));
         // add the label parsed above
@@ -122,13 +122,13 @@
 
         Iterator<Reference> types = rep.getReferences(RDF_TYPE.getUnicodeString());
         while (types.hasNext()) {
-            graph.add(new TripleImpl(entityAnnotation, ENHANCER_ENTITY_TYPE, new UriRef(types.next()
+            graph.add(new TripleImpl(entityAnnotation, ENHANCER_ENTITY_TYPE, new IRI(types.next()
                     .getReference())));
         }
         //add the name of the ReferencedSite that manages the Entity
         if(suggestion.getEntity().getSite() != null){
             graph.add(new TripleImpl(entityAnnotation, 
-                new UriRef(RdfResourceEnum.site.getUri()), 
+                new IRI(RdfResourceEnum.site.getUri()), 
                 new PlainLiteralImpl(suggestion.getEntity().getSite())));
         }
         
diff --git a/enhancement-engines/entitytagging/src/main/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/NamedEntity.java b/enhancement-engines/entitytagging/src/main/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/NamedEntity.java
index 7adbf9c..f7dce8c 100644
--- a/enhancement-engines/entitytagging/src/main/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/NamedEntity.java
+++ b/enhancement-engines/entitytagging/src/main/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/NamedEntity.java
@@ -19,9 +19,9 @@
 import static org.apache.stanbol.enhancer.servicesapi.rdf.Properties.DC_TYPE;
 import static org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_SELECTED_TEXT;
 
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.commons.lang.StringUtils;
 import org.apache.stanbol.enhancer.servicesapi.helper.EnhancementEngineHelper;
 import org.apache.stanbol.enhancer.servicesapi.rdf.TechnicalClasses;
@@ -30,10 +30,10 @@
 
 public final class NamedEntity {
     private static final Logger log = LoggerFactory.getLogger(NamedEntity.class);
-    private final NonLiteral entity;
+    private final BlankNodeOrIRI entity;
     private final String name;
-    private final UriRef type;
-    private NamedEntity(NonLiteral entity, String name, UriRef type) {
+    private final IRI type;
+    private NamedEntity(BlankNodeOrIRI entity, String name, IRI type) {
         this.entity = entity;
         this.name = name;
         this.type = type;
@@ -42,7 +42,7 @@
      * Getter for the Node providing the information about that entity
      * @return the entity
      */
-    public final NonLiteral getEntity() {
+    public final BlankNodeOrIRI getEntity() {
         return entity;
     }
     /**
@@ -56,7 +56,7 @@
      * Getter for the type
      * @return the type
      */
-    public final UriRef getType() {
+    public final IRI getType() {
         return type;
     }
     @Override
@@ -79,7 +79,7 @@
      * @return the {@link NamedEntity} or <code>null</code> if the parsed
      * text annotation is missing required information.
      */
-    public static NamedEntity createFromTextAnnotation(TripleCollection graph, NonLiteral textAnnotation){
+    public static NamedEntity createFromTextAnnotation(Graph graph, BlankNodeOrIRI textAnnotation){
         String selected = EnhancementEngineHelper.getString(graph, textAnnotation, ENHANCER_SELECTED_TEXT);
         if (selected == null) {
             log.debug("Unable to create NamedEntity for TextAnnotation {} "
@@ -100,7 +100,7 @@
                     textAnnotation, selected);
             return null;
         }
-        UriRef type = EnhancementEngineHelper.getReference(graph, textAnnotation, DC_TYPE);
+        IRI type = EnhancementEngineHelper.getReference(graph, textAnnotation, DC_TYPE);
         if (type == null) {
             log.warn("Unable to process TextAnnotation {} because property {}"
                      + " is not present!",textAnnotation, DC_TYPE);
diff --git a/enhancement-engines/entitytagging/src/main/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/NamedEntityTaggingEngine.java b/enhancement-engines/entitytagging/src/main/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/NamedEntityTaggingEngine.java
index 11eaa17..6032169 100644
--- a/enhancement-engines/entitytagging/src/main/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/NamedEntityTaggingEngine.java
+++ b/enhancement-engines/entitytagging/src/main/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/NamedEntityTaggingEngine.java
@@ -30,10 +30,10 @@
 import java.util.Map.Entry;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.commons.lang.StringUtils;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -338,10 +338,10 @@
         } else { // null indicates to use the Entityhub to lookup Entities
             site = null;
         }
-        MGraph graph = ci.getMetadata();
+        Graph graph = ci.getMetadata();
         LiteralFactory literalFactory = LiteralFactory.getInstance();
         // Retrieve the existing text annotations (requires read lock)
-        Map<NamedEntity,List<UriRef>> textAnnotations = new HashMap<NamedEntity,List<UriRef>>();
+        Map<NamedEntity,List<IRI>> textAnnotations = new HashMap<NamedEntity,List<IRI>>();
         // the language extracted for the parsed content or NULL if not
         // available
         String contentLangauge;
@@ -350,7 +350,7 @@
             contentLangauge = EnhancementEngineHelper.getLanguage(ci);
             for (Iterator<Triple> it = graph.filter(null, RDF_TYPE, TechnicalClasses.ENHANCER_TEXTANNOTATION); it
                     .hasNext();) {
-                UriRef uri = (UriRef) it.next().getSubject();
+                IRI uri = (IRI) it.next().getSubject();
                 if (graph.filter(uri, Properties.DC_RELATION, null).hasNext()) {
                     // this is not the most specific occurrence of this name:
                     // skip
@@ -360,10 +360,10 @@
                 if (namedEntity != null) {
                     // This is a first occurrence, collect any subsumed
                     // annotations
-                    List<UriRef> subsumed = new ArrayList<UriRef>();
+                    List<IRI> subsumed = new ArrayList<IRI>();
                     for (Iterator<Triple> it2 = graph.filter(null, Properties.DC_RELATION, uri); it2
                             .hasNext();) {
-                        subsumed.add((UriRef) it2.next().getSubject());
+                        subsumed.add((IRI) it2.next().getSubject());
                     }
                     textAnnotations.put(namedEntity, subsumed);
                 }
@@ -374,7 +374,7 @@
         // search the suggestions
         Map<NamedEntity,List<Suggestion>> suggestions = new HashMap<NamedEntity,List<Suggestion>>(
                 textAnnotations.size());
-        for (Entry<NamedEntity,List<UriRef>> entry : textAnnotations.entrySet()) {
+        for (Entry<NamedEntity,List<IRI>> entry : textAnnotations.entrySet()) {
             try {
                 List<Suggestion> entitySuggestions = computeEntityRecommentations(site, entry.getKey(),
                     entry.getValue(), contentLangauge);
@@ -391,8 +391,8 @@
             RdfValueFactory factory = RdfValueFactory.getInstance();
             Map<String,Representation> entityData = new HashMap<String,Representation>();
             for (Entry<NamedEntity,List<Suggestion>> entitySuggestions : suggestions.entrySet()) {
-                List<UriRef> subsumed = textAnnotations.get(entitySuggestions.getKey());
-                List<NonLiteral> annotationsToRelate = new ArrayList<NonLiteral>(subsumed);
+                List<IRI> subsumed = textAnnotations.get(entitySuggestions.getKey());
+                List<BlankNodeOrIRI> annotationsToRelate = new ArrayList<BlankNodeOrIRI>(subsumed);
                 annotationsToRelate.add(entitySuggestions.getKey().getEntity());
                 for (Suggestion suggestion : entitySuggestions.getValue()) {
                     log.debug("Add Suggestion {} for {}", suggestion.getEntity().getId(),
@@ -443,7 +443,7 @@
      */
     protected final List<Suggestion> computeEntityRecommentations(Site site,
                                                                   NamedEntity namedEntity,
-                                                                  List<UriRef> subsumedAnnotations,
+                                                                  List<IRI> subsumedAnnotations,
                                                                   String language) throws EntityhubException {
         // First get the required properties for the parsed textAnnotation
         // ... and check the values
diff --git a/enhancement-engines/entitytagging/src/test/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/DbPediaDataFileProvider.java b/enhancement-engines/entitytagging/src/test/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/DbPediaDataFileProvider.java
index 62caf39..7f178f7 100644
--- a/enhancement-engines/entitytagging/src/test/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/DbPediaDataFileProvider.java
+++ b/enhancement-engines/entitytagging/src/test/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/DbPediaDataFileProvider.java
@@ -45,7 +45,7 @@
         if(resourceUri != null){
             return resourceUri.openStream();
         } else {
-            throw new IOException("Resource '"+resource+"' not found");
+            throw new IOException("RDFTerm '"+resource+"' not found");
         }
     }
 
diff --git a/enhancement-engines/entitytagging/src/test/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/TestEntityLinkingEnhancementEngine.java b/enhancement-engines/entitytagging/src/test/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/TestEntityLinkingEnhancementEngine.java
index 13ddfa7..4e5e3dd 100644
--- a/enhancement-engines/entitytagging/src/test/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/TestEntityLinkingEnhancementEngine.java
+++ b/enhancement-engines/entitytagging/src/test/java/org/apache/stanbol/enhancer/engines/entitytagging/impl/TestEntityLinkingEnhancementEngine.java
@@ -43,15 +43,14 @@
 import java.util.Iterator;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.io.IOUtils;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.rdfentities.RdfEntityFactory;
@@ -148,7 +147,7 @@
      */
     private ContentItem initContentItem() throws IOException {
         ContentItem ci = ciFactory.createContentItem(
-            new UriRef("urn:iks-project:enhancer:text:content-item:person"),
+            new IRI("urn:iks-project:enhancer:text:content-item:person"),
             new StringSource(CONTEXT));
         //add three text annotations to be consumed by this test
         getTextAnnotation(ci, PERSON, CONTEXT, DBPEDIA_PERSON);
@@ -159,7 +158,7 @@
         return ci;
     }
 
-    public static void getTextAnnotation(ContentItem ci, String name,String context,UriRef type){
+    public static void getTextAnnotation(ContentItem ci, String name,String context,IRI type){
         String content;
         try {
             content = IOUtils.toString(ci.getStream(),"UTF-8");
@@ -169,8 +168,8 @@
         }
         RdfEntityFactory factory = RdfEntityFactory.createInstance(ci.getMetadata());
         TextAnnotation textAnnotation = factory.getProxy(
-                new UriRef("urn:iks-project:enhancer:test:text-annotation:"+randomUUID()), TextAnnotation.class);
-        textAnnotation.setCreator(new UriRef("urn:iks-project:enhancer:test:dummyEngine"));
+                new IRI("urn:iks-project:enhancer:test:text-annotation:"+randomUUID()), TextAnnotation.class);
+        textAnnotation.setCreator(new IRI("urn:iks-project:enhancer:test:dummyEngine"));
         textAnnotation.setCreated(new Date());
         textAnnotation.setSelectedText(name);
         textAnnotation.setSelectionContext(context);
@@ -229,7 +228,7 @@
     }
 
     private static int validateAllEntityAnnotations(NamedEntityTaggingEngine entityLinkingEngine, ContentItem ci){
-        Map<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+        Map<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
         expectedValues.put(ENHANCER_EXTRACTED_FROM, ci.getUri());
         expectedValues.put(DC_CREATOR,LiteralFactory.getInstance().createTypedLiteral(
             entityLinkingEngine.getClass().getName()));
@@ -239,7 +238,7 @@
         expectedValues.put(Properties.ENHANCER_CONFIDENCE, null);
         int entityAnnotationCount = 0;
         while (entityAnnotationIterator.hasNext()) {
-            UriRef entityAnnotation = (UriRef) entityAnnotationIterator.next().getSubject();
+            IRI entityAnnotation = (IRI) entityAnnotationIterator.next().getSubject();
             // test if selected Text is added
             validateEntityAnnotation(ci.getMetadata(), entityAnnotation, expectedValues);
             //fise:confidence now checked by EnhancementStructureHelper (STANBOL-630)
@@ -255,12 +254,12 @@
 //                    +"',entityAnnotation "+entityAnnotation+")",
 //                    0.0 <= confidence.doubleValue());
             //Test the entityhub:site property (STANBOL-625)
-            UriRef ENTITYHUB_SITE = new UriRef(RdfResourceEnum.site.getUri());
+            IRI ENTITYHUB_SITE = new IRI(RdfResourceEnum.site.getUri());
             Iterator<Triple> entitySiteIterator = ci.getMetadata().filter(entityAnnotation, 
                 ENTITYHUB_SITE, null);
             assertTrue("Expected entityhub:site value is missing (entityAnnotation "
                     +entityAnnotation+")",entitySiteIterator.hasNext());
-            Resource siteResource = entitySiteIterator.next().getObject();
+            RDFTerm siteResource = entitySiteIterator.next().getObject();
             assertTrue("entityhub:site values MUST BE Literals", siteResource instanceof Literal);
             assertEquals("'dbpedia' is expected as entityhub:site value", "dbpedia", ((Literal)siteResource).getLexicalForm());
             assertFalse("entityhub:site MUST HAVE only a single value", entitySiteIterator.hasNext());
diff --git a/enhancement-engines/geonames/src/main/java/org/apache/stanbol/enhancer/engines/geonames/impl/LocationEnhancementEngine.java b/enhancement-engines/geonames/src/main/java/org/apache/stanbol/enhancer/engines/geonames/impl/LocationEnhancementEngine.java
index 57a3ced..fee6465 100644
--- a/enhancement-engines/geonames/src/main/java/org/apache/stanbol/enhancer/engines/geonames/impl/LocationEnhancementEngine.java
+++ b/enhancement-engines/geonames/src/main/java/org/apache/stanbol/enhancer/engines/geonames/impl/LocationEnhancementEngine.java
@@ -44,12 +44,12 @@
 import java.util.Set;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Properties;
 import org.apache.felix.scr.annotations.Property;
@@ -96,9 +96,9 @@
     /**
      * This maps geonames.org feature classes to dbpedia.org ontology classes
      */
-    public static final Map<FeatureClass, Collection<UriRef>> FEATURE_CLASS_CONCEPT_MAPPINGS;
+    public static final Map<FeatureClass, Collection<IRI>> FEATURE_CLASS_CONCEPT_MAPPINGS;
 
-    public static final Map<String, Collection<UriRef>> FEATURE_TYPE_CONCEPT_MAPPINGS;
+    public static final Map<String, Collection<IRI>> FEATURE_TYPE_CONCEPT_MAPPINGS;
 
     private static final Logger log = LoggerFactory.getLogger(LocationEnhancementEngine.class);
 
@@ -129,7 +129,7 @@
     @Property(doubleValue = DEFAULT_MIN_HIERARCHY_SCORE)
     public static final String MIN_HIERARCHY_SCORE = "org.apache.stanbol.enhancer.engines.geonames.locationEnhancementEngine.min-hierarchy-score";
 
-    public static final UriRef CONCEPT_GEONAMES_FEATURE = new UriRef(NamespaceEnum.geonames.toString() + "Feature");
+    public static final IRI CONCEPT_GEONAMES_FEATURE = new IRI(NamespaceEnum.geonames.toString() + "Feature");
     @Property(value = GeonamesAPIWrapper.DEFAULT_GEONAMES_ORG_WEBSERVICE_URL)
     public static final String GEONAMES_SERVER_URL = "org.apache.stanbol.enhancer.engines.geonames.locationEnhancementEngine.serverURL";
     /**
@@ -156,28 +156,28 @@
     protected GeonamesAPIWrapper geonamesService;
 
     static {
-        Map<FeatureClass, Collection<UriRef>> mappings = new EnumMap<FeatureClass, Collection<UriRef>>(FeatureClass.class);
+        Map<FeatureClass, Collection<IRI>> mappings = new EnumMap<FeatureClass, Collection<IRI>>(FeatureClass.class);
         //first add the concepts of the geonames ontology
         for (FeatureClass fc : FeatureClass.values()) {
-            List<UriRef> conceptMappings = new ArrayList<UriRef>();
+            List<IRI> conceptMappings = new ArrayList<IRI>();
             conceptMappings.add(CONCEPT_GEONAMES_FEATURE); //all things are features
             conceptMappings.add(DBPEDIA_PLACE); //all things are dbpedia places
             mappings.put(fc, conceptMappings);
         }
         //now add additional mappings to the dbpedia Ontology
-        UriRef populatedPlace = new UriRef(dbpedia_ont + "PopulatedPlace");
-        mappings.get(FeatureClass.P).addAll(Arrays.asList(populatedPlace, new UriRef(dbpedia_ont + "Settlement")));
-        mappings.get(FeatureClass.A).addAll(Arrays.asList(populatedPlace, new UriRef(dbpedia_ont + "AdministrativeRegion")));
-        mappings.get(FeatureClass.H).add(new UriRef(dbpedia_ont + "BodyOfWater"));
-        mappings.get(FeatureClass.R).add(new UriRef(dbpedia_ont + "Infrastructure"));
-        mappings.get(FeatureClass.S).add(new UriRef(dbpedia_ont + "Building"));
-        mappings.get(FeatureClass.T).add(new UriRef(dbpedia_ont + "Mountain"));
+        IRI populatedPlace = new IRI(dbpedia_ont + "PopulatedPlace");
+        mappings.get(FeatureClass.P).addAll(Arrays.asList(populatedPlace, new IRI(dbpedia_ont + "Settlement")));
+        mappings.get(FeatureClass.A).addAll(Arrays.asList(populatedPlace, new IRI(dbpedia_ont + "AdministrativeRegion")));
+        mappings.get(FeatureClass.H).add(new IRI(dbpedia_ont + "BodyOfWater"));
+        mappings.get(FeatureClass.R).add(new IRI(dbpedia_ont + "Infrastructure"));
+        mappings.get(FeatureClass.S).add(new IRI(dbpedia_ont + "Building"));
+        mappings.get(FeatureClass.T).add(new IRI(dbpedia_ont + "Mountain"));
         //now write the unmodifiable static final constant
         FEATURE_CLASS_CONCEPT_MAPPINGS = Collections.unmodifiableMap(mappings);
 
         //Mappings for known FeatureTypes
-        Map<String, Collection<UriRef>> typeMappings = new HashMap<String, Collection<UriRef>>();
-        Collection<UriRef> lakeTypes = Arrays.asList(new UriRef(dbpedia_ont + "Lake"));
+        Map<String, Collection<IRI>> typeMappings = new HashMap<String, Collection<IRI>>();
+        Collection<IRI> lakeTypes = Arrays.asList(new IRI(dbpedia_ont + "Lake"));
         typeMappings.put("H.LK", lakeTypes);
         typeMappings.put("H.LKS", lakeTypes);
         typeMappings.put("H.LKI", lakeTypes);
@@ -195,8 +195,8 @@
         typeMappings.put("H.LKSNI", lakeTypes);
         typeMappings.put("H.RSV", lakeTypes);
 
-        UriRef stream = new UriRef(dbpedia_ont + " Stream");
-        Collection<UriRef> canalTypes = Arrays.asList(stream, new UriRef(dbpedia_ont + "Canal"));
+        IRI stream = new IRI(dbpedia_ont + " Stream");
+        Collection<IRI> canalTypes = Arrays.asList(stream, new IRI(dbpedia_ont + "Canal"));
         typeMappings.put("H.CNL", canalTypes);
         typeMappings.put("H.CNLA", canalTypes);
         typeMappings.put("H.CNLB", canalTypes);
@@ -207,7 +207,7 @@
         typeMappings.put("H.CNLQ", canalTypes);
         typeMappings.put("H.CNLX", canalTypes);
 
-        Collection<UriRef> riverTypes = Arrays.asList(stream, new UriRef(dbpedia_ont + "River"));
+        Collection<IRI> riverTypes = Arrays.asList(stream, new IRI(dbpedia_ont + "River"));
         typeMappings.put("H.STM", riverTypes);
         typeMappings.put("H.STMI", riverTypes);
         typeMappings.put("H.STMB", riverTypes);
@@ -225,18 +225,18 @@
         typeMappings.put("H.STM", riverTypes);
         typeMappings.put("H.STM", riverTypes);
 
-        Collection<UriRef> caveTypes = Arrays.asList(new UriRef(dbpedia_ont + "Cave"));
+        Collection<IRI> caveTypes = Arrays.asList(new IRI(dbpedia_ont + "Cave"));
         typeMappings.put("H.LKSB", caveTypes);
         typeMappings.put("R.TNLN", caveTypes);
         typeMappings.put("S.CAVE", caveTypes);
         typeMappings.put("S.BUR", caveTypes);
 
-        Collection<UriRef> countryTypes = Arrays.asList(new UriRef(dbpedia_ont + "Country"));
+        Collection<IRI> countryTypes = Arrays.asList(new IRI(dbpedia_ont + "Country"));
         typeMappings.put("A.PCLI", countryTypes);
 
-        UriRef settlement = new UriRef(dbpedia_ont + "Settlement");
-        Collection<UriRef> cityTypes = Arrays.asList(settlement, new UriRef(dbpedia_ont + "City"));
-        Collection<UriRef> villageTypes = Arrays.asList(settlement, new UriRef(dbpedia_ont + "Village"));
+        IRI settlement = new IRI(dbpedia_ont + "Settlement");
+        Collection<IRI> cityTypes = Arrays.asList(settlement, new IRI(dbpedia_ont + "City"));
+        Collection<IRI> villageTypes = Arrays.asList(settlement, new IRI(dbpedia_ont + "Village"));
         typeMappings.put("P.PPLG", cityTypes);
         typeMappings.put("P.PPLC", cityTypes);
         typeMappings.put("P.PPLF", villageTypes);
@@ -314,8 +314,8 @@
 
     @Override
     public void computeEnhancements(ContentItem ci) throws EngineException {
-        UriRef contentItemId = ci.getUri();
-        MGraph graph = ci.getMetadata();
+        IRI contentItemId = ci.getUri();
+        Graph graph = ci.getMetadata();
         LiteralFactory literalFactory = LiteralFactory.getInstance();
         //get all the textAnnotations
         /*
@@ -324,10 +324,10 @@
          * this map is used to avoid multiple lookups for text annotations
          * selecting the same name.
          */
-        Map<String, Collection<NonLiteral>> name2placeEnhancementMap = new HashMap<String, Collection<NonLiteral>>();
+        Map<String, Collection<BlankNodeOrIRI>> name2placeEnhancementMap = new HashMap<String, Collection<BlankNodeOrIRI>>();
         Iterator<Triple> iterator = graph.filter(null, DC_TYPE, DBPEDIA_PLACE);
         while (iterator.hasNext()) {
-            NonLiteral placeEnhancement = iterator.next().getSubject(); //the enhancement annotating an place
+            BlankNodeOrIRI placeEnhancement = iterator.next().getSubject(); //the enhancement annotating an place
             //this can still be an TextAnnotation of an EntityAnnotation
             //so we need to filter TextAnnotation
             Triple isTextAnnotation = new TripleImpl(placeEnhancement, RDF_TYPE, ENHANCER_TEXTANNOTATION);
@@ -338,9 +338,9 @@
                     log.warn("Unable to process TextAnnotation " + placeEnhancement
                             + " because property" + ENHANCER_SELECTED_TEXT + " is not present");
                 } else {
-                    Collection<NonLiteral> placeEnhancements = name2placeEnhancementMap.get(name);
+                    Collection<BlankNodeOrIRI> placeEnhancements = name2placeEnhancementMap.get(name);
                     if (placeEnhancements == null) {
-                        placeEnhancements = new ArrayList<NonLiteral>();
+                        placeEnhancements = new ArrayList<BlankNodeOrIRI>();
                         name2placeEnhancementMap.put(name, placeEnhancements);
                     }
                     placeEnhancements.add(placeEnhancement);
@@ -355,7 +355,7 @@
         if (getMaxLocationEnhancements() != null) {
             requestParams.put(SearchRequestPropertyEnum.maxRows, Collections.singleton(getMaxLocationEnhancements().toString()));
         }
-        for (Map.Entry<String, Collection<NonLiteral>> entry : name2placeEnhancementMap.entrySet()) {
+        for (Map.Entry<String, Collection<BlankNodeOrIRI>> entry : name2placeEnhancementMap.entrySet()) {
             List<Toponym> results;
             try {
                 requestParams.put(SearchRequestPropertyEnum.name, Collections.singleton(entry.getKey()));
@@ -391,7 +391,7 @@
                          */
                     }
                     //write the enhancement!
-                    NonLiteral locationEnhancement = writeEntityEnhancement(
+                    BlankNodeOrIRI locationEnhancement = writeEntityEnhancement(
                             contentItemId, graph, literalFactory, result, entry.getValue(), null, score);
                     log.debug("  > {}  >= {}",score,minHierarchyScore);
                     if (score != null && score >= minHierarchyScore) {
@@ -475,24 +475,24 @@
      * used to parse the score of the Toponym if this method is used to add a
      * parent Toponym.
      *
-     * @return The UriRef of the created entity enhancement
+     * @return The IRI of the created entity enhancement
      */
-    private UriRef writeEntityEnhancement(UriRef contentItemId, MGraph graph,
+    private IRI writeEntityEnhancement(IRI contentItemId, Graph graph,
             LiteralFactory literalFactory, Toponym toponym,
-            Collection<NonLiteral> relatedEnhancements, Collection<NonLiteral> requiresEnhancements,
+            Collection<BlankNodeOrIRI> relatedEnhancements, Collection<BlankNodeOrIRI> requiresEnhancements,
             Double score) {
-        UriRef entityRef = new UriRef("http://sws.geonames.org/" + toponym.getGeoNameId() + '/');
+        IRI entityRef = new IRI("http://sws.geonames.org/" + toponym.getGeoNameId() + '/');
         FeatureClass featureClass = toponym.getFeatureClass();
         log.debug("  > featureClass " + featureClass);
-        UriRef entityAnnotation = EnhancementEngineHelper.createEntityEnhancement(graph, this, contentItemId);
+        IRI entityAnnotation = EnhancementEngineHelper.createEntityEnhancement(graph, this, contentItemId);
         // first relate this entity annotation to the text annotation(s)
         if (relatedEnhancements != null) {
-            for (NonLiteral related : relatedEnhancements) {
+            for (BlankNodeOrIRI related : relatedEnhancements) {
                 graph.add(new TripleImpl(entityAnnotation, DC_RELATION, related));
             }
         }
         if (requiresEnhancements != null) {
-            for (NonLiteral requires : requiresEnhancements) {
+            for (BlankNodeOrIRI requires : requiresEnhancements) {
                 graph.add(new TripleImpl(entityAnnotation, DC_REQUIRES, requires));
                 //STANBOL-767: also add dc:relation link
                 graph.add(new TripleImpl(entityAnnotation, DC_RELATION, requires));
@@ -505,22 +505,22 @@
             graph.add(new TripleImpl(entityAnnotation, ENHANCER_CONFIDENCE, literalFactory.createTypedLiteral(score)));
         }
         //now get all the entity types for the results
-        Set<UriRef> entityTypes = new HashSet<UriRef>();
+        Set<IRI> entityTypes = new HashSet<IRI>();
         //first based on the feature class
-        Collection<UriRef> featureClassTypes = FEATURE_CLASS_CONCEPT_MAPPINGS.get(featureClass);
+        Collection<IRI> featureClassTypes = FEATURE_CLASS_CONCEPT_MAPPINGS.get(featureClass);
         if (featureClassTypes != null) {
             entityTypes.addAll(featureClassTypes);
         }
         //second for the feature Code
         String featureCode = toponym.getFeatureCode();
-        Collection<UriRef> featureCodeTypes = FEATURE_TYPE_CONCEPT_MAPPINGS.get(featureCode);
+        Collection<IRI> featureCodeTypes = FEATURE_TYPE_CONCEPT_MAPPINGS.get(featureCode);
         if (featureCodeTypes != null) {
             entityTypes.addAll(featureCodeTypes);
         }
         //third add the feature Code as additional type
-        entityTypes.add(new UriRef(NamespaceEnum.geonames + featureClass.name() + '.' + featureCode));
+        entityTypes.add(new IRI(NamespaceEnum.geonames + featureClass.name() + '.' + featureCode));
         //finally add the type triples to the enhancement
-        for (UriRef entityType : entityTypes) {
+        for (IRI entityType : entityTypes) {
             graph.add(new TripleImpl(entityAnnotation, ENHANCER_ENTITY_TYPE, entityType));
         }
         return entityAnnotation;
diff --git a/enhancement-engines/geonames/src/test/java/org/apache/stanbol/enhancer/engines/geonames/impl/TestLocationEnhancementEngine.java b/enhancement-engines/geonames/src/test/java/org/apache/stanbol/enhancer/engines/geonames/impl/TestLocationEnhancementEngine.java
index 5281f83..aabb694 100644
--- a/enhancement-engines/geonames/src/test/java/org/apache/stanbol/enhancer/engines/geonames/impl/TestLocationEnhancementEngine.java
+++ b/enhancement-engines/geonames/src/test/java/org/apache/stanbol/enhancer/engines/geonames/impl/TestLocationEnhancementEngine.java
@@ -33,8 +33,8 @@
 import java.util.Map;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.commons.io.IOUtils;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.rdfentities.RdfEntityFactory;
@@ -102,10 +102,10 @@
 
     public static ContentItem getContentItem(final String id,
             final String text) throws IOException {
-    	return ciFactory.createContentItem(new UriRef(id), new StringSource(text));
+    	return ciFactory.createContentItem(new IRI(id), new StringSource(text));
     }
 
-    public static void getTextAnnotation(ContentItem ci, String name, String context, UriRef type) {
+    public static void getTextAnnotation(ContentItem ci, String name, String context, IRI type) {
         String content;
         try {
             content = IOUtils.toString(ci.getStream(),"UTF-8");
@@ -114,8 +114,8 @@
             content = "";
         }
         RdfEntityFactory factory = RdfEntityFactory.createInstance(ci.getMetadata());
-        TextAnnotation testAnnotation = factory.getProxy(new UriRef("urn:org.apache:stanbol.enhancer:test:text-annotation:person"), TextAnnotation.class);
-        testAnnotation.setCreator(new UriRef("urn:org.apache:stanbol.enhancer:test:dummyEngine"));
+        TextAnnotation testAnnotation = factory.getProxy(new IRI("urn:org.apache:stanbol.enhancer:test:text-annotation:person"), TextAnnotation.class);
+        testAnnotation.setCreator(new IRI("urn:org.apache:stanbol.enhancer:test:dummyEngine"));
         testAnnotation.setCreated(new Date());
         testAnnotation.setSelectedText(name);
         testAnnotation.setSelectionContext(context);
@@ -144,7 +144,7 @@
             RemoteServiceHelper.checkServiceUnavailable(e, "overloaded with requests");
             return;
         }
-        Map<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+        Map<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
         expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());
         expectedValues.put(Properties.DC_CREATOR, LiteralFactory.getInstance().createTypedLiteral(
             locationEnhancementEngine.getClass().getName()));
diff --git a/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/HtmlExtractorEngine.java b/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/HtmlExtractorEngine.java
index 84510d5..bf873d7 100644
--- a/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/HtmlExtractorEngine.java
+++ b/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/HtmlExtractorEngine.java
@@ -25,9 +25,9 @@
 import java.util.Map;

 import java.util.Set;

 

-import org.apache.clerezza.rdf.core.MGraph;

-import org.apache.clerezza.rdf.core.UriRef;

-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;

+import org.apache.clerezza.commons.rdf.Graph;

+import org.apache.clerezza.commons.rdf.IRI;

+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;

 import org.apache.felix.scr.annotations.Component;

 import org.apache.felix.scr.annotations.Property;

 import org.apache.felix.scr.annotations.Reference;

@@ -153,7 +153,7 @@
     @Override

     public void computeEnhancements(ContentItem ci) throws EngineException {

         HtmlExtractor extractor = new HtmlExtractor(htmlExtractorRegistry, htmlParser);

-        MGraph model = new SimpleMGraph();

+        Graph model = new SimpleGraph();

         ci.getLock().readLock().lock();

         try {

             extractor.extract(ci.getUri().getUnicodeString(), ci.getStream(),null, ci.getMimeType(), model);

@@ -166,7 +166,7 @@
         ClerezzaRDFUtils.urifyBlankNodes(model);

         // make the model single rooted

         if (singleRootRdf) {

-            ClerezzaRDFUtils.makeConnected(model,ci.getUri(),new UriRef(NIE_NS+"contains"));

+            ClerezzaRDFUtils.makeConnected(model,ci.getUri(),new IRI(NIE_NS+"contains"));

         }

         //add the extracted triples to the metadata of the ContentItem

         ci.getLock().writeLock().lock();

diff --git a/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/BundleURIResolver.java b/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/BundleURIResolver.java
index 698c108..c88f035 100644
--- a/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/BundleURIResolver.java
+++ b/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/BundleURIResolver.java
@@ -55,7 +55,7 @@
                 String path = baseURI.getPath();

                 resource = path.substring(1, path.lastIndexOf('/') + 1) + href;

                 newUrl = BUNDLE.getEntry(resource);

-                LOG.debug("Resource: " + resource);

+                LOG.debug("RDFTerm: " + resource);

                 if (newUrl != null) {

                     return new StreamSource(newUrl.openStream(), newUrl.toString());

                 } else {

diff --git a/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/ClerezzaRDFUtils.java b/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/ClerezzaRDFUtils.java
index 08a3b4f..081c50c 100644
--- a/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/ClerezzaRDFUtils.java
+++ b/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/ClerezzaRDFUtils.java
@@ -23,14 +23,14 @@
 import java.util.Iterator;

 import java.util.Set;

 

-import org.apache.clerezza.rdf.core.BNode;

-import org.apache.clerezza.rdf.core.MGraph;

-import org.apache.clerezza.rdf.core.NonLiteral;

-import org.apache.clerezza.rdf.core.Resource;

-import org.apache.clerezza.rdf.core.Triple;

-import org.apache.clerezza.rdf.core.UriRef;

-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;

-import org.apache.clerezza.rdf.core.impl.TripleImpl;

+import org.apache.clerezza.commons.rdf.BlankNode;

+import org.apache.clerezza.commons.rdf.Graph;

+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;

+import org.apache.clerezza.commons.rdf.RDFTerm;

+import org.apache.clerezza.commons.rdf.Triple;

+import org.apache.clerezza.commons.rdf.IRI;

+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;

+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;

 import org.slf4j.Logger;

 import org.slf4j.LoggerFactory;

 

@@ -50,30 +50,30 @@
 

     private static final Logger LOG = LoggerFactory.getLogger(ClerezzaRDFUtils.class);

     

-    public static void urifyBlankNodes(MGraph model) {

-        HashMap<BNode,UriRef> blankNodeMap = new HashMap<BNode,UriRef>();

-        MGraph remove = new SimpleMGraph();

-        MGraph add = new SimpleMGraph();

+    public static void urifyBlankNodes(Graph model) {

+        HashMap<BlankNode,IRI> blankNodeMap = new HashMap<BlankNode,IRI>();

+        Graph remove = new SimpleGraph();

+        Graph add = new SimpleGraph();

         for (Triple t: model) {

-            NonLiteral subj = t.getSubject();

-            Resource obj = t.getObject();

-            UriRef pred = t.getPredicate();

+            BlankNodeOrIRI subj = t.getSubject();

+            RDFTerm obj = t.getObject();

+            IRI pred = t.getPredicate();

             boolean match = false;

-            if (subj instanceof BNode) {

+            if (subj instanceof BlankNode) {

                 match = true;

-                UriRef ru = blankNodeMap.get(subj);

+                IRI ru = blankNodeMap.get(subj);

                 if (ru == null) {

                     ru = createRandomUri();

-                    blankNodeMap.put((BNode)subj, ru);

+                    blankNodeMap.put((BlankNode)subj, ru);

                 }

                 subj = ru;

             }

-            if (obj instanceof BNode)  {

+            if (obj instanceof BlankNode)  {

                 match = true;

-                UriRef ru = blankNodeMap.get(obj);

+                IRI ru = blankNodeMap.get(obj);

                 if (ru == null) {

                     ru = createRandomUri();

-                    blankNodeMap.put((BNode)obj, ru);

+                    blankNodeMap.put((BlankNode)obj, ru);

                 }

                 obj = ru;

             }

@@ -86,31 +86,31 @@
         model.addAll(add);

     }

     

-    public static UriRef createRandomUri() {

-        return new UriRef("urn:rnd:"+randomUUID());

+    public static IRI createRandomUri() {

+        return new IRI("urn:rnd:"+randomUUID());

     }

     

-    public static void makeConnected(MGraph model, NonLiteral root, UriRef property) {

-        Set<NonLiteral> roots = findRoots(model);

+    public static void makeConnected(Graph model, BlankNodeOrIRI root, IRI property) {

+        Set<BlankNodeOrIRI> roots = findRoots(model);

         LOG.debug("Roots: {}",roots.size());

         boolean found = roots.remove(root);

         //connect all hanging roots to root by property

-        for (NonLiteral n: roots) {

+        for (BlankNodeOrIRI n: roots) {

             model.add(new TripleImpl(root,property,n));            

         }

     }

     

-    public static Set<NonLiteral> findRoots(MGraph model) {

-        Set<NonLiteral> roots = new HashSet<NonLiteral>();

-        Set<NonLiteral> visited = new HashSet<NonLiteral>();

+    public static Set<BlankNodeOrIRI> findRoots(Graph model) {

+        Set<BlankNodeOrIRI> roots = new HashSet<BlankNodeOrIRI>();

+        Set<BlankNodeOrIRI> visited = new HashSet<BlankNodeOrIRI>();

         for (Triple t: model) {

-            NonLiteral subj = t.getSubject();

+            BlankNodeOrIRI subj = t.getSubject();

             findRoot(model, subj, roots, visited);

         }

         return roots;

     }

     

-    private static void findRoot(MGraph model, NonLiteral node, Set<NonLiteral> roots, Set<NonLiteral> visited) {

+    private static void findRoot(Graph model, BlankNodeOrIRI node, Set<BlankNodeOrIRI> roots, Set<BlankNodeOrIRI> visited) {

         if (visited.contains(node)) {

             return;

         }

@@ -124,7 +124,7 @@
         }

         while (it.hasNext()) {

             Triple t = it.next();

-            NonLiteral subj = t.getSubject();

+            BlankNodeOrIRI subj = t.getSubject();

             findRoot(model, subj, roots, visited);

         }

     }

diff --git a/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/HtmlExtractionComponent.java b/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/HtmlExtractionComponent.java
index 0a65faf..906e851 100644
--- a/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/HtmlExtractionComponent.java
+++ b/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/HtmlExtractionComponent.java
@@ -18,7 +18,7 @@
 

 import java.util.Map;

 

-import org.apache.clerezza.rdf.core.MGraph;

+import org.apache.clerezza.commons.rdf.Graph;

 import org.w3c.dom.Document;

 

 /**

@@ -29,7 +29,7 @@
  */

 public interface HtmlExtractionComponent {

 

-    void extract(String id, Document doc, Map<String, Object> params, MGraph result)

+    void extract(String id, Document doc, Map<String, Object> params, Graph result)

             throws ExtractorException;

 

 }

diff --git a/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/HtmlExtractor.java b/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/HtmlExtractor.java
index 511a6d2..ea46441 100644
--- a/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/HtmlExtractor.java
+++ b/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/HtmlExtractor.java
@@ -26,9 +26,9 @@
 import java.util.List;

 import java.util.Map;

 

-import org.apache.clerezza.rdf.core.MGraph;

-import org.apache.clerezza.rdf.core.UriRef;

-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;

+import org.apache.clerezza.commons.rdf.Graph;

+import org.apache.clerezza.commons.rdf.IRI;

+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;

 import org.slf4j.Logger;

 import org.slf4j.LoggerFactory;

 import org.w3c.dom.Document;

@@ -72,7 +72,7 @@
 

     public void extract(String id,

             InputStream input, Charset charset, String mimeType,

-            MGraph result)

+            Graph result)

             throws ExtractorException {

         if (registry == null)

             return;

@@ -121,8 +121,8 @@
             InputStream input = new FileInputStream(file);

             Charset charset = Charset.forName("UTF-8");

             String mimeType = "text/html";

-            UriRef uri = new UriRef(file.toURI().toString());

-            MGraph container = new SimpleMGraph();

+            IRI uri = new IRI(file.toURI().toString());

+            Graph container = new SimpleGraph();

             inst.extract(uri.getUnicodeString(), input, charset, mimeType, container);

             System.out.println("Model for " + args[i]);

             //TODO

diff --git a/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/XsltExtractor.java b/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/XsltExtractor.java
index 6fc58fe..119bd4d 100644
--- a/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/XsltExtractor.java
+++ b/enhancement-engines/htmlextractor/src/main/java/org/apache/stanbol/enhancer/engines/htmlextractor/impl/XsltExtractor.java
@@ -36,8 +36,8 @@
 import javax.xml.transform.stream.StreamResult;

 import javax.xml.transform.stream.StreamSource;

 

-import org.apache.clerezza.rdf.core.Graph;

-import org.apache.clerezza.rdf.core.MGraph;

+import org.apache.clerezza.commons.rdf.ImmutableGraph;

+import org.apache.clerezza.commons.rdf.Graph;

 import org.apache.clerezza.rdf.core.serializedform.Parser;

 import org.slf4j.Logger;

 import org.slf4j.LoggerFactory;

@@ -126,7 +126,7 @@
     }

 

     public synchronized void extract(String id, Document doc, Map<String, Object> params,

-            MGraph result)

+            Graph result)

             throws ExtractorException {

 

         if (params == null) {

@@ -145,7 +145,7 @@
             }

             InputStream reader = new ByteArrayInputStream(writer.toByteArray());

             Parser rdfParser = Parser.getInstance();

-            Graph graph = rdfParser.parse(reader, this.syntax);

+            ImmutableGraph graph = rdfParser.parse(reader, this.syntax);

             result.addAll(graph);

         } catch (TransformerException e) {

             throw new ExtractorException(e.getMessage(), e);

diff --git a/enhancement-engines/htmlextractor/src/test/java/org/apache/stanbol/enhancer/engines/htmlextractor/TestHtmlExtractor.java b/enhancement-engines/htmlextractor/src/test/java/org/apache/stanbol/enhancer/engines/htmlextractor/TestHtmlExtractor.java
index 91aeb37..2b14cb4 100644
--- a/enhancement-engines/htmlextractor/src/test/java/org/apache/stanbol/enhancer/engines/htmlextractor/TestHtmlExtractor.java
+++ b/enhancement-engines/htmlextractor/src/test/java/org/apache/stanbol/enhancer/engines/htmlextractor/TestHtmlExtractor.java
@@ -24,11 +24,11 @@
 import java.io.InputStream;

 import java.util.Set;

 

-import org.apache.clerezza.rdf.core.MGraph;

-import org.apache.clerezza.rdf.core.NonLiteral;

-import org.apache.clerezza.rdf.core.Triple;

-import org.apache.clerezza.rdf.core.UriRef;

-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;

+import org.apache.clerezza.commons.rdf.Graph;

+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;

+import org.apache.clerezza.commons.rdf.Triple;

+import org.apache.clerezza.commons.rdf.IRI;

+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;

 import org.apache.stanbol.enhancer.engines.htmlextractor.impl.ClerezzaRDFUtils;

 import org.apache.stanbol.enhancer.engines.htmlextractor.impl.ExtractorException;

 import org.apache.stanbol.enhancer.engines.htmlextractor.impl.HtmlExtractionRegistry;

@@ -78,7 +78,7 @@
     @Test

     public void testRdfaExtraction() throws Exception {

         HtmlExtractor extractor = new HtmlExtractor(registry, parser);

-        MGraph model = new SimpleMGraph();

+        Graph model = new SimpleGraph();

         String testFile = "test-rdfa.html";

         // extract text from RDFa annotated html

         InputStream in = getResourceAsStream(testFile);

@@ -91,7 +91,7 @@
         LOG.debug("RDFa triples: {}",tripleCounter);

         printTriples(model);

         assertEquals(8, tripleCounter);

-        ClerezzaRDFUtils.makeConnected(model, new UriRef("file://" + testFile), new UriRef(NIE_NS+"contains"));

+        ClerezzaRDFUtils.makeConnected(model, new IRI("file://" + testFile), new IRI(NIE_NS+"contains"));

     }

     

     /** This tests some Microformat extraction

@@ -102,7 +102,7 @@
     @Test

     public void testMFExtraction() throws Exception {

         HtmlExtractor extractor = new HtmlExtractor(registry, parser);

-        MGraph model = new SimpleMGraph();

+        Graph model = new SimpleGraph();

         String testFile = "test-MF.html";

 

         // extract text from RDFa annotated html

@@ -116,7 +116,7 @@
         LOG.debug("Microformat triples: {}",tripleCounter);

         printTriples(model);

         assertEquals(127, tripleCounter);

-        ClerezzaRDFUtils.makeConnected(model, new UriRef("file://" + testFile), new UriRef(NIE_NS+"contains"));

+        ClerezzaRDFUtils.makeConnected(model, new IRI("file://" + testFile), new IRI(NIE_NS+"contains"));

     }

 

     /** This test some extraction of microdata from an HTML-5 document

@@ -126,7 +126,7 @@
     @Test

     public void testMicrodataExtraction() throws Exception {

       HtmlExtractor extractor = new HtmlExtractor(registry, parser);

-      MGraph model = new SimpleMGraph();

+      Graph model = new SimpleGraph();

       String testFile = "test-microdata.html";

 

       // extract text from RDFa annotated html

@@ -140,7 +140,7 @@
       LOG.debug("Microdata triples: {}",tripleCounter);

       printTriples(model);

       assertEquals(91, tripleCounter);

-      ClerezzaRDFUtils.makeConnected(model, new UriRef("file://" + testFile), new UriRef(NIE_NS+"contains"));

+      ClerezzaRDFUtils.makeConnected(model, new IRI("file://" + testFile), new IRI(NIE_NS+"contains"));

     }

     

     /** This tests the merging of disconnected graphs under a single root

@@ -150,7 +150,7 @@
     @Test

     public void testRootExtraction() throws Exception {

         HtmlExtractor extractor = new HtmlExtractor(registry, parser);

-        MGraph model = new SimpleMGraph();

+        Graph model = new SimpleGraph();

         String testFile = "test-MultiRoot.html";

 

         // extract text from RDFa annotated html

@@ -163,9 +163,9 @@
         int tripleCounter = model.size();

         LOG.debug("Triples: {}",tripleCounter);

         printTriples(model);

-        Set<NonLiteral> roots = ClerezzaRDFUtils.findRoots(model);

+        Set<BlankNodeOrIRI> roots = ClerezzaRDFUtils.findRoots(model);

         assertTrue(roots.size() > 1);

-        ClerezzaRDFUtils.makeConnected(model, new UriRef("file://" + testFile), new UriRef(NIE_NS+"contains"));

+        ClerezzaRDFUtils.makeConnected(model, new IRI("file://" + testFile), new IRI(NIE_NS+"contains"));

         roots = ClerezzaRDFUtils.findRoots(model);

         assertEquals(1,roots.size());

     }

@@ -175,7 +175,7 @@
                 testResultFile);

     }

 

-    private void printTriples(MGraph model) {

+    private void printTriples(Graph model) {

         for (Triple t: model) {

             LOG.debug(t.toString());

         }

diff --git a/enhancement-engines/keywordextraction/src/main/java/org/apache/stanbol/enhancer/engines/keywordextraction/engine/KeywordLinkingEngine.java b/enhancement-engines/keywordextraction/src/main/java/org/apache/stanbol/enhancer/engines/keywordextraction/engine/KeywordLinkingEngine.java
index b51f1dc..1b79894 100644
--- a/enhancement-engines/keywordextraction/src/main/java/org/apache/stanbol/enhancer/engines/keywordextraction/engine/KeywordLinkingEngine.java
+++ b/enhancement-engines/keywordextraction/src/main/java/org/apache/stanbol/enhancer/engines/keywordextraction/engine/KeywordLinkingEngine.java
@@ -30,13 +30,13 @@
 import java.util.Map.Entry;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.lang.StringUtils;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -350,7 +350,7 @@
         if(isOfflineMode() && !entitySearcher.supportsOfflineMode()){
             throw new EngineException("Offline mode is not supported by the Component used to lookup Entities");
         }
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
         if(contentPart == null){
             throw new IllegalStateException("No ContentPart with a supported Mime Type"
                     + "found for ContentItem "+ci.getUri()+"(supported: '"
@@ -416,12 +416,12 @@
         if(language != null && !language.isEmpty()){
             languageObject = new Language(language);
         }
-        MGraph metadata = ci.getMetadata();
+        Graph metadata = ci.getMetadata();
         for(LinkedEntity linkedEntity : linkedEntities){
-            Collection<UriRef> textAnnotations = new ArrayList<UriRef>(linkedEntity.getOccurrences().size());
+            Collection<IRI> textAnnotations = new ArrayList<IRI>(linkedEntity.getOccurrences().size());
             //first create the TextAnnotations for the Occurrences
             for(Occurrence occurrence : linkedEntity.getOccurrences()){
-                UriRef textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
+                IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
                 textAnnotations.add(textAnnotation);
                 metadata.add(new TripleImpl(textAnnotation, 
                     Properties.ENHANCER_START, 
@@ -438,14 +438,14 @@
                 metadata.add(new TripleImpl(textAnnotation, 
                     Properties.ENHANCER_CONFIDENCE, 
                     literalFactory.createTypedLiteral(linkedEntity.getScore())));
-                for(UriRef dcType : linkedEntity.getTypes()){
+                for(IRI dcType : linkedEntity.getTypes()){
                     metadata.add(new TripleImpl(
                         textAnnotation, Properties.DC_TYPE, dcType));
                 }
             }
             //now the EntityAnnotations for the Suggestions
             for(Suggestion suggestion : linkedEntity.getSuggestions()){
-                UriRef entityAnnotation = EnhancementEngineHelper.createEntityEnhancement(ci, this);
+                IRI entityAnnotation = EnhancementEngineHelper.createEntityEnhancement(ci, this);
                 //should we use the label used for the match, or search the
                 //representation for the best label ... currently its the matched one
                 Text label = suggestion.getBestLabel(linkerConfig.getNameField(),language);
@@ -457,21 +457,21 @@
                                     new Language(label.getLanguage()))));
                 metadata.add(new TripleImpl(entityAnnotation, 
                     Properties.ENHANCER_ENTITY_REFERENCE, 
-                    new UriRef(suggestion.getRepresentation().getId())));
+                    new IRI(suggestion.getRepresentation().getId())));
                 Iterator<Reference> suggestionTypes = suggestion.getRepresentation().getReferences(linkerConfig.getTypeField());
                 while(suggestionTypes.hasNext()){
                     metadata.add(new TripleImpl(entityAnnotation, 
-                        Properties.ENHANCER_ENTITY_TYPE, new UriRef(suggestionTypes.next().getReference())));
+                        Properties.ENHANCER_ENTITY_TYPE, new IRI(suggestionTypes.next().getReference())));
                 }
                 metadata.add(new TripleImpl(entityAnnotation,
                     Properties.ENHANCER_CONFIDENCE, literalFactory.createTypedLiteral(suggestion.getScore())));
-                for(UriRef textAnnotation : textAnnotations){
+                for(IRI textAnnotation : textAnnotations){
                     metadata.add(new TripleImpl(entityAnnotation, 
                         Properties.DC_RELATION, textAnnotation));
                 }
                 //add the name of the ReferencedSite providing this suggestion
                 metadata.add(new TripleImpl(entityAnnotation, 
-                    new UriRef(RdfResourceEnum.site.getUri()), 
+                    new IRI(RdfResourceEnum.site.getUri()), 
                     new PlainLiteralImpl(referencedSiteName)));
                 //in case dereferencing of Entities is enabled we need also to
                 //add the RDF data for entities
@@ -493,7 +493,7 @@
     private String extractLanguage(ContentItem ci) {
         String lang = EnhancementEngineHelper.getLanguage(ci);
 //        if(lang != null){
-//        MGraph metadata = ci.getMetadata();
+//        Graph metadata = ci.getMetadata();
 //        Iterator<Triple> langaugeEnhancementCreatorTriples = 
 //            metadata.filter(null, Properties.DC_CREATOR, LANG_ID_ENGINE_NAME);
 //        if(langaugeEnhancementCreatorTriples.hasNext()){
@@ -867,14 +867,14 @@
                             sourceTypes[0],o);
                         continue configs;
                     }
-                    UriRef targetUri = new UriRef(targetType);
+                    IRI targetUri = new IRI(targetType);
                     for(String sourceType : sourceTypes){
                         if(!sourceType.isEmpty()){
                             sourceType = NamespaceMappingUtils.getConfiguredUri(
                                 nsPrefixService,TYPE_MAPPINGS,sourceType.trim()); //support for ns:localName
                             try { //validate
                                 new URI(sourceType);
-                                UriRef old = linkerConfig.setTypeMapping(sourceType, targetUri);
+                                IRI old = linkerConfig.setTypeMapping(sourceType, targetUri);
                                 if(old == null){
                                     log.info(" > add type mapping {} > {}", sourceType,targetType);
                                 } else {
diff --git a/enhancement-engines/keywordextraction/src/main/java/org/apache/stanbol/enhancer/engines/keywordextraction/impl/EntityLinker.java b/enhancement-engines/keywordextraction/src/main/java/org/apache/stanbol/enhancer/engines/keywordextraction/impl/EntityLinker.java
index 5fa0b29..4c5d2e4 100644
--- a/enhancement-engines/keywordextraction/src/main/java/org/apache/stanbol/enhancer/engines/keywordextraction/impl/EntityLinker.java
+++ b/enhancement-engines/keywordextraction/src/main/java/org/apache/stanbol/enhancer/engines/keywordextraction/impl/EntityLinker.java
@@ -27,7 +27,7 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.commons.opennlp.TextAnalyzer.AnalysedText.Token;
 import org.apache.stanbol.enhancer.engines.keywordextraction.impl.EntityLinkerConfig.RedirectProcessingMode;
 import org.apache.stanbol.enhancer.engines.keywordextraction.impl.Suggestion.MATCH;
@@ -205,17 +205,17 @@
      * @param conceptTypes The list of suggestions
      * @return the types values for the {@link LinkedEntity}
      */
-    private Set<UriRef> getLinkedEntityTypes(Collection<Suggestion> suggestions){
+    private Set<IRI> getLinkedEntityTypes(Collection<Suggestion> suggestions){
         Collection<String> conceptTypes = new HashSet<String>();
         for(Suggestion suggestion : suggestions){
             for(Iterator<Reference> types = 
                 suggestion.getRepresentation().getReferences(config.getTypeField()); 
                 types.hasNext();conceptTypes.add(types.next().getReference()));
         }
-        Map<String,UriRef> typeMappings = config.getTypeMappings();
-        Set<UriRef> dcTypes = new HashSet<UriRef>();
+        Map<String,IRI> typeMappings = config.getTypeMappings();
+        Set<IRI> dcTypes = new HashSet<IRI>();
         for(String conceptType : conceptTypes){
-            UriRef dcType = typeMappings.get(conceptType);
+            IRI dcType = typeMappings.get(conceptType);
             if(dcType != null){
                 dcTypes.add(dcType);
             }
diff --git a/enhancement-engines/keywordextraction/src/main/java/org/apache/stanbol/enhancer/engines/keywordextraction/impl/EntityLinkerConfig.java b/enhancement-engines/keywordextraction/src/main/java/org/apache/stanbol/enhancer/engines/keywordextraction/impl/EntityLinkerConfig.java
index c9c24da..a76a029 100644
--- a/enhancement-engines/keywordextraction/src/main/java/org/apache/stanbol/enhancer/engines/keywordextraction/impl/EntityLinkerConfig.java
+++ b/enhancement-engines/keywordextraction/src/main/java/org/apache/stanbol/enhancer/engines/keywordextraction/impl/EntityLinkerConfig.java
@@ -25,7 +25,7 @@
 import opennlp.tools.chunker.Chunker;
 import opennlp.tools.postag.POSTagger;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.commons.opennlp.TextAnalyzer.AnalysedText;
 import org.apache.stanbol.commons.opennlp.TextAnalyzer.AnalysedText.Chunk;
 import org.apache.stanbol.commons.opennlp.TextAnalyzer.AnalysedText.Token;
@@ -95,10 +95,10 @@
      * Default mapping for Concept types to dc:type values added for
      * TextAnnotations.
      */
-    public static final Map<String,UriRef> DEFAULT_ENTITY_TYPE_MAPPINGS;
+    public static final Map<String,IRI> DEFAULT_ENTITY_TYPE_MAPPINGS;
     
     static { //the default mappings for the three types used by the Stanbol Enhancement Structure
-        Map<String,UriRef> mappings = new HashMap<String,UriRef>();
+        Map<String,IRI> mappings = new HashMap<String,IRI>();
         mappings.put(OntologicalClasses.DBPEDIA_ORGANISATION.getUnicodeString(), 
             OntologicalClasses.DBPEDIA_ORGANISATION);
         mappings.put("http://dbpedia.org/ontology/Newspaper", OntologicalClasses.DBPEDIA_ORGANISATION);
@@ -120,22 +120,22 @@
 
         mappings.put(OntologicalClasses.DBPEDIA_ORGANISATION.getUnicodeString(), 
             OntologicalClasses.DBPEDIA_ORGANISATION);
-//        UriRef DRUG = new UriRef(NamespaceEnum.drugbank+"drugs");
+//        IRI DRUG = new IRI(NamespaceEnum.drugbank+"drugs");
 //        mappings.put(DRUG.getUnicodeString(), DRUG);
 //        mappings.put(NamespaceEnum.dbpediaOnt+"Drug", DRUG);
 //        mappings.put(NamespaceEnum.dailymed+"drugs", DRUG);
 //        mappings.put(NamespaceEnum.sider+"drugs", DRUG);
 //        mappings.put(NamespaceEnum.tcm+"Medicine", DRUG);
 //        
-//        UriRef DISEASE = new UriRef(NamespaceEnum.diseasome+"diseases");
+//        IRI DISEASE = new IRI(NamespaceEnum.diseasome+"diseases");
 //        mappings.put(DISEASE.getUnicodeString(), DISEASE);
 //        mappings.put(NamespaceEnum.linkedct+"condition", DISEASE);
 //        mappings.put(NamespaceEnum.tcm+"Disease", DISEASE);
 //
-//        UriRef SIDE_EFFECT = new UriRef(NamespaceEnum.sider+"side_effects");
+//        IRI SIDE_EFFECT = new IRI(NamespaceEnum.sider+"side_effects");
 //        mappings.put(SIDE_EFFECT.getUnicodeString(), SIDE_EFFECT);
 //        
-//        UriRef INGREDIENT = new UriRef(NamespaceEnum.dailymed+"ingredients");
+//        IRI INGREDIENT = new IRI(NamespaceEnum.dailymed+"ingredients");
 //        mappings.put(INGREDIENT.getUnicodeString(), INGREDIENT);
                 
         DEFAULT_ENTITY_TYPE_MAPPINGS = Collections.unmodifiableMap(mappings);
@@ -198,8 +198,8 @@
      * Holds the mappings of rdf:type used by concepts to dc:type values used
      * by TextAnnotations. 
      */
-    private Map<String,UriRef> typeMappings;
-    private Map<String, UriRef> unmodTypeMappings;
+    private Map<String,IRI> typeMappings;
+    private Map<String, IRI> unmodTypeMappings;
     /**
      * The mode on how to process redirect for Entities. 
      */
@@ -207,7 +207,7 @@
     /**
      * the default DC Type
      */
-    private UriRef defaultDcType;
+    private IRI defaultDcType;
     private String nameField;
     private String redirectField;
     private String typeField;
@@ -265,7 +265,7 @@
         setMaxSuggestions(DEFAULT_SUGGESTIONS);
         setMaxSearchTokens(DEFAULT_MAX_SEARCH_TOKENS);
         setRedirectProcessingMode(DEFAULT_REDIRECT_PROCESSING_MODE);
-        typeMappings = new HashMap<String,UriRef>(DEFAULT_ENTITY_TYPE_MAPPINGS);
+        typeMappings = new HashMap<String,IRI>(DEFAULT_ENTITY_TYPE_MAPPINGS);
         unmodTypeMappings = Collections.unmodifiableMap(typeMappings);
         setDefaultDcType(typeMappings.remove(null));
         setNameField(DEFAULT_NAME_FIELD);
@@ -445,23 +445,23 @@
      * @return the previously mapped dc:type value or <code>null</code> if
      * no mapping for the parsed concept type was present
      */
-    public UriRef removeTypeMapping(String conceptType){
+    public IRI removeTypeMapping(String conceptType){
         return typeMappings.remove(conceptType);
     }
     /**
      * 
      * @param conceptType the type of the concept or <code>null</code> to
-     * add the default dc:type mapping. See also {@link #setDefaultDcType(UriRef)}
+     * add the default dc:type mapping. See also {@link #setDefaultDcType(IRI)}
      * @param dcType the dc:type for the parsed concept type
      * @return the previously mapped dc:type value if an existing mapping
      * was updated or <code>null</code> if a new mapping was added.
      */
-    public UriRef setTypeMapping(String conceptType, UriRef dcType){
+    public IRI setTypeMapping(String conceptType, IRI dcType){
         if(dcType == null) {
             throw new IllegalArgumentException("The parsed dc:type URI MUST NOT be NULL!");
         }
         if(conceptType == null){ //handle setting of the default dc:type value
-            UriRef oldDefault = getDefaultDcType();
+            IRI oldDefault = getDefaultDcType();
             setDefaultDcType(dcType);
             return oldDefault;
         }
@@ -475,7 +475,7 @@
      * cases.
      * @param defaultDcType the defaultDcType to set
      */
-    public void setDefaultDcType(UriRef defaultDcType) {
+    public void setDefaultDcType(IRI defaultDcType) {
         this.defaultDcType = defaultDcType;
     }
     /**
@@ -484,7 +484,7 @@
      * explicit mapping exists
      * @return the defaultDcType
      */
-    public UriRef getDefaultDcType() {
+    public IRI getDefaultDcType() {
         return defaultDcType;
     }
     /**
@@ -505,7 +505,7 @@
      * Getter for the read only mappings of type mappings
      * @return the type mappings (read only)
      */
-    public Map<String,UriRef> getTypeMappings() {
+    public Map<String,IRI> getTypeMappings() {
         return unmodTypeMappings;
     }
     /**
diff --git a/enhancement-engines/keywordextraction/src/main/java/org/apache/stanbol/enhancer/engines/keywordextraction/impl/LinkedEntity.java b/enhancement-engines/keywordextraction/src/main/java/org/apache/stanbol/enhancer/engines/keywordextraction/impl/LinkedEntity.java
index 46f6d05..cadd48a 100644
--- a/enhancement-engines/keywordextraction/src/main/java/org/apache/stanbol/enhancer/engines/keywordextraction/impl/LinkedEntity.java
+++ b/enhancement-engines/keywordextraction/src/main/java/org/apache/stanbol/enhancer/engines/keywordextraction/impl/LinkedEntity.java
@@ -24,7 +24,7 @@
 
 import opennlp.tools.util.Span;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.commons.opennlp.TextAnalyzer.AnalysedText;
 
 /**
@@ -131,7 +131,7 @@
         }
     }
     private final String selectedText;
-    private final Set<UriRef> types;
+    private final Set<IRI> types;
     private final List<Suggestion> suggestions;
     private final Collection<Occurrence> occurrences = new ArrayList<Occurrence>();
     private final Collection<Occurrence> unmodOccurrences = Collections.unmodifiableCollection(occurrences);
@@ -141,7 +141,7 @@
      * @param suggestions the entity suggestions
      * @param types the types of the linked entity. 
      */
-    protected LinkedEntity(String selectedText, List<Suggestion> suggestions, Set<UriRef> types) {
+    protected LinkedEntity(String selectedText, List<Suggestion> suggestions, Set<IRI> types) {
         this.suggestions = Collections.unmodifiableList(suggestions);
         this.selectedText = selectedText;
         this.types = Collections.unmodifiableSet(types);
@@ -155,7 +155,7 @@
      * @param types the types of the linked entity. 
      */
     protected LinkedEntity(AnalysedText sentence,int startToken,int tokenSpan, 
-                           List<Suggestion> suggestions, Set<UriRef> types) {
+                           List<Suggestion> suggestions, Set<IRI> types) {
         this(sentence.getText().substring(
             sentence.getTokens().get(startToken).getStart(), 
             sentence.getTokens().get(tokenSpan).getEnd()),suggestions,types);
@@ -173,7 +173,7 @@
      * Getter for read only list of types
      * @return the types
      */
-    public Set<UriRef> getTypes() {
+    public Set<IRI> getTypes() {
         return types;
     }
     /**
diff --git a/enhancement-engines/keywordextraction/src/test/java/org/apache/stanbol/enhancer/engines/keywordextraction/engine/KeywordLinkingEngineTest.java b/enhancement-engines/keywordextraction/src/test/java/org/apache/stanbol/enhancer/engines/keywordextraction/engine/KeywordLinkingEngineTest.java
index 44bc372..da65a32 100644
--- a/enhancement-engines/keywordextraction/src/test/java/org/apache/stanbol/enhancer/engines/keywordextraction/engine/KeywordLinkingEngineTest.java
+++ b/enhancement-engines/keywordextraction/src/test/java/org/apache/stanbol/enhancer/engines/keywordextraction/engine/KeywordLinkingEngineTest.java
@@ -39,14 +39,13 @@
 
 import opennlp.tools.tokenize.SimpleTokenizer;
 
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.stanbol.commons.opennlp.OpenNLP;
 import org.apache.stanbol.commons.opennlp.TextAnalyzer.TextAnalyzerConfig;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
@@ -168,7 +167,7 @@
     }
 
     public static ContentItem getContentItem(final String id, final String text) throws IOException {
-        return ciFactory.createContentItem(new UriRef(id),new StringSource(text));
+        return ciFactory.createContentItem(new IRI(id),new StringSource(text));
     }
     /**
      * This tests the EntityLinker functionality (if the expected Entities
@@ -237,7 +236,7 @@
         //compute the enhancements
         engine.computeEnhancements(ci);
         //validate the enhancement results
-        Map<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+        Map<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
         expectedValues.put(ENHANCER_EXTRACTED_FROM, ci.getUri());
         expectedValues.put(DC_CREATOR,LiteralFactory.getInstance().createTypedLiteral(
             engine.getClass().getName()));
@@ -251,18 +250,18 @@
         assertEquals("Five fise:EntityAnnotations are expected by this Test", 5, numEntityAnnotations);
     }
     /**
-     * Similar to {@link EnhancementStructureHelper#validateAllEntityAnnotations(org.apache.clerezza.rdf.core.TripleCollection, Map)}
+     * Similar to {@link EnhancementStructureHelper#validateAllEntityAnnotations(org.apache.clerezza.commons.rdf.Graph, Map)}
      * but in addition checks fise:confidence [0..1] and entityhub:site properties
      * @param ci
      * @param expectedValues
      * @return
      */
-    private static int validateAllEntityAnnotations(ContentItem ci, Map<UriRef,Resource> expectedValues){
+    private static int validateAllEntityAnnotations(ContentItem ci, Map<IRI,RDFTerm> expectedValues){
         Iterator<Triple> entityAnnotationIterator = ci.getMetadata().filter(null,
                 RDF_TYPE, ENHANCER_ENTITYANNOTATION);
         int entityAnnotationCount = 0;
         while (entityAnnotationIterator.hasNext()) {
-            UriRef entityAnnotation = (UriRef) entityAnnotationIterator.next().getSubject();
+            IRI entityAnnotation = (IRI) entityAnnotationIterator.next().getSubject();
             // test if selected Text is added
             validateEntityAnnotation(ci.getMetadata(), entityAnnotation, expectedValues);
             //validate also that the confidence is between [0..1]
@@ -279,12 +278,12 @@
 //                    +"',entityAnnotation "+entityAnnotation+")",
 //                    0.0 <= confidence.doubleValue());
             //Test the entityhub:site property (STANBOL-625)
-            UriRef ENTITYHUB_SITE = new UriRef(RdfResourceEnum.site.getUri());
+            IRI ENTITYHUB_SITE = new IRI(RdfResourceEnum.site.getUri());
             Iterator<Triple> entitySiteIterator = ci.getMetadata().filter(entityAnnotation, 
                 ENTITYHUB_SITE, null);
             assertTrue("Expected entityhub:site value is missing (entityAnnotation "
                     +entityAnnotation+")",entitySiteIterator.hasNext());
-            Resource siteResource = entitySiteIterator.next().getObject();
+            RDFTerm siteResource = entitySiteIterator.next().getObject();
             assertTrue("entityhub:site values MUST BE Literals", siteResource instanceof Literal);
             assertEquals("'"+TEST_REFERENCED_SITE_NAME+"' is expected as "
                 + "entityhub:site value", TEST_REFERENCED_SITE_NAME, 
diff --git a/enhancement-engines/keywordextraction/src/test/java/org/apache/stanbol/enhancer/engines/keywordextraction/impl/ClasspathDataFileProvider.java b/enhancement-engines/keywordextraction/src/test/java/org/apache/stanbol/enhancer/engines/keywordextraction/impl/ClasspathDataFileProvider.java
index c403190..ff895fb 100644
--- a/enhancement-engines/keywordextraction/src/test/java/org/apache/stanbol/enhancer/engines/keywordextraction/impl/ClasspathDataFileProvider.java
+++ b/enhancement-engines/keywordextraction/src/test/java/org/apache/stanbol/enhancer/engines/keywordextraction/impl/ClasspathDataFileProvider.java
@@ -68,7 +68,7 @@
         // load default OpenNLP models from classpath (embedded in the defaultdata bundle)
         final String resourcePath = RESOURCE_BASE_PATH + filename;
         final URL dataFile = getClass().getClassLoader().getResource(resourcePath);
-        //log.debug("Resource {} found: {}", (in == null ? "NOT" : ""), resourcePath);
+        //log.debug("RDFTerm {} found: {}", (in == null ? "NOT" : ""), resourcePath);
         return dataFile;
     }
 }
diff --git a/enhancement-engines/kuromoji-nlp/src/main/java/org/apache/stanbol/enhancer/engines/kuromoji/impl/KuromojiNlpEngine.java b/enhancement-engines/kuromoji-nlp/src/main/java/org/apache/stanbol/enhancer/engines/kuromoji/impl/KuromojiNlpEngine.java
index d50909b..0c254e7 100644
--- a/enhancement-engines/kuromoji-nlp/src/main/java/org/apache/stanbol/enhancer/engines/kuromoji/impl/KuromojiNlpEngine.java
+++ b/enhancement-engines/kuromoji-nlp/src/main/java/org/apache/stanbol/enhancer/engines/kuromoji/impl/KuromojiNlpEngine.java
@@ -37,12 +37,12 @@
 import java.util.List;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.io.input.CharSequenceReader;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -185,7 +185,7 @@
     @Override
     public int canEnhance(ContentItem ci) throws EngineException {
         // check if content is present
-        Map.Entry<UriRef,Blob> entry = NlpEngineHelper.getPlainText(this, ci, false);
+        Map.Entry<IRI,Blob> entry = NlpEngineHelper.getPlainText(this, ci, false);
         if(entry == null || entry.getValue() == null) {
             return CANNOT_ENHANCE;
         }
@@ -323,12 +323,12 @@
             } catch (IOException e) {/* ignore */}
         }
         //finally write the NER annotations to the metadata of the ContentItem
-        final MGraph metadata = ci.getMetadata();
+        final Graph metadata = ci.getMetadata();
         ci.getLock().writeLock().lock();
         try {
             Language lang = new Language("ja");
             for(NerData nerData : nerList){
-                UriRef ta = EnhancementEngineHelper.createTextEnhancement(ci, this);
+                IRI ta = EnhancementEngineHelper.createTextEnhancement(ci, this);
                 metadata.add(new TripleImpl(ta, ENHANCER_SELECTED_TEXT, new PlainLiteralImpl(
                     at.getSpan().substring(nerData.start, nerData.end),lang)));
                 metadata.add(new TripleImpl(ta, DC_TYPE, nerData.tag.getType()));
diff --git a/enhancement-engines/kuromoji-nlp/src/test/java/org/apache/stanbol/enhancer/engines/kuromoji/impl/ClasspathDataFileProvider.java b/enhancement-engines/kuromoji-nlp/src/test/java/org/apache/stanbol/enhancer/engines/kuromoji/impl/ClasspathDataFileProvider.java
index f3bb350..c13e4a7 100644
--- a/enhancement-engines/kuromoji-nlp/src/test/java/org/apache/stanbol/enhancer/engines/kuromoji/impl/ClasspathDataFileProvider.java
+++ b/enhancement-engines/kuromoji-nlp/src/test/java/org/apache/stanbol/enhancer/engines/kuromoji/impl/ClasspathDataFileProvider.java
@@ -73,7 +73,7 @@
         // load default OpenNLP models from classpath (embedded in the defaultdata bundle)
         final String resourcePath = RESOURCE_BASE_PATH + filename;
         final URL dataFile = getClass().getClassLoader().getResource(resourcePath);
-        //log.debug("Resource {} found: {}", (in == null ? "NOT" : ""), resourcePath);
+        //log.debug("RDFTerm {} found: {}", (in == null ? "NOT" : ""), resourcePath);
         return dataFile;
     }
 }
diff --git a/enhancement-engines/kuromoji-nlp/src/test/java/org/apache/stanbol/enhancer/engines/kuromoji/impl/TestKuromojiNlpEngine.java b/enhancement-engines/kuromoji-nlp/src/test/java/org/apache/stanbol/enhancer/engines/kuromoji/impl/TestKuromojiNlpEngine.java
index b9de7f2..1fcc005 100644
--- a/enhancement-engines/kuromoji-nlp/src/test/java/org/apache/stanbol/enhancer/engines/kuromoji/impl/TestKuromojiNlpEngine.java
+++ b/enhancement-engines/kuromoji-nlp/src/test/java/org/apache/stanbol/enhancer/engines/kuromoji/impl/TestKuromojiNlpEngine.java
@@ -24,10 +24,10 @@
 import java.util.Map;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.stanbol.commons.solr.utils.DataFileResourceLoader;
 import org.apache.stanbol.commons.stanboltools.datafileprovider.DataFileProvider;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
@@ -62,7 +62,7 @@
 
     private static ContentItemFactory contentItemFactory;
     
-    private static UriRef id = new UriRef("http://www.example.org/contentItem1");
+    private static IRI id = new IRI("http://www.example.org/contentItem1");
     /**
      * Test text taken from the <a href ="http://ja.wikipedia.org/wiki/%E3%83%AD%E3%83%B3%E3%83%89%E3%83%B3">
      * Japanese wikipedia side for London</a>.
@@ -107,7 +107,7 @@
         Assert.assertEquals(EnhancementEngine.ENHANCE_ASYNC, engine.canEnhance(contentItem));
         engine.computeEnhancements(contentItem);
         //assert the results
-        Map<UriRef,Resource> expected = new HashMap<UriRef,Resource>();
+        Map<IRI,RDFTerm> expected = new HashMap<IRI,RDFTerm>();
         expected.put(Properties.DC_CREATOR, lf.createTypedLiteral(engine.getClass().getName()));
         expected.put(Properties.ENHANCER_EXTRACTED_FROM,contentItem.getUri());
         Assert.assertEquals(16, EnhancementStructureHelper.validateAllTextAnnotations(
diff --git a/enhancement-engines/langdetect/src/main/java/org/apache/stanbol/enhancer/engines/langdetect/LanguageDetectionEnhancementEngine.java b/enhancement-engines/langdetect/src/main/java/org/apache/stanbol/enhancer/engines/langdetect/LanguageDetectionEnhancementEngine.java
index 82a2fb5..bfa9550 100644
--- a/enhancement-engines/langdetect/src/main/java/org/apache/stanbol/enhancer/engines/langdetect/LanguageDetectionEnhancementEngine.java
+++ b/enhancement-engines/langdetect/src/main/java/org/apache/stanbol/enhancer/engines/langdetect/LanguageDetectionEnhancementEngine.java
@@ -32,10 +32,10 @@
 import java.util.Set;

 

 import org.apache.clerezza.rdf.core.LiteralFactory;

-import org.apache.clerezza.rdf.core.MGraph;

-import org.apache.clerezza.rdf.core.UriRef;

-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;

-import org.apache.clerezza.rdf.core.impl.TripleImpl;

+import org.apache.clerezza.commons.rdf.Graph;

+import org.apache.clerezza.commons.rdf.IRI;

+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;

+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;

 import org.apache.commons.io.IOUtils;

 import org.apache.felix.scr.annotations.Component;

 import org.apache.felix.scr.annotations.Properties;

@@ -201,7 +201,7 @@
     }

 

     public void computeEnhancements(ContentItem ci) throws EngineException {

-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMTYPES);

+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMTYPES);

         if(contentPart == null){

             throw new IllegalStateException("No ContentPart with Mimetype '"

                     + TEXT_PLAIN_MIMETYPE+"' found for ContentItem "+ci.getUri()

@@ -253,13 +253,13 @@
         

         // add language to metadata

         if (languages != null) {

-            MGraph g = ci.getMetadata();

+            Graph g = ci.getMetadata();

             ci.getLock().writeLock().lock();

             try {

                 for(int i=0;i<maxSuggestedLanguages && i<languages.size();i++){

                     // add a hypothesis

                     Language hypothesis = languages.get(i);

-                    UriRef textEnhancement = EnhancementEngineHelper.createTextEnhancement(ci, this);

+                    IRI textEnhancement = EnhancementEngineHelper.createTextEnhancement(ci, this);

                     g.add(new TripleImpl(textEnhancement, DC_LANGUAGE, new PlainLiteralImpl(hypothesis.lang)));

                     g.add(new TripleImpl(textEnhancement, ENHANCER_CONFIDENCE, literalFactory.createTypedLiteral(hypothesis.prob)));

                     g.add(new TripleImpl(textEnhancement, DC_TYPE, DCTERMS_LINGUISTIC_SYSTEM));

diff --git a/enhancement-engines/langdetect/src/test/java/org/apache/stanbol/enhancer/engines/langdetect/LanguageDetectionEngineTest.java b/enhancement-engines/langdetect/src/test/java/org/apache/stanbol/enhancer/engines/langdetect/LanguageDetectionEngineTest.java
index 12ee34c..3b264df 100644
--- a/enhancement-engines/langdetect/src/test/java/org/apache/stanbol/enhancer/engines/langdetect/LanguageDetectionEngineTest.java
+++ b/enhancement-engines/langdetect/src/test/java/org/apache/stanbol/enhancer/engines/langdetect/LanguageDetectionEngineTest.java
@@ -30,8 +30,8 @@
 
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.commons.io.IOUtils;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
@@ -119,7 +119,7 @@
         langIdEngine.activate(context);
         ContentItem ci = ciFactory.createContentItem(new StringSource(text));
         langIdEngine.computeEnhancements(ci);
-        HashMap<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+        HashMap<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
         expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());
         expectedValues.put(Properties.DC_CREATOR, LiteralFactory.getInstance().createTypedLiteral(
             langIdEngine.getClass().getName()));
diff --git a/enhancement-engines/langid/src/main/java/org/apache/stanbol/enhancer/engines/langid/LangIdEnhancementEngine.java b/enhancement-engines/langid/src/main/java/org/apache/stanbol/enhancer/engines/langid/LangIdEnhancementEngine.java
index dd863f5..6679958 100644
--- a/enhancement-engines/langid/src/main/java/org/apache/stanbol/enhancer/engines/langid/LangIdEnhancementEngine.java
+++ b/enhancement-engines/langid/src/main/java/org/apache/stanbol/enhancer/engines/langid/LangIdEnhancementEngine.java
@@ -27,10 +27,10 @@
 import java.util.Map.Entry;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Properties;
 import org.apache.felix.scr.annotations.Property;
@@ -134,7 +134,7 @@
     }
 
     public void computeEnhancements(ContentItem ci) throws EngineException {
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMTYPES);
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMTYPES);
         if(contentPart == null){
             throw new IllegalStateException("No ContentPart with Mimetype '"
                     + TEXT_PLAIN_MIMETYPE+"' found for ContentItem "+ci.getUri()
@@ -164,10 +164,10 @@
         log.info("language identified as " + language);
 
         // add language to metadata
-        MGraph g = ci.getMetadata();
+        Graph g = ci.getMetadata();
         ci.getLock().writeLock().lock();
         try {
-            UriRef textEnhancement = EnhancementEngineHelper.createTextEnhancement(ci, this);
+            IRI textEnhancement = EnhancementEngineHelper.createTextEnhancement(ci, this);
             g.add(new TripleImpl(textEnhancement, DC_LANGUAGE, new PlainLiteralImpl(language)));
             g.add(new TripleImpl(textEnhancement, DC_TYPE, DCTERMS_LINGUISTIC_SYSTEM));
         } finally {
diff --git a/enhancement-engines/langid/src/test/java/org/apache/stanbol/enhancer/engines/langid/LangIdEngineTest.java b/enhancement-engines/langid/src/test/java/org/apache/stanbol/enhancer/engines/langid/LangIdEngineTest.java
index d36a7ed..1e09fd6 100644
--- a/enhancement-engines/langid/src/test/java/org/apache/stanbol/enhancer/engines/langid/LangIdEngineTest.java
+++ b/enhancement-engines/langid/src/test/java/org/apache/stanbol/enhancer/engines/langid/LangIdEngineTest.java
@@ -28,8 +28,8 @@
 
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.commons.io.IOUtils;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.engines.langid.LangIdEnhancementEngine;
@@ -98,7 +98,7 @@
         langIdEngine.activate(context);
         ContentItem ci = ciFactory.createContentItem(new StringSource(text));
         langIdEngine.computeEnhancements(ci);
-        HashMap<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+        HashMap<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
         expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());
         expectedValues.put(Properties.DC_CREATOR, LiteralFactory.getInstance().createTypedLiteral(
             langIdEngine.getClass().getName()));
diff --git a/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/FstLinkingEngine.java b/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/FstLinkingEngine.java
index d359042..26694cb 100644
--- a/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/FstLinkingEngine.java
+++ b/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/FstLinkingEngine.java
@@ -39,14 +39,14 @@
 import java.util.Set;
 import java.util.TreeMap;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.io.input.CharSequenceReader;
 import org.apache.commons.lang.StringUtils;
 import org.apache.lucene.analysis.TokenStream;
@@ -89,9 +89,9 @@
     private static final Map<String,Object> SERVICE_PROPERTIES = Collections.unmodifiableMap(Collections
             .singletonMap(ServiceProperties.ENHANCEMENT_ENGINE_ORDERING, (Object) ENGINE_ORDERING));
 
-    private static final UriRef ENHANCER_ENTITY_RANKING = new UriRef(NamespaceEnum.fise + "entity-ranking");
+    private static final IRI ENHANCER_ENTITY_RANKING = new IRI(NamespaceEnum.fise + "entity-ranking");
 
-    public static final UriRef FISE_ORIGIN = new UriRef(NamespaceEnum.fise + "origin");
+    public static final IRI FISE_ORIGIN = new IRI(NamespaceEnum.fise + "origin");
 
     private final LiteralFactory literalFactory = LiteralFactory.getInstance();
     
@@ -419,7 +419,7 @@
      * @param neTypes the types of the named entity
      * @return
      */
-    private boolean filterByNamedEntityType(Iterator<UriRef> eTypes, Set<String> neTypes) {
+    private boolean filterByNamedEntityType(Iterator<IRI> eTypes, Set<String> neTypes) {
         //first collect the allowed entity types
         Set<String> entityTypes = new HashSet<String>();
         for(String neType : neTypes){
@@ -440,7 +440,7 @@
         }
         //second check the actual entity types against the allowed
         while(eTypes.hasNext()){
-            UriRef typeUri = eTypes.next();
+            IRI typeUri = eTypes.next();
             if(typeUri != null && entityTypes.contains(typeUri.getUnicodeString())){
                 return false; //we found an match .. do not filter
             }
@@ -454,13 +454,13 @@
      * @param entityTypes
      * @return
      */
-    private boolean filterEntityByType(Iterator<UriRef> entityTypes){
-        Map<UriRef, Integer> whiteList = elConfig.getWhitelistedTypes();
-        Map<UriRef, Integer> blackList = elConfig.getBlacklistedTypes();
+    private boolean filterEntityByType(Iterator<IRI> entityTypes){
+        Map<IRI, Integer> whiteList = elConfig.getWhitelistedTypes();
+        Map<IRI, Integer> blackList = elConfig.getBlacklistedTypes();
         Integer w = null;
         Integer b = null;
         while(entityTypes.hasNext()){
-            UriRef type = entityTypes.next();
+            IRI type = entityTypes.next();
             Integer act = whiteList.get(type);
             if(act != null){
                 if(w == null || act.compareTo(w) < 0){
@@ -670,20 +670,20 @@
             languageObject = new Language(language);
         }
         
-        MGraph metadata = ci.getMetadata();
+        Graph metadata = ci.getMetadata();
         for(Tag tag : tags){
-            Collection<UriRef> textAnnotations = new ArrayList<UriRef>(tags.size());
+            Collection<IRI> textAnnotations = new ArrayList<IRI>(tags.size());
             //first create the TextAnnotations for the Occurrences
             Literal startLiteral = literalFactory.createTypedLiteral(tag.getStart());
             Literal endLiteral = literalFactory.createTypedLiteral(tag.getEnd());
             //search for existing text annotation
             Iterator<Triple> it = metadata.filter(null, ENHANCER_START, startLiteral);
-            UriRef textAnnotation = null;
+            IRI textAnnotation = null;
             while(it.hasNext()){
                 Triple t = it.next();
                 if(metadata.filter(t.getSubject(), ENHANCER_END, endLiteral).hasNext() &&
                         metadata.filter(t.getSubject(), RDF_TYPE, ENHANCER_TEXTANNOTATION).hasNext()){
-                    textAnnotation = (UriRef)t.getSubject();
+                    textAnnotation = (IRI)t.getSubject();
                     break;
                 }
             }
@@ -710,20 +710,20 @@
                     new PlainLiteralImpl(this.getClass().getName())));
             }
             //add dc:types (even to existing)
-            for(UriRef dcType : getDcTypes(tag.getSuggestions())){
+            for(IRI dcType : getDcTypes(tag.getSuggestions())){
                 metadata.add(new TripleImpl(
                     textAnnotation, Properties.DC_TYPE, dcType));
             }
             textAnnotations.add(textAnnotation);
             //now the EntityAnnotations for the Suggestions
             for(Match match : tag.getSuggestions()){
-                UriRef entityAnnotation = EnhancementEngineHelper.createEntityEnhancement(ci, this);
+                IRI entityAnnotation = EnhancementEngineHelper.createEntityEnhancement(ci, this);
                 //should we use the label used for the match, or search the
                 //representation for the best label ... currently its the matched one
                 metadata.add(new TripleImpl(entityAnnotation, Properties.ENHANCER_ENTITY_LABEL, match.getMatchLabel()));
                 metadata.add(new TripleImpl(entityAnnotation,ENHANCER_ENTITY_REFERENCE, 
-                    new UriRef(match.getUri())));
-                for(UriRef type : match.getTypes()){
+                    new IRI(match.getUri())));
+                for(IRI type : match.getTypes()){
                     metadata.add(new TripleImpl(entityAnnotation, 
                         Properties.ENHANCER_ENTITY_TYPE, type));
                 }
@@ -736,8 +736,8 @@
                     metadata.add(new TripleImpl(entityAnnotation, FISE_ORIGIN, indexConfig.getOrigin()));
                 }
                 //TODO: add origin information of the EntiySearcher
-//                for(Entry<UriRef,Collection<Resource>> originInfo : entitySearcher.getOriginInformation().entrySet()){
-//                    for(Resource value : originInfo.getValue()){
+//                for(Entry<IRI,Collection<RDFTerm>> originInfo : entitySearcher.getOriginInformation().entrySet()){
+//                    for(RDFTerm value : originInfo.getValue()){
 //                        metadata.add(new TripleImpl(entityAnnotation, 
 //                            originInfo.getKey(),value));
 //                    }
@@ -773,11 +773,11 @@
      * @param conceptTypes The list of suggestions
      * @return the types values for the {@link LinkedEntity}
      */
-    private Set<UriRef> getDcTypes(List<Match> matches){
+    private Set<IRI> getDcTypes(List<Match> matches){
         if(matches == null || matches.isEmpty()){
             return Collections.emptySet();
         }
-        Collection<UriRef> conceptTypes = new HashSet<UriRef>();
+        Collection<IRI> conceptTypes = new HashSet<IRI>();
         double score = -1; //only consider types of the best ranked Entities
         for(Match match : matches){
             double actScore = match.getScore();
@@ -785,13 +785,13 @@
                 break;
             }
             score = actScore;
-            for(Iterator<UriRef> types = match.getTypes().iterator(); 
+            for(Iterator<IRI> types = match.getTypes().iterator(); 
                 types.hasNext(); conceptTypes.add(types.next()));
         }
-        Map<UriRef,UriRef> typeMappings = elConfig.getTypeMappings();
-        Set<UriRef> dcTypes = new HashSet<UriRef>();
-        for(UriRef conceptType : conceptTypes){
-            UriRef dcType = typeMappings.get(conceptType);
+        Map<IRI,IRI> typeMappings = elConfig.getTypeMappings();
+        Set<IRI> dcTypes = new HashSet<IRI>();
+        for(IRI conceptType : conceptTypes){
+            IRI dcType = typeMappings.get(conceptType);
             if(dcType != null){
                 dcTypes.add(dcType);
             }
diff --git a/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/FstLinkingEngineComponent.java b/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/FstLinkingEngineComponent.java
index c11a710..6dd5215 100644
--- a/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/FstLinkingEngineComponent.java
+++ b/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/FstLinkingEngineComponent.java
@@ -48,10 +48,10 @@
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.FilenameUtils;
 import org.apache.commons.lang.StringUtils;
@@ -177,7 +177,7 @@
      * The origin information for all Entities provided by the configured SolrCore and
      * FST. Origin information are added to all <code>fise:EntityAnnotation</code>
      * by using the <code>fise:origin</code> property. Configured values can be both
-     * {@link UriRef URI}s or {@link Literal}s. Configured Strings are checked if
+     * {@link IRI URI}s or {@link Literal}s. Configured Strings are checked if
      * they are valid {@link URI}s and  {@link URI#isAbsolute() absolute}. If not
      * a {@link Literal} is parsed.
      */
@@ -240,7 +240,7 @@
     /**
      * The origin information of Entities.
      */
-    private Resource origin;
+    private RDFTerm origin;
     
     /**
      * used to resolve '{prefix}:{local-name}' used within the engines configuration
@@ -470,13 +470,13 @@
         
         //(4) parse Origin information
         value = properties.get(ORIGIN);
-        if(value instanceof Resource){
-            origin = (Resource)origin;
+        if(value instanceof RDFTerm){
+            origin = (RDFTerm)origin;
         } else if (value instanceof String){
             try {
                 URI originUri = new URI((String)value);
                 if(originUri.isAbsolute()){
-                    origin = new UriRef((String)value);
+                    origin = new IRI((String)value);
                 } else {
                     origin = new PlainLiteralImpl((String)value);
                 }
@@ -485,7 +485,7 @@
             }
             log.info(" - origin: {}", origin);
         } else if(value != null){
-            log.warn("Values of the {} property MUST BE of type Resource or String "
+            log.warn("Values of the {} property MUST BE of type RDFTerm or String "
                     + "(parsed: {} (type:{}))", new Object[]{ORIGIN,value,value.getClass()});
         } //else no ORIGIN information provided
         
diff --git a/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/IndexConfiguration.java b/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/IndexConfiguration.java
index 0ae469c..09a89bc 100644
--- a/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/IndexConfiguration.java
+++ b/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/IndexConfiguration.java
@@ -30,9 +30,9 @@
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.FilenameUtils;
 import org.apache.commons.io.filefilter.WildcardFileFilter;
@@ -125,10 +125,10 @@
     /**
      * The origin is added to <code>fise:TextAnnotation</code> created for
      * linked Entities. It is intended to be used for providing a reference to
-     * dataset of the Entity. Both {@link UriRef URI}s and {@link Literal}s can
+     * dataset of the Entity. Both {@link IRI URI}s and {@link Literal}s can
      * be used here
      */
-    private Resource origin;
+    private RDFTerm origin;
 
     /**
      * If alternate tokens (<code>posInc == 0</code>) can be skipped or if such
@@ -421,7 +421,7 @@
         this.fstDirectory = fstDirectory;
     }
 
-    public void setOrigin(Resource origin) {
+    public void setOrigin(RDFTerm origin) {
         this.origin = origin;
     }
     /**
@@ -434,7 +434,7 @@
      * 
      * @return the origin or <code>null</code> if none is configured
      */
-    public Resource getOrigin() {
+    public RDFTerm getOrigin() {
         return origin;
     }
     
diff --git a/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/Match.java b/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/Match.java
index afcab96..575b0a1 100644
--- a/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/Match.java
+++ b/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/Match.java
@@ -22,8 +22,8 @@
 import java.util.Comparator;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.engines.entitylinking.impl.Suggestion;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -77,11 +77,11 @@
         return getValues(FieldType.label);
     }
     
-    public Collection<UriRef> getTypes(){
+    public Collection<IRI> getTypes(){
         return getValues(FieldType.type);
     }
     
-    public Collection<UriRef> getRedirects(){
+    public Collection<IRI> getRedirects(){
         return getValues(FieldType.redirect);
     }
     public Double getRanking(){
@@ -157,8 +157,8 @@
     static enum FieldType {
         id(String.class),
         label(Literal.class, true), 
-        type(UriRef.class,true), 
-        redirect(UriRef.class,true), 
+        type(IRI.class,true), 
+        redirect(IRI.class,true), 
         ranking(Double.class);
         
         Class<?> valueType;
diff --git a/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/TaggingSession.java b/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/TaggingSession.java
index 2714fff..cfc1b20 100644
--- a/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/TaggingSession.java
+++ b/enhancement-engines/lucenefstlinking/src/main/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/TaggingSession.java
@@ -33,10 +33,10 @@
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
 import org.apache.commons.lang.StringUtils;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.document.Document;
@@ -572,17 +572,17 @@
                 values.put(FieldType.label, labels);
                 //load the types
                 if(typeField != null){
-                    Set<UriRef> types = new HashSet<UriRef>();
+                    Set<IRI> types = new HashSet<IRI>();
                     for(String type : doc.getValues(typeField)){
-                        types.add(new UriRef(type));
+                        types.add(new IRI(type));
                     }
                     values.put(FieldType.type, types);
                 }
                 //load the redirects
                 if(redirectField != null){
-                    Set<UriRef> redirects = new HashSet<UriRef>();
+                    Set<IRI> redirects = new HashSet<IRI>();
                     for(String redirect : doc.getValues(redirectField)){
-                        redirects.add(new UriRef(redirect));
+                        redirects.add(new IRI(redirect));
                     }
                     values.put(FieldType.redirect, redirects);
                 }
diff --git a/enhancement-engines/lucenefstlinking/src/test/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/FstLinkingEngineTest.java b/enhancement-engines/lucenefstlinking/src/test/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/FstLinkingEngineTest.java
index 81c56fb..9f3d4ef 100644
--- a/enhancement-engines/lucenefstlinking/src/test/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/FstLinkingEngineTest.java
+++ b/enhancement-engines/lucenefstlinking/src/test/java/org/apache/stanbol/enhancer/engines/lucenefstlinking/FstLinkingEngineTest.java
@@ -48,13 +48,13 @@
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.solr.client.solrj.SolrServer;
 import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
 import org.apache.solr.core.SolrCore;
@@ -158,9 +158,9 @@
      * Used with the {@link EnhancementStructureHelper} to validate Enhancement 
      * results
      */
-    private static Map<UriRef,Resource> EXPECTED_ENHANCEMENT_VALUES;
+    private static Map<IRI,RDFTerm> EXPECTED_ENHANCEMENT_VALUES;
     static{
-        EXPECTED_ENHANCEMENT_VALUES = new HashMap<UriRef,Resource>();
+        EXPECTED_ENHANCEMENT_VALUES = new HashMap<IRI,RDFTerm>();
         EXPECTED_ENHANCEMENT_VALUES.put(DC_CREATOR, LiteralFactory.getInstance().createTypedLiteral(
             FstLinkingEngine.class.getName()));
         //adding null as expected for confidence makes it a required property
@@ -346,13 +346,13 @@
         //iterate over all fise:TextAnnotations
         //NOTE this assumes all textAnnotations are from the FST linking engine
         log.info("  ... validated fise:TextAnnotations:");
-        Map<UriRef,Resource> expected = new HashMap<UriRef,Resource>(EXPECTED_ENHANCEMENT_VALUES);
+        Map<IRI,RDFTerm> expected = new HashMap<IRI,RDFTerm>(EXPECTED_ENHANCEMENT_VALUES);
         expected.put(ENHANCER_EXTRACTED_FROM, ci.getUri());
         int[] num = new int[]{0,0};
         Iterator<Triple> textAnnotations = ci.getMetadata().filter(
             null, Properties.RDF_TYPE, TechnicalClasses.ENHANCER_TEXTANNOTATION);
         while(textAnnotations.hasNext()){
-            UriRef textAnnotation = (UriRef)textAnnotations.next().getSubject();
+            IRI textAnnotation = (IRI)textAnnotations.next().getSubject();
             //validate this test annotation against the Stanbol EnhancementStructure
             EnhancementStructureHelper.validateTextAnnotation(
                 ci.getMetadata(), textAnnotation, content, expected);
@@ -374,11 +374,11 @@
         Iterator<Triple> entityAnnotations = ci.getMetadata().filter(
             null, Properties.RDF_TYPE, TechnicalClasses.ENHANCER_ENTITYANNOTATION);
         while(entityAnnotations.hasNext()){
-            UriRef entityAnnotation = (UriRef)entityAnnotations.next().getSubject();
+            IRI entityAnnotation = (IRI)entityAnnotations.next().getSubject();
             //validate this test annotation against the Stanbol EnhancementStructure
             EnhancementStructureHelper.validateEntityAnnotation(
                 ci.getMetadata(), entityAnnotation, expected);
-            UriRef entityUri = EnhancementEngineHelper.getReference(
+            IRI entityUri = EnhancementEngineHelper.getReference(
                 ci.getMetadata(), entityAnnotation, Properties.ENHANCER_ENTITY_REFERENCE);
             log.info(" {}. {}",num[1]+1,entityUri);
             Assert.assertNotNull(entityUri);
diff --git a/enhancement-engines/metaxa/src/main/java/org/apache/stanbol/enhancer/engines/metaxa/MetaxaEngine.java b/enhancement-engines/metaxa/src/main/java/org/apache/stanbol/enhancer/engines/metaxa/MetaxaEngine.java
index 1acb2cb..14771e7 100644
--- a/enhancement-engines/metaxa/src/main/java/org/apache/stanbol/enhancer/engines/metaxa/MetaxaEngine.java
+++ b/enhancement-engines/metaxa/src/main/java/org/apache/stanbol/enhancer/engines/metaxa/MetaxaEngine.java
@@ -30,15 +30,15 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.BNode;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.impl.TypedLiteralImpl;
 import org.apache.commons.io.IOUtils;
 import org.apache.felix.scr.annotations.Component;
@@ -98,7 +98,7 @@
     /**
      * Plain text content of a content item.
       */
-    public static final UriRef NIE_PLAINTEXTCONTENT = new UriRef(NamespaceEnum.nie + "plainTextContent");
+    public static final IRI NIE_PLAINTEXTCONTENT = new IRI(NamespaceEnum.nie + "plainTextContent");
     private static final URIImpl NIE_PLAINTEXT_PROPERTY = new URIImpl(NIE_PLAINTEXTCONTENT.getUnicodeString());
     /**
      * The default value for the Execution of this Engine. Currently set to
@@ -230,7 +230,7 @@
         } finally {
             ci.getLock().readLock().unlock();
         }
-        // Convert the RDF2go model to a Clerezza Graph and also extract
+        // Convert the RDF2go model to a Clerezza ImmutableGraph and also extract
         // the extracted plain text from the model
         if (null == m) {
             log.debug("Unable to preocess ContentItem {} (mime type {}) with Metaxa",
@@ -245,14 +245,14 @@
             throw new EngineException("Unable to initialise Blob for storing" +
             		"the plain text content",e);
         }
-        HashMap<BlankNode, BNode> blankNodeMap = new HashMap<BlankNode, BNode>();
+        HashMap<BlankNode, BlankNode> blankNodeMap = new HashMap<BlankNode, BlankNode>();
         RDF2GoUtils.urifyBlankNodes(m);
         ClosableIterator<Statement> it = m.iterator();
         BufferedWriter out = new BufferedWriter(new OutputStreamWriter(
             plainTextSink.getOutputStream(), UTF8));
         boolean textExtracted = false; //used to detect if some text was extracted
         try {
-            MGraph g = new SimpleMGraph(); //first add to a temporary graph
+            Graph g = new SimpleGraph(); //first add to a temporary graph
             while (it.hasNext()) {
                 Statement oneStmt = it.next();
                 //we need to treat triples that provide the plain/text
@@ -271,16 +271,16 @@
                         }
                         textExtracted = true;
                         if (includeText) {
-                            NonLiteral subject = (NonLiteral) asClerezzaResource(oneStmt.getSubject(), blankNodeMap);
-                            UriRef predicate = (UriRef) asClerezzaResource(oneStmt.getPredicate(), blankNodeMap);
-                            Resource object = asClerezzaResource(oneStmt.getObject(), blankNodeMap);
+                            BlankNodeOrIRI subject = (BlankNodeOrIRI) asClerezzaResource(oneStmt.getSubject(), blankNodeMap);
+                            IRI predicate = (IRI) asClerezzaResource(oneStmt.getPredicate(), blankNodeMap);
+                            RDFTerm object = asClerezzaResource(oneStmt.getObject(), blankNodeMap);
                             g.add(new TripleImpl(subject, predicate, object));
                         }
                     }
                 } else { //add metadata to the metadata of the contentItem
-                    NonLiteral subject = (NonLiteral) asClerezzaResource(oneStmt.getSubject(), blankNodeMap);
-                    UriRef predicate = (UriRef) asClerezzaResource(oneStmt.getPredicate(), blankNodeMap);
-                    Resource object = asClerezzaResource(oneStmt.getObject(), blankNodeMap);
+                    BlankNodeOrIRI subject = (BlankNodeOrIRI) asClerezzaResource(oneStmt.getSubject(), blankNodeMap);
+                    IRI predicate = (IRI) asClerezzaResource(oneStmt.getPredicate(), blankNodeMap);
+                    RDFTerm object = asClerezzaResource(oneStmt.getObject(), blankNodeMap);
 
                     if (null != subject && null != predicate && null != object) {
                         Triple t = new TripleImpl(subject, predicate, object);
@@ -304,7 +304,7 @@
         }
         if(textExtracted){
             //add plain text to the content item
-            UriRef blobUri = new UriRef("urn:metaxa:plain-text:"+randomUUID());
+            IRI blobUri = new IRI("urn:metaxa:plain-text:"+randomUUID());
             ci.addPart(blobUri, plainTextSink.getBlob());
         }
     }
@@ -313,22 +313,22 @@
      * Converts the given RDF2Go node into a corresponding Clerezza object.
      *
      * @param node a {@link Node}
-     * @return a {@link Resource}
+     * @return a {@link RDFTerm}
      */
-    public static Resource asClerezzaResource(Node node, HashMap<BlankNode, BNode> blankNodeMap) {
+    public static RDFTerm asClerezzaResource(Node node, HashMap<BlankNode, BlankNode> blankNodeMap) {
 
         if (node instanceof URI) {
-            return new UriRef(node.asURI().toString());
+            return new IRI(node.asURI().toString());
         } else if (node instanceof BlankNode) {
-            BNode bNode = blankNodeMap.get(node);
+            BlankNode bNode = blankNodeMap.get(node);
             if (bNode == null) {
-                bNode = new BNode();
+                bNode = new BlankNode();
                 blankNodeMap.put(node.asBlankNode(), bNode);
             }
             return bNode;
         } else if (node instanceof DatatypeLiteral) {
             DatatypeLiteral dtl = node.asDatatypeLiteral();
-            return new TypedLiteralImpl(dtl.getValue(), new UriRef(dtl.getDatatype().asURI().toString()));
+            return new TypedLiteralImpl(dtl.getValue(), new IRI(dtl.getDatatype().asURI().toString()));
         } else if (node instanceof PlainLiteral) {
             return new PlainLiteralImpl(node.asLiteral().getValue());
         }
diff --git a/enhancement-engines/metaxa/src/main/java/org/apache/stanbol/enhancer/engines/metaxa/core/RDF2GoUtils.java b/enhancement-engines/metaxa/src/main/java/org/apache/stanbol/enhancer/engines/metaxa/core/RDF2GoUtils.java
index cdf9077..b3ba669 100644
--- a/enhancement-engines/metaxa/src/main/java/org/apache/stanbol/enhancer/engines/metaxa/core/RDF2GoUtils.java
+++ b/enhancement-engines/metaxa/src/main/java/org/apache/stanbol/enhancer/engines/metaxa/core/RDF2GoUtils.java
@@ -27,7 +27,7 @@
 import org.ontoware.rdf2go.model.impl.URIGenerator;
 import org.ontoware.rdf2go.model.node.BlankNode;
 import org.ontoware.rdf2go.model.node.Node;
-import org.ontoware.rdf2go.model.node.Resource;
+import org.ontoware.rdf2go.model.node.RDFTerm;
 import org.ontoware.rdf2go.model.node.URI;
 
 /**
@@ -45,7 +45,7 @@
         Model remove = RDF2Go.getModelFactory().createModel();
         remove.open();
         for (Statement stmt : model) {
-            Resource subj = stmt.getSubject();
+            RDFTerm subj = stmt.getSubject();
             URI pred = stmt.getPredicate();
             Node obj = stmt.getObject();
             boolean match = false;
diff --git a/enhancement-engines/metaxa/src/main/java/org/apache/stanbol/enhancer/engines/metaxa/core/html/BundleURIResolver.java b/enhancement-engines/metaxa/src/main/java/org/apache/stanbol/enhancer/engines/metaxa/core/html/BundleURIResolver.java
index 72dbd1e..289c5a7 100644
--- a/enhancement-engines/metaxa/src/main/java/org/apache/stanbol/enhancer/engines/metaxa/core/html/BundleURIResolver.java
+++ b/enhancement-engines/metaxa/src/main/java/org/apache/stanbol/enhancer/engines/metaxa/core/html/BundleURIResolver.java
@@ -55,7 +55,7 @@
                 String path = baseURI.getPath();
                 resource = path.substring(1, path.lastIndexOf('/') + 1) + href;
                 newUrl = BUNDLE.getEntry(resource);
-                LOG.debug("Resource: " + resource);
+                LOG.debug("RDFTerm: " + resource);
                 if (newUrl != null) {
                     return new StreamSource(newUrl.openStream(), newUrl.toString());
                 } else {
diff --git a/enhancement-engines/metaxa/src/main/java/org/apache/stanbol/enhancer/engines/metaxa/core/mp3/MP3FileExtractor.java b/enhancement-engines/metaxa/src/main/java/org/apache/stanbol/enhancer/engines/metaxa/core/mp3/MP3FileExtractor.java
index 162bdae..467445f 100644
--- a/enhancement-engines/metaxa/src/main/java/org/apache/stanbol/enhancer/engines/metaxa/core/mp3/MP3FileExtractor.java
+++ b/enhancement-engines/metaxa/src/main/java/org/apache/stanbol/enhancer/engines/metaxa/core/mp3/MP3FileExtractor.java
@@ -21,7 +21,7 @@
 import java.nio.charset.Charset;

 

 import org.ontoware.rdf2go.model.Model;

-import org.ontoware.rdf2go.model.node.Resource;

+import org.ontoware.rdf2go.model.node.RDFTerm;

 import org.ontoware.rdf2go.model.node.URI;

 import org.ontoware.rdf2go.vocabulary.RDF;

 import org.semanticdesktop.aperture.extractor.AbstractFileExtractor;

@@ -112,7 +112,7 @@
   

   protected void addSimpleContact(URI property, String fullname, RDFContainer container) {

     Model model = container.getModel();

-    Resource resource = ModelUtil.generateRandomResource(model);

+    RDFTerm resource = ModelUtil.generateRandomResource(model);

     model.addStatement(resource, RDF.type, NCO.Contact);

     model.addStatement(resource, NCO.fullname, fullname);

     model.addStatement(container.getDescribedUri(), property, resource);

diff --git a/enhancement-engines/metaxa/src/test/java/org/apache/stanbol/enhancer/engines/metaxa/core/TestMetaxaCore.java b/enhancement-engines/metaxa/src/test/java/org/apache/stanbol/enhancer/engines/metaxa/core/TestMetaxaCore.java
index 7cb0920..5215596 100644
--- a/enhancement-engines/metaxa/src/test/java/org/apache/stanbol/enhancer/engines/metaxa/core/TestMetaxaCore.java
+++ b/enhancement-engines/metaxa/src/test/java/org/apache/stanbol/enhancer/engines/metaxa/core/TestMetaxaCore.java
@@ -20,12 +20,12 @@
 import java.io.InputStream;
 import java.util.HashMap;
 
-import org.apache.clerezza.rdf.core.BNode;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.io.IOUtils;
 import org.apache.stanbol.enhancer.engines.metaxa.MetaxaEngine;
 import org.junit.BeforeClass;
@@ -187,15 +187,15 @@
 
         int tripleCounter = 0;
 
-        HashMap<BlankNode, BNode> blankNodeMap = new HashMap<BlankNode, BNode>();
+        HashMap<BlankNode, BlankNode> blankNodeMap = new HashMap<BlankNode, BlankNode>();
 
         ClosableIterator<Statement> it = m.iterator();
         while (it.hasNext()) {
             Statement oneStmt = it.next();
 
-            NonLiteral subject = (NonLiteral) MetaxaEngine.asClerezzaResource(oneStmt.getSubject(), blankNodeMap);
-            UriRef predicate = (UriRef) MetaxaEngine.asClerezzaResource(oneStmt.getPredicate(), blankNodeMap);
-            Resource object = MetaxaEngine.asClerezzaResource(oneStmt.getObject(), blankNodeMap);
+            BlankNodeOrIRI subject = (BlankNodeOrIRI) MetaxaEngine.asClerezzaResource(oneStmt.getSubject(), blankNodeMap);
+            IRI predicate = (IRI) MetaxaEngine.asClerezzaResource(oneStmt.getPredicate(), blankNodeMap);
+            RDFTerm object = MetaxaEngine.asClerezzaResource(oneStmt.getObject(), blankNodeMap);
 
             if (null != subject
                     && null != predicate
diff --git a/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nif20Helper.java b/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nif20Helper.java
index fcab7f5..2eef189 100644
--- a/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nif20Helper.java
+++ b/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nif20Helper.java
@@ -30,13 +30,13 @@
 import java.util.Iterator;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.io.IOUtils;
 import org.apache.stanbol.enhancer.nlp.NlpAnnotations;
 import org.apache.stanbol.enhancer.nlp.model.AnalysedText;
@@ -62,9 +62,9 @@
 
     private Nif20Helper(){}
     
-    public static final Map<SpanTypeEnum,UriRef> SPAN_TYPE_TO_SSO_TYPE;
+    public static final Map<SpanTypeEnum,IRI> SPAN_TYPE_TO_SSO_TYPE;
     static {
-        Map<SpanTypeEnum,UriRef> mapping = new EnumMap<SpanTypeEnum,UriRef>(SpanTypeEnum.class);
+        Map<SpanTypeEnum,IRI> mapping = new EnumMap<SpanTypeEnum,IRI>(SpanTypeEnum.class);
         //mapping.put(SpanTypeEnum.Text, null);
         //mapping.put(SpanTypeEnum.TextSection, null);
         mapping.put(SpanTypeEnum.Sentence, Nif20.Sentence.getUri());
@@ -78,15 +78,15 @@
      * Concept representing the Phrase (e.g. {@link LexicalCategory#Noun} maps
      * to "<code>http://purl.org/olia/olia.owl#NounPhrase</code>").
      */
-    public static final Map<LexicalCategory,UriRef> LEXICAL_TYPE_TO_PHRASE_TYPE;
+    public static final Map<LexicalCategory,IRI> LEXICAL_TYPE_TO_PHRASE_TYPE;
     static {
         String olia = "http://purl.org/olia/olia.owl#";
-        Map<LexicalCategory,UriRef> mapping = new EnumMap<LexicalCategory,UriRef>(LexicalCategory.class);
-        mapping.put(LexicalCategory.Noun, new UriRef(olia+"NounPhrase"));
-        mapping.put(LexicalCategory.Verb, new UriRef(olia+"VerbPhrase"));
-        mapping.put(LexicalCategory.Adjective, new UriRef(olia+"AdjectivePhrase"));
-        mapping.put(LexicalCategory.Adverb, new UriRef(olia+"AdverbPhrase"));
-        mapping.put(LexicalCategory.Conjuction, new UriRef(olia+"ConjuctionPhrase"));
+        Map<LexicalCategory,IRI> mapping = new EnumMap<LexicalCategory,IRI>(LexicalCategory.class);
+        mapping.put(LexicalCategory.Noun, new IRI(olia+"NounPhrase"));
+        mapping.put(LexicalCategory.Verb, new IRI(olia+"VerbPhrase"));
+        mapping.put(LexicalCategory.Adjective, new IRI(olia+"AdjectivePhrase"));
+        mapping.put(LexicalCategory.Adverb, new IRI(olia+"AdverbPhrase"));
+        mapping.put(LexicalCategory.Conjuction, new IRI(olia+"ConjuctionPhrase"));
         LEXICAL_TYPE_TO_PHRASE_TYPE = Collections.unmodifiableMap(mapping);
     }    
     /**
@@ -97,10 +97,10 @@
      * @param end the end position or values &lt; 1 when open ended.
      * @return the NIF 2.0 Fragment URI
      * @throws IllegalArgumentException if <code>null</code> is parsed as base
-     * {@link UriRef} or the end position is &gt;=0 but &lt= the parsed start
+     * {@link IRI} or the end position is &gt;=0 but &lt= the parsed start
      * position.
      */
-    public static final UriRef getNifFragmentURI(UriRef base, int start,int end){
+    public static final IRI getNifFragmentURI(IRI base, int start,int end){
         if(base == null){
             throw new IllegalArgumentException("Base URI MUST NOT be NULL!");
         }
@@ -113,10 +113,10 @@
             }
             sb.append(end);
         } //else open ended ...
-        return new UriRef(sb.toString());
+        return new IRI(sb.toString());
     }
  
-    public static final UriRef getNifRFC5147URI(UriRef base, int start, int end){
+    public static final IRI getNifRFC5147URI(IRI base, int start, int end){
         if(base == null){
             throw new IllegalArgumentException("Base URI MUST NOT be NULL!");
         }
@@ -128,7 +128,7 @@
         if(end >= 0){
             sb.append(',').append(end);
         } //else select the whole string ...
-        return new UriRef(sb.toString());
+        return new IRI(sb.toString());
     }
     
     public static final int NIF_HASH_CONTEXT_LENGTH = 10;
@@ -136,7 +136,7 @@
     
     public static final Charset UTF8 = Charset.forName("UTF8");
     
-    public static final UriRef getNifHashURI(UriRef base, int start, int end, String text){
+    public static final IRI getNifHashURI(IRI base, int start, int end, String text){
         if(base == null){
             throw new IllegalArgumentException("Base URI MUST NOT be NULL!");
         }
@@ -161,7 +161,7 @@
         sb.append('_');
         sb.append(text.substring(start, 
             Math.min(end,start+NIF_HASH_MAX_STRING_LENGTH)));
-        return new UriRef(sb.toString());
+        return new IRI(sb.toString());
     }
 
     /**
@@ -212,7 +212,7 @@
      * @param segmentUri the URI of the resource representing the parsed 
      * annotated element in the graph
      */
-    public static void writePos(MGraph graph, Annotated annotated, UriRef segmentUri) {
+    public static void writePos(Graph graph, Annotated annotated, IRI segmentUri) {
         Value<PosTag> posTag = annotated.getAnnotation(NlpAnnotations.POS_ANNOTATION);
         if(posTag != null){
             if(posTag.value().isMapped()){
@@ -241,7 +241,7 @@
      * @param segmentUri
      * @param value
      */
-    private static void setOliaConf(MGraph graph, UriRef segmentUri,
+    private static void setOliaConf(Graph graph, IRI segmentUri,
             Value<?> value) {
         Iterator<Triple> existingConfValues = graph.filter(segmentUri, Nif20.oliaConf.getUri(), null);
         while(existingConfValues.hasNext()){
@@ -262,10 +262,10 @@
      * @param segmentUri the URI of the resource representing the parsed 
      * annotated element in the graph
      */
-    public static void writePhrase(MGraph graph, Annotated annotated, UriRef segmentUri) {
+    public static void writePhrase(Graph graph, Annotated annotated, IRI segmentUri) {
         Value<PhraseTag> phraseTag = annotated.getAnnotation(NlpAnnotations.PHRASE_ANNOTATION);
         if(phraseTag != null){
-            UriRef phraseTypeUri = LEXICAL_TYPE_TO_PHRASE_TYPE.get(phraseTag.value().getCategory());
+            IRI phraseTypeUri = LEXICAL_TYPE_TO_PHRASE_TYPE.get(phraseTag.value().getCategory());
             if(phraseTypeUri != null){ //add the oliaLink for the Phrase
                 graph.add(new TripleImpl(segmentUri, Nif20.oliaCategory.getUri(), phraseTypeUri));
                 setOliaConf(graph, segmentUri, phraseTag);
diff --git a/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nif20MetadataEngine.java b/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nif20MetadataEngine.java
index 4701633..51fb343 100644
--- a/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nif20MetadataEngine.java
+++ b/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nif20MetadataEngine.java
@@ -26,12 +26,12 @@
 import java.util.Iterator;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
@@ -126,7 +126,7 @@
     
     private final Logger log = LoggerFactory.getLogger(Nif20MetadataEngine.class);
     //TODO: replace this with a reald ontology
-    private final static UriRef SENTIMENT_PROPERTY = new UriRef(NamespaceEnum.fise+"sentiment-value");
+    private final static IRI SENTIMENT_PROPERTY = new IRI(NamespaceEnum.fise+"sentiment-value");
     private final LiteralFactory lf = LiteralFactory.getInstance();
     
     /**
@@ -184,24 +184,24 @@
         if(words){
             activeTypes.add(SpanTypeEnum.Token);
         }
-        MGraph metadata = ci.getMetadata();
-        UriRef base = ci.getUri();
+        Graph metadata = ci.getMetadata();
+        IRI base = ci.getUri();
         ci.getLock().writeLock().lock();
         try {
             //write the context
-            UriRef text = writeSpan(metadata, base, at, language, at);
+            IRI text = writeSpan(metadata, base, at, language, at);
             metadata.add(new TripleImpl(text, Nif20.sourceUrl.getUri(), ci.getUri()));
             
             Iterator<Span> spans = at.getEnclosed(activeTypes);
-            UriRef sentence = null;
-            UriRef phrase = null;
-            UriRef word = null;
+            IRI sentence = null;
+            IRI phrase = null;
+            IRI word = null;
             boolean firstWordInSentence = true;
             while(spans.hasNext()){
                 Span span = spans.next();
                 //TODO: filter Spans based on additional requirements
                 //(1) write generic information about the span
-                UriRef current = writeSpan(metadata, base, at, language, span);
+                IRI current = writeSpan(metadata, base, at, language, span);
                 //write the context
                 metadata.add(new TripleImpl(current, Nif20.referenceContext.getUri(), text));
                 //(2) add the relations between the different spans
@@ -282,11 +282,11 @@
      * @param text the {@link AnalysedText}
      * @param language the {@link Language} or <code>null</code> if not known
      * @param span the {@link Span} to write.
-     * @return the {@link UriRef} representing the parsed {@link Span} in the
+     * @return the {@link IRI} representing the parsed {@link Span} in the
      * graph
      */
-    public UriRef writeSpan(MGraph graph, UriRef base, AnalysedText text, Language language, Span span){
-        UriRef segment = Nif20Helper.getNifRFC5147URI(base, span.getStart(), 
+    public IRI writeSpan(Graph graph, IRI base, AnalysedText text, Language language, Span span){
+        IRI segment = Nif20Helper.getNifRFC5147URI(base, span.getStart(), 
                 span.getType() == SpanTypeEnum.Text ? -1 : span.getEnd());
         if(!contextOnlyUriScheme || span.getType() == SpanTypeEnum.Text){
             graph.add(new TripleImpl(segment, RDF_TYPE, Nif20.RFC5147String.getUri()));
diff --git a/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nlp2RdfMetadataEngine.java b/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nlp2RdfMetadataEngine.java
index 2dd7145..6f22040 100644
--- a/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nlp2RdfMetadataEngine.java
+++ b/enhancement-engines/nlp2rdf/src/main/java/org/apache/stanbol/enhancer/engines/nlp2rdf/engine/Nlp2RdfMetadataEngine.java
@@ -28,11 +28,11 @@
 import java.util.Iterator;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
@@ -78,7 +78,7 @@
 
     private final Logger log = LoggerFactory.getLogger(Nlp2RdfMetadataEngine.class);
     //TODO: replace this with a reald ontology
-    private final static UriRef SENTIMENT_PROPERTY = new UriRef(NamespaceEnum.fise+"sentiment-value");
+    private final static IRI SENTIMENT_PROPERTY = new IRI(NamespaceEnum.fise+"sentiment-value");
     private final LiteralFactory lf = LiteralFactory.getInstance();
     
     /**
@@ -123,20 +123,20 @@
         if(words){
             activeTypes.add(SpanTypeEnum.Token);
         }
-        MGraph metadata = ci.getMetadata();
-        UriRef base = ci.getUri();
+        Graph metadata = ci.getMetadata();
+        IRI base = ci.getUri();
         ci.getLock().writeLock().lock();
         try {
             Iterator<Span> spans = at.getEnclosed(activeTypes);
-            UriRef sentence = null;
-            UriRef phrase = null;
-            UriRef word = null;
+            IRI sentence = null;
+            IRI phrase = null;
+            IRI word = null;
             boolean firstWordInSentence = true;
             while(spans.hasNext()){
                 Span span = spans.next();
                 //TODO: filter Spans based on additional requirements
                 //(1) write generic information about the span
-                UriRef current = writeSpan(metadata, base, at, language, span);
+                IRI current = writeSpan(metadata, base, at, language, span);
                 //(2) add the relations between the different spans
                 switch (span.getType()) {
                     case Sentence:
diff --git a/enhancement-engines/opencalais/src/main/java/org/apache/stanbol/enhancer/engines/opencalais/impl/CalaisEntityOccurrence.java b/enhancement-engines/opencalais/src/main/java/org/apache/stanbol/enhancer/engines/opencalais/impl/CalaisEntityOccurrence.java
index e1ab1e7..a63dc67 100644
--- a/enhancement-engines/opencalais/src/main/java/org/apache/stanbol/enhancer/engines/opencalais/impl/CalaisEntityOccurrence.java
+++ b/enhancement-engines/opencalais/src/main/java/org/apache/stanbol/enhancer/engines/opencalais/impl/CalaisEntityOccurrence.java
@@ -16,7 +16,7 @@
  */

 package org.apache.stanbol.enhancer.engines.opencalais.impl;

 

-import org.apache.clerezza.rdf.core.Resource;

+import org.apache.clerezza.commons.rdf.RDFTerm;

 

 /**

  * Stores the values extracted from the Calais entity data.

@@ -25,8 +25,8 @@
  */

 public class CalaisEntityOccurrence {

 

-    public Resource id;

-    public Resource type;

+    public RDFTerm id;

+    public RDFTerm type;

     public String name;

     public Integer offset;

     public Integer length;

diff --git a/enhancement-engines/opencalais/src/main/java/org/apache/stanbol/enhancer/engines/opencalais/impl/OpenCalaisEngine.java b/enhancement-engines/opencalais/src/main/java/org/apache/stanbol/enhancer/engines/opencalais/impl/OpenCalaisEngine.java
index c7e8687..9a76433 100644
--- a/enhancement-engines/opencalais/src/main/java/org/apache/stanbol/enhancer/engines/opencalais/impl/OpenCalaisEngine.java
+++ b/enhancement-engines/opencalais/src/main/java/org/apache/stanbol/enhancer/engines/opencalais/impl/OpenCalaisEngine.java
@@ -50,19 +50,19 @@
 import java.util.Map.Entry;

 import java.util.Set;

 

-import org.apache.clerezza.rdf.core.Graph;

-import org.apache.clerezza.rdf.core.Language;

-import org.apache.clerezza.rdf.core.Literal;

+import org.apache.clerezza.commons.rdf.ImmutableGraph;

+import org.apache.clerezza.commons.rdf.Language;

+import org.apache.clerezza.commons.rdf.Literal;

 import org.apache.clerezza.rdf.core.LiteralFactory;

-import org.apache.clerezza.rdf.core.MGraph;

-import org.apache.clerezza.rdf.core.NonLiteral;

-import org.apache.clerezza.rdf.core.Resource;

-import org.apache.clerezza.rdf.core.Triple;

-import org.apache.clerezza.rdf.core.UriRef;

+import org.apache.clerezza.commons.rdf.Graph;

+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;

+import org.apache.clerezza.commons.rdf.RDFTerm;

+import org.apache.clerezza.commons.rdf.Triple;

+import org.apache.clerezza.commons.rdf.IRI;

 import org.apache.clerezza.rdf.core.access.TcManager;

-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;

-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;

-import org.apache.clerezza.rdf.core.impl.TripleImpl;

+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;

+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;

+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;

 import org.apache.clerezza.rdf.core.serializedform.Parser;

 import org.apache.clerezza.rdf.core.serializedform.Serializer;

 import org.apache.clerezza.rdf.core.sparql.ParseException;

@@ -178,7 +178,7 @@
     /**

      * a map for mapping Calais classes to other classes (e.g. from dbpedia)

      */

-    private Map<UriRef,UriRef> calaisTypeMap;

+    private Map<IRI,IRI> calaisTypeMap;

     

     /**

      * the default file containing type mappings. Key and value are separated by the regular expression ' ?= ?'.

@@ -213,11 +213,11 @@
         this.calaisUrl = calaisUrl;

     }

 

-    public Map<UriRef,UriRef> getCalaisTypeMap() {

+    public Map<IRI,IRI> getCalaisTypeMap() {

       return calaisTypeMap;

     }

 

-    public void setCalaisTypeMap(Map<UriRef,UriRef> calaisTypeMap) {

+    public void setCalaisTypeMap(Map<IRI,IRI> calaisTypeMap) {

       this.calaisTypeMap = calaisTypeMap;

     }

 

@@ -245,7 +245,7 @@
             continue;

           String[] entry = line.split("\\s*=\\s*");

           if (entry.length == 2) {

-            calaisTypeMap.put(new UriRef(entry[0]), new UriRef(entry[1]));

+            calaisTypeMap.put(new IRI(entry[0]), new IRI(entry[1]));

           }

         }

         reader.close();

@@ -271,7 +271,7 @@
     }

 

     public void computeEnhancements(ContentItem ci) throws EngineException {

-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);

+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);

         if(contentPart == null){

             throw new IllegalStateException("No ContentPart with an supported Mimetype '"

                     + SUPPORTED_MIMETYPES+"' found for ContentItem "+ci.getUri()

@@ -286,7 +286,7 @@
             throw new InvalidContentException(this, ci, e);

         }

 

-        MGraph calaisModel = getCalaisAnalysis(text, contentPart.getValue().getMimeType());

+        Graph calaisModel = getCalaisAnalysis(text, contentPart.getValue().getMimeType());

         if (calaisModel != null) {

             //Acquire a write lock on the ContentItem when adding the enhancements

             ci.getLock().writeLock().lock();

@@ -328,11 +328,11 @@
             language = null;

         }

         //TODO create TextEnhancement (form, start, end, type?) and EntityAnnotation (id, name, type)

-        HashMap<Resource, UriRef> entityAnnotationMap = new HashMap<Resource, UriRef>();

+        HashMap<RDFTerm, IRI> entityAnnotationMap = new HashMap<RDFTerm, IRI>();

         for (CalaisEntityOccurrence occ : occs) {

-            UriRef textAnnotation = EnhancementEngineHelper.createTextEnhancement(

+            IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(

                     ci, this);

-            MGraph model = ci.getMetadata();

+            Graph model = ci.getMetadata();

             model.add(new TripleImpl(textAnnotation, DC_TYPE, occ.type));

             // for autotagger use the name instead of the matched term (that might be a pronoun!)

             if (onlyNERMode) {

@@ -360,7 +360,7 @@
                 entityAnnotationMap.put(occ.id,textAnnotation);

                 }

                 else {

-//                UriRef entityAnnotation = EnhancementEngineHelper.createEntityEnhancement(ci, this);

+//                IRI entityAnnotation = EnhancementEngineHelper.createEntityEnhancement(ci, this);

 //                entityAnnotationMap.put(occ.id, entityAnnotation);

 //                model.add(new TripleImpl(entityAnnotation, DC_RELATION, textAnnotation));

 //                model.add(new TripleImpl(entityAnnotation, ENHANCER_ENTITY_LABEL, occ.name));

@@ -372,15 +372,15 @@
     }

 

     /**

-     * Retrieves the annotations from OpenCalais as RDF/XML. From that an MGraph is created.

+     * Retrieves the annotations from OpenCalais as RDF/XML. From that an Graph is created.

      *

      * @param text the text to send to OpenCalais

      *

-     * @return an MGraph with all annotations

+     * @return an Graph with all annotations

      *

      * @throws EngineException

      */

-    public MGraph getCalaisAnalysis(String text, String mimeType) throws EngineException {

+    public Graph getCalaisAnalysis(String text, String mimeType) throws EngineException {

         if (mimeType.equals("text/plain")) {

             mimeType = "text/raw";

         }

@@ -395,7 +395,7 @@
                 ">" +

                 "</c:processingDirectives>" +

                 "</c:params>";

-        MGraph model = null;

+        Graph model = null;

         try {

             StringBuilder postParams = new StringBuilder();

             postParams

@@ -426,18 +426,18 @@
     }

 

     /**

-     * Parses an InputStream of RDF data and produces an MGraph from them

+     * Parses an InputStream of RDF data and produces an Graph from them

      *

      * @param in The InputStream of RDF data

      * @param format the format of the RDF data

      *

-     * @return the resulting MGraph or null if the RDF serialization format is not supported by the parser

+     * @return the resulting Graph or null if the RDF serialization format is not supported by the parser

      */

-    public MGraph readModel(InputStream in, String format) {

+    public Graph readModel(InputStream in, String format) {

         Parser parser = Parser.getInstance();

         if (parser.getSupportedFormats().contains(format)) {

-            Graph graph = parser.parse(in, format);

-            MGraph model = new SimpleMGraph(graph);

+            ImmutableGraph graph = parser.parse(in, format);

+            Graph model = new SimpleGraph(graph);

             return model;

         } else {

             log.warn("Unsupported RDF format: {}\nSupported RDF formats: {}",

@@ -450,13 +450,13 @@
      * Extracts the relevant entity information from the Calais RDF data.

      * The entities and the relted information is extracted by a Sparql query.

      *

-     * @param model the MGraph representing the Calais data

+     * @param model the Graph representing the Calais data

      *

      * @return a Collection of entity information

      * @throws EngineException on a {@link ParseException} while processing the

      * Sparql query.

      */

-    public Collection<CalaisEntityOccurrence> queryModel(MGraph model) throws EngineException {

+    public Collection<CalaisEntityOccurrence> queryModel(Graph model) throws EngineException {

         //TODO extract also Geo info (latitude/longitude)?

         String query =

                 "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> " +

@@ -493,7 +493,7 @@
             while (rs.hasNext()) {

                 SolutionMapping row = rs.next();

                 CalaisEntityOccurrence occ = new CalaisEntityOccurrence();

-                Resource disambiguated = row.get("did");

+                RDFTerm disambiguated = row.get("did");

                 occ.id = (disambiguated == null ? row.get("id") : disambiguated);

                 if (onlyNERMode) {

                     occ.type = row.get("type");

@@ -502,7 +502,7 @@
                     occ.type = (disambiguated == null ? row.get("type") : row.get("dtype"));

                 }

                 if (calaisTypeMap != null) {

-                    UriRef mappedType = calaisTypeMap.get(occ.type);

+                    IRI mappedType = calaisTypeMap.get(occ.type);

                     if (mappedType != null) {

                         occ.type = mappedType;

                     }

@@ -618,7 +618,7 @@
         String standAlone = (String)properties.get(CALAIS_NER_ONLY_MODE_KEY);

         setLicenseKey(license);

         setCalaisUrl(url);

-        calaisTypeMap = new HashMap<UriRef,UriRef>();

+        calaisTypeMap = new HashMap<IRI,IRI>();

         loadTypeMap(calaisTypeMapFile);

         onlyNERMode = Boolean.parseBoolean(standAlone);

         //      this.tcManager = TcManager.getInstance();

diff --git a/enhancement-engines/opencalais/src/test/java/org/apache/stanbol/enhancer/engines/opencalais/impl/TestOpenCalaisEngine.java b/enhancement-engines/opencalais/src/test/java/org/apache/stanbol/enhancer/engines/opencalais/impl/TestOpenCalaisEngine.java
index 4bfe6f0..521c84c 100644
--- a/enhancement-engines/opencalais/src/test/java/org/apache/stanbol/enhancer/engines/opencalais/impl/TestOpenCalaisEngine.java
+++ b/enhancement-engines/opencalais/src/test/java/org/apache/stanbol/enhancer/engines/opencalais/impl/TestOpenCalaisEngine.java
@@ -26,11 +26,11 @@
 import java.util.Map;

 

 import org.apache.clerezza.rdf.core.LiteralFactory;

-import org.apache.clerezza.rdf.core.MGraph;

-import org.apache.clerezza.rdf.core.Resource;

-import org.apache.clerezza.rdf.core.UriRef;

+import org.apache.clerezza.commons.rdf.Graph;

+import org.apache.clerezza.commons.rdf.RDFTerm;

+import org.apache.clerezza.commons.rdf.IRI;

 import org.apache.clerezza.rdf.core.access.TcManager;

-import org.apache.clerezza.rdf.core.impl.TripleImpl;

+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;

 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;

 import org.apache.stanbol.enhancer.servicesapi.ContentItem;

 import org.apache.stanbol.enhancer.servicesapi.ContentItemFactory;

@@ -79,7 +79,7 @@
     @BeforeClass

     public static void oneTimeSetup() throws ConfigurationException {

         calaisExtractor = new OpenCalaisEngine();

-        calaisExtractor.setCalaisTypeMap(new HashMap<UriRef,UriRef>());

+        calaisExtractor.setCalaisTypeMap(new HashMap<IRI,IRI>());

         calaisExtractor.tcManager = TcManager.getInstance();

         if (TEST_LICENSE_KEY != null && TEST_LICENSE_KEY.matches("\\w+")) {

             calaisExtractor.setLicenseKey(TEST_LICENSE_KEY);

@@ -96,7 +96,7 @@
         String format = "application/rdf+xml";

         InputStream in = this.getClass().getClassLoader().getResourceAsStream(testFile);

         Assert.assertNotNull("failed to load resource " + testFile, in);

-        MGraph model = calaisExtractor.readModel(in, format);

+        Graph model = calaisExtractor.readModel(in, format);

         Assert.assertNotNull("model reader failed with format: " + format, model);

         Collection<CalaisEntityOccurrence> entities;

         try {

@@ -111,7 +111,7 @@
         //test the generation of the Enhancements

         ContentItem ci = wrapAsContentItem(TEST_TEXT);

         calaisExtractor.createEnhancements(entities, ci);

-        Map<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();

+        Map<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();

         expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());

         expectedValues.put(Properties.DC_CREATOR, 

             LiteralFactory.getInstance().createTypedLiteral(

@@ -130,7 +130,7 @@
         ci.getMetadata().add(

             new TripleImpl(ci.getUri(), Properties.DC_LANGUAGE, LiteralFactory.getInstance()

                     .createTypedLiteral("en")));

-        MGraph model;

+        Graph model;

         try {

             model = calaisExtractor.getCalaisAnalysis(TEST_TEXT, "text/plain");

         } catch (EngineException e) {

diff --git a/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/CustomNERModelEnhancementEngine.java b/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/CustomNERModelEnhancementEngine.java
index 87a4f25..2449def 100644
--- a/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/CustomNERModelEnhancementEngine.java
+++ b/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/CustomNERModelEnhancementEngine.java
@@ -32,7 +32,7 @@
 
 import opennlp.tools.namefind.TokenNameFinderModel;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
 import org.apache.felix.scr.annotations.Property;
@@ -175,7 +175,7 @@
                             dcTypeUri,o);
                         continue configs;
                     }
-                    this.config.setMappedType(namedEntityType,new UriRef(dcTypeUri));
+                    this.config.setMappedType(namedEntityType,new IRI(dcTypeUri));
                     log.info("  add mapping {} > {}",namedEntityType,dcTypeUri);
                 }
             }
diff --git a/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NEREngineConfig.java b/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NEREngineConfig.java
index 20e203d..60adfde 100644
--- a/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NEREngineConfig.java
+++ b/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NEREngineConfig.java
@@ -27,7 +27,7 @@
 import java.util.TreeMap;
 import java.util.concurrent.CopyOnWriteArrayList;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.commons.opennlp.OpenNLP;
 import org.apache.stanbol.enhancer.nlp.model.tag.TagSet;
 import org.apache.stanbol.enhancer.nlp.ner.NerTag;
@@ -39,10 +39,10 @@
      * Default mapping for Concept types to dc:type values added for
      * TextAnnotations.
      */
-    public static final Map<String,UriRef> DEFAULT_ENTITY_TYPE_MAPPINGS;
+    public static final Map<String,IRI> DEFAULT_ENTITY_TYPE_MAPPINGS;
     
     static { //the default mappings for the default NER types
-        Map<String,UriRef> mappings = new TreeMap<String,UriRef>();
+        Map<String,IRI> mappings = new TreeMap<String,IRI>();
         mappings.put("person", OntologicalClasses.DBPEDIA_PERSON);
         mappings.put("location", OntologicalClasses.DBPEDIA_PLACE);
         mappings.put("organization", OntologicalClasses.DBPEDIA_ORGANISATION);
@@ -51,7 +51,7 @@
     
     /**
      * Holds the configured {@link NerTag}s - the mappings from the
-     * named entity name to the {@link UriRef} type used for the
+     * named entity name to the {@link IRI} type used for the
      * <code>dc:type</code> value for <code>fise:TextAnnotation</code>s
      */
     private TagSet<NerTag> nerTagSet = new TagSet<NerTag>("NER TagSet");
@@ -70,7 +70,7 @@
     private String defaultLanguage;
     
     public NEREngineConfig(){
-        for(Entry<String,UriRef> mapping : DEFAULT_ENTITY_TYPE_MAPPINGS.entrySet()){
+        for(Entry<String,IRI> mapping : DEFAULT_ENTITY_TYPE_MAPPINGS.entrySet()){
             nerTagSet.addTag(new NerTag(mapping.getKey(), mapping.getValue()));
         }
     }
@@ -171,7 +171,7 @@
      * @throws IllegalArgumentException if the parsed NamedEntity
      * type is <code>null</code> or an empty String.
      */
-    public void setMappedType(String namedEntityType,UriRef dcType){
+    public void setMappedType(String namedEntityType,IRI dcType){
         if(namedEntityType != null && !namedEntityType.isEmpty()){
             nerTagSet.addTag(new NerTag(namedEntityType, dcType));
         } else {
diff --git a/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NEREngineCore.java b/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NEREngineCore.java
index a4f13dc..ebd7b82 100644
--- a/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NEREngineCore.java
+++ b/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NEREngineCore.java
@@ -46,12 +46,12 @@
 import opennlp.tools.util.InvalidFormatException;
 import opennlp.tools.util.Span;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.lang.StringUtils;
 import org.apache.stanbol.commons.opennlp.OpenNLP;
 import org.apache.stanbol.commons.stanboltools.datafileprovider.DataFileProvider;
@@ -151,7 +151,7 @@
             text = null;
         } else { //no AnalysedText with tokens ...
             //fallback to processing the plain text is still supported
-            Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
+            Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
             if(contentPart == null){
                 throw new IllegalStateException("No ContentPart with Mimetype '"
                     + TEXT_PLAIN_MIMETYPE+"' found for ContentItem "+ci.getUri()
@@ -240,7 +240,7 @@
                                   StringUtils.abbreviate(at != null ? at.getSpan() : text, 100) });
         }
         LiteralFactory literalFactory = LiteralFactory.getInstance();
-        MGraph g = ci.getMetadata();
+        Graph g = ci.getMetadata();
         Map<String,List<NameOccurrence>> entityNames;
         if(at != null){
             entityNames = extractNameOccurrences(nameFinderModel, at, lang);
@@ -250,16 +250,16 @@
         //lock the ContentItem while writing the RDF data for found Named Entities
         ci.getLock().writeLock().lock();
         try {
-            Map<String,UriRef> previousAnnotations = new LinkedHashMap<String,UriRef>();
+            Map<String,IRI> previousAnnotations = new LinkedHashMap<String,IRI>();
             for (Map.Entry<String,List<NameOccurrence>> nameInContext : entityNames.entrySet()) {
     
                 String name = nameInContext.getKey();
                 List<NameOccurrence> occurrences = nameInContext.getValue();
     
-                UriRef firstOccurrenceAnnotation = null;
+                IRI firstOccurrenceAnnotation = null;
     
                 for (NameOccurrence occurrence : occurrences) {
-                    UriRef textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
+                    IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
                     g.add(new TripleImpl(textAnnotation, ENHANCER_SELECTED_TEXT, 
                         new PlainLiteralImpl(name, language)));
                     g.add(new TripleImpl(textAnnotation, ENHANCER_SELECTION_CONTEXT, 
@@ -283,7 +283,7 @@
                     if (firstOccurrenceAnnotation == null) {
                         // check already extracted annotations to find a first most
                         // specific occurrence
-                        for (Map.Entry<String,UriRef> entry : previousAnnotations.entrySet()) {
+                        for (Map.Entry<String,IRI> entry : previousAnnotations.entrySet()) {
                             if (entry.getKey().contains(name)) {
                                 // we have found a most specific previous
                                 // occurrence, use it as subsumption target
diff --git a/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NameOccurrence.java b/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NameOccurrence.java
index 90fa676..3d548f2 100644
--- a/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NameOccurrence.java
+++ b/enhancement-engines/opennlp/opennlp-ner/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/impl/NameOccurrence.java
@@ -16,13 +16,13 @@
  */
 package org.apache.stanbol.enhancer.engines.opennlp.impl;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 public class NameOccurrence {
 
     public final String name;
     
-    public final UriRef type;
+    public final IRI type;
     
     public final Integer start;
 
@@ -32,7 +32,7 @@
 
     public final Double confidence;
 
-    public NameOccurrence(String name, Integer start, Integer end, UriRef type,
+    public NameOccurrence(String name, Integer start, Integer end, IRI type,
             String context, Double confidence) {
         this.name = name;
         this.type = type;
diff --git a/enhancement-engines/opennlp/opennlp-ner/src/test/java/org/apache/stanbol/enhancer/engines/opennlp/impl/ClasspathDataFileProvider.java b/enhancement-engines/opennlp/opennlp-ner/src/test/java/org/apache/stanbol/enhancer/engines/opennlp/impl/ClasspathDataFileProvider.java
index bf66545..22bba62 100644
--- a/enhancement-engines/opennlp/opennlp-ner/src/test/java/org/apache/stanbol/enhancer/engines/opennlp/impl/ClasspathDataFileProvider.java
+++ b/enhancement-engines/opennlp/opennlp-ner/src/test/java/org/apache/stanbol/enhancer/engines/opennlp/impl/ClasspathDataFileProvider.java
@@ -74,7 +74,7 @@
         // load default OpenNLP models from classpath (embedded in the defaultdata bundle)
         final String resourcePath = RESOURCE_BASE_PATH + filename;
         final URL dataFile = getClass().getClassLoader().getResource(resourcePath);
-        //log.debug("Resource {} found: {}", (in == null ? "NOT" : ""), resourcePath);
+        //log.debug("RDFTerm {} found: {}", (in == null ? "NOT" : ""), resourcePath);
         return dataFile;
     }
 }
diff --git a/enhancement-engines/opennlp/opennlp-ner/src/test/java/org/apache/stanbol/enhancer/engines/opennlp/impl/TestNamedEntityExtractionEnhancementEngine.java b/enhancement-engines/opennlp/opennlp-ner/src/test/java/org/apache/stanbol/enhancer/engines/opennlp/impl/TestNamedEntityExtractionEnhancementEngine.java
index b3d3237..7176153 100644
--- a/enhancement-engines/opennlp/opennlp-ner/src/test/java/org/apache/stanbol/enhancer/engines/opennlp/impl/TestNamedEntityExtractionEnhancementEngine.java
+++ b/enhancement-engines/opennlp/opennlp-ner/src/test/java/org/apache/stanbol/enhancer/engines/opennlp/impl/TestNamedEntityExtractionEnhancementEngine.java
@@ -27,11 +27,11 @@
 import java.util.Map;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.stanbol.commons.opennlp.OpenNLP;
 import org.apache.stanbol.commons.stanboltools.datafileprovider.DataFileProvider;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
@@ -87,7 +87,7 @@
 
     public static ContentItem wrapAsContentItem(final String id,
             final String text, String language) throws IOException {
-    	ContentItem ci =  ciFactory.createContentItem(new UriRef(id),new StringSource(text));
+    	ContentItem ci =  ciFactory.createContentItem(new IRI(id),new StringSource(text));
     	if(language != null){
     	    ci.getMetadata().add(new TripleImpl(ci.getUri(), DC_LANGUAGE, new PlainLiteralImpl(language)));
     	}
@@ -151,12 +151,12 @@
             throws EngineException, IOException {
         ContentItem ci = wrapAsContentItem("urn:test:content-item:single:sentence", SINGLE_SENTENCE,"en");
         nerEngine.computeEnhancements(ci);
-        Map<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+        Map<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
         expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());
         expectedValues.put(Properties.DC_CREATOR, LiteralFactory.getInstance().createTypedLiteral(nerEngine.getClass().getName()));
         //adding null as expected for confidence makes it a required property
         expectedValues.put(Properties.ENHANCER_CONFIDENCE, null);
-        MGraph g = ci.getMetadata();
+        Graph g = ci.getMetadata();
         int textAnnotationCount = validateAllTextAnnotations(g,SINGLE_SENTENCE,expectedValues);
         assertEquals(3, textAnnotationCount);
     }
@@ -167,16 +167,16 @@
         nerEngine.config.getDefaultModelTypes().clear(); 
         //but instead a custom model provided by the test data
         nerEngine.config.addCustomNameFinderModel("en", "bionlp2004-DNA-en.bin");
-        nerEngine.config.setMappedType("DNA", new UriRef("http://www.bootstrep.eu/ontology/GRO#DNA"));
+        nerEngine.config.setMappedType("DNA", new IRI("http://www.bootstrep.eu/ontology/GRO#DNA"));
         nerEngine.computeEnhancements(ci);
-        Map<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+        Map<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
         expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());
         expectedValues.put(Properties.DC_CREATOR, LiteralFactory.getInstance().createTypedLiteral(nerEngine.getClass().getName()));
         //adding null as expected for confidence makes it a required property
         expectedValues.put(Properties.ENHANCER_CONFIDENCE, null);
         //and dc:type values MUST be the URI set as mapped type
-        expectedValues.put(Properties.DC_TYPE, new UriRef("http://www.bootstrep.eu/ontology/GRO#DNA"));
-        MGraph g = ci.getMetadata();
+        expectedValues.put(Properties.DC_TYPE, new IRI("http://www.bootstrep.eu/ontology/GRO#DNA"));
+        Graph g = ci.getMetadata();
         int textAnnotationCount = validateAllTextAnnotations(g,EHEALTH,expectedValues);
         assertEquals(7, textAnnotationCount);
     }
diff --git a/enhancement-engines/opennlp/opennlp-pos/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/pos/services/OpenNlpPosTaggingEngine.java b/enhancement-engines/opennlp/opennlp-pos/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/pos/services/OpenNlpPosTaggingEngine.java
index 0eb8b3e..1d3dd8b 100644
--- a/enhancement-engines/opennlp/opennlp-pos/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/pos/services/OpenNlpPosTaggingEngine.java
+++ b/enhancement-engines/opennlp/opennlp-pos/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/pos/services/OpenNlpPosTaggingEngine.java
@@ -38,7 +38,7 @@
 import opennlp.tools.tokenize.Tokenizer;
 import opennlp.tools.util.Sequence;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
@@ -161,7 +161,7 @@
     @Override
     public int canEnhance(ContentItem ci) throws EngineException {
         // check if content is present
-        Map.Entry<UriRef,Blob> entry = NlpEngineHelper.getPlainText(this, ci, false);
+        Map.Entry<IRI,Blob> entry = NlpEngineHelper.getPlainText(this, ci, false);
         if(entry == null || entry.getValue() == null) {
             return CANNOT_ENHANCE;
         }
diff --git a/enhancement-engines/opennlp/opennlp-sentence/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/sentence/impl/OpenNlpSentenceDetectionEngine.java b/enhancement-engines/opennlp/opennlp-sentence/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/sentence/impl/OpenNlpSentenceDetectionEngine.java
index a409060..5b04eae 100644
--- a/enhancement-engines/opennlp/opennlp-sentence/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/sentence/impl/OpenNlpSentenceDetectionEngine.java
+++ b/enhancement-engines/opennlp/opennlp-sentence/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/sentence/impl/OpenNlpSentenceDetectionEngine.java
@@ -28,7 +28,7 @@
 import opennlp.tools.sentdetect.SentenceDetectorME;
 import opennlp.tools.sentdetect.SentenceModel;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
@@ -131,7 +131,7 @@
     @Override
     public int canEnhance(ContentItem ci) throws EngineException {
         // check if content is present
-        Map.Entry<UriRef,Blob> entry = NlpEngineHelper.getPlainText(this, ci, false);
+        Map.Entry<IRI,Blob> entry = NlpEngineHelper.getPlainText(this, ci, false);
         if(entry == null || entry.getValue() == null) {
             return CANNOT_ENHANCE;
         }
diff --git a/enhancement-engines/opennlp/opennlp-token/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/token/impl/OpenNlpTokenizerEngine.java b/enhancement-engines/opennlp/opennlp-token/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/token/impl/OpenNlpTokenizerEngine.java
index 9e26cda..a701bf5 100644
--- a/enhancement-engines/opennlp/opennlp-token/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/token/impl/OpenNlpTokenizerEngine.java
+++ b/enhancement-engines/opennlp/opennlp-token/src/main/java/org/apache/stanbol/enhancer/engines/opennlp/token/impl/OpenNlpTokenizerEngine.java
@@ -30,7 +30,7 @@
 import opennlp.tools.tokenize.TokenizerME;
 import opennlp.tools.tokenize.TokenizerModel;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
@@ -137,7 +137,7 @@
     @Override
     public int canEnhance(ContentItem ci) throws EngineException {
         // check if content is present
-        Map.Entry<UriRef,Blob> entry = NlpEngineHelper.getPlainText(this, ci, false);
+        Map.Entry<IRI,Blob> entry = NlpEngineHelper.getPlainText(this, ci, false);
         if(entry == null || entry.getValue() == null) {
             return CANNOT_ENHANCE;
         }
diff --git a/enhancement-engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/RefactorEnhancementEngine.java b/enhancement-engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/RefactorEnhancementEngine.java
index c6e41ce..f9d91db 100644
--- a/enhancement-engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/RefactorEnhancementEngine.java
+++ b/enhancement-engines/refactor/src/main/java/org/apache/stanbol/enhancer/engines/refactor/RefactorEnhancementEngine.java
@@ -30,11 +30,11 @@
 import java.util.List;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.TcProvider;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -44,7 +44,7 @@
 import org.apache.felix.scr.annotations.Property;
 import org.apache.felix.scr.annotations.Reference;
 import org.apache.felix.scr.annotations.Service;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.commons.owl.transformation.OWLAPIToClerezzaConverter;
 import org.apache.stanbol.enhancer.engines.refactor.dereferencer.Dereferencer;
 import org.apache.stanbol.enhancer.engines.refactor.dereferencer.DereferencerImpl;
@@ -85,7 +85,6 @@
 import org.osgi.service.component.ComponentFactory;
 import org.osgi.service.component.ComponentInstance;
 import org.semanticweb.owlapi.apibinding.OWLManager;
-import org.semanticweb.owlapi.model.IRI;
 import org.semanticweb.owlapi.model.OWLOntology;
 import org.semanticweb.owlapi.model.OWLOntologyCreationException;
 import org.semanticweb.owlapi.model.OWLOntologyID;
@@ -123,7 +122,7 @@
      */
     private class GraphContentSourceWithPhysicalIRI extends GraphContentInputSource {
 
-        public GraphContentSourceWithPhysicalIRI(InputStream content, IRI physicalIri) {
+        public GraphContentSourceWithPhysicalIRI(InputStream content, org.semanticweb.owlapi.model.IRI physicalIri) {
             super(content);
             bindPhysicalOrigin(Origin.create(physicalIri));
         }
@@ -238,31 +237,31 @@
         log.debug("Refactor enhancement job will run in session '{}'.", session.getID());
 
         // Retrieve and filter the metadata graph for entities recognized by the engines.
-        final MGraph metadataGraph = ci.getMetadata(), signaturesGraph = new IndexedMGraph();
+        final Graph metadataGraph = ci.getMetadata(), signaturesGraph = new IndexedGraph();
         // FIXME the Stanbol Enhancer vocabulary should be retrieved from somewhere in the enhancer API.
-        final UriRef ENHANCER_ENTITY_REFERENCE = new UriRef(
+        final IRI ENHANCER_ENTITY_REFERENCE = new IRI(
                 "http://fise.iks-project.eu/ontology/entity-reference");
         Iterator<Triple> tripleIt = metadataGraph.filter(null, ENHANCER_ENTITY_REFERENCE, null);
         while (tripleIt.hasNext()) {
             // Get the entity URI
-            Resource obj = tripleIt.next().getObject();
-            if (!(obj instanceof UriRef)) {
-                log.warn("Invalid UriRef for entity reference {}. Skipping.", obj);
+            RDFTerm obj = tripleIt.next().getObject();
+            if (!(obj instanceof IRI)) {
+                log.warn("Invalid IRI for entity reference {}. Skipping.", obj);
                 continue;
             }
-            final String entityReference = ((UriRef) obj).getUnicodeString();
+            final String entityReference = ((IRI) obj).getUnicodeString();
             log.debug("Trying to resolve entity {}", entityReference);
 
             // Populate the entity signatures graph, by querying either the Entity Hub or the dereferencer.
             if (engineConfiguration.isEntityHubUsed()) {
-                MGraph result = populateWithEntity(entityReference, signaturesGraph);
+                Graph result = populateWithEntity(entityReference, signaturesGraph);
                 if (result != signaturesGraph && result != null) {
                     log.warn("Entity Hub query added triples to a new graph instead of populating the supplied one!"
                              + " New signatures will be discarded.");
                 }
             } else try {
-                OntologyInputSource<TripleCollection> source = new GraphContentSourceWithPhysicalIRI(
-                        dereferencer.resolve(entityReference), IRI.create(entityReference));
+                OntologyInputSource<Graph> source = new GraphContentSourceWithPhysicalIRI(
+                        dereferencer.resolve(entityReference), org.semanticweb.owlapi.model.IRI.create(entityReference));
                 signaturesGraph.addAll(source.getRootOntology());
             } catch (FileNotFoundException e) {
                 log.error("Failed to dereference entity " + entityReference + ". Skipping.", e);
@@ -301,17 +300,17 @@
              * 
              * To perform the refactoring of the ontology to a given vocabulary we use the Stanbol Refactor.
              */
-            Recipe recipe = ruleStore.getRecipe(new UriRef(engineConfiguration.getRecipeId()));
+            Recipe recipe = ruleStore.getRecipe(new IRI(engineConfiguration.getRecipeId()));
 
             log.debug("Recipe {} contains {} rules.", recipe, recipe.getRuleList().size());
             log.debug("The ontology to be refactor is {}", ontology);
 
-            TripleCollection tc = refactorer.graphRefactoring(
-                OWLAPIToClerezzaConverter.owlOntologyToClerezzaMGraph(ontology), recipe);
+            Graph tc = refactorer.graphRefactoring(
+                OWLAPIToClerezzaConverter.owlOntologyToClerezzaGraph(ontology), recipe);
 
             /*
              * ontology = refactorer .ontologyRefactoring(ontology,
-             * IRI.create(engineConfiguration.getRecipeId()));
+             * org.semanticweb.owlapi.model.IRI.create(engineConfiguration.getRecipeId()));
              */
             /*
              * The newly generated ontology is converted to Clarezza format and then added os substitued to
@@ -346,7 +345,7 @@
             for (OWLOntologyID id : session.listManagedOntologies()) {
                 try {
                     String key = ontologyProvider.getKey(id.getOntologyIRI());
-                    ontologyProvider.getStore().deleteTripleCollection(new UriRef(key));
+                    ontologyProvider.getStore().deleteGraph(new IRI(key));
                 } catch (Exception ex) {
                     log.error("Failed to delete triple collection " + id, ex);
                     continue;
@@ -374,7 +373,7 @@
 
         // Deactivation clears all the rules and releases OntoNet resources.
 
-        UriRef recipeId = new UriRef(engineConfiguration.getRecipeId());
+        IRI recipeId = new IRI(engineConfiguration.getRecipeId());
         try {
             // step 1: get all the rules
             log.debug("Recipe {} and its associated rules will be removed from the rule store.", recipeId);
@@ -420,9 +419,9 @@
      *            {@link String}
      * @return the {@link OWLOntology} of the entity
      */
-    private MGraph populateWithEntity(String entityURI, MGraph target) {
+    private Graph populateWithEntity(String entityURI, Graph target) {
         log.debug("Requesting signature of entity {}", entityURI);
-        MGraph graph = target != null ? target : new IndexedMGraph();
+        Graph graph = target != null ? target : new IndexedGraph();
         // Query the Entity Hub
         Entity signature = referencedSiteManager.getEntity(entityURI);
         if (signature != null) {
@@ -451,7 +450,7 @@
      */
     private void initEngine(RefactorEnhancementEngineConf engineConfiguration) {
 
-        // IRI dulcifierScopeIRI = IRI.create((String) context.getProperties().get(SCOPE));
+        // IRI dulcifierScopeIRI = org.semanticweb.owlapi.model.IRI.create((String) context.getProperties().get(SCOPE));
         String scopeId = engineConfiguration.getScope();
 
         // Create or get the scope with the configured ID
@@ -470,11 +469,11 @@
         try {
             log.info("Will now load requested ontology into the core space of scope '{}'.", scopeId);
             OWLOntologyManager sharedManager = OWLManager.createOWLOntologyManager();
-            IRI physicalIRI = null;
+            org.semanticweb.owlapi.model.IRI physicalIRI = null;
             for (int o = 0; o < coreScopeOntologySet.length; o++) {
                 String url = coreScopeOntologySet[o];
                 try {
-                    physicalIRI = IRI.create(url);
+                    physicalIRI = org.semanticweb.owlapi.model.IRI.create(url);
                 } catch (Exception e) {
                     failed.add(url);
                 }
@@ -506,7 +505,7 @@
         String recipeId = engineConfiguration.getRecipeId();
         Recipe recipe = null;
         try {
-            recipe = ruleStore.createRecipe(new UriRef(recipeId), null);
+            recipe = ruleStore.createRecipe(new IRI(recipeId), null);
         } catch (AlreadyExistingRecipeException e1) {
             log.error("A recipe with ID {} already exists in the store.", recipeId);
         }
diff --git a/enhancement-engines/restful-langident/src/main/java/org/apache/stanbol/enhancer/engines/restful/langident/impl/RestfulLangidentEngine.java b/enhancement-engines/restful-langident/src/main/java/org/apache/stanbol/enhancer/engines/restful/langident/impl/RestfulLangidentEngine.java
index 2f4e630..ac84370 100644
--- a/enhancement-engines/restful-langident/src/main/java/org/apache/stanbol/enhancer/engines/restful/langident/impl/RestfulLangidentEngine.java
+++ b/enhancement-engines/restful-langident/src/main/java/org/apache/stanbol/enhancer/engines/restful/langident/impl/RestfulLangidentEngine.java
@@ -40,10 +40,10 @@
 import java.util.Map.Entry;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.io.IOUtils;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -176,7 +176,7 @@
     @Override
     public int canEnhance(ContentItem ci) throws EngineException {
         // check if content is present
-        Map.Entry<UriRef,Blob> entry = getPlainText(this, ci, false);
+        Map.Entry<IRI,Blob> entry = getPlainText(this, ci, false);
         if(entry == null || entry.getValue() == null) {
             return CANNOT_ENHANCE;
         }
@@ -202,7 +202,7 @@
     @Override
     public void computeEnhancements(final ContentItem ci) throws EngineException {
         //get the plain text Blob
-        Map.Entry<UriRef,Blob> textBlob = getPlainText(this, ci, false);
+        Map.Entry<IRI,Blob> textBlob = getPlainText(this, ci, false);
         Blob blob = textBlob.getValue();
         //send the text to the server
         final HttpPost request = new HttpPost(serviceUrl);
@@ -230,7 +230,7 @@
                 throw RuntimeException.class.cast(e);
             }
         }
-        MGraph metadata = ci.getMetadata();
+        Graph metadata = ci.getMetadata();
         log.debug("Detected Languages for ContentItem {} and Blob {}");
         ci.getLock().writeLock().lock();
         try { //write TextAnnotations for the detected languages
@@ -238,7 +238,7 @@
                 // add a hypothesis
                 log.debug(" > {}@{}", suggestion.getLanguage(),
                     suggestion.hasProbability() ? suggestion.getProbability() : "-,--");
-                UriRef textEnhancement = EnhancementEngineHelper.createTextEnhancement(ci, this);
+                IRI textEnhancement = EnhancementEngineHelper.createTextEnhancement(ci, this);
                 metadata.add(new TripleImpl(textEnhancement, DC_LANGUAGE, new PlainLiteralImpl(suggestion.getLanguage())));
                 metadata.add(new TripleImpl(textEnhancement, DC_TYPE, DCTERMS_LINGUISTIC_SYSTEM));
                 if(suggestion.hasProbability()){
@@ -407,8 +407,8 @@
      * @throws IllegalStateException if exception is <code>true</code> and the
      * language could not be retrieved from the parsed {@link ContentItem}.
      */
-    public static Entry<UriRef,Blob> getPlainText(EnhancementEngine engine, ContentItem ci, boolean exception) {
-        Entry<UriRef,Blob> textBlob = ContentItemHelper.getBlob(
+    public static Entry<IRI,Blob> getPlainText(EnhancementEngine engine, ContentItem ci, boolean exception) {
+        Entry<IRI,Blob> textBlob = ContentItemHelper.getBlob(
             ci, singleton("text/plain"));
         if(textBlob != null) {
             return textBlob;
diff --git a/enhancement-engines/restful-nlp/src/main/java/org/apache/stanbol/enhancer/engines/restful/nlp/impl/RestfulNlpAnalysisEngine.java b/enhancement-engines/restful-nlp/src/main/java/org/apache/stanbol/enhancer/engines/restful/nlp/impl/RestfulNlpAnalysisEngine.java
index bbfa968..280db69 100644
--- a/enhancement-engines/restful-nlp/src/main/java/org/apache/stanbol/enhancer/engines/restful/nlp/impl/RestfulNlpAnalysisEngine.java
+++ b/enhancement-engines/restful-nlp/src/main/java/org/apache/stanbol/enhancer/engines/restful/nlp/impl/RestfulNlpAnalysisEngine.java
@@ -44,12 +44,12 @@
 import java.util.Set;
 import java.util.StringTokenizer;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.io.IOUtils;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -179,23 +179,23 @@
     /**
      * The property used to write the sum of all positive classified words
      */
-    public static final UriRef POSITIVE_SENTIMENT_PROPERTY = new UriRef(NamespaceEnum.fise+"positive-sentiment");
+    public static final IRI POSITIVE_SENTIMENT_PROPERTY = new IRI(NamespaceEnum.fise+"positive-sentiment");
     /**
      * The property used to write the sum of all negative classified words
      */
-    public static final UriRef NEGATIVE_SENTIMENT_PROPERTY = new UriRef(NamespaceEnum.fise+"negative-sentiment");
+    public static final IRI NEGATIVE_SENTIMENT_PROPERTY = new IRI(NamespaceEnum.fise+"negative-sentiment");
     /**
      * The sentiment of the section (sum of positive and negative classifications)
      */
-    public static final UriRef SENTIMENT_PROPERTY = new UriRef(NamespaceEnum.fise+"sentiment");
+    public static final IRI SENTIMENT_PROPERTY = new IRI(NamespaceEnum.fise+"sentiment");
     /**
      * The dc:type value used for fise:TextAnnotations indicating a Sentiment
      */
-    public static final UriRef SENTIMENT_TYPE = new UriRef(NamespaceEnum.fise+"Sentiment");
+    public static final IRI SENTIMENT_TYPE = new IRI(NamespaceEnum.fise+"Sentiment");
     /**
      * The dc:Type value sued for the sentiment annotation of the whole document
      */
-    public static final UriRef DOCUMENT_SENTIMENT_TYPE = new UriRef(NamespaceEnum.fise+"DocumentSentiment");
+    public static final IRI DOCUMENT_SENTIMENT_TYPE = new IRI(NamespaceEnum.fise+"DocumentSentiment");
 
     private static final Map<String,Object> SERVICE_PROPERTIES;
     static {
@@ -254,7 +254,7 @@
     @Override
     public int canEnhance(ContentItem ci) throws EngineException {
         // check if content is present
-        Map.Entry<UriRef,Blob> entry = NlpEngineHelper.getPlainText(this, ci, false);
+        Map.Entry<IRI,Blob> entry = NlpEngineHelper.getPlainText(this, ci, false);
         if(entry == null || entry.getValue() == null) {
             return CANNOT_ENHANCE;
         }
@@ -340,7 +340,7 @@
 
             Iterator<Span> spans = at.getEnclosed(EnumSet.of(SpanTypeEnum.Sentence,SpanTypeEnum.Chunk));
             Sentence context = null;
-            MGraph metadata = ci.getMetadata();
+            Graph metadata = ci.getMetadata();
             Language lang = new Language(language);
             LiteralFactory lf = LiteralFactory.getInstance();
             ci.getLock().writeLock().lock();
@@ -354,7 +354,7 @@
                         default:
                             Value<NerTag> nerAnno = span.getAnnotation(NER_ANNOTATION);
                             if(nerAnno != null){
-                                UriRef ta = EnhancementEngineHelper.createTextEnhancement(ci, this);
+                                IRI ta = EnhancementEngineHelper.createTextEnhancement(ci, this);
                                 //add span related data
                                 metadata.add(new TripleImpl(ta, ENHANCER_SELECTED_TEXT, 
                                     new PlainLiteralImpl(span.getSpan(), lang)));
@@ -382,7 +382,7 @@
                                 Double sentiment = sentimentAnnotation.value();
 
 								//Create a fise:TextAnnotation for the sentiment
-                                UriRef ta = EnhancementEngineHelper.createTextEnhancement(ci, this);
+                                IRI ta = EnhancementEngineHelper.createTextEnhancement(ci, this);
                                 metadata.add(new TripleImpl(ta, ENHANCER_START,
                                         lf.createTypedLiteral(span.getStart())));
                                 metadata.add(new TripleImpl(ta, ENHANCER_END,
@@ -393,7 +393,7 @@
                                 //add the generic dc:type used for all Sentiment annotation
                                 metadata.add(new TripleImpl(ta, DC_TYPE, SENTIMENT_TYPE));
 								//determine the specific dc:type for the sentiment annotation
-                                UriRef ssoType = NIFHelper.SPAN_TYPE_TO_SSO_TYPE.get(span.getType());
+                                IRI ssoType = NIFHelper.SPAN_TYPE_TO_SSO_TYPE.get(span.getType());
                                 if(ssoType != null){
                                     metadata.add(new TripleImpl(ta, DC_TYPE, ssoType));
                                 }
@@ -416,7 +416,7 @@
 
                 //Add the annotation for the overall sentiment of the document 
                 if ( sentimentCount > 0 ) {
-                UriRef ta = EnhancementEngineHelper.createTextEnhancement(ci, this);
+                IRI ta = EnhancementEngineHelper.createTextEnhancement(ci, this);
                     //calculate the average sentiment for a document
                     //TODO: Think on a better way to calculate a general sentiment value for a document.
                     metadata.add(new TripleImpl(ta, SENTIMENT_PROPERTY,
diff --git a/enhancement-engines/sentiment-summarization/src/main/java/org/apache/stanbol/enhancer/engines/sentiment/summarize/SentimentSummarizationEngine.java b/enhancement-engines/sentiment-summarization/src/main/java/org/apache/stanbol/enhancer/engines/sentiment/summarize/SentimentSummarizationEngine.java
index 38a1024..db5098e 100644
--- a/enhancement-engines/sentiment-summarization/src/main/java/org/apache/stanbol/enhancer/engines/sentiment/summarize/SentimentSummarizationEngine.java
+++ b/enhancement-engines/sentiment-summarization/src/main/java/org/apache/stanbol/enhancer/engines/sentiment/summarize/SentimentSummarizationEngine.java
@@ -35,12 +35,12 @@
 import java.util.NavigableMap;
 import java.util.TreeMap;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
@@ -123,23 +123,23 @@
     /**
      * The property used to write the sum of all positive classified words
      */
-    public static final UriRef POSITIVE_SENTIMENT_PROPERTY = new UriRef(NamespaceEnum.fise+"positive-sentiment");
+    public static final IRI POSITIVE_SENTIMENT_PROPERTY = new IRI(NamespaceEnum.fise+"positive-sentiment");
     /**
      * The property used to write the sum of all negative classified words
      */
-    public static final UriRef NEGATIVE_SENTIMENT_PROPERTY = new UriRef(NamespaceEnum.fise+"negative-sentiment");
+    public static final IRI NEGATIVE_SENTIMENT_PROPERTY = new IRI(NamespaceEnum.fise+"negative-sentiment");
     /**
      * The sentiment of the section (sum of positive and negative classifications)
      */
-    public static final UriRef SENTIMENT_PROPERTY = new UriRef(NamespaceEnum.fise+"sentiment");
+    public static final IRI SENTIMENT_PROPERTY = new IRI(NamespaceEnum.fise+"sentiment");
     /**
      * The dc:type value used for fise:TextAnnotations indicating a Sentiment
      */
-    public static final UriRef SENTIMENT_TYPE = new UriRef(NamespaceEnum.fise+"Sentiment");
+    public static final IRI SENTIMENT_TYPE = new IRI(NamespaceEnum.fise+"Sentiment");
     /**
      * The dc:Type value sued for the sentiment annotation of the whole document
      */
-    public static final UriRef DOCUMENT_SENTIMENT_TYPE = new UriRef(NamespaceEnum.fise+"DocumentSentiment");
+    public static final IRI DOCUMENT_SENTIMENT_TYPE = new IRI(NamespaceEnum.fise+"DocumentSentiment");
 
 
     private static final int DEFAULT_NEGATION_CONTEXT = 2;
@@ -590,7 +590,7 @@
     
     private void writeSentimentEnhancements(ContentItem ci, List<SentimentPhrase> sentimentPhrases, AnalysedText at, Language lang) {
         // TODO Auto-generated method stub
-        MGraph metadata = ci.getMetadata();
+        Graph metadata = ci.getMetadata();
         Sentence currentSentence = null;
         final List<SentimentPhrase> sentencePhrases = new ArrayList<SentimentPhrase>();
         for(SentimentPhrase sentPhrase : sentimentPhrases){
@@ -606,7 +606,7 @@
                 }
             }
             if(writeSentimentPhrases){
-                UriRef enh = createTextEnhancement(ci, this);
+                IRI enh = createTextEnhancement(ci, this);
                 String phraseText = at.getSpan().substring(sentPhrase.getStartIndex(), sentPhrase.getEndIndex());
                 metadata.add(new TripleImpl(enh, ENHANCER_SELECTED_TEXT, 
                     new PlainLiteralImpl(phraseText, lang)));
@@ -634,7 +634,7 @@
                     lf.createTypedLiteral(sentPhrase.getSentiment())));               
                 //add the Sentiment type as well as the type of the SSO Ontology
                 metadata.add(new TripleImpl(enh, DC_TYPE, SENTIMENT_TYPE));
-                UriRef ssoType = NIFHelper.SPAN_TYPE_TO_SSO_TYPE.get(SpanTypeEnum.Chunk);
+                IRI ssoType = NIFHelper.SPAN_TYPE_TO_SSO_TYPE.get(SpanTypeEnum.Chunk);
                 if(ssoType != null){
                     metadata.add(new TripleImpl(enh, DC_TYPE, ssoType));
                 }
@@ -665,8 +665,8 @@
         if(section == null || sectionPhrases == null || sectionPhrases.isEmpty()){
             return; //nothing to do
         }
-        UriRef enh = createTextEnhancement(ci, this);
-        MGraph metadata = ci.getMetadata();
+        IRI enh = createTextEnhancement(ci, this);
+        Graph metadata = ci.getMetadata();
         if(section.getType() == SpanTypeEnum.Sentence){
             //TODO use the fise:TextAnnotation new model for 
             //add start/end positions
@@ -708,7 +708,7 @@
 
         //add the Sentiment type as well as the type of the SSO Ontology
         metadata.add(new TripleImpl(enh, DC_TYPE, SENTIMENT_TYPE));
-        UriRef ssoType = NIFHelper.SPAN_TYPE_TO_SSO_TYPE.get(section.getType());
+        IRI ssoType = NIFHelper.SPAN_TYPE_TO_SSO_TYPE.get(section.getType());
         if(ssoType != null){
             metadata.add(new TripleImpl(enh, DC_TYPE, ssoType));
         }
diff --git a/enhancement-engines/smartcn-token/src/main/java/org/apache/stanbol/enhancer/engines/smartcn/impl/SmartcnSentenceEngine.java b/enhancement-engines/smartcn-token/src/main/java/org/apache/stanbol/enhancer/engines/smartcn/impl/SmartcnSentenceEngine.java
index afe8f76..a16cb96 100644
--- a/enhancement-engines/smartcn-token/src/main/java/org/apache/stanbol/enhancer/engines/smartcn/impl/SmartcnSentenceEngine.java
+++ b/enhancement-engines/smartcn-token/src/main/java/org/apache/stanbol/enhancer/engines/smartcn/impl/SmartcnSentenceEngine.java
@@ -26,7 +26,7 @@
 import java.util.Iterator;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.commons.io.input.CharSequenceReader;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -110,7 +110,7 @@
     @Override
     public int canEnhance(ContentItem ci) throws EngineException {
         // check if content is present
-        Map.Entry<UriRef,Blob> entry = NlpEngineHelper.getPlainText(this, ci, false);
+        Map.Entry<IRI,Blob> entry = NlpEngineHelper.getPlainText(this, ci, false);
         if(entry == null || entry.getValue() == null) {
             return CANNOT_ENHANCE;
         }
diff --git a/enhancement-engines/smartcn-token/src/main/java/org/apache/stanbol/enhancer/engines/smartcn/impl/SmartcnTokenizerEngine.java b/enhancement-engines/smartcn-token/src/main/java/org/apache/stanbol/enhancer/engines/smartcn/impl/SmartcnTokenizerEngine.java
index 001d509..9086b72 100644
--- a/enhancement-engines/smartcn-token/src/main/java/org/apache/stanbol/enhancer/engines/smartcn/impl/SmartcnTokenizerEngine.java
+++ b/enhancement-engines/smartcn-token/src/main/java/org/apache/stanbol/enhancer/engines/smartcn/impl/SmartcnTokenizerEngine.java
@@ -26,7 +26,7 @@
 import java.util.Iterator;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.commons.io.input.CharSequenceReader;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -110,7 +110,7 @@
     @Override
     public int canEnhance(ContentItem ci) throws EngineException {
         // check if content is present
-        Map.Entry<UriRef,Blob> entry = NlpEngineHelper.getPlainText(this, ci, false);
+        Map.Entry<IRI,Blob> entry = NlpEngineHelper.getPlainText(this, ci, false);
         if(entry == null || entry.getValue() == null) {
             return CANNOT_ENHANCE;
         }
diff --git a/enhancement-engines/textannotationnewmodel/src/main/java/org/apache/stanbol/enhancer/engines/textannotationnewmodel/impl/TextAnnotationsNewModelEngine.java b/enhancement-engines/textannotationnewmodel/src/main/java/org/apache/stanbol/enhancer/engines/textannotationnewmodel/impl/TextAnnotationsNewModelEngine.java
index 6b858a1..726ea1a 100644
--- a/enhancement-engines/textannotationnewmodel/src/main/java/org/apache/stanbol/enhancer/engines/textannotationnewmodel/impl/TextAnnotationsNewModelEngine.java
+++ b/enhancement-engines/textannotationnewmodel/src/main/java/org/apache/stanbol/enhancer/engines/textannotationnewmodel/impl/TextAnnotationsNewModelEngine.java
@@ -35,15 +35,17 @@
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 
-import org.apache.clerezza.rdf.core.Language;
+
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
 import org.apache.felix.scr.annotations.Properties;
@@ -115,7 +117,7 @@
      */
     @Override
     public void computeEnhancements(ContentItem contentItem) throws EngineException {
-        Entry<UriRef,Blob> textBlob = getBlob(contentItem, supportedMimeTypes);
+        Entry<IRI,Blob> textBlob = getBlob(contentItem, supportedMimeTypes);
         if(textBlob == null){
             return;
         }
@@ -128,13 +130,13 @@
             throw new EngineException(this, contentItem, "Unable to read Plain Text Blob", e);
         }
         Set<Triple> addedTriples = new HashSet<Triple>();
-        MGraph metadata = contentItem.getMetadata();
+        Graph metadata = contentItem.getMetadata();
         //extract all the necessary information within a read lock
         contentItem.getLock().readLock().lock();
         try {
             Iterator<Triple> it = metadata.filter(null, RDF_TYPE, ENHANCER_TEXTANNOTATION);
             while(it.hasNext()){
-                NonLiteral ta = it.next().getSubject();
+                BlankNodeOrIRI ta = it.next().getSubject();
                 boolean hasPrefix = metadata.filter(ta, ENHANCER_SELECTION_PREFIX, null).hasNext();
                 boolean hasSuffix = metadata.filter(ta, ENHANCER_SELECTION_SUFFIX, null).hasNext();
                 boolean hasSelected = metadata.filter(ta, ENHANCER_SELECTED_TEXT, null).hasNext();
diff --git a/enhancement-engines/textannotationnewmodel/src/test/java/org/apache/stanbol/enhancer/engines/textannotationnewmodel/impl/TextAnnotationNewModelEngineTest.java b/enhancement-engines/textannotationnewmodel/src/test/java/org/apache/stanbol/enhancer/engines/textannotationnewmodel/impl/TextAnnotationNewModelEngineTest.java
index 98167a8..fbfff80 100644
--- a/enhancement-engines/textannotationnewmodel/src/test/java/org/apache/stanbol/enhancer/engines/textannotationnewmodel/impl/TextAnnotationNewModelEngineTest.java
+++ b/enhancement-engines/textannotationnewmodel/src/test/java/org/apache/stanbol/enhancer/engines/textannotationnewmodel/impl/TextAnnotationNewModelEngineTest.java
@@ -29,15 +29,13 @@
 import java.util.Iterator;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.clerezza.rdf.jena.parser.JenaParserProvider;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
 import org.apache.stanbol.enhancer.servicesapi.ContentItemFactory;
@@ -57,7 +55,7 @@
 import org.osgi.service.cm.ConfigurationException;
 import org.osgi.service.component.ComponentContext;
 
-import org.apache.clerezza.rdf.core.Resource;
+import org.apache.clerezza.commons.rdf.RDFTerm;
 
 public class TextAnnotationNewModelEngineTest {
     
@@ -66,8 +64,8 @@
     private static final String TEST_ENHANCEMENTS = "enhancement-results.rdf";
     
     private static final JenaParserProvider rdfParser = new JenaParserProvider();
-    private static MGraph origEnhancements;
-    private static UriRef ciUri;
+    private static Graph origEnhancements;
+    private static IRI ciUri;
     
     private ContentItem contentItem;
     
@@ -80,15 +78,15 @@
     public static void init() throws IOException, ConfigurationException {
         InputStream in = TextAnnotationNewModelEngineTest.class.getClassLoader().getResourceAsStream(TEST_ENHANCEMENTS);
         Assert.assertNotNull("Unable to load reaource '"+TEST_ENHANCEMENTS+"' via Classpath",in);
-        origEnhancements = new IndexedMGraph();
+        origEnhancements = new IndexedGraph();
         rdfParser.parse(origEnhancements, in, SupportedFormat.RDF_XML, null);
         Assert.assertFalse(origEnhancements.isEmpty());
         //parse the ID of the ContentItem form the enhancements
         Iterator<Triple> it = origEnhancements.filter(null, Properties.ENHANCER_EXTRACTED_FROM, null);
         Assert.assertTrue(it.hasNext());
-        Resource id = it.next().getObject();
-        Assert.assertTrue(id instanceof UriRef);
-        ciUri = (UriRef)id;
+        RDFTerm id = it.next().getObject();
+        Assert.assertTrue(id instanceof IRI);
+        ciUri = (IRI)id;
         //validate that the enhancements in the file are valid
         //NOTE: the input data are no longer fully valid to test some features of this engine
         //      because of that this initial test is deactivated
@@ -108,7 +106,7 @@
     @Before
     public void initTest() throws IOException {
         contentItem = ciFactory.createContentItem(ciUri, 
-            new StringSource(SINGLE_SENTENCE), new IndexedMGraph(origEnhancements));
+            new StringSource(SINGLE_SENTENCE), new IndexedGraph(origEnhancements));
     }
     
     @Test
@@ -116,15 +114,15 @@
         Assert.assertEquals(EnhancementEngine.ENHANCE_ASYNC, engine.canEnhance(contentItem));
         engine.computeEnhancements(contentItem);
         //validate
-        MGraph g = contentItem.getMetadata();
+        Graph g = contentItem.getMetadata();
         Iterator<Triple> it = g.filter(null, RDF_TYPE, ENHANCER_TEXTANNOTATION);
         Assert.assertTrue(it.hasNext());
         while(it.hasNext()){
-            NonLiteral ta = it.next().getSubject();
-            Assert.assertTrue(ta instanceof UriRef);
-            Map<UriRef,Resource> expected = new HashMap<UriRef,Resource>();
+            BlankNodeOrIRI ta = it.next().getSubject();
+            Assert.assertTrue(ta instanceof IRI);
+            Map<IRI,RDFTerm> expected = new HashMap<IRI,RDFTerm>();
             expected.put(Properties.ENHANCER_EXTRACTED_FROM, contentItem.getUri());
-            EnhancementStructureHelper.validateTextAnnotation(g, (UriRef)ta, SINGLE_SENTENCE, expected,true);
+            EnhancementStructureHelper.validateTextAnnotation(g, (IRI)ta, SINGLE_SENTENCE, expected,true);
         }
         
     }
diff --git a/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/TikaEngine.java b/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/TikaEngine.java
index 8143a53..1a2df22 100644
--- a/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/TikaEngine.java
+++ b/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/TikaEngine.java
@@ -43,10 +43,10 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.io.IOUtils;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Properties;
@@ -298,10 +298,10 @@
                 }
             }
             String random = randomUUID().toString();
-            UriRef textBlobUri = new UriRef("urn:tika:text:"+random);
+            IRI textBlobUri = new IRI("urn:tika:text:"+random);
             ci.addPart(textBlobUri, plainTextSink.getBlob());
             if(xhtmlHandler != null){
-                UriRef xhtmlBlobUri = new UriRef("urn:tika:xhtml:"+random);
+                IRI xhtmlBlobUri = new IRI("urn:tika:xhtml:"+random);
                 ci.addPart(xhtmlBlobUri,  xhtmlSink.getBlob());
             }
             //add the extracted metadata
@@ -312,15 +312,15 @@
             }
             ci.getLock().writeLock().lock();
             try {
-                MGraph graph = ci.getMetadata();
-                UriRef id = ci.getUri();
+                Graph graph = ci.getMetadata();
+                IRI id = ci.getUri();
                 Set<String> mapped = ontologyMappings.apply(graph, id, metadata);
                 if(includeUnmappedProperties){
                     Set<String> unmapped = new HashSet<String>(Arrays.asList(metadata.names()));
                     unmapped.removeAll(mapped);
                     for(String name : unmapped){
                         if(name.indexOf(':') >=0 || includeAllUnmappedProperties){ //only mapped
-                            UriRef prop = new UriRef(new StringBuilder(TIKA_URN_PREFIX).append(name).toString());
+                            IRI prop = new IRI(new StringBuilder(TIKA_URN_PREFIX).append(name).toString());
                             for(String value : metadata.getValues(name)){
                                 //TODO: without the Property for the name we have no datatype
                                 //      information ... so we add PlainLiterals for now
diff --git a/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/ConstantMapping.java b/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/ConstantMapping.java
index 098de45..70cc5f6 100644
--- a/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/ConstantMapping.java
+++ b/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/ConstantMapping.java
@@ -21,19 +21,19 @@
 import java.util.Collections;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.tika.metadata.Metadata;
 
 public class ConstantMapping extends Mapping{
 
     
-    private Collection<Resource> values;
+    private Collection<RDFTerm> values;
 
-    public ConstantMapping(UriRef ontProperty, Resource...values) {
+    public ConstantMapping(IRI ontProperty, RDFTerm...values) {
         super(ontProperty, null);
         if(values == null || values.length < 1){
             throw new IllegalArgumentException("The parsed values MUST NOT be NULL nor an empty array");
@@ -46,8 +46,8 @@
     }
 
     @Override
-    public boolean apply(MGraph graph, NonLiteral subject, Metadata metadata) {
-        for(Resource value : values){
+    public boolean apply(Graph graph, BlankNodeOrIRI subject, Metadata metadata) {
+        for(RDFTerm value : values){
             graph.add(new TripleImpl(subject, ontProperty, value));
             mappingLogger.log(subject, ontProperty, null, value);
         }
diff --git a/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/Mapping.java b/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/Mapping.java
index fd705f3..f72a4a9 100644
--- a/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/Mapping.java
+++ b/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/Mapping.java
@@ -35,19 +35,17 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.BNode;
+import org.apache.clerezza.commons.rdf.BlankNode;
 import org.apache.clerezza.rdf.core.InvalidLiteralTypeException;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.clerezza.rdf.core.NoConvertorException;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
-import org.apache.clerezza.rdf.core.impl.TypedLiteralImpl;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TypedLiteralImpl;
 import org.apache.clerezza.rdf.ontologies.RDFS;
 import org.apache.clerezza.rdf.ontologies.XSD;
 import org.apache.tika.metadata.DublinCore;
@@ -58,7 +56,7 @@
 
 /**
  * Used as value for Apache Tika {@link Metadata} mappings. Holds the
- * ontology property as {@link UriRef} and optionally a Tika {@link Property}.
+ * ontology property as {@link IRI} and optionally a Tika {@link Property}.
  * Later can be used to parse the correct datatype for values contained in the
  * {@link Metadata}
  * 
@@ -74,21 +72,21 @@
      * List with allowed DataTypes.<ul>
      * <li> <code>null</code> is used for {@link PlainLiteral}s
      * <li> {@link XSD} datatyoes are used for {@link TypedLiteral}s
-     * <li> {@link RDFS#Resource} is used for {@link NonLiteral} values. Note
-     * that only {@link UriRef} is supported, because for Tika {@link BNode}s
+     * <li> {@link RDFS#RDFTerm} is used for {@link BlankNodeOrIRI} values. Note
+     * that only {@link IRI} is supported, because for Tika {@link BlankNode}s
      * do not make sense.
      * </ul>
      */
-    public static final Set<UriRef> ONT_TYPES;
+    public static final Set<IRI> ONT_TYPES;
     /**
      * Map with the same keys as contained in {@link #ONT_TYPES}. The values
      * are the java types.
      */
-    protected static final Map<UriRef,Class<?>> ONT_TYPE_MAP;
+    protected static final Map<IRI,Class<?>> ONT_TYPE_MAP;
     
     static {
         //use a linked HasSetMap to have the nice ordering (mainly for logging)
-        Map<UriRef,Class<?>> map = new LinkedHashMap<UriRef,Class<?>>();
+        Map<IRI,Class<?>> map = new LinkedHashMap<IRI,Class<?>>();
         //Plain Literal values
         map.put(null,null);
         //Typed Literal values
@@ -107,7 +105,7 @@
         map.put(XSD.short_,Short.class);
         map.put(XSD.string,String.class);
         map.put(XSD.time,Date.class);
-        //Data Types for NonLiteral values
+        //Data Types for BlankNodeOrIRI values
         map.put(RDFS.Resource,URI.class);
         ONT_TYPE_MAP = Collections.unmodifiableMap(map);
         ONT_TYPES = ONT_TYPE_MAP.keySet();
@@ -119,14 +117,14 @@
         //XSD.token,XSD.unsignedByte,XSD.unsignedInt,XSD.unsignedLong,XSD.unsignedShort,
     }
     
-    protected final UriRef ontProperty;
+    protected final IRI ontProperty;
     
     protected final Converter converter;
     /**
      * Getter for the OntologyProperty for this mapping
      * @return the ontProperty
      */
-    public final UriRef getOntologyProperty() {
+    public final IRI getOntologyProperty() {
         return ontProperty;
     }
     /**
@@ -141,12 +139,12 @@
      */
     public abstract Set<String> getMappedTikaProperties();
     
-    protected final UriRef ontType;
+    protected final IRI ontType;
     
-    protected Mapping(UriRef ontProperty,UriRef ontType){
+    protected Mapping(IRI ontProperty,IRI ontType){
         this(ontProperty,ontType,null);
     }
-    protected Mapping(UriRef ontProperty,UriRef ontType,Converter converter){
+    protected Mapping(IRI ontProperty,IRI ontType,Converter converter){
         if(ontProperty == null){
             throw new IllegalArgumentException("The parsed ontology property MUST NOT be NULL!");
         }
@@ -161,34 +159,34 @@
     
     /**
      * Applies this mapping based on the parsed {@link Metadata} and stores the 
-     * results to {@link MGraph}
-     * @param graph the Graph to store the mapping results
+     * results to {@link Graph}
+     * @param graph the ImmutableGraph to store the mapping results
      * @param subject the subject (context) to add the mappings
      * @param metadata the metadata used for applying the mapping
      * @return <code>true</code> if the mapping could be applied based on the
      * parsed data. Otherwise <code>false</code>. This is intended to be used
      * by components that need to check if required mappings could be applied.
      */
-    public abstract boolean apply(MGraph graph, NonLiteral subject, Metadata metadata);
+    public abstract boolean apply(Graph graph, BlankNodeOrIRI subject, Metadata metadata);
     /**
      * Converts the parsed value based on the mapping information to an RDF
-     * {@link Resource}. Optionally supports also validation if the parsed
+     * {@link RDFTerm}. Optionally supports also validation if the parsed
      * value is valid for the {@link Mapping#ontType ontology type} specified by
      * the parsed mapping.
      * @param value the value
      * @param mapping the mapping
      * @param validate 
-     * @return the {@link Resource} or <code>null</code> if the parsed value is
+     * @return the {@link RDFTerm} or <code>null</code> if the parsed value is
      * <code>null</code> or {@link String#isEmpty() empty}.
      * @throws IllegalArgumentException if the parsed {@link Mapping} is 
      * <code>null</code>
      */
-    protected Resource toResource(String value, boolean validate){
+    protected RDFTerm toResource(String value, boolean validate){
         Metadata dummy = null;//used for date validation
         if(value == null || value.isEmpty()){
             return null; //ignore null and empty values
         }
-        Resource object;
+        RDFTerm object;
         if(ontType == null){
             object = new PlainLiteralImpl(value);
         } else if(ontType == RDFS.Resource){
@@ -196,7 +194,7 @@
                 if(validate){
                     new URI(value);
                 }
-                object = new UriRef(value);
+                object = new IRI(value);
             } catch (URISyntaxException e) {
                 log.warn("Unable to create Reference for value {} (not a valid URI)" +
                         " -> create a literal instead",value);
@@ -232,7 +230,7 @@
             if(validate && clazz != null && 
                     !clazz.equals(Date.class)){ //we need not to validate dates
                 try {
-                    lf.createObject(clazz,(TypedLiteral)object);
+                    lf.createObject(clazz,(Literal)object);
                 } catch (NoConvertorException e) {
                     log.info("Unable to validate typed literals of type {} because" +
                             "there is no converter for Class {} registered with Clerezza",
@@ -261,8 +259,8 @@
      */
     protected static final class MappingLogger{
         
-        private List<NonLiteral> subjects = new ArrayList<NonLiteral>();
-        private UriRef predicate;
+        private List<BlankNodeOrIRI> subjects = new ArrayList<BlankNodeOrIRI>();
+        private IRI predicate;
         private final int intendSize = 2;
         private final char[] intnedArray;
         private static final int MAX_INTEND = 5;
@@ -276,7 +274,7 @@
                 Math.min(MAX_INTEND, intend)*intendSize);
         }
         
-        protected void log(NonLiteral subject,UriRef predicate, String prop, Resource object){
+        protected void log(BlankNodeOrIRI subject,IRI predicate, String prop, RDFTerm object){
             if(!log.isDebugEnabled()){
                 return;
             }
@@ -305,6 +303,6 @@
     }
     
     public static interface Converter {
-        Resource convert(Resource value);
+        RDFTerm convert(RDFTerm value);
     }
 }
diff --git a/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/OntologyMappings.java b/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/OntologyMappings.java
index cee8444..a5de0b3 100644
--- a/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/OntologyMappings.java
+++ b/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/OntologyMappings.java
@@ -29,10 +29,10 @@
 import java.util.TreeSet;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.ontologies.OWL;
 import org.apache.clerezza.rdf.ontologies.RDFS;
 import org.apache.clerezza.rdf.ontologies.SKOS;
@@ -63,7 +63,7 @@
     
     private static OntologyMappings defaultMappings;
     
-    private final Map<UriRef,Collection<Mapping>> mappings = new HashMap<UriRef,Collection<Mapping>>();
+    private final Map<IRI,Collection<Mapping>> mappings = new HashMap<IRI,Collection<Mapping>>();
     /**
      * Used to protect the default mappings from modifications
      */
@@ -334,11 +334,11 @@
             new PropertyMapping(ma+"averageBitRate",XSD.double_,
                 new Mapping.Converter(){//we need to convert from MByte/min to kByte/sec
                     @Override
-                    public Resource convert(Resource value) {
-                        if(value instanceof TypedLiteral &&
-                                XSD.double_.equals(((TypedLiteral)value).getDataType())){
+                    public RDFTerm convert(RDFTerm value) {
+                        if(value instanceof Literal &&
+                                XSD.double_.equals(((Literal)value).getDataType())){
                             LiteralFactory lf = LiteralFactory.getInstance();
-                            double mm = lf.createObject(Double.class, (TypedLiteral)value);
+                            double mm = lf.createObject(Double.class, (Literal)value);
                             return lf.createTypedLiteral(Double.valueOf(
                                 mm*1024/60));
                         } else {
@@ -348,7 +348,7 @@
                 
             },XMPDM.FILE_DATA_RATE.getName()));
 
-        //GEO -> Media Resource Ontology
+        //GEO -> Media RDFTerm Ontology
         mappings.addMapping(new ResourceMapping(ma+"hasLocation", 
             new Mapping[]{ //required
                 new PropertyMapping(ma+"locationLatitude", XSD.double_,Geographic.LATITUDE.getName()),
@@ -466,7 +466,7 @@
         }
         propMappings.add(mapping);
     }
-    public void removePropertyMappings(UriRef property){
+    public void removePropertyMappings(IRI property){
         if(readonly){
             throw new IllegalStateException("This "+getClass().getSimpleName()+" instance is read only!");
         }
@@ -475,13 +475,13 @@
     
     /**
      * Applies the registered Ontology Mappings to the parsed metadata and
-     * context. Mappings are added to the parsed Graph
+     * context. Mappings are added to the parsed ImmutableGraph
      * @param graph
      * @param context
      * @param metadata
      * @return Set containing the names of mapped keys
      */
-    public Set<String> apply(MGraph graph, UriRef context, Metadata metadata){
+    public Set<String> apply(Graph graph, IRI context, Metadata metadata){
         Set<String> keys = new HashSet<String>(Arrays.asList(metadata.names()));
         Set<String> mappedKeys = new HashSet<String>();
         for(Mapping mapping : this){
diff --git a/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/PropertyMapping.java b/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/PropertyMapping.java
index 74fcc98..873bd03 100644
--- a/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/PropertyMapping.java
+++ b/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/PropertyMapping.java
@@ -23,11 +23,11 @@
 import java.util.List;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.tika.metadata.Metadata;
 
 public final class PropertyMapping extends Mapping {
@@ -37,24 +37,24 @@
      */
     protected final Set<String> tikaProperties;
 
-    public PropertyMapping(String ontProperty, UriRef ontType,String...tikaProperties) {
-        this(ontProperty == null? null : new UriRef(ontProperty), ontType,tikaProperties);
+    public PropertyMapping(String ontProperty, IRI ontType,String...tikaProperties) {
+        this(ontProperty == null? null : new IRI(ontProperty), ontType,tikaProperties);
     }
-    public PropertyMapping(String ontProperty, UriRef ontType,Converter converter,String...tikaProperties) {
-        this(ontProperty == null? null : new UriRef(ontProperty), ontType,converter,tikaProperties);
+    public PropertyMapping(String ontProperty, IRI ontType,Converter converter,String...tikaProperties) {
+        this(ontProperty == null? null : new IRI(ontProperty), ontType,converter,tikaProperties);
     }
 
     public PropertyMapping(String ontProperty,String...tikaProperties) {
-        this(ontProperty == null? null : new UriRef(ontProperty),null,tikaProperties);
+        this(ontProperty == null? null : new IRI(ontProperty),null,tikaProperties);
     }
 
-    public PropertyMapping(UriRef ontProperty,String...tikaProperties) {
+    public PropertyMapping(IRI ontProperty,String...tikaProperties) {
         this(ontProperty,null,tikaProperties);
     }
-    public PropertyMapping(UriRef ontProperty, UriRef ontType,String...tikaProperties) {
+    public PropertyMapping(IRI ontProperty, IRI ontType,String...tikaProperties) {
         this(ontProperty,ontType,null,tikaProperties);
     }
-    public PropertyMapping(UriRef ontProperty, UriRef ontType,Converter converter,String...tikaProperties) {
+    public PropertyMapping(IRI ontProperty, IRI ontType,Converter converter,String...tikaProperties) {
         super(ontProperty, ontType,converter);
         if(tikaProperties == null || tikaProperties.length < 1){
             throw new IllegalArgumentException("The list of parsed Tika properties MUST NOT be NULL nor empty!");
@@ -68,13 +68,13 @@
     }
 
     @Override
-    public boolean apply(MGraph graph, NonLiteral subject, Metadata metadata) {
-        Set<Resource> values = new HashSet<Resource>();
+    public boolean apply(Graph graph, BlankNodeOrIRI subject, Metadata metadata) {
+        Set<RDFTerm> values = new HashSet<RDFTerm>();
         for(String tikaProperty : tikaProperties){
             String[] tikaPropValues = metadata.getValues(tikaProperty);
             if(tikaPropValues != null && tikaPropValues.length > 0){
                 for(String tikaPropValue : tikaPropValues){
-                    Resource resource = toResource(tikaPropValue, true);
+                    RDFTerm resource = toResource(tikaPropValue, true);
                     if(resource != null){
                         values.add(resource);
                         mappingLogger.log(subject, ontProperty, tikaProperty, resource);
@@ -87,7 +87,7 @@
         if(values.isEmpty()){
             return false;
         } else {
-            for(Resource resource : values){
+            for(RDFTerm resource : values){
                 graph.add(new TripleImpl(subject, ontProperty, resource));
             }
             return true;
diff --git a/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/ResourceMapping.java b/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/ResourceMapping.java
index 75b1d11..21fc705 100644
--- a/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/ResourceMapping.java
+++ b/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/ResourceMapping.java
@@ -23,12 +23,12 @@
 import java.util.HashSet;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.BNode;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.tika.metadata.Metadata;
 
 public final class ResourceMapping extends Mapping{
@@ -41,16 +41,16 @@
     Set<String> mappedTikaProperties;
     
     public ResourceMapping(String ontProperty, Mapping...required) {
-        this(new UriRef(ontProperty), required);
+        this(new IRI(ontProperty), required);
     }
     public ResourceMapping(String ontProperty, Mapping[] required, Mapping[] optional,Mapping[] additional) {
-        this(new UriRef(ontProperty), required,optional,additional);
+        this(new IRI(ontProperty), required,optional,additional);
     }
 
-    public ResourceMapping(UriRef ontProperty, Mapping...requried) {
+    public ResourceMapping(IRI ontProperty, Mapping...requried) {
         this(ontProperty,requried,null,null);
     }
-    public ResourceMapping(UriRef ontProperty, Mapping[] required, Mapping[] optional,Mapping[] additional) {
+    public ResourceMapping(IRI ontProperty, Mapping[] required, Mapping[] optional,Mapping[] additional) {
         super(ontProperty,null);
         required = required == null ? EMPTY : required;
         optional = optional == null ? EMPTY : optional;
@@ -91,12 +91,12 @@
     }
 
     @Override
-    public boolean apply(MGraph graph, NonLiteral subject, Metadata metadata) {
+    public boolean apply(Graph graph, BlankNodeOrIRI subject, Metadata metadata) {
         boolean added = false;
-        NonLiteral s = new BNode();
+        BlankNodeOrIRI s = new BlankNode();
         mappingLogger.log(subject, ontProperty, null, s);
         if(!required.isEmpty()) {
-            MGraph g = new SimpleMGraph();
+            Graph g = new SimpleGraph();
             for(Mapping m : required){
                 if(!m.apply(g, s, metadata)){
                     return false;
diff --git a/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/TypeMapping.java b/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/TypeMapping.java
index 0fcf5b9..ceb6d8f 100644
--- a/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/TypeMapping.java
+++ b/enhancement-engines/tika/src/main/java/org/apache/stanbol/enhancer/engines/tika/metadata/TypeMapping.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.engines.tika.metadata;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.ontologies.RDF;
 
 /**
@@ -27,9 +27,9 @@
 public class TypeMapping extends ConstantMapping {
 
     public TypeMapping(String type) {
-        this(new UriRef(type));
+        this(new IRI(type));
     }
-    public TypeMapping(UriRef...types) {
+    public TypeMapping(IRI...types) {
         super(RDF.type, types);
     }
 
diff --git a/enhancement-engines/tika/src/test/java/org/apache/stanbol/enhancer/engines/tika/TikaEngineTest.java b/enhancement-engines/tika/src/test/java/org/apache/stanbol/enhancer/engines/tika/TikaEngineTest.java
index b79b932..e6edfa5 100644
--- a/enhancement-engines/tika/src/test/java/org/apache/stanbol/enhancer/engines/tika/TikaEngineTest.java
+++ b/enhancement-engines/tika/src/test/java/org/apache/stanbol/enhancer/engines/tika/TikaEngineTest.java
@@ -45,15 +45,12 @@
 import java.util.Set;
 import java.util.regex.Pattern;
 
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.ontologies.RDF;
 import org.apache.clerezza.rdf.ontologies.XSD;
 import org.apache.commons.io.IOUtils;
@@ -112,7 +109,7 @@
         ContentItem ci = createContentItem("test.html", "text/html; charset=UTF-8");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -142,7 +139,7 @@
         ContentItem ci = createContentItem("test.pdf", "application/pdf");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -199,7 +196,7 @@
         ContentItem ci = createContentItem("test.doc", "application/msword");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -227,7 +224,7 @@
         ContentItem ci = createContentItem("test.rtf", "application/rtf");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -256,7 +253,7 @@
         ContentItem ci = createContentItem("test.odt", "application/vnd.oasis.opendocument.text");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -285,7 +282,7 @@
         ContentItem ci = createContentItem("test.email.txt", "message/rfc822");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -314,27 +311,27 @@
         //no check the extracted metadata!
         //DC
         //STANBOL-757: dc:date no longer added by Tika 1.2 (dc:created is still present)
-        //verifyValue(ci, new UriRef(NamespaceEnum.dc+"date"), XSD.dateTime,"2010-09-06T09:25:34Z");
-        verifyValue(ci, new UriRef(NamespaceEnum.dc+"format"), null,"message/rfc822");
+        //verifyValue(ci, new IRI(NamespaceEnum.dc+"date"), XSD.dateTime,"2010-09-06T09:25:34Z");
+        verifyValue(ci, new IRI(NamespaceEnum.dc+"format"), null,"message/rfc822");
         //STANBOL-757: dc:subject no longer added by Tika1.2 (dc:title is used instead)
-        //verifyValue(ci, new UriRef(NamespaceEnum.dc+"subject"), null,"[jira] Commented: (TIKA-461) RFC822 messages not parsed");
-        verifyValue(ci, new UriRef(NamespaceEnum.dc+"title"), null,"[jira] Commented: (TIKA-461) RFC822 messages not parsed");
-        verifyValue(ci, new UriRef(NamespaceEnum.dc+"creator"), null,"Julien Nioche (JIRA) <jira@apache.org>");
-        verifyValue(ci, new UriRef(NamespaceEnum.dc+"created"), XSD.dateTime,"2010-09-06T09:25:34Z");
+        //verifyValue(ci, new IRI(NamespaceEnum.dc+"subject"), null,"[jira] Commented: (TIKA-461) RFC822 messages not parsed");
+        verifyValue(ci, new IRI(NamespaceEnum.dc+"title"), null,"[jira] Commented: (TIKA-461) RFC822 messages not parsed");
+        verifyValue(ci, new IRI(NamespaceEnum.dc+"creator"), null,"Julien Nioche (JIRA) <jira@apache.org>");
+        verifyValue(ci, new IRI(NamespaceEnum.dc+"created"), XSD.dateTime,"2010-09-06T09:25:34Z");
         
         //Media Ontology
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"creationDate"),XSD.dateTime,"2010-09-06T09:25:34Z");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"hasFormat"),null,"message/rfc822");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"hasCreator"),null,"Julien Nioche (JIRA) <jira@apache.org>");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"hasContributor"),null,"Julien Nioche (JIRA) <jira@apache.org>");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"creationDate"),XSD.dateTime,"2010-09-06T09:25:34Z");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"hasFormat"),null,"message/rfc822");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"hasCreator"),null,"Julien Nioche (JIRA) <jira@apache.org>");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"hasContributor"),null,"Julien Nioche (JIRA) <jira@apache.org>");
         //STANBOL-757: This was present with Tika 1.1 because its mapping from dc:subject 
-//        verifyValue(ci, new UriRef(NamespaceEnum.media+"hasKeyword"),null,"[jira] Commented: (TIKA-461) RFC822 messages not parsed");
+//        verifyValue(ci, new IRI(NamespaceEnum.media+"hasKeyword"),null,"[jira] Commented: (TIKA-461) RFC822 messages not parsed");
 
         
         //Nepomuk Message
         String message = "http://www.semanticdesktop.org/ontologies/2007/03/22/nmo#";
-        verifyValue(ci, new UriRef(message+"from"),null,"Julien Nioche (JIRA) <jira@apache.org>");
-        verifyValue(ci, new UriRef(message+"to"),null,"dev@tika.apache.org");
+        verifyValue(ci, new IRI(message+"from"),null,"Julien Nioche (JIRA) <jira@apache.org>");
+        verifyValue(ci, new IRI(message+"to"),null,"dev@tika.apache.org");
         
     }
     @Test
@@ -343,7 +340,7 @@
         ContentItem ci = createContentItem("testMP3id3v24.mp3", "audio/mpeg");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -359,16 +356,16 @@
         Blob xhtmlBlob = contentPart.getValue();
         assertNotNull(xhtmlBlob);
         //Test AudioTrack metadata
-        NonLiteral audioTrack = verifyNonLiteral(ci, new UriRef(NamespaceEnum.media+"hasTrack"));
+        BlankNodeOrIRI audioTrack = verifyBlankNodeOrIRI(ci, new IRI(NamespaceEnum.media+"hasTrack"));
         //types
         verifyValues(ci, audioTrack, RDF.type, 
-            new UriRef(NamespaceEnum.media+"MediaFragment"),
-            new UriRef(NamespaceEnum.media+"Track"),
-            new UriRef(NamespaceEnum.media+"AudioTrack"));
+            new IRI(NamespaceEnum.media+"MediaFragment"),
+            new IRI(NamespaceEnum.media+"Track"),
+            new IRI(NamespaceEnum.media+"AudioTrack"));
         //properties
-        verifyValue(ci, audioTrack, new UriRef(NamespaceEnum.media+"hasFormat"), XSD.string, "Mono");
-        verifyValue(ci, audioTrack, new UriRef(NamespaceEnum.media+"samplingRate"), XSD.int_, "44100");
-        verifyValue(ci, audioTrack, new UriRef(NamespaceEnum.media+"hasCompression"), XSD.string, "MP3");
+        verifyValue(ci, audioTrack, new IRI(NamespaceEnum.media+"hasFormat"), XSD.string, "Mono");
+        verifyValue(ci, audioTrack, new IRI(NamespaceEnum.media+"samplingRate"), XSD.int_, "44100");
+        verifyValue(ci, audioTrack, new IRI(NamespaceEnum.media+"hasCompression"), XSD.string, "MP3");
     }
     /**
      * Tests mappings for the Mp4 metadata extraction capabilities added to
@@ -383,7 +380,7 @@
         ContentItem ci = createContentItem("testMP4.m4a", "audio/mp4");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -399,42 +396,42 @@
         Blob xhtmlBlob = contentPart.getValue();
         assertNotNull(xhtmlBlob);
         //Test AudioTrack metadata
-        NonLiteral audioTrack = verifyNonLiteral(ci, new UriRef(NamespaceEnum.media+"hasTrack"));
+        BlankNodeOrIRI audioTrack = verifyBlankNodeOrIRI(ci, new IRI(NamespaceEnum.media+"hasTrack"));
         //types
         verifyValues(ci, audioTrack, RDF.type, 
-            new UriRef(NamespaceEnum.media+"MediaFragment"),
-            new UriRef(NamespaceEnum.media+"Track"),
-            new UriRef(NamespaceEnum.media+"AudioTrack"));
+            new IRI(NamespaceEnum.media+"MediaFragment"),
+            new IRI(NamespaceEnum.media+"Track"),
+            new IRI(NamespaceEnum.media+"AudioTrack"));
         //properties
-        verifyValue(ci, audioTrack, new UriRef(NamespaceEnum.media+"hasFormat"), XSD.string, "Stereo");
-        verifyValue(ci, audioTrack, new UriRef(NamespaceEnum.media+"samplingRate"), XSD.int_, "44100");
-        verifyValue(ci, audioTrack, new UriRef(NamespaceEnum.media+"hasCompression"), XSD.string, "M4A");
+        verifyValue(ci, audioTrack, new IRI(NamespaceEnum.media+"hasFormat"), XSD.string, "Stereo");
+        verifyValue(ci, audioTrack, new IRI(NamespaceEnum.media+"samplingRate"), XSD.int_, "44100");
+        verifyValue(ci, audioTrack, new IRI(NamespaceEnum.media+"hasCompression"), XSD.string, "M4A");
     }
     @Test
     public void testGEOMetadata() throws EngineException, IOException, ParseException{
         log.info(">>> testGEOMetadata <<<");
-        //first validate Media Resource Ontology
-        UriRef hasLocation = new UriRef(NamespaceEnum.media+"hasLocation");
-        UriRef locationLatitude = new UriRef(NamespaceEnum.media+"locationLatitude");
-        UriRef locationLongitude = new UriRef(NamespaceEnum.media+"locationLongitude");
-        //UriRef locationAltitude = new UriRef(NamespaceEnum.media+"locationAltitude");
+        //first validate Media RDFTerm Ontology
+        IRI hasLocation = new IRI(NamespaceEnum.media+"hasLocation");
+        IRI locationLatitude = new IRI(NamespaceEnum.media+"locationLatitude");
+        IRI locationLongitude = new IRI(NamespaceEnum.media+"locationLongitude");
+        //IRI locationAltitude = new IRI(NamespaceEnum.media+"locationAltitude");
         ContentItem ci = createContentItem("testJPEG_GEO.jpg", OCTET_STREAM.toString());//"video/x-ms-asf");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
         Iterator<Triple> it = ci.getMetadata().filter(ci.getUri(),hasLocation, null);
         assertTrue(it.hasNext());
-        Resource r = it.next().getObject();
+        RDFTerm r = it.next().getObject();
         assertFalse(it.hasNext());
-        assertTrue(r instanceof NonLiteral);
-        NonLiteral location = verifyNonLiteral(ci, hasLocation);
+        assertTrue(r instanceof BlankNodeOrIRI);
+        BlankNodeOrIRI location = verifyBlankNodeOrIRI(ci, hasLocation);
         //lat
         verifyValue(ci, location, locationLatitude, XSD.double_, "12.54321");
         //long
         verifyValue(ci, location, locationLongitude, XSD.double_, "-54.1234");
         
         //second the GEO ont
-        UriRef lat = new UriRef(NamespaceEnum.geo+"lat");
-        UriRef lon = new UriRef(NamespaceEnum.geo+"long");
+        IRI lat = new IRI(NamespaceEnum.geo+"lat");
+        IRI lon = new IRI(NamespaceEnum.geo+"long");
         //lat
         verifyValue(ci, lat, XSD.double_, "12.54321");
         //long
@@ -448,15 +445,15 @@
         ContentItem ci = createContentItem("testMP3id3v24.mp3", "audio/mpeg");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        verifyValue(ci,new UriRef(NamespaceEnum.dc+"creator"),null,"Test Artist");
-        verifyValue(ci, new UriRef(NamespaceEnum.dc+"title"),null,"Test Album");
-        verifyValue(ci, new UriRef(NamespaceEnum.dc+"format"),null,"audio/mpeg");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"hasFormat"),null,"audio/mpeg");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"mainOriginalTitle"),null,"Test Album");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"hasContributor"),null,"Test Artist");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"releaseDate"),XSD.string,"2008");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"hasGenre"),null,"Rock");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"hasCreator"),null,"Test Artist");
+        verifyValue(ci,new IRI(NamespaceEnum.dc+"creator"),null,"Test Artist");
+        verifyValue(ci, new IRI(NamespaceEnum.dc+"title"),null,"Test Album");
+        verifyValue(ci, new IRI(NamespaceEnum.dc+"format"),null,"audio/mpeg");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"hasFormat"),null,"audio/mpeg");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"mainOriginalTitle"),null,"Test Album");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"hasContributor"),null,"Test Artist");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"releaseDate"),XSD.string,"2008");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"hasGenre"),null,"Rock");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"hasCreator"),null,"Test Artist");
     }
     @Test
     public void testExifMetadata() throws EngineException, ParseException, IOException {
@@ -465,32 +462,32 @@
         ContentItem ci = createContentItem("testJPEG_EXIF.jpg", "image/jpeg");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        verifyValue(ci, new UriRef(exif+"make"),null,"Canon");
-        verifyValue(ci, new UriRef(exif+"software"),null,"Adobe Photoshop CS3 Macintosh");
-        verifyValue(ci, new UriRef(exif+"dateTimeOriginal"),XSD.dateTime,"2009-08-11T09:09:45");
-        verifyValue(ci, new UriRef(exif+"relatedImageWidth"),XSD.int_,"100");
-        verifyValue(ci, new UriRef(exif+"fNumber"),XSD.double_,"5.6");
-        verifyValue(ci, new UriRef(exif+"model"),null,"Canon EOS 40D");
-        verifyValue(ci, new UriRef(exif+"isoSpeedRatings"),XSD.int_,"400");
-        verifyValue(ci, new UriRef(exif+"xResolution"),XSD.double_,"240.0");
-        verifyValue(ci, new UriRef(exif+"flash"),XSD.boolean_,"false");
-        verifyValue(ci, new UriRef(exif+"exposureTime"),XSD.double_,"6.25E-4");
-        verifyValue(ci, new UriRef(exif+"yResolution"),XSD.double_,"240.0");
-        verifyValue(ci, new UriRef(exif+"resolutionUnit"),XSD.string,"Inch");
-        verifyValue(ci, new UriRef(exif+"focalLength"),XSD.double_,"194.0");
-        verifyValue(ci, new UriRef(exif+"relatedImageLength"),XSD.int_,"68");
-        verifyValue(ci, new UriRef(exif+"bitsPerSample"),XSD.int_,"8");
+        verifyValue(ci, new IRI(exif+"make"),null,"Canon");
+        verifyValue(ci, new IRI(exif+"software"),null,"Adobe Photoshop CS3 Macintosh");
+        verifyValue(ci, new IRI(exif+"dateTimeOriginal"),XSD.dateTime,"2009-08-11T09:09:45");
+        verifyValue(ci, new IRI(exif+"relatedImageWidth"),XSD.int_,"100");
+        verifyValue(ci, new IRI(exif+"fNumber"),XSD.double_,"5.6");
+        verifyValue(ci, new IRI(exif+"model"),null,"Canon EOS 40D");
+        verifyValue(ci, new IRI(exif+"isoSpeedRatings"),XSD.int_,"400");
+        verifyValue(ci, new IRI(exif+"xResolution"),XSD.double_,"240.0");
+        verifyValue(ci, new IRI(exif+"flash"),XSD.boolean_,"false");
+        verifyValue(ci, new IRI(exif+"exposureTime"),XSD.double_,"6.25E-4");
+        verifyValue(ci, new IRI(exif+"yResolution"),XSD.double_,"240.0");
+        verifyValue(ci, new IRI(exif+"resolutionUnit"),XSD.string,"Inch");
+        verifyValue(ci, new IRI(exif+"focalLength"),XSD.double_,"194.0");
+        verifyValue(ci, new IRI(exif+"relatedImageLength"),XSD.int_,"68");
+        verifyValue(ci, new IRI(exif+"bitsPerSample"),XSD.int_,"8");
         //also Media Ontology mappings for Exif
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"frameHeight"),XSD.int_,"68");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"frameWidth"),XSD.int_,"100");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"hasFormat"),null,"image/jpeg");
-        verifyValue(ci, new UriRef(NamespaceEnum.media+"creationDate"),XSD.dateTime,"2009-08-11T09:09:45");
-        verifyValues(ci, new UriRef(NamespaceEnum.media+"hasKeyword"),null,"serbor","moscow-birds","canon-55-250");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"frameHeight"),XSD.int_,"68");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"frameWidth"),XSD.int_,"100");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"hasFormat"),null,"image/jpeg");
+        verifyValue(ci, new IRI(NamespaceEnum.media+"creationDate"),XSD.dateTime,"2009-08-11T09:09:45");
+        verifyValues(ci, new IRI(NamespaceEnum.media+"hasKeyword"),null,"serbor","moscow-birds","canon-55-250");
         //and finally the mapped DC properties
-        verifyValue(ci, new UriRef(NamespaceEnum.dc+"format"),null,"image/jpeg");
-        verifyValue(ci, new UriRef(NamespaceEnum.dc+"created"),XSD.dateTime,"2009-08-11T09:09:45");
-        verifyValue(ci, new UriRef(NamespaceEnum.dc+"modified"),XSD.dateTime,"2009-10-02T23:02:49");
-        verifyValues(ci, new UriRef(NamespaceEnum.dc+"subject"), null, "serbor","moscow-birds","canon-55-250");
+        verifyValue(ci, new IRI(NamespaceEnum.dc+"format"),null,"image/jpeg");
+        verifyValue(ci, new IRI(NamespaceEnum.dc+"created"),XSD.dateTime,"2009-08-11T09:09:45");
+        verifyValue(ci, new IRI(NamespaceEnum.dc+"modified"),XSD.dateTime,"2009-10-02T23:02:49");
+        verifyValues(ci, new IRI(NamespaceEnum.dc+"subject"), null, "serbor","moscow-birds","canon-55-250");
     }
     
     /**
@@ -508,7 +505,7 @@
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
         //test that the "xmpDM:logComment" is present
-        verifyValue(ci, new UriRef("urn:tika.apache.org:tika:xmpDM:logComment"), null,"Test Comments");
+        verifyValue(ci, new IRI("urn:tika.apache.org:tika:xmpDM:logComment"), null,"Test Comments");
     }
     
     @Test
@@ -517,7 +514,7 @@
         ContentItem ci = createContentItem("test.pdf", OCTET_STREAM.toString());
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -557,7 +554,7 @@
         ContentItem ci = createContentItem("test.pages", "application/x-iwork-pages-sffpages");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         //it MUST NOT give an error but also not add a content part
         assertNull(contentPart);
@@ -570,7 +567,7 @@
         ContentItem ci = createContentItem("test.xhtml", XHTML.toString()+"; charset=UTF-8");
         assertFalse(engine.canEnhance(ci) == CANNOT_ENHANCE);
         engine.computeEnhancements(ci);
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, 
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, 
             singleton("text/plain"));
         assertNotNull(contentPart);
         Blob plainTextBlob = contentPart.getValue();
@@ -631,84 +628,81 @@
     /*
      * Internal helper methods 
      */
-    private NonLiteral verifyNonLiteral(ContentItem ci, UriRef property){
-        return verifyNonLiteral(ci, ci.getUri(), property);
+    private BlankNodeOrIRI verifyBlankNodeOrIRI(ContentItem ci, IRI property){
+        return verifyBlankNodeOrIRI(ci, ci.getUri(), property);
     }
-    private static NonLiteral verifyNonLiteral(ContentItem ci, UriRef subject, UriRef property){
+    private static BlankNodeOrIRI verifyBlankNodeOrIRI(ContentItem ci, IRI subject, IRI property){
         Iterator<Triple> it = ci.getMetadata().filter(subject,property, null);
         assertTrue(it.hasNext());
-        Resource r = it.next().getObject();
+        RDFTerm r = it.next().getObject();
         assertFalse(it.hasNext());
-        assertTrue(r instanceof NonLiteral);
-        return (NonLiteral)r;
+        assertTrue(r instanceof BlankNodeOrIRI);
+        return (BlankNodeOrIRI)r;
     }
-    private static UriRef verifyValue(ContentItem ci, UriRef property, UriRef value){
+    private static IRI verifyValue(ContentItem ci, IRI property, IRI value){
         return verifyValue(ci, ci.getUri(), property, value);
     }
-    private static UriRef verifyValue(ContentItem ci, NonLiteral subject, UriRef property, UriRef value){
+    private static IRI verifyValue(ContentItem ci, BlankNodeOrIRI subject, IRI property, IRI value){
         Iterator<Triple> it = ci.getMetadata().filter(subject,property, null);
         assertTrue(it.hasNext());
-        Resource r = it.next().getObject();
+        RDFTerm r = it.next().getObject();
         assertFalse(it.hasNext());
-        assertTrue(r instanceof UriRef);
+        assertTrue(r instanceof IRI);
         assertEquals(value,r);
-        return (UriRef)r;
+        return (IRI)r;
    }
-    private static Literal verifyValue(ContentItem ci, UriRef property, UriRef dataType, String lexValue) throws ParseException{
+    private static Literal verifyValue(ContentItem ci, IRI property, IRI dataType, String lexValue) throws ParseException{
         return verifyValue(ci, ci.getUri(), property, dataType, lexValue);
     }
-    private static Literal verifyValue(ContentItem ci, NonLiteral subject, UriRef property, UriRef dataType, String lexValue) throws ParseException{
+    private static Literal verifyValue(ContentItem ci, BlankNodeOrIRI subject, IRI property, IRI dataType, String lexValue) throws ParseException{
         Iterator<Triple> it = ci.getMetadata().filter(subject,property, null);
         assertTrue(it.hasNext());
-        Resource r = it.next().getObject();
+        RDFTerm r = it.next().getObject();
         assertFalse(it.hasNext());
-        if(dataType == null){
-            assertTrue(r instanceof PlainLiteral);
-        } else {
-            assertTrue(r instanceof TypedLiteral);
-            assertEquals(dataType, ((TypedLiteral)r).getDataType());
+        if(dataType != null){
+            assertEquals(dataType, ((Literal)r).getDataType());
         }
         //if we check dates and the lexical value is not UTC than we need to
         //consider the time zone of the host running this test
         if(XSD.dateTime.equals(dataType) && lexValue.charAt(lexValue.length()-1) != 'Z'){
             Date expectedDate = dateDefaultTimezone.parse(lexValue);
-            assertEquals(expectedDate, lf.createObject(Date.class, ((TypedLiteral)r)));
+            assertEquals(expectedDate, lf.createObject(Date.class, ((Literal)r)));
         } else {
             assertEquals(lexValue,((Literal)r).getLexicalForm());
         }
         return (Literal)r;
     }
-    private static Set<Literal> verifyValues(ContentItem ci, UriRef property, UriRef dataType, String...lexValues){
+    private static Set<Literal> verifyValues(ContentItem ci, IRI property, IRI dataType, String...lexValues){
         return verifyValues(ci, ci.getUri(), property, dataType, lexValues);
     }
-    private static Set<Literal> verifyValues(ContentItem ci, NonLiteral subject, UriRef property, UriRef dataType, String...lexValues){
+    private static Set<Literal> verifyValues(ContentItem ci, BlankNodeOrIRI subject, IRI property, IRI dataType, String...lexValues){
         Iterator<Triple> it = ci.getMetadata().filter(subject,property, null);
         assertTrue(it.hasNext());
         Set<String> expected = new HashSet<String>(Arrays.asList(lexValues));
         Set<Literal> found = new HashSet<Literal>(expected.size());
         while(it.hasNext()){
-            Resource r = it.next().getObject();
+            RDFTerm r = it.next().getObject();
             if(dataType == null){
-                assertTrue(r instanceof PlainLiteral);
+                assertTrue(r instanceof Literal);
             } else {
-                assertTrue(r instanceof TypedLiteral);
-                assertEquals(dataType, ((TypedLiteral)r).getDataType());
+                assertTrue(r instanceof Literal);
+                assertEquals(dataType, ((Literal)r).getDataType());
             }
             assertTrue(expected.remove(((Literal)r).getLexicalForm()));
             found.add((Literal)r);
         }
         return found;
     }
-    private static Set<NonLiteral> verifyValues(ContentItem ci, NonLiteral subject, UriRef property, NonLiteral...references){
+    private static Set<BlankNodeOrIRI> verifyValues(ContentItem ci, BlankNodeOrIRI subject, IRI property, BlankNodeOrIRI...references){
         Iterator<Triple> it = ci.getMetadata().filter(subject,property, null);
         assertTrue(it.hasNext());
-        Set<NonLiteral> expected = new HashSet<NonLiteral>(Arrays.asList(references));
-        Set<NonLiteral> found = new HashSet<NonLiteral>(expected.size());
+        Set<BlankNodeOrIRI> expected = new HashSet<BlankNodeOrIRI>(Arrays.asList(references));
+        Set<BlankNodeOrIRI> found = new HashSet<BlankNodeOrIRI>(expected.size());
         while(it.hasNext()){
-            Resource r = it.next().getObject();
-            assertTrue(r instanceof NonLiteral);
+            RDFTerm r = it.next().getObject();
+            assertTrue(r instanceof BlankNodeOrIRI);
             assertTrue(expected.remove(r));
-            found.add((NonLiteral)r);
+            found.add((BlankNodeOrIRI)r);
         }
         return found;
     }
diff --git a/enhancement-engines/topic/api/src/main/java/org/apache/stanbol/enhancer/topic/api/TopicClassifier.java b/enhancement-engines/topic/api/src/main/java/org/apache/stanbol/enhancer/topic/api/TopicClassifier.java
index b8d11d8..6295db8 100644
--- a/enhancement-engines/topic/api/src/main/java/org/apache/stanbol/enhancer/topic/api/TopicClassifier.java
+++ b/enhancement-engines/topic/api/src/main/java/org/apache/stanbol/enhancer/topic/api/TopicClassifier.java
@@ -16,8 +16,8 @@
  */
 package org.apache.stanbol.enhancer.topic.api;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.topic.api.training.TrainingSet;
 import org.apache.stanbol.enhancer.topic.api.training.TrainingSetException;
 import org.osgi.framework.InvalidSyntaxException;
@@ -180,5 +180,5 @@
      * 
      * @return the number of concepts successfully imported (including roots).
      */
-    int importConceptsFromGraph(Graph graph, UriRef conceptClass, UriRef broaderProperty) throws ClassifierException;
+    int importConceptsFromGraph(ImmutableGraph graph, IRI conceptClass, IRI broaderProperty) throws ClassifierException;
 }
diff --git a/enhancement-engines/topic/engine/src/main/java/org/apache/stanbol/enhancer/engine/topic/TopicClassificationEngine.java b/enhancement-engines/topic/engine/src/main/java/org/apache/stanbol/enhancer/engine/topic/TopicClassificationEngine.java
index bc53430..a240d76 100644
--- a/enhancement-engines/topic/engine/src/main/java/org/apache/stanbol/enhancer/engine/topic/TopicClassificationEngine.java
+++ b/enhancement-engines/topic/engine/src/main/java/org/apache/stanbol/enhancer/engine/topic/TopicClassificationEngine.java
@@ -35,14 +35,14 @@
 import java.util.Set;
 import java.util.UUID;
 
-import org.apache.clerezza.rdf.core.Graph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.utils.GraphNode;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang.StringUtils;
@@ -479,7 +479,7 @@
 
     @Override
     public void computeEnhancements(ContentItem ci) throws EngineException {
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
         if (contentPart == null) {
             throw new IllegalStateException(
                     "No ContentPart with a supported Mime Type" + "found for ContentItem " + ci.getUri()
@@ -507,7 +507,7 @@
                 contentPart.getKey(), ci.getUri());
             return;
         }
-        MGraph metadata = ci.getMetadata();
+        Graph metadata = ci.getMetadata();
         List<TopicSuggestion> topics;
         try {
             topics = suggestTopics(text);
@@ -517,20 +517,20 @@
         } catch (ClassifierException e) {
             throw new EngineException(e);
         }
-        UriRef precision = new UriRef(NamespaceEnum.fise + "classifier/precision");
-        UriRef recall = new UriRef(NamespaceEnum.fise + "classifier/recall");
-        UriRef f1 = new UriRef(NamespaceEnum.fise + "classifier/f1");
+        IRI precision = new IRI(NamespaceEnum.fise + "classifier/precision");
+        IRI recall = new IRI(NamespaceEnum.fise + "classifier/recall");
+        IRI f1 = new IRI(NamespaceEnum.fise + "classifier/f1");
 
         LiteralFactory lf = LiteralFactory.getInstance();
         ci.getLock().writeLock().lock();
         try {
             // Global text annotation to attach all the topic annotation to it.
-            UriRef textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
+            IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(ci, this);
             metadata.add(new TripleImpl(textAnnotation,
                     org.apache.stanbol.enhancer.servicesapi.rdf.Properties.DC_TYPE,
                     OntologicalClasses.SKOS_CONCEPT));
             for (TopicSuggestion topic : topics) {
-                UriRef enhancement = EnhancementEngineHelper.createEntityEnhancement(ci, this);
+                IRI enhancement = EnhancementEngineHelper.createEntityEnhancement(ci, this);
                 metadata.add(new TripleImpl(enhancement,
                         org.apache.stanbol.enhancer.servicesapi.rdf.Properties.RDF_TYPE,
                         TechnicalClasses.ENHANCER_TOPICANNOTATION));
@@ -540,7 +540,7 @@
                 // add link to entity
                 metadata.add(new TripleImpl(enhancement,
                         org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_ENTITY_REFERENCE,
-                        new UriRef(topic.conceptUri)));
+                        new IRI(topic.conceptUri)));
                 metadata.add(new TripleImpl(enhancement,
                         org.apache.stanbol.enhancer.servicesapi.rdf.Properties.ENHANCER_ENTITY_TYPE,
                         OntologicalClasses.SKOS_CONCEPT));
@@ -1509,25 +1509,25 @@
     }
 
     @Override
-    public int importConceptsFromGraph(Graph graph, UriRef conceptClass, UriRef broaderProperty) throws ClassifierException {
+    public int importConceptsFromGraph(ImmutableGraph graph, IRI conceptClass, IRI broaderProperty) throws ClassifierException {
         int importedCount = 0;
         Iterator<Triple> conceptIterator = graph.filter(null,
             org.apache.stanbol.enhancer.servicesapi.rdf.Properties.RDF_TYPE, conceptClass);
         while (conceptIterator.hasNext()) {
             Triple conceptTriple = conceptIterator.next();
-            if (!(conceptTriple.getSubject() instanceof UriRef)) {
+            if (!(conceptTriple.getSubject() instanceof IRI)) {
                 continue;
             }
-            UriRef conceptUri = (UriRef) conceptTriple.getSubject();
+            IRI conceptUri = (IRI) conceptTriple.getSubject();
             GraphNode node = new GraphNode(conceptUri, graph);
             List<String> broaderConcepts = new ArrayList<String>();
             // TODO: use OWL property inference on sub-properties here instead of explicit
             // property filter
             Iterator<GraphNode> broaderIterator = node.getObjectNodes(broaderProperty);
             while (broaderIterator.hasNext()) {
-                Resource node2 = broaderIterator.next().getNode();
-                if (node2 instanceof UriRef) {
-                    broaderConcepts.add(((UriRef) node2).getUnicodeString());
+                RDFTerm node2 = broaderIterator.next().getNode();
+                if (node2 instanceof IRI) {
+                    broaderConcepts.add(((IRI) node2).getUnicodeString());
                 }
             }
             addConcept(conceptUri.getUnicodeString(), broaderConcepts);
diff --git a/enhancement-engines/topic/engine/src/test/java/org/apache/stanbol/enhancer/engine/topic/TopicEngineTest.java b/enhancement-engines/topic/engine/src/test/java/org/apache/stanbol/enhancer/engine/topic/TopicEngineTest.java
index 347f10f..8047334 100644
--- a/enhancement-engines/topic/engine/src/test/java/org/apache/stanbol/enhancer/engine/topic/TopicEngineTest.java
+++ b/enhancement-engines/topic/engine/src/test/java/org/apache/stanbol/enhancer/engine/topic/TopicEngineTest.java
@@ -34,7 +34,7 @@
 import java.util.Random;
 import java.util.TreeMap;
 
-import org.apache.clerezza.rdf.core.Graph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.clerezza.rdf.jena.parser.JenaParserProvider;
@@ -206,7 +206,7 @@
         log.info(" --- testImportModelFromSKOS --- ");
         Parser parser = Parser.getInstance();
         parser.bindParsingProvider(new JenaParserProvider());
-        Graph graph = parser.parse(getClass().getResourceAsStream("/sample-scheme.skos.rdf.xml"),
+        ImmutableGraph graph = parser.parse(getClass().getResourceAsStream("/sample-scheme.skos.rdf.xml"),
             SupportedFormat.RDF_XML);
         int imported = classifier.importConceptsFromGraph(graph, OntologicalClasses.SKOS_CONCEPT,
             Properties.SKOS_BROADER);
diff --git a/enhancement-engines/topic/web/src/main/java/org/apache/stanbol/enhancer/web/topic/resource/TopicModelResource.java b/enhancement-engines/topic/web/src/main/java/org/apache/stanbol/enhancer/web/topic/resource/TopicModelResource.java
index e024164..45acb14 100644
--- a/enhancement-engines/topic/web/src/main/java/org/apache/stanbol/enhancer/web/topic/resource/TopicModelResource.java
+++ b/enhancement-engines/topic/web/src/main/java/org/apache/stanbol/enhancer/web/topic/resource/TopicModelResource.java
@@ -38,8 +38,8 @@
 import javax.ws.rs.core.Response.ResponseBuilder;
 import javax.ws.rs.core.UriInfo;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Property;
@@ -241,15 +241,15 @@
         @Consumes(MediaType.WILDCARD)
         public Response importConceptsFromRDF(@QueryParam(value = "concept_class") String conceptClassUri,
                 @QueryParam(value = "broader_property") String broaderPropertyUri,
-                Graph graph,
+                ImmutableGraph graph,
                 @Context HttpHeaders headers) throws ClassifierException {
-            UriRef conceptClass = OntologicalClasses.SKOS_CONCEPT;
-            UriRef broaderProperty = Properties.SKOS_BROADER;
+            IRI conceptClass = OntologicalClasses.SKOS_CONCEPT;
+            IRI broaderProperty = Properties.SKOS_BROADER;
             if (conceptClassUri != null && !conceptClassUri.isEmpty()) {
-                conceptClass = new UriRef(conceptClassUri);
+                conceptClass = new IRI(conceptClassUri);
             }
             if (broaderPropertyUri != null && !broaderPropertyUri.isEmpty()) {
-                broaderProperty = new UriRef(broaderPropertyUri);
+                broaderProperty = new IRI(broaderPropertyUri);
             }
             int imported = classifier.importConceptsFromGraph(graph, conceptClass, broaderProperty);
             ResponseBuilder rb;
diff --git a/enhancement-engines/uima/uimalocal-template/pom.xml b/enhancement-engines/uima/uimalocal-template/pom.xml
index a43f5cb..8c06b56 100644
--- a/enhancement-engines/uima/uimalocal-template/pom.xml
+++ b/enhancement-engines/uima/uimalocal-template/pom.xml
@@ -84,8 +84,6 @@
     <dependency>
       <groupId>org.apache.clerezza</groupId>
       <artifactId>rdf.core</artifactId>
-      <version>0.14</version>
-      <type>jar</type>
     </dependency>
     <dependency>
       <groupId>org.apache.felix</groupId>
diff --git a/enhancement-engines/uima/uimalocal-template/src/main/java/org/apache/stanbol/enhancer/engines/uimalocal/UIMALocal.java b/enhancement-engines/uima/uimalocal-template/src/main/java/org/apache/stanbol/enhancer/engines/uimalocal/UIMALocal.java
index 9d1cf37..fa84495 100644
--- a/enhancement-engines/uima/uimalocal-template/src/main/java/org/apache/stanbol/enhancer/engines/uimalocal/UIMALocal.java
+++ b/enhancement-engines/uima/uimalocal-template/src/main/java/org/apache/stanbol/enhancer/engines/uimalocal/UIMALocal.java
@@ -29,7 +29,7 @@
 import java.util.Set;
 import java.util.UUID;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Properties;
 import org.apache.felix.scr.annotations.Property;
@@ -149,7 +149,7 @@
 
     @Override
     public void computeEnhancements(ContentItem ci) throws EngineException {
-        Entry<UriRef, Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
+        Entry<IRI, Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
         if (contentPart == null) {
             throw new IllegalStateException("No ContentPart with an supported Mimetype '"
                     + SUPPORTED_MIMETYPES + "' found for ContentItem " + ci.getUri()
@@ -182,16 +182,16 @@
 
         for (String typeName : uimaTypeNames) {
             List<FeatureStructure> featureSetList = concertToCasLight(jcas, typeName);
-            UriRef uimaUriRef = new UriRef(uimaUri);
+            IRI uimaIRI = new IRI(uimaUri);
 
             FeatureStructureListHolder holder;
             ci.getLock().writeLock().lock();
             try {
-                holder = ci.getPart(uimaUriRef, FeatureStructureListHolder.class);
+                holder = ci.getPart(uimaIRI, FeatureStructureListHolder.class);
             } catch (NoSuchPartException e) {
                 holder = new FeatureStructureListHolder();
                 logger.info("Adding FeatureSet List Holder content part with uri:" + uimaUri);
-                ci.addPart(uimaUriRef, holder);
+                ci.addPart(uimaIRI, holder);
                 logger.info(uimaUri + " content part added.");
             } finally {
                 ci.getLock().writeLock().unlock();
diff --git a/enhancement-engines/uima/uimaremote/pom.xml b/enhancement-engines/uima/uimaremote/pom.xml
index 15e4c4d..bf9d8f9 100644
--- a/enhancement-engines/uima/uimaremote/pom.xml
+++ b/enhancement-engines/uima/uimaremote/pom.xml
@@ -106,8 +106,6 @@
     <dependency>
       <groupId>org.apache.clerezza</groupId>
       <artifactId>rdf.core</artifactId>
-      <version>0.14</version>
-      <type>bundle</type>
     </dependency>
     <dependency>
       <groupId>org.apache.felix</groupId>
diff --git a/enhancement-engines/uima/uimaremote/src/main/java/org/apache/stanbol/enhancer/engines/uimaremote/UIMARemoteClient.java b/enhancement-engines/uima/uimaremote/src/main/java/org/apache/stanbol/enhancer/engines/uimaremote/UIMARemoteClient.java
index fe57e84..8c5a220 100644
--- a/enhancement-engines/uima/uimaremote/src/main/java/org/apache/stanbol/enhancer/engines/uimaremote/UIMARemoteClient.java
+++ b/enhancement-engines/uima/uimaremote/src/main/java/org/apache/stanbol/enhancer/engines/uimaremote/UIMARemoteClient.java
@@ -26,7 +26,7 @@
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Properties;
 import org.apache.felix.scr.annotations.Property;
@@ -138,7 +138,7 @@
 
     @Override
     public void computeEnhancements(ContentItem ci) throws EngineException {
-        Entry<UriRef, Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
+        Entry<IRI, Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
         if (contentPart == null) {
             throw new IllegalStateException("No ContentPart with an supported Mimetype '"
                     + SUPPORTED_MIMETYPES + "' found for ContentItem " + ci.getUri()
@@ -156,16 +156,16 @@
         for (UIMASimpleServletClient ussc : usscList) {
             logger.info("Accessing uima source:" + ussc.getSourceName() + " endpoint:" + ussc.getUri());
             List<FeatureStructure> featureSetList = ussc.process(text);
-            UriRef uimaUriRef = new UriRef(uimaUri);
+            IRI uimaIRI = new IRI(uimaUri);
 
             FeatureStructureListHolder holder;
             ci.getLock().writeLock().lock();
             try {
-                holder = ci.getPart(uimaUriRef, FeatureStructureListHolder.class);
+                holder = ci.getPart(uimaIRI, FeatureStructureListHolder.class);
             } catch (NoSuchPartException e) {
                 holder = new FeatureStructureListHolder();
                 logger.info("Adding FeatureSet List Holder content part with uri:" + uimaUri);
-                ci.addPart(uimaUriRef, holder);
+                ci.addPart(uimaIRI, holder);
                 logger.info(uimaUri + " content part added.");
             } finally {
                 ci.getLock().writeLock().unlock();
diff --git a/enhancement-engines/uima/uimatotriples/pom.xml b/enhancement-engines/uima/uimatotriples/pom.xml
index 6da5db7..5b36644 100644
--- a/enhancement-engines/uima/uimatotriples/pom.xml
+++ b/enhancement-engines/uima/uimatotriples/pom.xml
@@ -112,8 +112,6 @@
     <dependency>
       <groupId>org.apache.clerezza</groupId>
       <artifactId>rdf.core</artifactId>
-      <version>0.14</version>
-      <type>jar</type>
     </dependency>
     <dependency>
       <groupId>org.apache.stanbol</groupId>
diff --git a/enhancement-engines/uima/uimatotriples/src/main/java/org/apache/stanbol/enhancer/engines/uimatotriples/UIMAToTriples.java b/enhancement-engines/uima/uimatotriples/src/main/java/org/apache/stanbol/enhancer/engines/uimatotriples/UIMAToTriples.java
index e5b139d..bef4068 100644
--- a/enhancement-engines/uima/uimatotriples/src/main/java/org/apache/stanbol/enhancer/engines/uimatotriples/UIMAToTriples.java
+++ b/enhancement-engines/uima/uimatotriples/src/main/java/org/apache/stanbol/enhancer/engines/uimatotriples/UIMAToTriples.java
@@ -26,10 +26,10 @@
 import java.util.Set;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Properties;
 import org.apache.felix.scr.annotations.Property;
@@ -158,9 +158,9 @@
 
 
         try {
-            UriRef uimaUriRef = new UriRef(uimaUri);
+            IRI uimaIRI = new IRI(uimaUri);
             logger.info(new StringBuilder("Trying to load holder for ref:").append(uimaUri).toString());
-            holder = ci.getPart(uimaUriRef, FeatureStructureListHolder.class);
+            holder = ci.getPart(uimaIRI, FeatureStructureListHolder.class);
             for (String source : sourceNames) {
                 logger.info(new StringBuilder("Processing UIMA source:").append(source).toString());
                 List<FeatureStructure> sourceList = holder.getFeatureStructureList(source);
@@ -176,14 +176,14 @@
                     logger.debug(new StringBuilder("Checking ").append(typeName).toString());
                     if (tnfs.checkFeatureStructureAllowed(typeName, fs.getFeatures())) {
                         logger.debug(new StringBuilder("Adding ").append(typeName).toString());
-                        UriRef textAnnotation = EnhancementEngineHelper.createTextEnhancement(
+                        IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(
                                 ci, this);
-                        MGraph metadata = ci.getMetadata();
+                        Graph metadata = ci.getMetadata();
                         String uriRefStr = uimaUri + ":" + typeName;
                         if (mappings.containsKey(typeName)) {
                             uriRefStr = mappings.get(typeName);
                         }
-                        metadata.add(new TripleImpl(textAnnotation, DC_TYPE, new UriRef(uriRefStr)));
+                        metadata.add(new TripleImpl(textAnnotation, DC_TYPE, new IRI(uriRefStr)));
 
                         if (fs.getFeature("begin") != null) {
                             metadata.add(new TripleImpl(textAnnotation, ENHANCER_START,
@@ -205,7 +205,7 @@
                                     predRefStr = mappings.get(f.getName());
                                 }
 
-                                UriRef predicate = new UriRef(predRefStr);
+                                IRI predicate = new IRI(predRefStr);
 
                                 metadata.add(new TripleImpl(textAnnotation, predicate, new PlainLiteralImpl(f.getValueAsString())));
                             }
diff --git a/enhancement-engines/xmpextractor/src/main/java/org/apache/stanbol/enhancer/engines/xmpextractor/XmpExtractorEngine.java b/enhancement-engines/xmpextractor/src/main/java/org/apache/stanbol/enhancer/engines/xmpextractor/XmpExtractorEngine.java
index 7ed5705..eca09a5 100644
--- a/enhancement-engines/xmpextractor/src/main/java/org/apache/stanbol/enhancer/engines/xmpextractor/XmpExtractorEngine.java
+++ b/enhancement-engines/xmpextractor/src/main/java/org/apache/stanbol/enhancer/engines/xmpextractor/XmpExtractorEngine.java
@@ -23,17 +23,17 @@
 import java.util.Collections;

 import java.util.Map;

 

-import org.apache.clerezza.rdf.core.Graph;

-import org.apache.clerezza.rdf.core.MGraph;

-import org.apache.clerezza.rdf.core.UriRef;

-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;

+import org.apache.clerezza.commons.rdf.ImmutableGraph;

+import org.apache.clerezza.commons.rdf.Graph;

+import org.apache.clerezza.commons.rdf.IRI;

+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;

 import org.apache.clerezza.rdf.core.serializedform.Parser;

 import org.apache.clerezza.rdf.utils.GraphNode;

 import org.apache.felix.scr.annotations.Component;

 import org.apache.felix.scr.annotations.Property;

 import org.apache.felix.scr.annotations.Reference;

 import org.apache.felix.scr.annotations.Service;

-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;

+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;

 import org.apache.stanbol.enhancer.servicesapi.ContentItem;

 import org.apache.stanbol.enhancer.servicesapi.EngineException;

 import org.apache.stanbol.enhancer.servicesapi.EnhancementEngine;

@@ -90,10 +90,10 @@
 		}

     	byte[] bytes = baos.toByteArray();

     	if (bytes.length > 0) {

-	        MGraph model = new IndexedMGraph();

+	        Graph model = new IndexedGraph();

 			parser.parse(model, new ByteArrayInputStream(bytes), "application/rdf+xml");

 	        GraphNode gn = new GraphNode(

-					new UriRef("http://relative-uri.fake/"), model);

+					new IRI("http://relative-uri.fake/"), model);

 			gn.replaceWith(ci.getUri());

 	        ci.getLock().writeLock().lock();

 	        try { 

diff --git a/enhancement-engines/zemanta/src/main/java/org/apache/stanbol/enhancer/engines/zemanta/ZemantaOntologyEnum.java b/enhancement-engines/zemanta/src/main/java/org/apache/stanbol/enhancer/engines/zemanta/ZemantaOntologyEnum.java
index c5c1b3f..c9c8060 100644
--- a/enhancement-engines/zemanta/src/main/java/org/apache/stanbol/enhancer/engines/zemanta/ZemantaOntologyEnum.java
+++ b/enhancement-engines/zemanta/src/main/java/org/apache/stanbol/enhancer/engines/zemanta/ZemantaOntologyEnum.java
@@ -16,7 +16,7 @@
  */
 package org.apache.stanbol.enhancer.engines.zemanta;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 /**
  * Holds concepts, properties and instances found in the Zemanta ontology.
@@ -51,7 +51,7 @@
     Keyword,
     name,
     schema,;
-    UriRef uri;
+    IRI uri;
 
     /**
      * Creates n new entity of this Enum by using the parsed namespace and
@@ -61,7 +61,7 @@
      * @param local The local name or <code>null</code> to use the default
      */
     ZemantaOntologyEnum(String ns, String local) {
-        uri = new UriRef((ns == null ? "http://s.zemanta.com/ns#" : ns) + (local == null ? name() : local));
+        uri = new IRI((ns == null ? "http://s.zemanta.com/ns#" : ns) + (local == null ? name() : local));
     }
 
     /**
@@ -96,9 +96,9 @@
     /**
      * The URI of the element of this Enum.
      *
-     * @return the URI of the element as Clerezza UriRef
+     * @return the URI of the element as Clerezza IRI
      */
-    public UriRef getUri() {
+    public IRI getUri() {
         return uri;
     }
 }
diff --git a/enhancement-engines/zemanta/src/main/java/org/apache/stanbol/enhancer/engines/zemanta/impl/ZemantaAPIWrapper.java b/enhancement-engines/zemanta/src/main/java/org/apache/stanbol/enhancer/engines/zemanta/impl/ZemantaAPIWrapper.java
index 01a503e..26a748d 100644
--- a/enhancement-engines/zemanta/src/main/java/org/apache/stanbol/enhancer/engines/zemanta/impl/ZemantaAPIWrapper.java
+++ b/enhancement-engines/zemanta/src/main/java/org/apache/stanbol/enhancer/engines/zemanta/impl/ZemantaAPIWrapper.java
@@ -27,9 +27,9 @@
 import java.util.EnumMap;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.clerezza.rdf.jena.parser.JenaParserProvider;
 import org.slf4j.Logger;
@@ -38,7 +38,7 @@
 /**
  * This class wraps the Zemanta API into one method.
  * Zemanta is able to return RDF-XML so parsing the response into
- * a Graph object is simple.
+ * a ImmutableGraph object is simple.
  *
  * @author michaelmarth
  * @author westei (Rupert Westenthaler)
@@ -55,9 +55,9 @@
         apiKey = key;
     }
 
-    public Graph enhance(String textToAnalyze) throws IOException {
+    public ImmutableGraph enhance(String textToAnalyze) throws IOException {
         InputStream is = sendRequest(textToAnalyze);
-        Graph zemantaResponseGraph = parseResponse(is);
+        ImmutableGraph zemantaResponseGraph = parseResponse(is);
         return zemantaResponseGraph;
     }
 
@@ -130,13 +130,13 @@
         return data;
     }
 
-    private Graph parseResponse(InputStream is) {
+    private ImmutableGraph parseResponse(InputStream is) {
         JenaParserProvider jenaParserProvider = new JenaParserProvider();
         //NOTE(rw): the new third parameter is the base URI used to resolve relative paths
-        MGraph g = new SimpleMGraph();
+        Graph g = new SimpleGraph();
         jenaParserProvider.parse(g,is, SupportedFormat.RDF_XML,null);
         log.debug("graph: " + g.toString());
-        return g.getGraph();
+        return g.getImmutableGraph();
     }
 
 }
diff --git a/enhancement-engines/zemanta/src/main/java/org/apache/stanbol/enhancer/engines/zemanta/impl/ZemantaEnhancementEngine.java b/enhancement-engines/zemanta/src/main/java/org/apache/stanbol/enhancer/engines/zemanta/impl/ZemantaEnhancementEngine.java
index d72203f..6692631 100644
--- a/enhancement-engines/zemanta/src/main/java/org/apache/stanbol/enhancer/engines/zemanta/impl/ZemantaEnhancementEngine.java
+++ b/enhancement-engines/zemanta/src/main/java/org/apache/stanbol/enhancer/engines/zemanta/impl/ZemantaEnhancementEngine.java
@@ -46,17 +46,17 @@
 import java.util.Map.Entry;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.io.IOUtils;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -189,7 +189,7 @@
 
 
     public void computeEnhancements(ContentItem ci) throws EngineException {
-        Entry<UriRef,Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
+        Entry<IRI,Blob> contentPart = ContentItemHelper.getBlob(ci, SUPPORTED_MIMETYPES);
         if(contentPart == null){
             throw new IllegalStateException("No ContentPart with a supported Mime Type"
                 + "found for ContentItem "+ci.getUri()+"(supported: '"
@@ -207,10 +207,10 @@
                 contentPart.getKey(),ci.getUri());
             return;
         }
-        MGraph graph = ci.getMetadata();
-        UriRef ciId = ci.getUri();
+        Graph graph = ci.getMetadata();
+        IRI ciId = ci.getUri();
         //we need to store the results of Zemanta in an temp graph
-        MGraph results = new SimpleMGraph();
+        Graph results = new SimpleGraph();
         ZemantaAPIWrapper zemanta = new ZemantaAPIWrapper(key);
         try {
             results.addAll(zemanta.enhance(text));
@@ -234,20 +234,20 @@
                 (Object) defaultOrder));
     }
     
-    protected void processCategories(MGraph results, MGraph enhancements, UriRef ciId) {
+    protected void processCategories(Graph results, Graph enhancements, IRI ciId) {
         Iterator<Triple> categories = results.filter(null, RDF_TYPE, ZemantaOntologyEnum.Category.getUri());
         //add the root Text annotation as soon as the first TopicAnnotation is added.
-        UriRef textAnnotation = null;
+        IRI textAnnotation = null;
         while (categories.hasNext()) {
-            NonLiteral category = categories.next().getSubject();
+            BlankNodeOrIRI category = categories.next().getSubject();
             log.debug("process category " + category);
             Double confidence = parseConfidence(results, category);
             log.debug(" > confidence :" + confidence);
             //now we need to follow the Target link
-            UriRef target = EnhancementEngineHelper.getReference(results, category, ZemantaOntologyEnum.target.getUri());
+            IRI target = EnhancementEngineHelper.getReference(results, category, ZemantaOntologyEnum.target.getUri());
             if (target != null) {
                 //first check the used categorisation
-                UriRef categorisationScheme = EnhancementEngineHelper.getReference(results, target, ZemantaOntologyEnum.categorization.getUri());
+                IRI categorisationScheme = EnhancementEngineHelper.getReference(results, target, ZemantaOntologyEnum.categorization.getUri());
                 if (categorisationScheme != null && categorisationScheme.equals(ZemantaOntologyEnum.categorization_DMOZ.getUri())) {
                     String categoryTitle = EnhancementEngineHelper.getString(results, target, ZemantaOntologyEnum.title.getUri());
                     if (categoryTitle != null) {
@@ -258,7 +258,7 @@
                             enhancements.add(new TripleImpl(textAnnotation,DC_TYPE,SKOS_CONCEPT));
                         }
                         //now write the TopicAnnotation
-                        UriRef categoryEnhancement = createTopicEnhancement(enhancements, this, ciId);
+                        IRI categoryEnhancement = createTopicEnhancement(enhancements, this, ciId);
                         //make related to the EntityAnnotation
                         enhancements.add(new TripleImpl(categoryEnhancement, DC_RELATION, textAnnotation));
                         //write the title
@@ -266,7 +266,7 @@
                         //write the reference
                         if (categoryTitle.startsWith(ZEMANTA_DMOZ_PREFIX)) {
                             enhancements.add(
-                                    new TripleImpl(categoryEnhancement, ENHANCER_ENTITY_REFERENCE, new UriRef(DMOZ_BASE_URL + categoryTitle.substring(ZEMANTA_DMOZ_PREFIX.length()))));
+                                    new TripleImpl(categoryEnhancement, ENHANCER_ENTITY_REFERENCE, new IRI(DMOZ_BASE_URL + categoryTitle.substring(ZEMANTA_DMOZ_PREFIX.length()))));
                         }
                         //write the confidence
                         if (confidence != null) {
@@ -302,40 +302,40 @@
      *                     enhancements
      * @param text         the content of the content item as string
      */
-    protected void processRecognition(MGraph results, MGraph enhancements, String text, UriRef ciId) {
+    protected void processRecognition(Graph results, Graph enhancements, String text, IRI ciId) {
         Iterator<Triple> recognitions = results.filter(null, RDF_TYPE, ZemantaOntologyEnum.Recognition.getUri());
         while (recognitions.hasNext()) {
-            NonLiteral recognition = recognitions.next().getSubject();
+            BlankNodeOrIRI recognition = recognitions.next().getSubject();
             log.debug("process recognition " + recognition);
             //first get everything we need for the textAnnotations
             Double confidence = parseConfidence(results, recognition);
             log.debug(" > confidence :" + confidence);
             String anchor = EnhancementEngineHelper.getString(results, recognition, ZemantaOntologyEnum.anchor.getUri());
             log.debug(" > anchor :" + anchor);
-            Collection<NonLiteral> textAnnotations = processTextAnnotation(enhancements, text, ciId, anchor, confidence);
+            Collection<BlankNodeOrIRI> textAnnotations = processTextAnnotation(enhancements, text, ciId, anchor, confidence);
             log.debug(" > number of textAnnotations :" + textAnnotations.size());
 
             //second we need to create the EntityAnnotation that represent the
             //recognition
-            NonLiteral object = EnhancementEngineHelper.getReference(results, recognition, ZemantaOntologyEnum.object.getUri());
+            BlankNodeOrIRI object = EnhancementEngineHelper.getReference(results, recognition, ZemantaOntologyEnum.object.getUri());
             log.debug(" > object :" + object);
             //The targets represent the linked entities
             //  ... and yes there can be more of them!
             //TODO: can we create an EntityAnnotation with several referred entities?
             //      Should we use the owl:sameAs to decide that!
-            Set<UriRef> sameAsSet = new HashSet<UriRef>();
-            for (Iterator<UriRef> sameAs = getReferences(results, object, ZemantaOntologyEnum.owlSameAs.getUri()); sameAs.hasNext(); sameAsSet.add(sameAs.next()))
+            Set<IRI> sameAsSet = new HashSet<IRI>();
+            for (Iterator<IRI> sameAs = getReferences(results, object, ZemantaOntologyEnum.owlSameAs.getUri()); sameAs.hasNext(); sameAsSet.add(sameAs.next()))
                 ;
             log.debug(" > sameAs :" + sameAsSet);
             //now parse the targets and look if there are others than the one
             //merged by using sameAs
-            Iterator<UriRef> targets = EnhancementEngineHelper.getReferences(results, object, ZemantaOntologyEnum.target.getUri());
+            Iterator<IRI> targets = EnhancementEngineHelper.getReferences(results, object, ZemantaOntologyEnum.target.getUri());
             String title = null;
             while (targets.hasNext()) {
                 //the entityRef is the URL of the target
-                UriRef entity = targets.next();
+                IRI entity = targets.next();
                 log.debug("    -  target :" + entity);
-                UriRef targetType = EnhancementEngineHelper.getReference(results, entity, ZemantaOntologyEnum.targetType.getUri());
+                IRI targetType = EnhancementEngineHelper.getReference(results, entity, ZemantaOntologyEnum.targetType.getUri());
                 log.debug("       o type :" + targetType);
                 if (ZemantaOntologyEnum.targetType_RDF.getUri().equals(targetType)) {
                     String targetTitle = EnhancementEngineHelper.getString(results, entity, ZemantaOntologyEnum.title.getUri());
@@ -357,16 +357,16 @@
                 //      any entity types!
             }
             //create the entityEnhancement
-            UriRef entityEnhancement = EnhancementEngineHelper.createEntityEnhancement(enhancements, this, ciId);
+            IRI entityEnhancement = EnhancementEngineHelper.createEntityEnhancement(enhancements, this, ciId);
             if (confidence != null) {
                 enhancements.add(
                         new TripleImpl(entityEnhancement, ENHANCER_CONFIDENCE, literalFactory.createTypedLiteral(confidence)));
             }
-            for (NonLiteral relatedTextAnnotation : textAnnotations) {
+            for (BlankNodeOrIRI relatedTextAnnotation : textAnnotations) {
                 enhancements.add(
                         new TripleImpl(entityEnhancement, DC_RELATION, relatedTextAnnotation));
             }
-            for (UriRef entity : sameAsSet) {
+            for (IRI entity : sameAsSet) {
                 enhancements.add(
                         new TripleImpl(entityEnhancement, ENHANCER_ENTITY_REFERENCE, entity));
             }
@@ -388,7 +388,7 @@
      *         double value.
      * @see ZemantaOntologyEnum#confidence
      */
-    private static Double parseConfidence(TripleCollection tc, NonLiteral resource) {
+    private static Double parseConfidence(Graph tc, BlankNodeOrIRI resource) {
         String confidenceString = EnhancementEngineHelper.getString(tc, resource, ZemantaOntologyEnum.confidence.getUri());
         Double confidence;
         if (confidenceString != null) {
@@ -421,21 +421,21 @@
      *
      * @return a collection of all existing/created text annotations for the parsed anchor
      */
-    private Collection<NonLiteral> processTextAnnotation(MGraph enhancements, String text, UriRef ciId, String anchor, Double confidence) {
-        Collection<NonLiteral> textAnnotations = new ArrayList<NonLiteral>();
+    private Collection<BlankNodeOrIRI> processTextAnnotation(Graph enhancements, String text, IRI ciId, String anchor, Double confidence) {
+        Collection<BlankNodeOrIRI> textAnnotations = new ArrayList<BlankNodeOrIRI>();
         int anchorLength = anchor.length();
         Literal anchorLiteral = new PlainLiteralImpl(anchor);
         //first search for existing TextAnnotations for the anchor
-        Map<Integer, Collection<NonLiteral>> existingTextAnnotationsMap = searchExistingTextAnnotations(enhancements, anchorLiteral);
+        Map<Integer, Collection<BlankNodeOrIRI>> existingTextAnnotationsMap = searchExistingTextAnnotations(enhancements, anchorLiteral);
 
         for (int current = text.indexOf(anchor); current >= 0; current = text.indexOf(anchor, current + 1)) {
-            Collection<NonLiteral> existingTextAnnotations = existingTextAnnotationsMap.get(current);
+            Collection<BlankNodeOrIRI> existingTextAnnotations = existingTextAnnotationsMap.get(current);
             if (existingTextAnnotations != null) {
                 //use the existing once
                 textAnnotations.addAll(existingTextAnnotations);
             } else {
                 //we need to create an new one!
-                UriRef textAnnotation = EnhancementEngineHelper.createTextEnhancement(enhancements, this, ciId);
+                IRI textAnnotation = EnhancementEngineHelper.createTextEnhancement(enhancements, this, ciId);
                 textAnnotations.add(textAnnotation);
                 //write the selection
                 enhancements.add(
@@ -490,18 +490,18 @@
      * @return Map that uses the start position as an key and a list of
      *         text annotations as an value.
      */
-    private Map<Integer, Collection<NonLiteral>> searchExistingTextAnnotations(MGraph enhancements, Literal anchorLiteral) {
+    private Map<Integer, Collection<BlankNodeOrIRI>> searchExistingTextAnnotations(Graph enhancements, Literal anchorLiteral) {
         Iterator<Triple> textAnnotationsIterator = enhancements.filter(null, ENHANCER_SELECTED_TEXT, anchorLiteral);
-        Map<Integer, Collection<NonLiteral>> existingTextAnnotationsMap = new HashMap<Integer, Collection<NonLiteral>>();
+        Map<Integer, Collection<BlankNodeOrIRI>> existingTextAnnotationsMap = new HashMap<Integer, Collection<BlankNodeOrIRI>>();
         while (textAnnotationsIterator.hasNext()) {
-            NonLiteral subject = textAnnotationsIterator.next().getSubject();
+            BlankNodeOrIRI subject = textAnnotationsIterator.next().getSubject();
             //test rdfType
             if (enhancements.contains(new TripleImpl(subject, RDF_TYPE, ENHANCER_TEXTANNOTATION))) {
                 Integer start = EnhancementEngineHelper.get(enhancements, subject, ENHANCER_START, Integer.class, literalFactory);
                 if (start != null) {
-                    Collection<NonLiteral> textAnnotationList = existingTextAnnotationsMap.get(start);
+                    Collection<BlankNodeOrIRI> textAnnotationList = existingTextAnnotationsMap.get(start);
                     if (textAnnotationList == null) {
-                        textAnnotationList = new ArrayList<NonLiteral>();
+                        textAnnotationList = new ArrayList<BlankNodeOrIRI>();
                         existingTextAnnotationsMap.put(start, textAnnotationList);
                     }
                     textAnnotationList.add(subject);
diff --git a/enhancement-engines/zemanta/src/test/java/org/apache/stanbol/enhancer/engines/zemanta/impl/ZemantaEnhancementEngineTest.java b/enhancement-engines/zemanta/src/test/java/org/apache/stanbol/enhancer/engines/zemanta/impl/ZemantaEnhancementEngineTest.java
index a965607..59b62b8 100644
--- a/enhancement-engines/zemanta/src/test/java/org/apache/stanbol/enhancer/engines/zemanta/impl/ZemantaEnhancementEngineTest.java
+++ b/enhancement-engines/zemanta/src/test/java/org/apache/stanbol/enhancer/engines/zemanta/impl/ZemantaEnhancementEngineTest.java
@@ -28,8 +28,8 @@
 import java.util.Map;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.jena.serializer.JenaSerializerProvider;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
@@ -95,7 +95,7 @@
     public static ContentItem wrapAsContentItem(final String text) throws IOException {
     	String id = "urn:org.apache.stanbol.enhancer:test:engines.zemanta:content-item-"
             + EnhancementEngineHelper.randomUUID().toString();
-    	return ciFactory.createContentItem(new UriRef(id), new StringSource(text));
+    	return ciFactory.createContentItem(new IRI(id), new StringSource(text));
     }
 
     @Test
@@ -109,7 +109,7 @@
         }
         JenaSerializerProvider serializer = new JenaSerializerProvider();
         serializer.serialize(System.out, ci.getMetadata(), TURTLE);
-        Map<UriRef,Resource> expectedValues = new HashMap<UriRef,Resource>();
+        Map<IRI,RDFTerm> expectedValues = new HashMap<IRI,RDFTerm>();
         expectedValues.put(Properties.ENHANCER_EXTRACTED_FROM, ci.getUri());
         expectedValues.put(Properties.DC_CREATOR, LiteralFactory.getInstance().createTypedLiteral(
             zemantaEngine.getClass().getName()));
diff --git a/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/Benchmark.java b/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/Benchmark.java
index f4b76b7..783f573 100644
--- a/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/Benchmark.java
+++ b/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/Benchmark.java
@@ -18,7 +18,7 @@
 
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.Graph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
 import org.apache.stanbol.enhancer.servicesapi.Chain;
 import org.apache.stanbol.enhancer.servicesapi.ContentItemFactory;
 import org.apache.stanbol.enhancer.servicesapi.EnhancementException;
@@ -40,8 +40,8 @@
     List<BenchmarkResult> execute(EnhancementJobManager jobManager, 
                                   ContentItemFactory ciFactory) throws EnhancementException;
     
-    /** Return the enhanced Graph of our input text */
-    Graph getGraph(EnhancementJobManager jobManager, 
+    /** Return the enhanced ImmutableGraph of our input text */
+    ImmutableGraph getGraph(EnhancementJobManager jobManager, 
                    ContentItemFactory ciFactory) throws EnhancementException;
 
     /**
diff --git a/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/BenchmarkResult.java b/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/BenchmarkResult.java
index 158924a..cadebad 100644
--- a/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/BenchmarkResult.java
+++ b/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/BenchmarkResult.java
@@ -18,8 +18,8 @@
 
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.IRI;
 
 /** Benchmark result for a single TripleMatcherGroup */
 public interface BenchmarkResult {
@@ -33,5 +33,5 @@
     String getInfo();
     
     /** Set of subjects that match our TripleMatcherGroup */
-    Set<UriRef> getMatchingSubjects();
+    Set<IRI> getMatchingSubjects();
 }
diff --git a/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/TripleMatcher.java b/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/TripleMatcher.java
index f09af4a..53e446b 100644
--- a/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/TripleMatcher.java
+++ b/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/TripleMatcher.java
@@ -16,7 +16,7 @@
  */
 package org.apache.stanbol.enhancer.benchmark;
 
-import org.apache.clerezza.rdf.core.Triple;
+import org.apache.clerezza.commons.rdf.Triple;
 
 /** TripleMatcher is used to count how many Triples
  *  match a given statement in the benchmark tool.
diff --git a/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/TripleMatcherGroup.java b/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/TripleMatcherGroup.java
index 319928a..72c5a56 100644
--- a/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/TripleMatcherGroup.java
+++ b/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/TripleMatcherGroup.java
@@ -19,8 +19,8 @@
 import java.util.Collection;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.IRI;
 
 /** A group of TripleMatcher, used to check that 
  *  enhancements match all the TripleMatcher in 
@@ -36,10 +36,10 @@
      */
     boolean isExpectGroup();
     
-    /** Return the set of UriRef that match all
-     *  TripleMatcher in this group for supplied Graph
+    /** Return the set of IRI that match all
+     *  TripleMatcher in this group for supplied ImmutableGraph
      */
-    Set<UriRef> getMatchingSubjects(Graph g);
+    Set<IRI> getMatchingSubjects(ImmutableGraph g);
     
     /** @return our TripleMatcher */
     Collection<TripleMatcher> getMatchers();
diff --git a/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/BenchmarkImpl.java b/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/BenchmarkImpl.java
index b2f9c9c..b143df5 100644
--- a/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/BenchmarkImpl.java
+++ b/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/BenchmarkImpl.java
@@ -20,7 +20,7 @@
 import java.util.LinkedList;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.Graph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
 import org.apache.stanbol.enhancer.benchmark.Benchmark;
 import org.apache.stanbol.enhancer.benchmark.BenchmarkResult;
 import org.apache.stanbol.enhancer.benchmark.TripleMatcherGroup;
@@ -36,7 +36,7 @@
     
     private String name;
     private String inputText;
-    private Graph graph;
+    private ImmutableGraph graph;
     private ContentItemFactory ciFactory;
     private Chain chain;
     
@@ -94,7 +94,7 @@
     }
     
     /** @inheritDoc */
-    public Graph getGraph(EnhancementJobManager jobManager, 
+    public ImmutableGraph getGraph(EnhancementJobManager jobManager, 
                           ContentItemFactory ciFactory) throws EnhancementException {
         if(graph == null) {
             ContentItem ci;
@@ -109,7 +109,7 @@
             } else { //parsing null as chain does not work!
                 jobManager.enhanceContent(ci,chain);
             }
-            graph = ci.getMetadata().getGraph();
+            graph = ci.getMetadata().getImmutableGraph();
         }
         return graph;
     }
diff --git a/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/BenchmarkResultImpl.java b/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/BenchmarkResultImpl.java
index 6c7ebe5..5480963 100644
--- a/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/BenchmarkResultImpl.java
+++ b/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/BenchmarkResultImpl.java
@@ -18,8 +18,8 @@
 
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.benchmark.BenchmarkResult;
 import org.apache.stanbol.enhancer.benchmark.TripleMatcherGroup;
 
@@ -28,9 +28,9 @@
     private final TripleMatcherGroup tmg;
     private final boolean successful;
     private String info;
-    private final Set<UriRef> matchingSubjects;
+    private final Set<IRI> matchingSubjects;
     
-    BenchmarkResultImpl(TripleMatcherGroup tmg, Graph graph) {
+    BenchmarkResultImpl(TripleMatcherGroup tmg, ImmutableGraph graph) {
         this.tmg = tmg;
         matchingSubjects = tmg.getMatchingSubjects(graph);
         
@@ -82,7 +82,7 @@
     }
     
     @Override
-    public Set<UriRef> getMatchingSubjects() {
+    public Set<IRI> getMatchingSubjects() {
         return matchingSubjects;
     }
 }
diff --git a/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/BenchmarkServlet.java b/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/BenchmarkServlet.java
index 3d965fc..01fe40f 100644
--- a/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/BenchmarkServlet.java
+++ b/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/BenchmarkServlet.java
@@ -35,7 +35,7 @@
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.clerezza.rdf.core.Graph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.io.LineIterator;
@@ -108,7 +108,7 @@
             serializer = s;
         }
         
-        public String format(Graph g, String mimeType) throws UnsupportedEncodingException {
+        public String format(ImmutableGraph g, String mimeType) throws UnsupportedEncodingException {
             final ByteArrayOutputStream bos = new ByteArrayOutputStream();
             serializer.serialize(bos, g, mimeType);
             return bos.toString("UTF-8");
@@ -128,7 +128,7 @@
         log.info("Servlet mounted at {}", mountPath);
         
         final Properties config = new Properties();
-        config.put("class.resource.loader.description", "Velocity Classpath Resource Loader");
+        config.put("class.resource.loader.description", "Velocity Classpath RDFTerm Loader");
         config.put("class.resource.loader.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
         config.put("resource.loader","class");
         velocity.init(config);
diff --git a/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/TripleMatcherGroupImpl.java b/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/TripleMatcherGroupImpl.java
index 616b09c..5219db1 100644
--- a/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/TripleMatcherGroupImpl.java
+++ b/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/TripleMatcherGroupImpl.java
@@ -23,10 +23,10 @@
 import java.util.List;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.benchmark.TripleMatcher;
 import org.apache.stanbol.enhancer.benchmark.TripleMatcherGroup;
 
@@ -61,25 +61,25 @@
     }
     
     @Override
-    public Set<UriRef> getMatchingSubjects(Graph g) {
+    public Set<IRI> getMatchingSubjects(ImmutableGraph g) {
         if(matchers.isEmpty()) {
-            return new HashSet<UriRef>();
+            return new HashSet<IRI>();
         }
 
         // For all matchers, find the set of subjects that match
         // and compute the intersection of those sets
-        Set<UriRef> intersection = null;
+        Set<IRI> intersection = null;
         for(TripleMatcher m : matchers) {
-            final Set<UriRef> s = new HashSet<UriRef>();
+            final Set<IRI> s = new HashSet<IRI>();
             final Iterator<Triple> it = g.iterator();
             while(it.hasNext()) {
                 final Triple t = it.next();
                 if(m.matches(t)) {
-                    final NonLiteral n = t.getSubject();
-                    if(n instanceof UriRef) {
-                        s.add((UriRef)n);
+                    final BlankNodeOrIRI n = t.getSubject();
+                    if(n instanceof IRI) {
+                        s.add((IRI)n);
                     } else {
-                        // TODO do we need to handle non-UriRef subjects?
+                        // TODO do we need to handle non-IRI subjects?
                     }
                 }
             }
diff --git a/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/TripleMatcherImpl.java b/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/TripleMatcherImpl.java
index d824cd9..aff450e 100644
--- a/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/TripleMatcherImpl.java
+++ b/enhancer/benchmark/src/main/java/org/apache/stanbol/enhancer/benchmark/impl/TripleMatcherImpl.java
@@ -18,15 +18,15 @@
 
 import java.io.IOException;
 
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.benchmark.TripleMatcher;
 
 class TripleMatcherImpl implements TripleMatcher {
 
     private final String operator;
-    private final UriRef predicateUri;
-    private final UriRef objectUri;
+    private final IRI predicateUri;
+    private final IRI objectUri;
     
     /** Build from a line supplied by the parser.
      *  Format is PREDICATE_URI OPERATOR ARGUMENTS, 
@@ -37,11 +37,11 @@
             throw new IOException("Invalid TripleMatcher format in line [" + line + "]");
         }
         
-        predicateUri = new UriRef(parts[0]);
+        predicateUri = new IRI(parts[0]);
         
         operator = parts[1];
         if("URI".equals(operator)) {
-            objectUri = new UriRef(parts[2]);
+            objectUri = new IRI(parts[2]);
         } else {
             // TODO support other operators
             objectUri = null;
diff --git a/enhancer/benchmark/src/test/java/org/apache/stanbol/enhancer/benchmark/impl/TripleMatcherGroupImplTest.java b/enhancer/benchmark/src/test/java/org/apache/stanbol/enhancer/benchmark/impl/TripleMatcherGroupImplTest.java
index c95547b..2809829 100644
--- a/enhancer/benchmark/src/test/java/org/apache/stanbol/enhancer/benchmark/impl/TripleMatcherGroupImplTest.java
+++ b/enhancer/benchmark/src/test/java/org/apache/stanbol/enhancer/benchmark/impl/TripleMatcherGroupImplTest.java
@@ -20,20 +20,20 @@
 
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
 import org.junit.Before;
 import org.junit.Test;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 
 public class TripleMatcherGroupImplTest {
-    private MGraph graph;
+    private Graph graph;
     
     @Before
     public void createGraph() {
-        graph = new SimpleMGraph();
+        graph = new SimpleGraph();
         graph.add(TripleUtil.uriTriple("S1", "P1", "01"));
         graph.add(TripleUtil.uriTriple("S1", "P1", "02"));
         graph.add(TripleUtil.uriTriple("S2", "P1", "01"));
@@ -49,14 +49,14 @@
         assertEquals(
                 "Empty matcher group should find nothing",
                 0,
-                group.getMatchingSubjects(graph.getGraph()).size());
+                group.getMatchingSubjects(graph.getImmutableGraph()).size());
         
         // Add two matchers, only S1 and S2 match all of them
         group.addMatcher(new TripleMatcherImpl("P1 URI 01"));
         group.addMatcher(new TripleMatcherImpl("P1 URI 02"));
         
-        final Set<UriRef> actual = group.getMatchingSubjects(graph.getGraph());
-        final Set<UriRef> expected = TripleUtil.uriRefSet("S1", "S2");
+        final Set<IRI> actual = group.getMatchingSubjects(graph.getImmutableGraph());
+        final Set<IRI> expected = TripleUtil.uriRefSet("S1", "S2");
         
         assertEquals("Size of results " + actual + " matches " + expected, expected.size(), actual.size());
         assertTrue("Content of results " + actual + " matches " + expected, expected.containsAll(actual));
diff --git a/enhancer/benchmark/src/test/java/org/apache/stanbol/enhancer/benchmark/impl/TripleUtil.java b/enhancer/benchmark/src/test/java/org/apache/stanbol/enhancer/benchmark/impl/TripleUtil.java
index 31b4348..cfa7c5d 100644
--- a/enhancer/benchmark/src/test/java/org/apache/stanbol/enhancer/benchmark/impl/TripleUtil.java
+++ b/enhancer/benchmark/src/test/java/org/apache/stanbol/enhancer/benchmark/impl/TripleUtil.java
@@ -19,19 +19,19 @@
 import java.util.HashSet;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 
 public class TripleUtil {
     static Triple uriTriple(String subject, String predicate, String object) {
-        return new TripleImpl(new UriRef(subject), new UriRef(predicate), new UriRef(object));
+        return new TripleImpl(new IRI(subject), new IRI(predicate), new IRI(object));
     }
     
-    static Set<UriRef> uriRefSet(String...uri) {
-        final Set<UriRef> result = new HashSet<UriRef>();
+    static Set<IRI> uriRefSet(String...uri) {
+        final Set<IRI> result = new HashSet<IRI>();
         for(String str : uri) {
-            result.add(new UriRef(str));
+            result.add(new IRI(str));
         }
         return result;
     }
diff --git a/enhancer/chain/allactive/src/main/java/org/apache/stanbol/enhancer/chain/allactive/impl/AllActiveEnginesChain.java b/enhancer/chain/allactive/src/main/java/org/apache/stanbol/enhancer/chain/allactive/impl/AllActiveEnginesChain.java
index 82e3277..bea65db 100644
--- a/enhancer/chain/allactive/src/main/java/org/apache/stanbol/enhancer/chain/allactive/impl/AllActiveEnginesChain.java
+++ b/enhancer/chain/allactive/src/main/java/org/apache/stanbol/enhancer/chain/allactive/impl/AllActiveEnginesChain.java
@@ -25,7 +25,7 @@
 import java.util.List;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Graph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
 import org.apache.stanbol.enhancer.servicesapi.Chain;
 import org.apache.stanbol.enhancer.servicesapi.ChainException;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
@@ -62,7 +62,7 @@
     private String name;
     private final Object lock = new Object();
     private BundleContext context;
-    private Graph executionPlan;
+    private ImmutableGraph executionPlan;
     private Set<String> engineNames;
     private EnginesTracker tracker;
     
@@ -107,7 +107,7 @@
         return name;
     }
     @Override
-    public Graph getExecutionPlan() throws ChainException {
+    public ImmutableGraph getExecutionPlan() throws ChainException {
         synchronized (lock) {
             if(executionPlan == null){
                 update();
diff --git a/enhancer/chain/graph/src/main/java/org/apache/stanbol/enhancer/chain/graph/impl/GraphChain.java b/enhancer/chain/graph/src/main/java/org/apache/stanbol/enhancer/chain/graph/impl/GraphChain.java
index bd3e3e8..245017f 100644
--- a/enhancer/chain/graph/src/main/java/org/apache/stanbol/enhancer/chain/graph/impl/GraphChain.java
+++ b/enhancer/chain/graph/src/main/java/org/apache/stanbol/enhancer/chain/graph/impl/GraphChain.java
@@ -40,11 +40,11 @@
 import java.util.Map.Entry;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.commons.io.IOUtils;
 import org.apache.felix.scr.annotations.Activate;
@@ -105,7 +105,7 @@
     protected final Logger log = LoggerFactory.getLogger(GraphChain.class); 
     
     /**
-     * Property used to configure the Graph by using the line based 
+     * Property used to configure the ImmutableGraph by using the line based 
      * representation with the following Syntax:
      * <code><pre>
      *   &lt;engineName&gt;;&lt;parm1&gt;=&lt;value1&gt;,&lt;value2&gt;;&lt;parm2&gt;=&lt;value1&gt;...
@@ -217,10 +217,10 @@
                 }
             } else {
                 throw new ConfigurationException(PROPERTY_CHAIN_LIST, 
-                    "The list based configuration of a Graph Chain MUST BE " +
+                    "The list based configuration of a ImmutableGraph Chain MUST BE " +
                     "configured as a Array or Collection of Strings (parsed: "+
                     (list != null?list.getClass():"null")+"). NOTE you can also " +
-                    "configure the Graph by pointing to a resource with the graph as" +
+                    "configure the ImmutableGraph by pointing to a resource with the graph as" +
                     "value of the property '"+PROPERTY_GRAPH_RESOURCE+"'.");
             }
             Map<String,Map<String,List<String>>> config;
@@ -258,7 +258,7 @@
         super.deactivate(ctx);
     }
     @Override
-    public Graph getExecutionPlan() throws ChainException {
+    public ImmutableGraph getExecutionPlan() throws ChainException {
         return internalChain.getExecutionPlan();
     }
 
@@ -294,7 +294,7 @@
          * The executionPlan is parsed and validated within
          * {@link #updateExecutionPlan()}
          */
-        private Graph executionPlan;
+        private ImmutableGraph executionPlan;
         /**
          * The referenced engine names. Use the {@link #resourceName} to sync 
          * access.<p>
@@ -302,7 +302,7 @@
          */
         private Set<String> engineNames;
         /**
-         * Parser used to parse the RDF {@link Graph} from the {@link InputStream}
+         * Parser used to parse the RDF {@link ImmutableGraph} from the {@link InputStream}
          * provided to the {@link #available(String, InputStream)} method by the
          * {@link DataFileTracker}.
          */
@@ -365,7 +365,7 @@
             return false; //keep tracking
         }
         @Override
-        public Graph getExecutionPlan() throws ChainException {
+        public ImmutableGraph getExecutionPlan() throws ChainException {
             synchronized (resourceName) {
                 if(executionPlan == null){
                     updateExecutionPlan();
@@ -426,7 +426,7 @@
      */
     private final class ListConfigExecutionPlan implements Chain {
 
-        private final Graph executionPlan;
+        private final ImmutableGraph executionPlan;
         private final Set<String> engines;
         
         /**
@@ -445,9 +445,9 @@
                 		"GraphChain '{}'",getName());
             }
             engines = Collections.unmodifiableSet(new HashSet<String>(config.keySet()));
-            MGraph graph = new SimpleMGraph();
-            NonLiteral epNode = createExecutionPlan(graph, getName(), chainProperties);
-            //caches the String name -> {NonLiteral node, List<Stirng> dependsOn} mappings
+            Graph graph = new SimpleGraph();
+            BlankNodeOrIRI epNode = createExecutionPlan(graph, getName(), chainProperties);
+            //caches the String name -> {BlankNodeOrIRI node, List<Stirng> dependsOn} mappings
             Map<String,Object[]> name2nodes = new HashMap<String,Object[]>();
             //1. write the nodes (without dependencies)
             for(Entry<String,Map<String,List<String>>> node : config.entrySet()){
@@ -470,9 +470,9 @@
                         Object[] targetInfo = name2nodes.get(target);
                         if(targetInfo != null){
                             graph.add(new TripleImpl(
-                                (NonLiteral)info.getValue()[0], 
+                                (BlankNodeOrIRI)info.getValue()[0], 
                                 ExecutionPlan.DEPENDS_ON, 
-                                (NonLiteral)targetInfo[0]));
+                                (BlankNodeOrIRI)targetInfo[0]));
                             
                         } else { //reference to a undefined engine :(
                             throw new IllegalArgumentException("The Engine '"+
@@ -483,11 +483,11 @@
                     }
                 } //this node has no dependencies
             }
-            this.executionPlan = graph.getGraph();
+            this.executionPlan = graph.getImmutableGraph();
         }
         
         @Override
-        public Graph getExecutionPlan() throws ChainException {
+        public ImmutableGraph getExecutionPlan() throws ChainException {
             return executionPlan;
         }
 
diff --git a/enhancer/chain/list/src/main/java/org/apache/stanbol/enhancer/chain/list/impl/ListChain.java b/enhancer/chain/list/src/main/java/org/apache/stanbol/enhancer/chain/list/impl/ListChain.java
index 5d04d3b..ed536a4 100644
--- a/enhancer/chain/list/src/main/java/org/apache/stanbol/enhancer/chain/list/impl/ListChain.java
+++ b/enhancer/chain/list/src/main/java/org/apache/stanbol/enhancer/chain/list/impl/ListChain.java
@@ -30,10 +30,10 @@
 import java.util.Map.Entry;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
 import org.apache.felix.scr.annotations.Properties;
@@ -101,7 +101,7 @@
 
     private Set<String> engineNames;
     
-    private Graph executionPlan;
+    private ImmutableGraph executionPlan;
         
     
     @Override
@@ -124,9 +124,9 @@
                         (value != null?value.getClass():"null")+")");
         }
         Set<String> engineNames = new HashSet<String>(configuredChain.size());
-        NonLiteral last = null;
-        MGraph ep = new SimpleMGraph();
-        NonLiteral epNode = createExecutionPlan(ep, getName(), getChainProperties());
+        BlankNodeOrIRI last = null;
+        Graph ep = new SimpleGraph();
+        BlankNodeOrIRI epNode = createExecutionPlan(ep, getName(), getChainProperties());
         log.debug("Parse ListChain config:");
         for(String line : configuredChain){
             try {
@@ -151,7 +151,7 @@
                 "The configured chain MUST at least contain a single valid entry!");
         }
         this.engineNames = Collections.unmodifiableSet(engineNames);
-        this.executionPlan = ep.getGraph();
+        this.executionPlan = ep.getImmutableGraph();
     }
 
     @Override
@@ -161,7 +161,7 @@
         super.deactivate(ctx);
     }
     @Override
-    public Graph getExecutionPlan() throws ChainException {
+    public ImmutableGraph getExecutionPlan() throws ChainException {
         return executionPlan;
     }
 
diff --git a/enhancer/chain/weighted/src/main/java/org/apache/stanbol/enhancer/chain/weighted/impl/WeightedChain.java b/enhancer/chain/weighted/src/main/java/org/apache/stanbol/enhancer/chain/weighted/impl/WeightedChain.java
index 7fe493c..bc6a164 100644
--- a/enhancer/chain/weighted/src/main/java/org/apache/stanbol/enhancer/chain/weighted/impl/WeightedChain.java
+++ b/enhancer/chain/weighted/src/main/java/org/apache/stanbol/enhancer/chain/weighted/impl/WeightedChain.java
@@ -31,7 +31,7 @@
 import java.util.Map.Entry;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Graph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
 import org.apache.felix.scr.annotations.Properties;
@@ -120,7 +120,7 @@
      */
     private Object epLock = new Object();
     
-    private Graph executionPlan = null;
+    private ImmutableGraph executionPlan = null;
     
     @Override
     protected void activate(ComponentContext ctx) throws ConfigurationException {
@@ -177,7 +177,7 @@
         super.deactivate(ctx);
     }
     @Override
-    public Graph getExecutionPlan() throws ChainException {
+    public ImmutableGraph getExecutionPlan() throws ChainException {
         synchronized (epLock) {
             if(executionPlan == null){
                 executionPlan = createExecutionPlan();
@@ -199,7 +199,7 @@
      * @throws ChainException if a required {@link EnhancementEngine} of the
      * configured {@link #chain} is not active.
      */
-    private Graph createExecutionPlan() throws ChainException {
+    private ImmutableGraph createExecutionPlan() throws ChainException {
         List<EnhancementEngine> availableEngines = new ArrayList<EnhancementEngine>(chain.size());
         Set<String> optionalEngines = new HashSet<String>();
         Set<String> missingEngines = new HashSet<String>();
diff --git a/enhancer/generic/core/src/main/java/org/apache/stanbol/enhancer/contentitem/file/FileContentItemFactory.java b/enhancer/generic/core/src/main/java/org/apache/stanbol/enhancer/contentitem/file/FileContentItemFactory.java
index 7d5d2cb..13550fa 100644
--- a/enhancer/generic/core/src/main/java/org/apache/stanbol/enhancer/contentitem/file/FileContentItemFactory.java
+++ b/enhancer/generic/core/src/main/java/org/apache/stanbol/enhancer/contentitem/file/FileContentItemFactory.java
@@ -30,8 +30,8 @@
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.commons.io.IOUtils;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -39,7 +39,7 @@
 import org.apache.felix.scr.annotations.Properties;
 import org.apache.felix.scr.annotations.Property;
 import org.apache.felix.scr.annotations.Service;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.enhancer.servicesapi.Blob;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
 import org.apache.stanbol.enhancer.servicesapi.ContentItemFactory;
@@ -159,12 +159,12 @@
     }
         
     @Override
-    protected ContentItem createContentItem(UriRef id, Blob blob, MGraph metadata) {
+    protected ContentItem createContentItem(IRI id, Blob blob, Graph metadata) {
         return new FileContentItem(id, blob, metadata);
     }
 
     @Override
-    protected ContentItem createContentItem(String prefix, Blob blob, MGraph metadata) {
+    protected ContentItem createContentItem(String prefix, Blob blob, Graph metadata) {
         return new FileContentItem(prefix, blob, metadata);
     }
 
@@ -391,7 +391,7 @@
      * prefix is <code>null</code>
      * @throws IllegalStateException if the parsed blob is not an {@link FileBlob}
      */
-    protected UriRef getDefaultUri(Blob blob, String prefix) {
+    protected IRI getDefaultUri(Blob blob, String prefix) {
         if(blob == null){
             throw new IllegalArgumentException("The parsed Blob MUST NOT be NULL!");
         }
@@ -399,7 +399,7 @@
             throw new IllegalArgumentException("The parsed prefix MUST NOT be NULL!");
         }
         if(blob instanceof FileBlob) {
-            return new UriRef(prefix+SHA1.toLowerCase()+ '-' + ((FileBlob)blob).getSha1());
+            return new IRI(prefix+SHA1.toLowerCase()+ '-' + ((FileBlob)blob).getSha1());
         } else {
             throw new IllegalStateException("FileContentItem expects FileBlobs to be used" +
                     "as Blob implementation (found: "+blob.getClass()+")!");
@@ -408,13 +408,13 @@
 
     protected class FileContentItem extends ContentItemImpl implements ContentItem {
         
-        public FileContentItem(UriRef id, Blob blob,MGraph metadata) {
+        public FileContentItem(IRI id, Blob blob,Graph metadata) {
             super(id == null ? getDefaultUri(blob, DEFAULT_CONTENT_ITEM_PREFIX) : id, blob,
-                    metadata == null ? new IndexedMGraph() : metadata);
+                    metadata == null ? new IndexedGraph() : metadata);
         }
-        public FileContentItem(String prefix, Blob blob,MGraph metadata) {
+        public FileContentItem(String prefix, Blob blob,Graph metadata) {
             super(getDefaultUri(blob, prefix), blob,
-                metadata == null ? new IndexedMGraph() : metadata);
+                metadata == null ? new IndexedGraph() : metadata);
         }
 
         
diff --git a/enhancer/generic/core/src/main/java/org/apache/stanbol/enhancer/contentitem/inmemory/InMemoryContentItem.java b/enhancer/generic/core/src/main/java/org/apache/stanbol/enhancer/contentitem/inmemory/InMemoryContentItem.java
index 9da1d7f..ca13d54 100644
--- a/enhancer/generic/core/src/main/java/org/apache/stanbol/enhancer/contentitem/inmemory/InMemoryContentItem.java
+++ b/enhancer/generic/core/src/main/java/org/apache/stanbol/enhancer/contentitem/inmemory/InMemoryContentItem.java
@@ -17,10 +17,10 @@
 package org.apache.stanbol.enhancer.contentitem.inmemory;
 
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.enhancer.servicesapi.Blob;
 import org.apache.stanbol.enhancer.servicesapi.ContentItemFactory;
 import org.apache.stanbol.enhancer.servicesapi.ContentSource;
@@ -33,7 +33,7 @@
 /**
  * ContentItem implementation that holds a complete copy of the data in
  * memory. Internally it uses {@link InMemoryBlob} to store the content and
- * an {@link SimpleMGraph} for the metadata.
+ * an {@link SimpleGraph} for the metadata.
  * <p>
  * This implementation can be used independently of any store implementation and
  * is suitable for stateless processing.
@@ -53,14 +53,14 @@
      * with a {@link ByteArraySource}
      */
     public InMemoryContentItem(byte[] content, String mimeType) {
-        this((UriRef)null,new InMemoryBlob(content, mimeType),null);
+        this((IRI)null,new InMemoryBlob(content, mimeType),null);
     }
     /**
      * 
      * @param id
      * @param content
      * @param mimeType
-     * @deprecated use {@link InMemoryContentItemFactory#createContentItem(UriRef, ContentSource)}
+     * @deprecated use {@link InMemoryContentItemFactory#createContentItem(IRI, ContentSource)}
      * with a {@link StringSource} instead.
      */
     public InMemoryContentItem(String id, String content, String mimeType) {
@@ -71,7 +71,7 @@
      * @param id
      * @param content
      * @param mimetype
-     * @deprecated use {@link InMemoryContentItemFactory#createContentItem(UriRef, ContentSource)}
+     * @deprecated use {@link InMemoryContentItemFactory#createContentItem(IRI, ContentSource)}
      * with a {@link ByteArraySource} instead.
      */
     public InMemoryContentItem(String id, byte[] content, String mimetype) {
@@ -83,12 +83,12 @@
      * @param id
      * @param content
      * @param mimetype
-     * @deprecated use {@link InMemoryContentItemFactory#createContentItem(UriRef, ContentSource,MGraph)}
+     * @deprecated use {@link InMemoryContentItemFactory#createContentItem(IRI, ContentSource,Graph)}
      * with a {@link ByteArraySource} instead.
      */
     public InMemoryContentItem(String uriString, byte[] content, String mimeType,
-            MGraph metadata) {
-    	this(uriString != null? new UriRef(uriString) : null ,
+            Graph metadata) {
+    	this(uriString != null? new IRI(uriString) : null ,
     	        new InMemoryBlob(content, mimeType),
     	        metadata);
     }
@@ -97,10 +97,10 @@
      * @param id
      * @param content
      * @param mimetype
-     * @deprecated use {@link InMemoryContentItemFactory#createContentItem(UriRef, ContentSource,MGraph)}
+     * @deprecated use {@link InMemoryContentItemFactory#createContentItem(IRI, ContentSource,Graph)}
      * with a {@link StringSource} instead.
      */
-    public InMemoryContentItem(UriRef uriRef, String content, String mimeType) {
+    public InMemoryContentItem(IRI uriRef, String content, String mimeType) {
 		this(uriRef, new InMemoryBlob(content, mimeType), null);
 	}
     /**
@@ -108,18 +108,18 @@
      * @param id
      * @param content
      * @param mimetype
-     * @deprecated use {@link InMemoryContentItemFactory#createContentItem(UriRef, ContentSource,MGraph)}
+     * @deprecated use {@link InMemoryContentItemFactory#createContentItem(IRI, ContentSource,Graph)}
      * with a {@link ByteArraySource} instead.
      */
-    public InMemoryContentItem(UriRef uri, byte[] content, String mimeType, MGraph metadata) {
+    public InMemoryContentItem(IRI uri, byte[] content, String mimeType, Graph metadata) {
         this(uri, new InMemoryBlob(content, mimeType),metadata);
     }
-    protected InMemoryContentItem(String uriString, Blob blob, MGraph metadata) {
-        this(uriString != null ? new UriRef(uriString) : null, blob, metadata);
+    protected InMemoryContentItem(String uriString, Blob blob, Graph metadata) {
+        this(uriString != null ? new IRI(uriString) : null, blob, metadata);
     }
-    protected InMemoryContentItem(UriRef uri, Blob blob, MGraph metadata) {
+    protected InMemoryContentItem(IRI uri, Blob blob, Graph metadata) {
         super(uri == null ? ContentItemHelper.makeDefaultUrn(blob): uri,blob,
-                metadata == null ? new IndexedMGraph() : metadata);
+                metadata == null ? new IndexedGraph() : metadata);
     }
 
     /**
diff --git a/enhancer/generic/core/src/main/java/org/apache/stanbol/enhancer/contentitem/inmemory/InMemoryContentItemFactory.java b/enhancer/generic/core/src/main/java/org/apache/stanbol/enhancer/contentitem/inmemory/InMemoryContentItemFactory.java
index 9da32dd..f07d6f5 100644
--- a/enhancer/generic/core/src/main/java/org/apache/stanbol/enhancer/contentitem/inmemory/InMemoryContentItemFactory.java
+++ b/enhancer/generic/core/src/main/java/org/apache/stanbol/enhancer/contentitem/inmemory/InMemoryContentItemFactory.java
@@ -20,8 +20,8 @@
 import java.io.IOException;
 import java.io.OutputStream;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Properties;
 import org.apache.felix.scr.annotations.Property;
@@ -62,12 +62,12 @@
     }
 
     @Override
-    protected ContentItem createContentItem(UriRef id, Blob blob, MGraph metadata) {
+    protected ContentItem createContentItem(IRI id, Blob blob, Graph metadata) {
         return new InMemoryContentItem(id, blob, metadata);
     }
     
     @Override
-    protected ContentItem createContentItem(String prefix, Blob blob, MGraph metadata) {
+    protected ContentItem createContentItem(String prefix, Blob blob, Graph metadata) {
         return new InMemoryContentItem(ContentItemHelper.makeDefaultUri(prefix, blob), blob, metadata);
     }
 
diff --git a/enhancer/generic/nlp-json/src/main/java/org/apache/stanbol/enhancer/nlp/json/valuetype/impl/NerTagSupport.java b/enhancer/generic/nlp-json/src/main/java/org/apache/stanbol/enhancer/nlp/json/valuetype/impl/NerTagSupport.java
index 1c5abc3..d21a3b8 100644
--- a/enhancer/generic/nlp-json/src/main/java/org/apache/stanbol/enhancer/nlp/json/valuetype/impl/NerTagSupport.java
+++ b/enhancer/generic/nlp-json/src/main/java/org/apache/stanbol/enhancer/nlp/json/valuetype/impl/NerTagSupport.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.nlp.json.valuetype.impl;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
 import org.apache.felix.scr.annotations.Property;
@@ -49,7 +49,7 @@
         }
         JsonNode uri = jValue.path("uri");
         if(uri.isTextual()){
-            return new NerTag(tag.getTextValue(), new UriRef(uri.getTextValue()));
+            return new NerTag(tag.getTextValue(), new IRI(uri.getTextValue()));
         } else {
             return new NerTag(tag.getTextValue());
         }
diff --git a/enhancer/generic/nlp-json/src/test/java/org/apache/stanbol/enhancer/nlp/json/AnalyzedTextSerializerAndParserTest.java b/enhancer/generic/nlp-json/src/test/java/org/apache/stanbol/enhancer/nlp/json/AnalyzedTextSerializerAndParserTest.java
index 5873fc3..2e486fa 100644
--- a/enhancer/generic/nlp-json/src/test/java/org/apache/stanbol/enhancer/nlp/json/AnalyzedTextSerializerAndParserTest.java
+++ b/enhancer/generic/nlp-json/src/test/java/org/apache/stanbol/enhancer/nlp/json/AnalyzedTextSerializerAndParserTest.java
@@ -30,7 +30,7 @@
 import java.util.Set;
 import java.util.Map.Entry;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.nlp.NlpAnnotations;
 import org.apache.stanbol.enhancer.nlp.model.AnalysedText;
@@ -108,7 +108,7 @@
     
     private static ContentItem ci;
 
-    private static Entry<UriRef,Blob> textBlob;
+    private static Entry<IRI,Blob> textBlob;
     
     @BeforeClass
     public static final void setup() throws IOException {
diff --git a/enhancer/generic/nlp-json/src/test/java/org/apache/stanbol/enhancer/nlp/json/valuetype/ValueTypeSupportTest.java b/enhancer/generic/nlp-json/src/test/java/org/apache/stanbol/enhancer/nlp/json/valuetype/ValueTypeSupportTest.java
index 86abc0b..63fc1e0 100644
--- a/enhancer/generic/nlp-json/src/test/java/org/apache/stanbol/enhancer/nlp/json/valuetype/ValueTypeSupportTest.java
+++ b/enhancer/generic/nlp-json/src/test/java/org/apache/stanbol/enhancer/nlp/json/valuetype/ValueTypeSupportTest.java
@@ -11,7 +11,7 @@
 import java.util.Map.Entry;

 import java.util.Set;

 

-import org.apache.clerezza.rdf.core.UriRef;

+import org.apache.clerezza.commons.rdf.IRI;

 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;

 import org.apache.stanbol.enhancer.nlp.json.AnalyzedTextParser;

 import org.apache.stanbol.enhancer.nlp.json.AnalyzedTextSerializer;

@@ -42,7 +42,7 @@
     

     private static ContentItem ci;

 

-    private static Entry<UriRef,Blob> textBlob;

+    private static Entry<IRI,Blob> textBlob;

 	

 	protected static void setupAnalysedText(String text) throws IOException {

 		ci = ciFactory.createContentItem(new StringSource(text));

diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/model/AnalysedText.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/model/AnalysedText.java
index 6414274..8bbdd1b 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/model/AnalysedText.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/model/AnalysedText.java
@@ -19,26 +19,26 @@
 import java.util.ConcurrentModificationException;
 import java.util.Iterator;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.servicesapi.Blob;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
 
 /**
  * Provides access to NLP processing results of the <code>text/plain</code>
  * {@link Blob} of an ContentItem. Intended to be
- * {@link ContentItem#addPart(org.apache.clerezza.rdf.core.UriRef, Object) added
+ * {@link ContentItem#addPart(org.apache.clerezza.commons.rdf.IRI, Object) added
  * as ContentPart} by using {@link #ANALYSED_TEXT_URI}.
- * @see ContentItem#addPart(UriRef, Object)
+ * @see ContentItem#addPart(IRI, Object)
  */
 public interface AnalysedText extends Section{
 
     
     /**
-     * The {@link UriRef} used to register the {@link AnalysedText} instance
-     * as {@link ContentItem#addPart(org.apache.clerezza.rdf.core.UriRef, Object) 
+     * The {@link IRI} used to register the {@link AnalysedText} instance
+     * as {@link ContentItem#addPart(org.apache.clerezza.commons.rdf.IRI, Object) 
      * ContentPart} to the {@link ContentItem}
      */
-    public static final UriRef ANALYSED_TEXT_URI = new UriRef("urn:stanbol.enhancer:nlp.analysedText");
+    public static final IRI ANALYSED_TEXT_URI = new IRI("urn:stanbol.enhancer:nlp.analysedText");
 
     /**
      * Returns {@link SpanTypeEnum#Text}
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/model/AnalysedTextFactory.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/model/AnalysedTextFactory.java
index d46a207..accca07 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/model/AnalysedTextFactory.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/model/AnalysedTextFactory.java
@@ -18,7 +18,7 @@
 
 import java.io.IOException;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.nlp.model.impl.AnalysedTextFactoryImpl;
 import org.apache.stanbol.enhancer.servicesapi.Blob;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
@@ -40,10 +40,10 @@
     /**
      * Creates an {@link AnalysedText} instance for the parsed {@link Blob}
      * and registers itself as 
-     * {@link ContentItem#addPart(org.apache.clerezza.rdf.core.UriRef, Object) 
-     * ContentPart} with the {@link UriRef} {@link AnalysedText#ANALYSED_TEXT_URI}
+     * {@link ContentItem#addPart(org.apache.clerezza.commons.rdf.IRI, Object) 
+     * ContentPart} with the {@link IRI} {@link AnalysedText#ANALYSED_TEXT_URI}
      * to the parsed {@link ContentItem}.<p>
-     * If already a ContentPart with the given UriRef is registered this 
+     * If already a ContentPart with the given IRI is registered this 
      * Method will throw an {@link IllegalStateException}.
      * @param ci the ContentItem to register the created {@link AnalysedText} instance
      * @param blob the analysed {@link Blob}
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/model/AnalysedTextUtils.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/model/AnalysedTextUtils.java
index 3a49d2a..b92e113 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/model/AnalysedTextUtils.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/model/AnalysedTextUtils.java
@@ -29,7 +29,7 @@
 import java.util.TreeSet;
 import java.util.Map.Entry;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.nlp.model.impl.SectionImpl;
 import org.apache.stanbol.enhancer.nlp.model.impl.SpanImpl;
 import org.apache.stanbol.enhancer.servicesapi.Blob;
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Case.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Case.java
index 19faf93..ef5e510 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Case.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Case.java
@@ -20,7 +20,7 @@
 import java.util.EnumSet;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 /**
  * Defines verb tenses as defined by the <a href="">OLIA</a> Ontology.
@@ -481,17 +481,17 @@
 
     ;
     static final String OLIA_NAMESPACE = "http://purl.org/olia/olia.owl#";
-    UriRef uri;
+    IRI uri;
 
     Case() {
         this(null);
     }
 
     Case(String name) {
-        uri = new UriRef(OLIA_NAMESPACE + (name == null ? name() : (name + "Case")));
+        uri = new IRI(OLIA_NAMESPACE + (name == null ? name() : (name + "Case")));
     }
 
-    public UriRef getUri() {
+    public IRI getUri() {
         return uri;
     }
 
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Definitness.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Definitness.java
index 7f97aa1..a645833 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Definitness.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Definitness.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.nlp.morpho;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 public enum Definitness {
     /**
@@ -38,13 +38,13 @@
      */
     Indefinite;
     static final String OLIA_NAMESPACE = "http://purl.org/olia/olia.owl#";
-    UriRef uri;
+    IRI uri;
 
     Definitness() {
-        uri = new UriRef(OLIA_NAMESPACE + name());
+        uri = new IRI(OLIA_NAMESPACE + name());
     }
 
-    public UriRef getUri() {
+    public IRI getUri() {
         return uri;
     }
 
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Gender.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Gender.java
index 2d1965b..f407edf 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Gender.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Gender.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.nlp.morpho;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 /**
  * Enumeration representing the different genders of words based on the <a
@@ -60,17 +60,17 @@
      */
     Neuter;
     static final String OLIA_NAMESPACE = "http://purl.org/olia/olia.owl#";
-    UriRef uri;
+    IRI uri;
 
     Gender() {
         this(null);
     }
 
     Gender(String name) {
-        uri = new UriRef(OLIA_NAMESPACE + (name == null ? name() : name));
+        uri = new IRI(OLIA_NAMESPACE + (name == null ? name() : name));
     }
 
-    public UriRef getUri() {
+    public IRI getUri() {
         return uri;
     }
 
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/NumberFeature.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/NumberFeature.java
index 7d1d8d8..69d9796 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/NumberFeature.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/NumberFeature.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.nlp.morpho;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 public enum NumberFeature {
     /**
@@ -57,13 +57,13 @@
      */
     Trial;
     static final String OLIA_NAMESPACE = "http://purl.org/olia/olia.owl#";
-    UriRef uri;
+    IRI uri;
 
     NumberFeature() {
-        uri = new UriRef(OLIA_NAMESPACE + name());
+        uri = new IRI(OLIA_NAMESPACE + name());
     }
 
-    public UriRef getUri() {
+    public IRI getUri() {
         return uri;
     }
 
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Person.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Person.java
index b255d6e..793b140 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Person.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Person.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.nlp.morpho;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 /**
  * Enumeration representing the different persons of words based on the <a*
@@ -44,17 +44,17 @@
     Third("ThirdPerson");
 
     static final String OLIA_NAMESPACE = "http://purl.org/olia/olia.owl#";
-    UriRef uri;
+    IRI uri;
 
     Person() {
         this(null);
     }
 
     Person(String name) {
-        uri = new UriRef(OLIA_NAMESPACE + (name == null ? name() : name));
+        uri = new IRI(OLIA_NAMESPACE + (name == null ? name() : name));
     }
 
-    public UriRef getUri() {
+    public IRI getUri() {
         return uri;
     }
 
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Tense.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Tense.java
index 8c3b96c..3032449 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Tense.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/Tense.java
@@ -22,7 +22,7 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 /**
  * Defines verb tenses as defined by the
  * <a href="">OLIA</a> Ontology.<p>
@@ -67,7 +67,7 @@
     RelativePresent(Relative),
     ;
     static final String OLIA_NAMESPACE = "http://purl.org/olia/olia.owl#";
-    UriRef uri;
+    IRI uri;
     Tense parent;
     
     Tense() {
@@ -81,7 +81,7 @@
         this(name,null);
     }
     Tense(String name,Tense parent) {
-        uri = new UriRef(OLIA_NAMESPACE + (name == null ? name() : name));
+        uri = new IRI(OLIA_NAMESPACE + (name == null ? name() : name));
         this.parent = parent;
     }
     /**
@@ -113,7 +113,7 @@
         return transitiveClosureMap.get(this);
     }
     
-    public UriRef getUri() {
+    public IRI getUri() {
         return uri;
     }
 
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/VerbMood.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/VerbMood.java
index f1dcfd6..0bc10e2 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/VerbMood.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/morpho/VerbMood.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.nlp.morpho;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 /**
  * Enumeration representing the different verbal moods based on the <a* href="http://purl.org/olia/olia.owl">OLIA</a> Ontology
  * 
@@ -71,16 +71,16 @@
 	IndicativeVerb,
     ;
     static final String OLIA_NAMESPACE = "http://purl.org/olia/olia.owl#";
-    UriRef uri;
+    IRI uri;
     VerbMood() {
         this(null);
     }
 
     VerbMood(String name) {
-        uri = new UriRef(OLIA_NAMESPACE + (name == null ? name() : (name + "Verb Form")));
+        uri = new IRI(OLIA_NAMESPACE + (name == null ? name() : (name + "Verb Form")));
     }
 
-    public UriRef getUri() {
+    public IRI getUri() {
         return uri;
     }
 
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/ner/NerTag.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/ner/NerTag.java
index e10470d..21f7c1f 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/ner/NerTag.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/ner/NerTag.java
@@ -16,17 +16,17 @@
 */
 package org.apache.stanbol.enhancer.nlp.ner;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.nlp.model.tag.Tag;
 
 public class NerTag extends Tag<NerTag> {
 
-    private UriRef type;
+    private IRI type;
     
     public NerTag(String tag) {
         super(tag);
     }
-    public NerTag(String tag,UriRef type) {
+    public NerTag(String tag,IRI type) {
         super(tag);
         this.type = type;
     }
@@ -36,7 +36,7 @@
      * @return the <code>dc:type</code> of the Named Entity
      * as also used by the <code>fise:TextAnnotation</code>
      */
-    public UriRef getType() {
+    public IRI getType() {
         return type;
     }
     
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/nif/Nif20.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/nif/Nif20.java
index 4e87dc1..cce425b 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/nif/Nif20.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/nif/Nif20.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.nlp.nif;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.nlp.pos.LexicalCategory;
 import org.apache.stanbol.enhancer.nlp.pos.Pos;
 
@@ -523,17 +523,17 @@
 	;
     public final static String NAMESPACE = "http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#";
 
-    UriRef uri;
+    IRI uri;
     
     private Nif20() {
-        uri = new UriRef(NAMESPACE+name());
+        uri = new IRI(NAMESPACE+name());
     }
     
     public String getLocalName(){
         return name();
     }
     
-    public UriRef getUri(){
+    public IRI getUri(){
         return uri;
     }
     
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/nif/SsoOntology.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/nif/SsoOntology.java
index 21f3cc1..c5633f2 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/nif/SsoOntology.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/nif/SsoOntology.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.nlp.nif;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 public enum SsoOntology {
     /**
@@ -73,17 +73,17 @@
     ;
     public final static String NAMESPACE = "http://nlp2rdf.lod2.eu/schema/sso/";
 
-    UriRef uri;
+    IRI uri;
     
     private SsoOntology() {
-        uri = new UriRef(NAMESPACE+name());
+        uri = new IRI(NAMESPACE+name());
     }
     
     public String getLocalName(){
         return name();
     }
     
-    public UriRef getUri(){
+    public IRI getUri(){
         return uri;
     }
     
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/nif/StringOntology.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/nif/StringOntology.java
index f342ff1..a92d87e 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/nif/StringOntology.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/nif/StringOntology.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.nlp.nif;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.nlp.utils.NIFHelper;
 
 public enum StringOntology {
@@ -24,7 +24,7 @@
      * The URI of this String was created with the URI Recipe Context-Hash, see
      * http://aksw.org/Projects/NIF#context-hash-nif-uri-recipe.
      * 
-     * @see NIFHelper#getNifHashURI(UriRef, int, int, String)
+     * @see NIFHelper#getNifHashURI(IRI, int, int, String)
      */
     ContextHashBasedString,
     /**
@@ -33,7 +33,7 @@
      * be a string, a HTML document, a PDF document, text file or any other arbitrary string. The uri denoting
      * the actual document should be able to reproduce that document, i.e. either the string is directly
      * included via the property sourceString or an url can be given that contains the string via the property
-     * sourceUrl. Depending on the feedback, this might also become the Graph URI or a subclass of
+     * sourceUrl. Depending on the feedback, this might also become the ImmutableGraph URI or a subclass of
      * owl:Ontology
      */
     Document,
@@ -90,17 +90,17 @@
     sourceString;
     public final static String NAMESPACE = "http://nlp2rdf.lod2.eu/schema/string/";
 
-    UriRef uri;
+    IRI uri;
 
     private StringOntology() {
-        uri = new UriRef(NAMESPACE + name());
+        uri = new IRI(NAMESPACE + name());
     }
 
     public String getLocalName() {
         return name();
     }
 
-    public UriRef getUri() {
+    public IRI getUri() {
         return uri;
     }
 
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/ontology/SsoOntology.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/ontology/SsoOntology.java
index 537a5ed..fe4a55b 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/ontology/SsoOntology.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/ontology/SsoOntology.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.nlp.ontology;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 public enum SsoOntology {
     /**
@@ -73,17 +73,17 @@
     ;
     public final static String NAMESPACE = "http://nlp2rdf.lod2.eu/schema/sso/";
 
-    UriRef uri;
+    IRI uri;
     
     private SsoOntology() {
-        uri = new UriRef(NAMESPACE+name());
+        uri = new IRI(NAMESPACE+name());
     }
     
     public String getLocalName(){
         return name();
     }
     
-    public UriRef getUri(){
+    public IRI getUri(){
         return uri;
     }
     
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/ontology/StringOntology.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/ontology/StringOntology.java
index 79abb12..67ce740 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/ontology/StringOntology.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/ontology/StringOntology.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.nlp.ontology;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.nlp.utils.NIFHelper;
 
 public enum StringOntology {
@@ -24,7 +24,7 @@
      * The URI of this String was created with the URI Recipe Context-Hash, see
      * http://aksw.org/Projects/NIF#context-hash-nif-uri-recipe.
      * 
-     * @see NIFHelper#getNifHashURI(UriRef, int, int, String)
+     * @see NIFHelper#getNifHashURI(IRI, int, int, String)
      */
     ContextHashBasedString,
     /**
@@ -33,7 +33,7 @@
      * be a string, a HTML document, a PDF document, text file or any other arbitrary string. The uri denoting
      * the actual document should be able to reproduce that document, i.e. either the string is directly
      * included via the property sourceString or an url can be given that contains the string via the property
-     * sourceUrl. Depending on the feedback, this might also become the Graph URI or a subclass of
+     * sourceUrl. Depending on the feedback, this might also become the ImmutableGraph URI or a subclass of
      * owl:Ontology
      */
     Document,
@@ -90,17 +90,17 @@
     sourceString;
     public final static String NAMESPACE = "http://nlp2rdf.lod2.eu/schema/string/";
 
-    UriRef uri;
+    IRI uri;
 
     private StringOntology() {
-        uri = new UriRef(NAMESPACE + name());
+        uri = new IRI(NAMESPACE + name());
     }
 
     public String getLocalName() {
         return name();
     }
 
-    public UriRef getUri() {
+    public IRI getUri() {
         return uri;
     }
 
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/LexicalCategory.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/LexicalCategory.java
index 7d5f12c..bc647fe 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/LexicalCategory.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/LexicalCategory.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.nlp.pos;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 /**
  * Lexical categories used by the Stanbol Enhancer NLP module. Defined based on the top level
@@ -112,13 +112,13 @@
     ;
     static final String OLIA_NAMESPACE = "http://purl.org/olia/olia.owl#";
 
-    UriRef uri;
+    IRI uri;
 
     LexicalCategory() {
-        this.uri = new UriRef(OLIA_NAMESPACE + name());
+        this.uri = new IRI(OLIA_NAMESPACE + name());
     }
 
-    public UriRef getUri() {
+    public IRI getUri() {
         return uri;
     }
 
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/Pos.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/Pos.java
index e60d430..5f39ba0 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/Pos.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/Pos.java
@@ -27,7 +27,7 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.nlp.morpho.Tense;
 
 import com.ibm.icu.impl.Punycode;
@@ -1995,7 +1995,7 @@
 
     private final Set<LexicalCategory> categories;
     private final Collection<Pos> parents;
-    private final UriRef uri;
+    private final IRI uri;
 
     Pos(LexicalCategory category) {
         this(null, category, (LexicalCategory) null);
@@ -2006,7 +2006,7 @@
     }
 
     Pos(String name, LexicalCategory category, LexicalCategory additional) {
-        this.uri = new UriRef(OLIA_NAMESPACE + (name == null ? name() : name));
+        this.uri = new IRI(OLIA_NAMESPACE + (name == null ? name() : name));
         categories = EnumSet.of(category);
         if (additional != null) {
             categories.add(additional);
@@ -2027,7 +2027,7 @@
     }
 
     Pos(String name, LexicalCategory category, Pos... parent) {
-        this.uri = new UriRef(OLIA_NAMESPACE + (name == null ? name() : name));
+        this.uri = new IRI(OLIA_NAMESPACE + (name == null ? name() : name));
         this.parents = parent == null || parent.length < 1 ? Collections.EMPTY_SET : Arrays.asList(parent);
         categories = category == null ? EnumSet.noneOf(LexicalCategory.class) : EnumSet.of(category);
         Set<Pos> toProcess = new HashSet<Pos>(parents);
@@ -2052,7 +2052,7 @@
         return parents;
     }
 
-    public UriRef getUri() {
+    public IRI getUri() {
         return uri;
     }
 
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/olia/English.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/olia/English.java
index 8cb6aad..981036c 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/olia/English.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/olia/English.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.nlp.pos.olia;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.nlp.model.tag.TagSet;
 import org.apache.stanbol.enhancer.nlp.pos.LexicalCategory;
 import org.apache.stanbol.enhancer.nlp.pos.Pos;
@@ -40,9 +40,9 @@
     static {
         //TODO: define constants for annotation model and linking model
         PENN_TREEBANK.getProperties().put("olia.annotationModel", 
-            new UriRef("http://purl.org/olia/penn.owl"));
+            new IRI("http://purl.org/olia/penn.owl"));
         PENN_TREEBANK.getProperties().put("olia.linkingModel", 
-            new UriRef("http://purl.org/olia/penn-link.rdf"));
+            new IRI("http://purl.org/olia/penn-link.rdf"));
 
         PENN_TREEBANK.addTag(new PosTag("CC", Pos.CoordinatingConjunction));
         PENN_TREEBANK.addTag(new PosTag("CD",Pos.CardinalNumber));
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/olia/German.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/olia/German.java
index 864f376..ec2faeb 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/olia/German.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/olia/German.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.nlp.pos.olia;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.nlp.model.tag.TagSet;
 import org.apache.stanbol.enhancer.nlp.pos.LexicalCategory;
 import org.apache.stanbol.enhancer.nlp.pos.Pos;
@@ -40,9 +40,9 @@
     static {
         //TODO: define constants for annotation model and linking model
         STTS.getProperties().put("olia.annotationModel", 
-            new UriRef("http://purl.org/olia/stts.owl"));
+            new IRI("http://purl.org/olia/stts.owl"));
         STTS.getProperties().put("olia.linkingModel", 
-            new UriRef("http://purl.org/olia/stts-link.rdf"));
+            new IRI("http://purl.org/olia/stts-link.rdf"));
         STTS.addTag(new PosTag("ADJA", Pos.AttributiveAdjective));
         STTS.addTag(new PosTag("ADJD", Pos.PredicativeAdjective));
         STTS.addTag(new PosTag("ADV", LexicalCategory.Adverb));
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/olia/Spanish.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/olia/Spanish.java
index c01354b..368a131 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/olia/Spanish.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/pos/olia/Spanish.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.nlp.pos.olia;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.nlp.model.tag.TagSet;
 import org.apache.stanbol.enhancer.nlp.pos.LexicalCategory;
 import org.apache.stanbol.enhancer.nlp.pos.Pos;
@@ -37,10 +37,10 @@
     static {
         //TODO: define constants for annotation model and linking model
         PAROLE.getProperties().put("olia.annotationModel", 
-            new UriRef("http://purl.org/olia/parole_es_cat.owl"));
+            new IRI("http://purl.org/olia/parole_es_cat.owl"));
 // NO linking model
 //        PAROLE.getProperties().put("olia.linkingModel", 
-//            new UriRef("http://purl.org/olia/???"));
+//            new IRI("http://purl.org/olia/???"));
         PAROLE.addTag(new PosTag("AO", LexicalCategory.Adjective));
         PAROLE.addTag(new PosTag("AQ", Pos.QualifierAdjective));
         PAROLE.addTag(new PosTag("CC", Pos.CoordinatingConjunction));
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/utils/NIFHelper.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/utils/NIFHelper.java
index 9ae4147..18cfb5a 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/utils/NIFHelper.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/utils/NIFHelper.java
@@ -29,12 +29,12 @@
 import java.util.EnumMap;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
 import org.apache.commons.io.IOUtils;
 import org.apache.stanbol.enhancer.nlp.NlpAnnotations;
 import org.apache.stanbol.enhancer.nlp.model.AnalysedText;
@@ -60,9 +60,9 @@
 
     private NIFHelper(){}
     
-    public static final Map<SpanTypeEnum,UriRef> SPAN_TYPE_TO_SSO_TYPE;
+    public static final Map<SpanTypeEnum,IRI> SPAN_TYPE_TO_SSO_TYPE;
     static {
-        Map<SpanTypeEnum,UriRef> mapping = new EnumMap<SpanTypeEnum,UriRef>(SpanTypeEnum.class);
+        Map<SpanTypeEnum,IRI> mapping = new EnumMap<SpanTypeEnum,IRI>(SpanTypeEnum.class);
         //mapping.put(SpanTypeEnum.Text, null);
         //mapping.put(SpanTypeEnum.TextSection, null);
         mapping.put(SpanTypeEnum.Sentence, SsoOntology.Sentence.getUri());
@@ -76,15 +76,15 @@
      * Concept representing the Phrase (e.g. {@link LexicalCategory#Noun} maps
      * to "<code>http://purl.org/olia/olia.owl#NounPhrase</code>").
      */
-    public static final Map<LexicalCategory,UriRef> LEXICAL_TYPE_TO_PHRASE_TYPE;
+    public static final Map<LexicalCategory,IRI> LEXICAL_TYPE_TO_PHRASE_TYPE;
     static {
         String olia = "http://purl.org/olia/olia.owl#";
-        Map<LexicalCategory,UriRef> mapping = new EnumMap<LexicalCategory,UriRef>(LexicalCategory.class);
-        mapping.put(LexicalCategory.Noun, new UriRef(olia+"NounPhrase"));
-        mapping.put(LexicalCategory.Verb, new UriRef(olia+"VerbPhrase"));
-        mapping.put(LexicalCategory.Adjective, new UriRef(olia+"AdjectivePhrase"));
-        mapping.put(LexicalCategory.Adverb, new UriRef(olia+"AdverbPhrase"));
-        mapping.put(LexicalCategory.Conjuction, new UriRef(olia+"ConjuctionPhrase"));
+        Map<LexicalCategory,IRI> mapping = new EnumMap<LexicalCategory,IRI>(LexicalCategory.class);
+        mapping.put(LexicalCategory.Noun, new IRI(olia+"NounPhrase"));
+        mapping.put(LexicalCategory.Verb, new IRI(olia+"VerbPhrase"));
+        mapping.put(LexicalCategory.Adjective, new IRI(olia+"AdjectivePhrase"));
+        mapping.put(LexicalCategory.Adverb, new IRI(olia+"AdverbPhrase"));
+        mapping.put(LexicalCategory.Conjuction, new IRI(olia+"ConjuctionPhrase"));
         LEXICAL_TYPE_TO_PHRASE_TYPE = Collections.unmodifiableMap(mapping);
     }    
     /**
@@ -95,10 +95,10 @@
      * @param end the end position or values &lt; 1 when open ended.
      * @return the NIF 2.0 Fragment URI
      * @throws IllegalArgumentException if <code>null</code> is parsed as base
-     * {@link UriRef} or the end position is &gt;=0 but &lt= the parsed start
+     * {@link IRI} or the end position is &gt;=0 but &lt= the parsed start
      * position.
      */
-    public static final UriRef getNifFragmentURI(UriRef base, int start,int end){
+    public static final IRI getNifFragmentURI(IRI base, int start,int end){
         if(base == null){
             throw new IllegalArgumentException("Base URI MUST NOT be NULL!");
         }
@@ -111,10 +111,10 @@
             }
             sb.append(end);
         } //else open ended ...
-        return new UriRef(sb.toString());
+        return new IRI(sb.toString());
     }
  
-    public static final UriRef getNifOffsetURI(UriRef base, int start, int end){
+    public static final IRI getNifOffsetURI(IRI base, int start, int end){
         if(base == null){
             throw new IllegalArgumentException("Base URI MUST NOT be NULL!");
         }
@@ -127,7 +127,7 @@
             }
             sb.append(end);
         } //else open ended ...
-        return new UriRef(sb.toString());
+        return new IRI(sb.toString());
     }
     
     public static final int NIF_HASH_CONTEXT_LENGTH = 10;
@@ -135,7 +135,7 @@
     
     public static final Charset UTF8 = Charset.forName("UTF8");
     
-    public static final UriRef getNifHashURI(UriRef base, int start, int end, String text){
+    public static final IRI getNifHashURI(IRI base, int start, int end, String text){
         if(base == null){
             throw new IllegalArgumentException("Base URI MUST NOT be NULL!");
         }
@@ -160,7 +160,7 @@
         sb.append('_');
         sb.append(text.substring(start, 
             Math.min(end,start+NIF_HASH_MAX_STRING_LENGTH)));
-        return new UriRef(sb.toString());
+        return new IRI(sb.toString());
     }
 
     /**
@@ -215,11 +215,11 @@
      * @param text the {@link AnalysedText}
      * @param language the {@link Language} or <code>null</code> if not known
      * @param span the {@link Span} to write.
-     * @return the {@link UriRef} representing the parsed {@link Span} in the
+     * @return the {@link IRI} representing the parsed {@link Span} in the
      * graph
      */
-    public static UriRef writeSpan(MGraph graph, UriRef base, AnalysedText text, Language language, Span span){
-        UriRef segment = getNifOffsetURI(base, span.getStart(), span.getEnd());
+    public static IRI writeSpan(Graph graph, IRI base, AnalysedText text, Language language, Span span){
+        IRI segment = getNifOffsetURI(base, span.getStart(), span.getEnd());
         graph.add(new TripleImpl(segment, RDF_TYPE, StringOntology.OffsetBasedString.getUri()));
         graph.add(new TripleImpl(segment, StringOntology.anchorOf.getUri(), 
             new PlainLiteralImpl(span.getSpan(),language)));
@@ -252,7 +252,7 @@
      * @param segmentUri the URI of the resource representing the parsed 
      * annotated element in the graph
      */
-    public static void writePos(MGraph graph, Annotated annotated, UriRef segmentUri) {
+    public static void writePos(Graph graph, Annotated annotated, IRI segmentUri) {
         Value<PosTag> posTag = annotated.getAnnotation(NlpAnnotations.POS_ANNOTATION);
         if(posTag != null){
             if(posTag.value().isMapped()){
@@ -280,10 +280,10 @@
      * @param segmentUri the URI of the resource representing the parsed 
      * annotated element in the graph
      */
-    public static void writePhrase(MGraph graph, Annotated annotated, UriRef segmentUri) {
+    public static void writePhrase(Graph graph, Annotated annotated, IRI segmentUri) {
         Value<PhraseTag> phraseTag = annotated.getAnnotation(NlpAnnotations.PHRASE_ANNOTATION);
         if(phraseTag != null){
-            UriRef phraseTypeUri = LEXICAL_TYPE_TO_PHRASE_TYPE.get(phraseTag.value().getCategory());
+            IRI phraseTypeUri = LEXICAL_TYPE_TO_PHRASE_TYPE.get(phraseTag.value().getCategory());
             if(phraseTypeUri != null){ //add the oliaLink for the Phrase
                 graph.add(new TripleImpl(segmentUri, SsoOntology.oliaLink.getUri(), phraseTypeUri));
                 graph.add(new TripleImpl(segmentUri, ENHANCER_CONFIDENCE, 
diff --git a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/utils/NlpEngineHelper.java b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/utils/NlpEngineHelper.java
index 1d39891..ad7a956 100644
--- a/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/utils/NlpEngineHelper.java
+++ b/enhancer/generic/nlp/src/main/java/org/apache/stanbol/enhancer/nlp/utils/NlpEngineHelper.java
@@ -23,7 +23,7 @@
 import java.util.Map;
 import java.util.Map.Entry;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.nlp.NlpProcessingRole;
 import org.apache.stanbol.enhancer.nlp.NlpServiceProperties;
 import org.apache.stanbol.enhancer.nlp.model.AnalysedText;
@@ -179,7 +179,7 @@
                 throw new IllegalStateException("Unable to initialise AnalysedText"
                     + "ContentPart because the parsed AnalysedTextFactory is NULL");
             }
-            Entry<UriRef,Blob> textBlob = getPlainText(engine, ci, true);
+            Entry<IRI,Blob> textBlob = getPlainText(engine, ci, true);
             //we need to create
             ci.getLock().writeLock().lock();
             try {
@@ -211,8 +211,8 @@
      * @throws IllegalStateException if exception is <code>true</code> and the
      * language could not be retrieved from the parsed {@link ContentItem}.
      */
-    public static Entry<UriRef,Blob> getPlainText(EnhancementEngine engine, ContentItem ci, boolean exception) {
-        Entry<UriRef,Blob> textBlob = ContentItemHelper.getBlob(
+    public static Entry<IRI,Blob> getPlainText(EnhancementEngine engine, ContentItem ci, boolean exception) {
+        Entry<IRI,Blob> textBlob = ContentItemHelper.getBlob(
             ci, singleton("text/plain"));
         if(textBlob != null) {
             return textBlob;
diff --git a/enhancer/generic/nlp/src/test/java/org/apache/stanbol/enhancer/nlp/dependency/DependencyRelationTest.java b/enhancer/generic/nlp/src/test/java/org/apache/stanbol/enhancer/nlp/dependency/DependencyRelationTest.java
index e2c6fb1..23da53d 100644
--- a/enhancer/generic/nlp/src/test/java/org/apache/stanbol/enhancer/nlp/dependency/DependencyRelationTest.java
+++ b/enhancer/generic/nlp/src/test/java/org/apache/stanbol/enhancer/nlp/dependency/DependencyRelationTest.java
@@ -6,7 +6,7 @@
 import java.util.Set;

 import java.util.Map.Entry;

 

-import org.apache.clerezza.rdf.core.UriRef;

+import org.apache.clerezza.commons.rdf.IRI;

 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;

 import org.apache.stanbol.enhancer.nlp.model.AnalysedText;

 import org.apache.stanbol.enhancer.nlp.model.AnalysedTextFactory;

@@ -30,7 +30,7 @@
     private static final AnalysedTextFactory atFactory = AnalysedTextFactory.getDefaultInstance();

 

     private static ContentItem ci;

-    private static Entry<UriRef,Blob> textBlob;

+    private static Entry<IRI,Blob> textBlob;

 

     @BeforeClass

     public static void setup() throws IOException {

diff --git a/enhancer/generic/nlp/src/test/java/org/apache/stanbol/enhancer/nlp/model/AnalysedTextTest.java b/enhancer/generic/nlp/src/test/java/org/apache/stanbol/enhancer/nlp/model/AnalysedTextTest.java
index c9dc6b8..91223bd 100644
--- a/enhancer/generic/nlp/src/test/java/org/apache/stanbol/enhancer/nlp/model/AnalysedTextTest.java
+++ b/enhancer/generic/nlp/src/test/java/org/apache/stanbol/enhancer/nlp/model/AnalysedTextTest.java
@@ -31,7 +31,7 @@
 import java.util.Map.Entry;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.nlp.model.annotation.Annotation;
@@ -171,7 +171,7 @@
      */
     private static AnalysedText createAnalysedText() throws IOException {
         ci = ciFactory.createContentItem(new StringSource(text));
-        Entry<UriRef,Blob> textBlob = ContentItemHelper.getBlob(ci, Collections.singleton("text/plain"));
+        Entry<IRI,Blob> textBlob = ContentItemHelper.getBlob(ci, Collections.singleton("text/plain"));
         return  atFactory.createAnalysedText(ci, textBlob.getValue());
     }
     
diff --git a/enhancer/generic/nlp/src/test/java/org/apache/stanbol/enhancer/nlp/utils/NIFHelperTest.java b/enhancer/generic/nlp/src/test/java/org/apache/stanbol/enhancer/nlp/utils/NIFHelperTest.java
index 99e0c7a..6164432 100644
--- a/enhancer/generic/nlp/src/test/java/org/apache/stanbol/enhancer/nlp/utils/NIFHelperTest.java
+++ b/enhancer/generic/nlp/src/test/java/org/apache/stanbol/enhancer/nlp/utils/NIFHelperTest.java
@@ -20,7 +20,7 @@
 import java.io.IOException;
 import java.nio.charset.Charset;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.servicesapi.helper.ContentItemHelper;
 import org.junit.Test;
 
@@ -28,14 +28,14 @@
 
 public class NIFHelperTest {
 
-    static UriRef base = new UriRef("http://stanbol.apache.org/test/nif/nif-helper");
+    static IRI base = new IRI("http://stanbol.apache.org/test/nif/nif-helper");
     static String text = "This is a test for the NLP Interchange format!";
     
     
     @Test
     public void testFragmentURI(){
         Assert.assertEquals(
-            new UriRef(base.getUnicodeString()+"#char=23,26"), 
+            new IRI(base.getUnicodeString()+"#char=23,26"), 
             NIFHelper.getNifFragmentURI(base, 23, 26));
     }
     @Test
@@ -50,7 +50,7 @@
         String context = text.substring(13,23)+'('+selected+')'+text.substring(26,36);
         byte[] contextData = context.getBytes(Charset.forName("UTF8"));
         String md5 = ContentItemHelper.streamDigest(new ByteArrayInputStream(contextData), null, "MD5");
-        UriRef expected = new UriRef(base.getUnicodeString()+"#hash_10_3_"+md5+"_NLP");
+        IRI expected = new IRI(base.getUnicodeString()+"#hash_10_3_"+md5+"_NLP");
         Assert.assertEquals(expected, NIFHelper.getNifHashURI(base, 23, 26, text));
     }    
 }
diff --git a/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/RdfEntity.java b/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/RdfEntity.java
index a7b62a3..8696856 100644
--- a/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/RdfEntity.java
+++ b/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/RdfEntity.java
@@ -16,9 +16,9 @@
 */
 package org.apache.stanbol.enhancer.rdfentities;
 
-import org.apache.clerezza.rdf.core.BNode;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.IRI;
 
 /**
  * Super interface for all interfaces using the {@link RdfEntityFactory} to
@@ -31,9 +31,9 @@
     /**
      * Getter for the RDF node represented by the Proxy.
      *
-     * @return the node representing the proxy. Typically an {@link UriRef} but
-     * could be also a {@link BNode}
+     * @return the node representing the proxy. Typically an {@link IRI} but
+     * could be also a {@link BlankNode}
      */
-    NonLiteral getId();
+    BlankNodeOrIRI getId();
 
 }
diff --git a/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/RdfEntityFactory.java b/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/RdfEntityFactory.java
index ef2bdd4..2abfc68 100644
--- a/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/RdfEntityFactory.java
+++ b/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/RdfEntityFactory.java
@@ -18,8 +18,8 @@
 
 import java.util.Collection;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
 import org.apache.stanbol.enhancer.rdfentities.impl.SimpleRdfEntityFactory;
 
 
@@ -31,13 +31,13 @@
 public abstract class RdfEntityFactory {
 
     /**
-     * Creates a new factory for the parsed {@link MGraph} instance.
+     * Creates a new factory for the parsed {@link Graph} instance.
      *
      * @param graph the graph used by the proxies created by this factory to
      * read/write there data
      * @return the created factory
      */
-    public static RdfEntityFactory createInstance(MGraph graph){
+    public static RdfEntityFactory createInstance(Graph graph){
         return new SimpleRdfEntityFactory(graph);
     }
 
@@ -62,18 +62,18 @@
      * Proxies returned by this Factory:
      * <ul>
      * <li> MUST NOT have an internal state. They need to represent a view over
-     * the current data within the {@link MGraph} instance. Direct changes to
+     * the current data within the {@link Graph} instance. Direct changes to
      * the graph need to be reflected in calls to proxies.
      * <li> Implementations need to support {@link Collection} as parameter.
      * Collections need to represent a live view over the triples within the
-     * {@link MGraph}. However iterators may throw a
+     * {@link Graph}. However iterators may throw a
      * {@link ConcurrentModificationException} if the graph changes while using
      * the iterator.
      * </ul>
      *
      * @param <T> The interface implemented by the returned proxy
      * @param rdfNode the rdfNode represented by the proxy (created if not
-     * present in the Graph)
+     * present in the ImmutableGraph)
      * @param type The interface for the proxy. Needs to extend {@link RdfEntity}
      * @param additionalInterfaces Additional interfaces the proxy needs to
      * implement.
@@ -85,7 +85,7 @@
      * @throws NullPointerException if the parameter type, additionalInterfaces
      * or any entry of additionalInterfaces is <code>null</code>.
      */
-    public abstract <T extends RdfEntity> T getProxy(NonLiteral rdfNode,
+    public abstract <T extends RdfEntity> T getProxy(BlankNodeOrIRI rdfNode,
             Class<T> type, Class<?>... additionalInterfaces) throws IllegalArgumentException;
 
 }
diff --git a/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/fise/Enhancement.java b/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/fise/Enhancement.java
index 721b681..6434bf2 100644
--- a/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/fise/Enhancement.java
+++ b/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/fise/Enhancement.java
@@ -19,7 +19,7 @@
 import java.util.Collection;
 import java.util.Date;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.rdfentities.Rdf;
 import org.apache.stanbol.enhancer.rdfentities.RdfEntity;
 
@@ -30,10 +30,10 @@
  * To create an instance of this interface use the following code
  * <code><pre>
  *  ContentItem ci;
- *     MGraph graph = ci.getMetadata();
+ *     Graph graph = ci.getMetadata();
  *  RdfEntityFactory factory = RdfEntityFactory.createInstance(graph);
  *    String enhancementId = "http://wwww.example.com/iks-project/enhancer/example-enhancement";
- *    UriRef enhancementNode = new UriRef(enhancementId);
+ *    IRI enhancementNode = new IRI(enhancementId);
  *    Enhancement enhancement = factory.getProxy(enhancementNode, Enhancement.class);
  *    enhancement.setCreator("Rupert Westenthaler");
  *  enhancement.setCreated(new Date());
@@ -46,9 +46,9 @@
 public interface Enhancement extends RdfEntity{
 
     @Rdf(id="http://purl.org/dc/terms/creator")
-    UriRef getCreator();
+    IRI getCreator();
     @Rdf(id="http://purl.org/dc/terms/creator")
-    void setCreator(UriRef creator);
+    void setCreator(IRI creator);
 
     @Rdf(id="http://purl.org/dc/terms/created")
     void setCreated(Date date);
@@ -58,7 +58,7 @@
 //    @Rdf(id="http://purl.org/dc/terms/type")
 //    void setDcType(Collection<URI> types);
     @Rdf(id="http://purl.org/dc/terms/type")
-    Collection<UriRef> getDcType();
+    Collection<IRI> getDcType();
 
     @Rdf(id="http://fise.iks-project.eu/ontology/confidence")
     Double getConfidence();
@@ -66,9 +66,9 @@
     void setConfidence(Double value);
 
     @Rdf(id="http://fise.iks-project.eu/ontology/extracted-from")
-    UriRef getExtractedFrom();
+    IRI getExtractedFrom();
     @Rdf(id="http://fise.iks-project.eu/ontology/extracted-from")
-    void setExtractedFrom(UriRef contentItem);
+    void setExtractedFrom(IRI contentItem);
 
     @Rdf(id="http://purl.org/dc/terms/requires")
     Collection<Enhancement> getRequires();
diff --git a/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/fise/EntityAnnotation.java b/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/fise/EntityAnnotation.java
index a52aef4..b57f4a5 100644
--- a/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/fise/EntityAnnotation.java
+++ b/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/fise/EntityAnnotation.java
@@ -18,7 +18,7 @@
 
 import java.util.Collection;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.rdfentities.Rdf;
 
 
@@ -26,9 +26,9 @@
 public interface EntityAnnotation extends Enhancement {
 
     @Rdf(id="http://fise.iks-project.eu/ontology/entity-reference")
-    UriRef getEntityReference();
+    IRI getEntityReference();
     @Rdf(id="http://fise.iks-project.eu/ontology/entity-reference")
-    void setEntityReference(UriRef reference);
+    void setEntityReference(IRI reference);
 
     @Rdf(id="http://fise.iks-project.eu/ontology/entity-label")
     String getEntityLabel();
@@ -36,5 +36,5 @@
     void setEntityLabel(String label);
 
     @Rdf(id="http://fise.iks-project.eu/ontology/entity-type")
-    Collection<UriRef> getEntityTypes();
+    Collection<IRI> getEntityTypes();
 }
diff --git a/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/impl/RdfProxyInvocationHandler.java b/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/impl/RdfProxyInvocationHandler.java
index 20aae41..42af013 100644
--- a/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/impl/RdfProxyInvocationHandler.java
+++ b/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/impl/RdfProxyInvocationHandler.java
@@ -34,14 +34,14 @@
 import java.util.Iterator;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.LiteralFactory;
 import org.apache.clerezza.rdf.core.NoConvertorException;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
+import org.apache.clerezza.rdf.core.LiteralFactory;
 import org.apache.stanbol.enhancer.rdfentities.Rdf;
 import org.apache.stanbol.enhancer.rdfentities.RdfEntity;
 import org.apache.stanbol.enhancer.servicesapi.rdf.Properties;
@@ -89,14 +89,14 @@
 
     protected SimpleRdfEntityFactory factory;
     protected LiteralFactory literalFactory;
-    protected NonLiteral rdfNode;
+    protected BlankNodeOrIRI rdfNode;
     private final Set<Class<?>> interfaces;
-    public RdfProxyInvocationHandler(SimpleRdfEntityFactory factory, NonLiteral rdfNode, Class<?>[] parsedInterfaces, LiteralFactory literalFactory){
+    public RdfProxyInvocationHandler(SimpleRdfEntityFactory factory, BlankNodeOrIRI rdfNode, Class<?>[] parsedInterfaces, LiteralFactory literalFactory){
         this.rdfNode = rdfNode;
         this.factory = factory;
         this.literalFactory = literalFactory;
-        //TODO If slow implement this by directly using the MGraph Interface!
-        Collection<UriRef> nodeTypes = getValues(Properties.RDF_TYPE, UriRef.class);
+        //TODO If slow implement this by directly using the Graph Interface!
+        Collection<IRI> nodeTypes = getValues(Properties.RDF_TYPE, IRI.class);
         Set<Class<?>> interfaceSet = new HashSet<Class<?>>();
         for (Class<?> clazz : parsedInterfaces){
             if(!clazz.isInterface()){
@@ -110,7 +110,7 @@
             Rdf classAnnotation = clazz.getAnnotation(Rdf.class);
             if(classAnnotation == null){
             } else { //check of the type statement is present
-                UriRef typeRef = new UriRef(classAnnotation.id());
+                IRI typeRef = new IRI(classAnnotation.id());
                 if(!nodeTypes.contains(typeRef)){
                     //TODO: Question: How to get the dependencies for logging working with maven :(
                     //log.debug("add type "+typeRef+" for interface "+clazz+" to node "+rdfNode);
@@ -161,9 +161,9 @@
         if(rdf == null){
             throw new IllegalStateException("Invoked Method does not have an Rdf annotation!");
         }
-        UriRef property;
+        IRI property;
         if(rdf.id().startsWith("http://") || rdf.id().startsWith("urn:")){
-            property = new UriRef(rdf.id());
+            property = new IRI(rdf.id());
         } else {
             throw new IllegalStateException("The id=\""+rdf.id()+"\"provided by the rdf annotation is not an valid URI");
         }
@@ -248,81 +248,81 @@
     }
 
     @SuppressWarnings("unchecked")
-    private <T> T getValue(UriRef property, Class<T> type){
+    private <T> T getValue(IRI property, Class<T> type){
         Iterator<Triple> results = factory.getGraph().filter(rdfNode, property, null);
         if (results.hasNext()){
-            Resource result = results.next().getObject();
-            if (result instanceof NonLiteral){
+            RDFTerm result = results.next().getObject();
+            if (result instanceof BlankNodeOrIRI){
                 if (RdfEntity.class.isAssignableFrom(type)){
-                    return (T)factory.getProxy((NonLiteral)result, (Class<? extends RdfEntity>)type);
-                } else { //check result for UriRef and types UriRef, URI or URL
-                    if(result instanceof UriRef){
-                        if (UriRef.class.isAssignableFrom(type)){
+                    return (T)factory.getProxy((BlankNodeOrIRI)result, (Class<? extends RdfEntity>)type);
+                } else { //check result for IRI and types IRI, URI or URL
+                    if(result instanceof IRI){
+                        if (IRI.class.isAssignableFrom(type)){
                             return (T)result;
                         } else if (URI.class.isAssignableFrom(type)){
                             try {
-                                return (T)new URI(((UriRef)result).getUnicodeString());
+                                return (T)new URI(((IRI)result).getUnicodeString());
                             } catch (URISyntaxException e) {
                                 throw new IllegalStateException("Unable to parse "+URI.class
-                                        +" for "+UriRef.class+" value="+((UriRef)result).getUnicodeString());
+                                        +" for "+IRI.class+" value="+((IRI)result).getUnicodeString());
                             }
                         } else if (URL.class.isAssignableFrom(type)){
                             try {
-                                return (T)new URL(((UriRef)result).getUnicodeString());
+                                return (T)new URL(((IRI)result).getUnicodeString());
                             } catch (MalformedURLException e) {
                                 throw new IllegalStateException("Unable to parse "+URL.class
-                                        +" for "+UriRef.class+" value="+((UriRef)result).getUnicodeString());
+                                        +" for "+IRI.class+" value="+((IRI)result).getUnicodeString());
                             }
                         } else {
                             throw new IllegalArgumentException("Parsed Type "+type
                                     +" is not compatible for result type "+result.getClass()
                                     +" (value "+result+") of node "+rdfNode+" and property "+property
-                                    +"! (Subclass of RdfEntity, UriRef, URI or URL is expected for NonLiteral Values)");
+                                    +"! (Subclass of RdfEntity, IRI, URI or URL is expected for BlankNodeOrIRI Values)");
                         }
                     } else {
                         throw new IllegalArgumentException("Parsed Type "+type
                                 +" is not compatible for result type "+result.getClass()
                                 +" (value "+result+") of node "+rdfNode+" and property "+property
-                                +"! (Subclass of RdfEntity expected as type for NonLiteral values that are no instanceof UriRef)");
+                                +"! (Subclass of RdfEntity expected as type for BlankNodeOrIRI values that are no instanceof IRI)");
                     }
                 }
             } else {
-                return literalFactory.createObject(type,(TypedLiteral) result);
+                return literalFactory.createObject(type,(Literal) result);
             }
         } else {
             return null;
         }
     }
-    private <T> Collection<T> getValues(UriRef property, Class<T> type){
+    private <T> Collection<T> getValues(IRI property, Class<T> type){
         return new RdfProxyPropertyCollection<T>(property, type);
     }
-    private void setValue(UriRef property, Object value){
+    private void setValue(IRI property, Object value){
         removeValues(property);
         addValue(property, value);
     }
-    private void setValues(UriRef property, Collection<?> values){
+    private void setValues(IRI property, Collection<?> values){
         removeValues(property);
         for(Object value : values){
             addValue(property, value);
         }
     }
-    protected Resource getRdfResource(Object value) throws NoConvertorException{
-        if(value instanceof Resource){
-            //if the parsed object is already a Resource
-            return (Resource) value; //return it
+    protected RDFTerm getRdfResource(Object value) throws NoConvertorException{
+        if(value instanceof RDFTerm){
+            //if the parsed object is already a RDFTerm
+            return (RDFTerm) value; //return it
         } else if(value instanceof RdfEntity){ //check for other proxies
             return ((RdfEntity)value).getId();
         } else if(value instanceof URI){ //or URI links
-            return new UriRef(value.toString());
+            return new IRI(value.toString());
         } else if(value instanceof URL){ //or URL links
-            return new UriRef(value.toString());
+            return new IRI(value.toString());
         } else { //nothing of that
             //try to make an Literal (Clarezza internal Adapters)
             return literalFactory.createTypedLiteral(value);
         }
     }
-    private boolean addValue(UriRef property, Object value){
-        Resource rdfValue;
+    private boolean addValue(IRI property, Object value){
+        RDFTerm rdfValue;
         try {
             rdfValue = getRdfResource(value);
             return factory.getGraph().add(new TripleImpl(rdfNode, property, rdfValue));
@@ -331,8 +331,8 @@
                     +" to an RDF Node. Only "+RdfEntity.class+" and RDF Literal Types are supported");
         }
     }
-    private boolean removeValue(UriRef property, Object value){
-        Resource rdfValue;
+    private boolean removeValue(IRI property, Object value){
+        RDFTerm rdfValue;
         try {
             rdfValue = getRdfResource(value);
             return factory.getGraph().remove(new TripleImpl(rdfNode, property, rdfValue));
@@ -341,7 +341,7 @@
                     +" to an RDF Node. Only "+RdfEntity.class+" and RDF Literal Types are supported");
         }
     }
-    private void removeValues(UriRef proptery){
+    private void removeValues(IRI proptery){
         Iterator<Triple> toRemove = factory.getGraph().filter(rdfNode, proptery, null);
         while(toRemove.hasNext()){
             factory.getGraph().remove(toRemove.next());
@@ -349,9 +349,9 @@
     }
 
     /**
-     * We need this class to apply changes in the collection to the MGraph.
+     * We need this class to apply changes in the collection to the Graph.
      * This collection implementation is a stateless wrapper over the
-     * triples selected by the subject,property pair over the MGraph!<br>
+     * triples selected by the subject,property pair over the Graph!<br>
      * Default implementation of {@link AbstractCollection} are very poor
      * performance. Because of that this class overrides some methods
      * already implemented by its abstract super class.
@@ -361,21 +361,21 @@
      */
     private final class RdfProxyPropertyCollection<T> extends AbstractCollection<T> {
 
-        //private final NonLiteral resource;
-        private final UriRef property;
+        //private final BlankNodeOrIRI resource;
+        private final IRI property;
         private final Class<T> genericType;
         private final boolean entity;
         private final boolean uri;
         private final boolean url;
         private final boolean uriRef;
 
-        private RdfProxyPropertyCollection(UriRef property,Class<T> genericType) {
+        private RdfProxyPropertyCollection(IRI property,Class<T> genericType) {
             this.property = property;
             this.genericType = genericType;
             entity = RdfEntity.class.isAssignableFrom(genericType);
             uri = URI.class.isAssignableFrom(genericType);
             url = URL.class.isAssignableFrom(genericType);
-            uriRef = UriRef.class.isAssignableFrom(genericType);
+            uriRef = IRI.class.isAssignableFrom(genericType);
         }
 
         @Override
@@ -390,26 +390,26 @@
                 @SuppressWarnings("unchecked")
                 @Override
                 public T next() {
-                    Resource value = results.next().getObject();
+                    RDFTerm value = results.next().getObject();
                     if (entity){
                         //type checks are done within the constructor
-                        return (T) factory.getProxy((NonLiteral)value, (Class<? extends RdfEntity>)genericType);
+                        return (T) factory.getProxy((BlankNodeOrIRI)value, (Class<? extends RdfEntity>)genericType);
                     } else if(uri){
                         try {
-                            return (T)new URI(((UriRef)value).getUnicodeString());
+                            return (T)new URI(((IRI)value).getUnicodeString());
                         } catch (URISyntaxException e) {
-                            throw new IllegalStateException("Unable to parse "+URI.class+" for "+UriRef.class+" value="+((UriRef)value).getUnicodeString());
+                            throw new IllegalStateException("Unable to parse "+URI.class+" for "+IRI.class+" value="+((IRI)value).getUnicodeString());
                         }
                     } else if(url){
                         try {
-                            return (T)new URL(((UriRef)value).getUnicodeString());
+                            return (T)new URL(((IRI)value).getUnicodeString());
                         } catch (MalformedURLException e) {
-                            throw new IllegalStateException("Unable to parse "+URL.class+" for "+UriRef.class+" value="+((UriRef)value).getUnicodeString());
+                            throw new IllegalStateException("Unable to parse "+URL.class+" for "+IRI.class+" value="+((IRI)value).getUnicodeString());
                         }
                     } else if(uriRef){
                         return (T)value;
                     } else {
-                        return literalFactory.createObject(genericType, (TypedLiteral)value);
+                        return literalFactory.createObject(genericType, (Literal)value);
                     }
                 }
 
diff --git a/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/impl/SimpleRdfEntityFactory.java b/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/impl/SimpleRdfEntityFactory.java
index 0b2ae28..9954eb4 100644
--- a/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/impl/SimpleRdfEntityFactory.java
+++ b/enhancer/generic/rdfentities/src/main/java/org/apache/stanbol/enhancer/rdfentities/impl/SimpleRdfEntityFactory.java
@@ -18,28 +18,28 @@
 
 import java.lang.reflect.Proxy;
 
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
 import org.apache.stanbol.enhancer.rdfentities.RdfEntity;
 import org.apache.stanbol.enhancer.rdfentities.RdfEntityFactory;
 
 
 public class SimpleRdfEntityFactory extends RdfEntityFactory {
 
-    private final MGraph graph;
+    private final Graph graph;
     private final LiteralFactory literalFactory;
 
-    public SimpleRdfEntityFactory(MGraph graph) {
+    public SimpleRdfEntityFactory(Graph graph) {
         if (graph == null){
-            throw new IllegalArgumentException("The MGraph parsed as parameter MUST NOT be NULL!");
+            throw new IllegalArgumentException("The Graph parsed as parameter MUST NOT be NULL!");
         }
         this.graph = graph;
         literalFactory = LiteralFactory.getInstance();
     }
 
     @SuppressWarnings("unchecked")
-    public <T extends RdfEntity> T getProxy(NonLiteral rdfNode, Class<T> type,Class<?>...additionalInterfaces) {
+    public <T extends RdfEntity> T getProxy(BlankNodeOrIRI rdfNode, Class<T> type,Class<?>...additionalInterfaces) {
         Class<?>[] interfaces = new Class<?>[additionalInterfaces.length+1];
         interfaces[0] = type;
         System.arraycopy(additionalInterfaces, 0, interfaces, 1, additionalInterfaces.length);
@@ -51,7 +51,7 @@
         return (T)instance;
     }
 
-    protected MGraph getGraph() {
+    protected Graph getGraph() {
         return graph;
     }
 
diff --git a/enhancer/generic/rdfentities/src/test/java/org/apache/stanbol/enhancer/rdfentities/RdfEntityFactoryTest.java b/enhancer/generic/rdfentities/src/test/java/org/apache/stanbol/enhancer/rdfentities/RdfEntityFactoryTest.java
index 8606aff..bcf9b5f 100644
--- a/enhancer/generic/rdfentities/src/test/java/org/apache/stanbol/enhancer/rdfentities/RdfEntityFactoryTest.java
+++ b/enhancer/generic/rdfentities/src/test/java/org/apache/stanbol/enhancer/rdfentities/RdfEntityFactoryTest.java
@@ -30,11 +30,11 @@
 import java.util.Iterator;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
 import org.apache.stanbol.enhancer.rdfentities.Rdf;
 import org.apache.stanbol.enhancer.rdfentities.RdfEntity;
 import org.apache.stanbol.enhancer.rdfentities.RdfEntityFactory;
@@ -52,10 +52,10 @@
 
     @Test
     public void testRdfEntity() throws Exception {
-        MGraph graph = new SimpleMGraph();
+        Graph graph = new SimpleGraph();
         RdfEntityFactory factory = RdfEntityFactory.createInstance(graph);
         String testUri = "urn:RdfEntityFactoryTest:TestEntity";
-        UriRef node = new UriRef(testUri);
+        IRI node = new IRI(testUri);
         RdfEntity rdfEntity = factory.getProxy(node, RdfEntity.class);
         //TODO: Test type statement
         //TODO: test getID Method
@@ -68,10 +68,10 @@
     }
     @Test
     public void testPrimitiveDataTypes() throws Exception {
-        MGraph graph = new SimpleMGraph();
+        Graph graph = new SimpleGraph();
         RdfEntityFactory factory = RdfEntityFactory.createInstance(graph);
         String testUri = "urn:RdfEntityFactoryTest:TestEntity";
-        UriRef node = new UriRef(testUri);
+        IRI node = new IRI(testUri);
         TestRdfEntity testEntity = factory.getProxy(node, TestRdfEntity.class);
 
         testEntity.setBoolean(true);
@@ -139,10 +139,10 @@
 
     @Test
     public void testTypeStatements() throws Exception {
-        MGraph graph = new SimpleMGraph();
+        Graph graph = new SimpleGraph();
         RdfEntityFactory factory = RdfEntityFactory.createInstance(graph);
         String testUri = "urn:RdfEntityFactoryTest:TestEntity";
-        UriRef node = new UriRef(testUri);
+        IRI node = new IRI(testUri);
         TestRdfEntity entity = factory.getProxy(node, TestRdfEntity.class, new Class[]{TestRdfEntity2.class});
         // test the if the proxy implements both interfaces
         assertTrue(entity instanceof TestRdfEntity);
@@ -155,12 +155,12 @@
 
     @Test
     public void testObjectProperties() throws Exception {
-        MGraph graph = new SimpleMGraph();
+        Graph graph = new SimpleGraph();
         RdfEntityFactory factory = RdfEntityFactory.createInstance(graph);
         String testUri = "urn:RdfEntityFactoryTest:TestEntity";
         String testUri2 = "urn:RdfEntityFactoryTest:TestEntity2";
-        UriRef node = new UriRef(testUri);
-        UriRef node2 = new UriRef(testUri2);
+        IRI node = new IRI(testUri);
+        IRI node2 = new IRI(testUri2);
         TestRdfEntity entity = factory.getProxy(node, TestRdfEntity.class);
         TestRdfEntity2 entity2 = factory.getProxy(node2, TestRdfEntity2.class);
 
@@ -172,44 +172,44 @@
         entity.setURL(testURL);
         assertEquals(testURL, entity.getURL());
 
-        entity.setUriRef(node2);
-        assertEquals(node2, entity.getUriRef());
+        entity.setIRI(node2);
+        assertEquals(node2, entity.getIRI());
 
         entity2.setTestEntity(entity);
         assertEquals(entity, entity2.getTestEntity());
 
         Collection<TestRdfEntity> testEntities = entity2.getTestEntities();
         assertTrue(testEntities.isEmpty()); //check that entity is not in the collection
-        Set<UriRef> testUriRefs = new HashSet<UriRef>();
+        Set<IRI> testIRIs = new HashSet<IRI>();
         int NUM = 10;
         for (int i=0;i<NUM;i++){
-            UriRef testNode = new UriRef(testUri+':'+'_'+i);
-            testUriRefs.add(testNode);
+            IRI testNode = new IRI(testUri+':'+'_'+i);
+            testIRIs.add(testNode);
             testEntities.add(factory.getProxy(testNode, TestRdfEntity.class));
         }
         //now get a new collection and test if the added entities are there
-        Collection<UriRef> resultUriRefs = new ArrayList<UriRef>(); //add to a list to check for duplicates
+        Collection<IRI> resultIRIs = new ArrayList<IRI>(); //add to a list to check for duplicates
         for (TestRdfEntity e : entity2.getTestEntities()){
-            assertTrue(e.getId() instanceof UriRef); //I used UriRefs for the generation ...
-            resultUriRefs.add((UriRef)e.getId());
+            assertTrue(e.getId() instanceof IRI); //I used IRIs for the generation ...
+            resultIRIs.add((IRI)e.getId());
         }
         //now cross check
-        assertTrue(testUriRefs.containsAll(resultUriRefs));
-        assertTrue(resultUriRefs.containsAll(testUriRefs));
+        assertTrue(testIRIs.containsAll(resultIRIs));
+        assertTrue(resultIRIs.containsAll(testIRIs));
         //now one could try to remove some Elements ...
         // ... but things like that are already tested for Integers in testPrimitiveDataTypes
     }
 
     @Test
     public void testInterfaceHierarchies() throws Exception {
-        MGraph graph = new SimpleMGraph();
+        Graph graph = new SimpleGraph();
         RdfEntityFactory factory = RdfEntityFactory.createInstance(graph);
         String testUri = "urn:RdfEntityFactoryTest:SubTestEntity";
         String testUri2 = "urn:RdfEntityFactoryTest:TestEntity2";
         String testUri3 = "urn:RdfEntityFactoryTest:TestEntity";
-        UriRef node = new UriRef(testUri);
-        UriRef node2 = new UriRef(testUri2);
-        UriRef node3 = new UriRef(testUri3);
+        IRI node = new IRI(testUri);
+        IRI node2 = new IRI(testUri2);
+        IRI node3 = new IRI(testUri3);
         SubTestRdfEntity entity = factory.getProxy(node, SubTestRdfEntity.class);
         TestRdfEntity entity2 = factory.getProxy(node2, TestRdfEntity.class, SubTestRdfEntity.class, TestRdfEntity2.class);
         TestRdfEntity entity3 = factory.getProxy(node3, TestRdfEntity.class);
@@ -219,7 +219,7 @@
         assertTrue(entity instanceof TestRdfEntity);
         assertTrue(entity instanceof RdfEntity);
 
-        // test if the rdf:type triples are present in the MGraph
+        // test if the rdf:type triples are present in the Graph
         Set<String> typeStrings = getRdfTypes(graph, node);
         assertTrue(typeStrings.contains(SubTestRdfEntity.class.getAnnotation(Rdf.class).id()));
         assertTrue(typeStrings.contains(TestRdfEntity.class.getAnnotation(Rdf.class).id()));
@@ -231,7 +231,7 @@
         assertTrue(entity2 instanceof TestRdfEntity2);
         assertTrue(entity2 instanceof RdfEntity);
 
-        // test if the rdf:type triples are present in the MGraph
+        // test if the rdf:type triples are present in the Graph
         typeStrings = getRdfTypes(graph, node2);
         assertTrue(typeStrings.contains(SubTestRdfEntity.class.getAnnotation(Rdf.class).id()));
         assertTrue(typeStrings.contains(TestRdfEntity.class.getAnnotation(Rdf.class).id()));
@@ -256,13 +256,13 @@
         assertTrue(!(entity3 instanceof SubTestRdfEntity));
     }
 
-    private static Set<String> getRdfTypes(MGraph graph, UriRef node) {
+    private static Set<String> getRdfTypes(Graph graph, IRI node) {
         Iterator<Triple> typeStatements = graph.filter(node, Properties.RDF_TYPE, null);
         Set<String> typeStrings = new HashSet<String>();
         while(typeStatements.hasNext()){
-            Resource type = typeStatements.next().getObject();
-            assertTrue(type instanceof UriRef);
-            typeStrings.add(((UriRef)type).getUnicodeString());
+            RDFTerm type = typeStatements.next().getObject();
+            assertTrue(type instanceof IRI);
+            typeStrings.add(((IRI)type).getUnicodeString());
         }
         return typeStrings;
     }
@@ -329,10 +329,10 @@
         @Rdf(id="urn:test:URL")
         void setURL(URL uri);
 
-        @Rdf(id="urn:test:UriRef")
-        UriRef getUriRef();
-        @Rdf(id="urn:test:UriRef")
-        void setUriRef(UriRef uriRef);
+        @Rdf(id="urn:test:IRI")
+        IRI getIRI();
+        @Rdf(id="urn:test:IRI")
+        void setIRI(IRI uriRef);
     }
 
     /**
diff --git a/enhancer/generic/rdfentities/src/test/java/org/apache/stanbol/enhancer/rdfentities/fise/TestEnhancementInterfaces.java b/enhancer/generic/rdfentities/src/test/java/org/apache/stanbol/enhancer/rdfentities/fise/TestEnhancementInterfaces.java
index f84bbf8..ff7eb0b 100644
--- a/enhancer/generic/rdfentities/src/test/java/org/apache/stanbol/enhancer/rdfentities/fise/TestEnhancementInterfaces.java
+++ b/enhancer/generic/rdfentities/src/test/java/org/apache/stanbol/enhancer/rdfentities/fise/TestEnhancementInterfaces.java
@@ -29,12 +29,12 @@
 import java.util.Date;
 import java.util.Iterator;
 
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItem;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.rdfentities.RdfEntityFactory;
@@ -59,7 +59,7 @@
     public static final String SINGLE_SENTENCE = "Dr. Patrick Marshall (1869 - November 1950) was a"
         + " geologist who lived in New Zealand and worked at the University of Otago.";
     protected static final ContentSource SINGLE_SENTENCE_SOURCE = new StringSource(SINGLE_SENTENCE);
-    public static final UriRef TEST_ENHANCEMENT_ENGINE_URI = new UriRef("urn:test:dummyEnhancementEngine");
+    public static final IRI TEST_ENHANCEMENT_ENGINE_URI = new IRI("urn:test:dummyEnhancementEngine");
     private static ContentItemFactory ciFactory = InMemoryContentItemFactory.getInstance();
     public static ContentItem createContentItem(ContentSource cs) throws IOException {
     	return ciFactory.createContentItem(cs);
@@ -68,7 +68,7 @@
     @Test
     public void testEnhancementInterfaces() throws Exception {
         ContentItem ci = createContentItem(SINGLE_SENTENCE_SOURCE);
-        UriRef ciUri = new UriRef(ci.getUri().getUnicodeString());
+        IRI ciUri = new IRI(ci.getUri().getUnicodeString());
         RdfEntityFactory factory = RdfEntityFactory.createInstance(ci.getMetadata());
         long start = System.currentTimeMillis();
         //create an Text Annotation representing an extracted Person
@@ -77,7 +77,7 @@
         personAnnotation.setCreator(TEST_ENHANCEMENT_ENGINE_URI);
         personAnnotation.setCreated(new Date());
         personAnnotation.setExtractedFrom(ciUri);
-        personAnnotation.getDcType().add(new UriRef("http://www.example.org/cv/annotatation-types/text#Person"));
+        personAnnotation.getDcType().add(new IRI("http://www.example.org/cv/annotatation-types/text#Person"));
         personAnnotation.setConfidence(0.8);
         personAnnotation.setSelectedText("Patrick Marshall");
         personAnnotation.setStart(SINGLE_SENTENCE.indexOf(personAnnotation.getSelectedText()));
@@ -90,7 +90,7 @@
         locationAnnotation.setCreator(TEST_ENHANCEMENT_ENGINE_URI);
         locationAnnotation.setCreated(new Date());
         locationAnnotation.setExtractedFrom(ciUri);
-        locationAnnotation.getDcType().add(new UriRef("http://www.example.org/cv/annotatation-types/text#Location"));
+        locationAnnotation.getDcType().add(new IRI("http://www.example.org/cv/annotatation-types/text#Location"));
         locationAnnotation.setConfidence(0.78);
         locationAnnotation.setSelectedText("New Zealand");
         locationAnnotation.setStart(SINGLE_SENTENCE.indexOf(locationAnnotation.getSelectedText()));
@@ -103,7 +103,7 @@
         orgAnnotation.setCreator(TEST_ENHANCEMENT_ENGINE_URI);
         orgAnnotation.setCreated(new Date());
         orgAnnotation.setExtractedFrom(ciUri);
-        orgAnnotation.getDcType().add(new UriRef("http://www.example.org/cv/annotatation-types/text#Organisation"));
+        orgAnnotation.getDcType().add(new IRI("http://www.example.org/cv/annotatation-types/text#Organisation"));
         orgAnnotation.setConfidence(0.78);
         orgAnnotation.setSelectedText("University of Otago");
         orgAnnotation.setStart(SINGLE_SENTENCE.indexOf(orgAnnotation.getSelectedText()));
@@ -116,30 +116,30 @@
         patrickMarshall.setCreator(TEST_ENHANCEMENT_ENGINE_URI);
         patrickMarshall.setCreated(new Date());
         patrickMarshall.setExtractedFrom(ciUri);
-        patrickMarshall.getDcType().add(new UriRef("http://www.example.org/cv/annotatation-types/entity#Entity"));
+        patrickMarshall.getDcType().add(new IRI("http://www.example.org/cv/annotatation-types/entity#Entity"));
         patrickMarshall.setConfidence(0.56);
         patrickMarshall.getRelations().add(personAnnotation);
         patrickMarshall.setEntityLabel("Patrick Marshall");
-        patrickMarshall.setEntityReference(new UriRef("http://rdf.freebase.com/rdf/en/patrick_marshall"));
+        patrickMarshall.setEntityReference(new IRI("http://rdf.freebase.com/rdf/en/patrick_marshall"));
         patrickMarshall.getEntityTypes().addAll(Arrays.asList(
-                        new UriRef("http://rdf.freebase.com/ns/people.person"),
-                        new UriRef("http://rdf.freebase.com/ns/common.topic"),
-                        new UriRef("http://rdf.freebase.com/ns/education.academic")));
+                        new IRI("http://rdf.freebase.com/ns/people.person"),
+                        new IRI("http://rdf.freebase.com/ns/common.topic"),
+                        new IRI("http://rdf.freebase.com/ns/education.academic")));
         // and an other for New Zealand
         EntityAnnotation newZealand = factory.getProxy(
                 createEnhancementURI(), EntityAnnotation.class);
         newZealand.setCreator(TEST_ENHANCEMENT_ENGINE_URI);
         newZealand.setCreated(new Date());
         newZealand.setExtractedFrom(ciUri);
-        newZealand.getDcType().add(new UriRef("http://www.example.org/cv/annotatation-types/entity#Entity"));
+        newZealand.getDcType().add(new IRI("http://www.example.org/cv/annotatation-types/entity#Entity"));
         newZealand.setConfidence(0.98);
         newZealand.getRelations().add(locationAnnotation);
         newZealand.setEntityLabel("New Zealand");
-        newZealand.setEntityReference(new UriRef("http://rdf.freebase.com/rdf/en/new_zealand"));
+        newZealand.setEntityReference(new IRI("http://rdf.freebase.com/rdf/en/new_zealand"));
         newZealand.getEntityTypes().addAll(Arrays.asList(
-                new UriRef("http://rdf.freebase.com/ns/location.location"),
-                new UriRef("http://rdf.freebase.com/ns/common.topic"),
-                new UriRef("http://rdf.freebase.com/ns/location.country")));
+                new IRI("http://rdf.freebase.com/ns/location.location"),
+                new IRI("http://rdf.freebase.com/ns/common.topic"),
+                new IRI("http://rdf.freebase.com/ns/location.country")));
 
         // and an other option for New Zealand
         EntityAnnotation airNewZealand = factory.getProxy(
@@ -147,20 +147,20 @@
         airNewZealand.setCreator(TEST_ENHANCEMENT_ENGINE_URI);
         airNewZealand.setCreated(new Date());
         airNewZealand.setExtractedFrom(ciUri);
-        airNewZealand.getDcType().add(new UriRef("http://www.example.org/cv/annotatation-types/entity#Entity"));
+        airNewZealand.getDcType().add(new IRI("http://www.example.org/cv/annotatation-types/entity#Entity"));
         airNewZealand.setConfidence(0.36);
         airNewZealand.getRelations().add(locationAnnotation);
         airNewZealand.setEntityLabel("New Zealand");
-        airNewZealand.setEntityReference(new UriRef("http://rdf.freebase.com/rdf/en/air_new_zealand"));
+        airNewZealand.setEntityReference(new IRI("http://rdf.freebase.com/rdf/en/air_new_zealand"));
         airNewZealand.getEntityTypes().addAll(Arrays.asList(
-                new UriRef("http://rdf.freebase.com/ns/business.sponsor"),
-                new UriRef("http://rdf.freebase.com/ns/common.topic"),
-                new UriRef("http://rdf.freebase.com/ns/travel.transport_operator"),
-                new UriRef("http://rdf.freebase.com/ns/aviation.airline"),
-                new UriRef("http://rdf.freebase.com/ns/aviation.aircraft_owner"),
-                new UriRef("http://rdf.freebase.com/ns/business.employer"),
-                new UriRef("http://rdf.freebase.com/ns/freebase.apps.hosts.com.appspot.acre.juggle.juggle"),
-                new UriRef("http://rdf.freebase.com/ns/business.company")));
+                new IRI("http://rdf.freebase.com/ns/business.sponsor"),
+                new IRI("http://rdf.freebase.com/ns/common.topic"),
+                new IRI("http://rdf.freebase.com/ns/travel.transport_operator"),
+                new IRI("http://rdf.freebase.com/ns/aviation.airline"),
+                new IRI("http://rdf.freebase.com/ns/aviation.aircraft_owner"),
+                new IRI("http://rdf.freebase.com/ns/business.employer"),
+                new IRI("http://rdf.freebase.com/ns/freebase.apps.hosts.com.appspot.acre.juggle.juggle"),
+                new IRI("http://rdf.freebase.com/ns/business.company")));
         System.out.println("creation time "+(System.currentTimeMillis()-start)+"ms");
 
         //now test the enhancement
@@ -171,10 +171,10 @@
         assertEquals(3, numberOfEntityAnnotations);
     }
 
-    private static UriRef createEnhancementURI() {
+    private static IRI createEnhancementURI() {
         //TODO: add some Utility to create Instances to the RdfEntityFactory
         //      this should create a new URI by some default Algorithm
-        return new UriRef("urn:enhancement-" + EnhancementEngineHelper.randomUUID());
+        return new IRI("urn:enhancement-" + EnhancementEngineHelper.randomUUID());
     }
 
     /*
@@ -183,12 +183,12 @@
      * -----------------------------------------------------------------------
      */
 
-    private int checkAllEntityAnnotations(MGraph g) {
+    private int checkAllEntityAnnotations(Graph g) {
         Iterator<Triple> entityAnnotationIterator = g.filter(null,
                 RDF_TYPE, TechnicalClasses.ENHANCER_ENTITYANNOTATION);
         int entityAnnotationCount = 0;
         while (entityAnnotationIterator.hasNext()) {
-            UriRef entityAnnotation = (UriRef) entityAnnotationIterator.next().getSubject();
+            IRI entityAnnotation = (IRI) entityAnnotationIterator.next().getSubject();
             // test if selected Text is added
             checkEntityAnnotation(g, entityAnnotation);
             entityAnnotationCount++;
@@ -196,14 +196,14 @@
         return entityAnnotationCount;
     }
 
-    private int checkAllTextAnnotations(MGraph g) {
+    private int checkAllTextAnnotations(Graph g) {
         Iterator<Triple> textAnnotationIterator = g.filter(null,
                 RDF_TYPE, ENHANCER_TEXTANNOTATION);
         // test if a textAnnotation is present
         assertTrue("Expecting non-empty textAnnotationIterator", textAnnotationIterator.hasNext());
         int textAnnotationCount = 0;
         while (textAnnotationIterator.hasNext()) {
-            UriRef textAnnotation = (UriRef) textAnnotationIterator.next().getSubject();
+            IRI textAnnotation = (IRI) textAnnotationIterator.next().getSubject();
             // test if selected Text is added
             checkTextAnnotation(g, textAnnotation);
             textAnnotationCount++;
@@ -214,13 +214,13 @@
     /**
      * Checks if a text annotation is valid.
      */
-    private void checkTextAnnotation(MGraph g, UriRef textAnnotation) {
+    private void checkTextAnnotation(Graph g, IRI textAnnotation) {
         Iterator<Triple> selectedTextIterator = g.filter(textAnnotation,
                 ENHANCER_SELECTED_TEXT, null);
         // check if the selected text is added
         assertTrue(selectedTextIterator.hasNext());
         // test if the selected text is part of the TEXT_TO_TEST
-        Resource object = selectedTextIterator.next().getObject();
+        RDFTerm object = selectedTextIterator.next().getObject();
         assertTrue(object instanceof Literal);
         assertTrue(SINGLE_SENTENCE.contains(((Literal) object).getLexicalForm()));
         // test if context is added
@@ -236,14 +236,14 @@
     /**
      * Checks if an entity annotation is valid.
      */
-    private void checkEntityAnnotation(MGraph g, UriRef entityAnnotation) {
+    private void checkEntityAnnotation(Graph g, IRI entityAnnotation) {
         Iterator<Triple> relationToTextAnnotationIterator = g.filter(
                 entityAnnotation, DC_RELATION, null);
         // check if the relation to the text annotation is set
         assertTrue(relationToTextAnnotationIterator.hasNext());
         while (relationToTextAnnotationIterator.hasNext()) {
             // test if the referred annotations are text annotations
-            UriRef referredTextAnnotation = (UriRef) relationToTextAnnotationIterator.next().getObject();
+            IRI referredTextAnnotation = (IRI) relationToTextAnnotationIterator.next().getObject();
             assertTrue(g.filter(referredTextAnnotation, RDF_TYPE,
                     ENHANCER_TEXTANNOTATION).hasNext());
         }
@@ -253,7 +253,7 @@
                 ENHANCER_ENTITY_REFERENCE, null);
         assertTrue(entityReferenceIterator.hasNext());
         // test if the reference is an URI
-        assertTrue(entityReferenceIterator.next().getObject() instanceof UriRef);
+        assertTrue(entityReferenceIterator.next().getObject() instanceof IRI);
         // test if there is only one entity referred
         assertFalse(entityReferenceIterator.hasNext());
 
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/Chain.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/Chain.java
index 3c2bf21..59cd499 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/Chain.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/Chain.java
@@ -18,7 +18,7 @@
 
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Graph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
 
 /**
  * An Enhancement Chain represents a configuration that defines what engines 
@@ -45,9 +45,9 @@
     String PROPERTY_NAME = "stanbol.enhancer.chain.name";
     /**
      * Getter for the execution plan reflecting the current configuration of this
-     * Chain. The returned {@link Graph} is read only and MUST NOT be changed if 
+     * Chain. The returned {@link ImmutableGraph} is read only and MUST NOT be changed if 
      * the configuration of this Chain changes. This means that the Chain MUST 
-     * create a new Graph instance if the execution plan changes as a result of 
+     * create a new ImmutableGraph instance if the execution plan changes as a result of 
      * a change in the configuration. It MUST NOT change any execution plan 
      * parsed to other components by the getExecutionPlan() method.
      * @return the execution plan as defined by the 
@@ -57,7 +57,7 @@
      * the case of the Chain requires runtime information to determine the
      * execution plan.
      */
-    Graph getExecutionPlan() throws ChainException;
+    ImmutableGraph getExecutionPlan() throws ChainException;
     /**
      * Getter for the set of {@link EnhancementEngine}s referenced by there
      * name within the execution plan. This method is intended to be used 
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/ChainException.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/ChainException.java
index b0717c1..ec4a50d 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/ChainException.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/ChainException.java
@@ -20,8 +20,8 @@
 //import static org.apache.stanbol.enhancer.servicesapi.helper.ExecutionPlanHelper.getEngine;
 //import static org.apache.stanbol.enhancer.servicesapi.helper.ExecutionPlanHelper.isOptional;
 //
-//import org.apache.clerezza.rdf.core.Graph;
-//import org.apache.clerezza.rdf.core.NonLiteral;
+//import org.apache.clerezza.commons.rdf.ImmutableGraph;
+//import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
 
 /**
  * BaseException thrown by {@link Chain} implementations or
@@ -49,7 +49,7 @@
 //     * @param message
 //     * @param cause
 //     */
-//    public ChainException(Graph executionPlan, NonLiteral node, String message, Throwable cause){
+//    public ChainException(ImmutableGraph executionPlan, BlankNodeOrIRI node, String message, Throwable cause){
 //        super(String.format("Unable to execute node {} (engine: {} | optional : {}" +
 //        		" | dependsOn : {}) because of: {}",
 //            node,getEngine(executionPlan, node),
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/ContentItem.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/ContentItem.java
index 3cdc4b9..6e142a5 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/ContentItem.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/ContentItem.java
@@ -20,20 +20,20 @@
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReadWriteLock;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 
 /**
  * A unit of content that Stanbol Enhancer can enhance.
  * <p>
  * Gives access to the binary content that
- * was registered, and the Graph that represents its metadata
+ * was registered, and the ImmutableGraph that represents its metadata
  * (provided by client and/or generated).
  */
 public interface ContentItem {
 
     /** The Uri of this ContentItem (either supplied by client or generated by Stanbol Enhancer) */
-    UriRef getUri();
+    IRI getUri();
 
     /**
      * The binary content stream. Shortcut for
@@ -53,20 +53,20 @@
      * Read/write lock used to synchronise access to the {@link #getMetadata()
      * metadata} and the content parts of this content item.<p>
      * The lock needs to be used for reading and writing information from/to
-     * the ContentItem. In case the {@link MGraph} retured by {@link #getMetadata()}
-     * is an instanceof used by the {@link org.apache.clerezza.rdf.core.access.LockableMGraph}
+     * the ContentItem. In case the {@link Graph} retured by {@link #getMetadata()}
+     * is an instanceof used by the {@link org.apache.clerezza.rdf.core.access.LockableGraph}
      * the {@link Lock} returned by this mehtod is the same as used by the
      * metadata.This is to avoid deadlocks when using a lock while iterating over 
      * the {@link #getMetadata() metadata} and simultaneously accessing the content 
      * parts.
      *  
-     * @return the lock used for the content parts and the {@link LockableMGraph}
+     * @return the lock used for the content parts and the {@link LockableGraph}
      * containing the metadata of this content item.
      */
     ReadWriteLock getLock();
     
     /** Optional metadata */
-    MGraph getMetadata();
+    Graph getMetadata();
     
     /**
      * The main content of this content item
@@ -96,13 +96,13 @@
      * @throws IllegalArgumentException if <code>null</code> is parsed as
      * uri or clazz.
      */
-    <T> T getPart(UriRef uri, Class<T> clazz) throws NoSuchPartException;
+    <T> T getPart(IRI uri, Class<T> clazz) throws NoSuchPartException;
     
     /**
      * Get the uri of the part at the specified index
      * @throws NoSuchPartException if no part with the parsed index exists
      */
-    UriRef getPartUri(int index) throws NoSuchPartException;
+    IRI getPartUri(int index) throws NoSuchPartException;
 
     /**
      * Add a new part to this ContentItem
@@ -114,7 +114,7 @@
      * @throws IllegalArgumentException if <code>null</code> is parsed as
      * uriRef or object.
      */
-    Object addPart(UriRef uriRef, Object object);
+    Object addPart(IRI uriRef, Object object);
     
     /**
      * Removes a part - other than the main content part - from this ContentItem
@@ -138,6 +138,6 @@
      * <code>{@link #getPartUri(int) getPartUri(0)}</code>. This uri refers to
      * the main content part. This part can NOT be removed by this method
      */
-    void removePart(UriRef uriRef);
+    void removePart(IRI uriRef);
     
 }
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/ContentItemFactory.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/ContentItemFactory.java
index c5e4975..e416036 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/ContentItemFactory.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/ContentItemFactory.java
@@ -20,9 +20,9 @@
 import java.io.InputStream;
 import java.io.OutputStream;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
 
 /**
  * OSGI service to be used to create {@link ContentItem}s and Blobs.
@@ -101,7 +101,7 @@
      * @throws IOException on any error while reading the content from the 
      * content source.
      */
-    ContentItem createContentItem(UriRef id, ContentSource source) throws IOException;
+    ContentItem createContentItem(IRI id, ContentSource source) throws IOException;
     /**
      * Creates a new ContentItem for the passed id and content source.
      * @param prefix the URI prefix used generate the URI of the content item.
@@ -118,7 +118,7 @@
      * Callers can safely close any resource related to the parsed {@link ContentSource}
      * method after this method returns.
      * @param source The content source
-     * @param metadata an {@link MGraph} with the metadata or <code>null</code>
+     * @param metadata an {@link Graph} with the metadata or <code>null</code>
      * if none. Implementation are free to use the passed instance or to generate 
      * a new one. However they MUST ensure that all {@link Triple}s contained by 
      * the passed graph are also added to the {@link ContentItem#getMetadata() 
@@ -131,7 +131,7 @@
      * @throws IOException on any error while reading the content from the 
      * content source.
      */
-    ContentItem createContentItem(String prefix, ContentSource source, MGraph metadata) throws IOException;
+    ContentItem createContentItem(String prefix, ContentSource source, Graph metadata) throws IOException;
     /**
      * Creates a new ContentItem for the passed id and content source.
      * @param id the id for the ContentItem or <code>null</code> to generate an id.
@@ -146,7 +146,7 @@
      * Callers can safely close any resource related to the parsed {@link ContentSource}
      * method after this method returns.
      * @param source The content source
-     * @param metadata an {@link MGraph} with the metadata or <code>null</code>
+     * @param metadata an {@link Graph} with the metadata or <code>null</code>
      * if none. Implementation are free to use the passed instance or to generate 
      * a new one. However they MUST ensure that all {@link Triple}s contained by 
      * the passed graph are also added to the {@link ContentItem#getMetadata() 
@@ -159,7 +159,7 @@
      * @throws IOException on any error while reading the content from the 
      * content source.
      */
-    ContentItem createContentItem(UriRef id, ContentSource source, MGraph metadata) throws IOException;
+    ContentItem createContentItem(IRI id, ContentSource source, Graph metadata) throws IOException;
     /**
      * Creates a new ContentItem for the passed {@link ContentReference}. The
      * {@link ContentReference#getReference()} is used as ID for the content
@@ -183,7 +183,7 @@
      * dereference}
      * the reference at creation if needed.
      * @param reference the reference to the content
-     * @param metadata an {@link MGraph} with the metadata or <code>null</code>
+     * @param metadata an {@link Graph} with the metadata or <code>null</code>
      * if none. Implementation are free to use the passed instance or to generate 
      * a new one. However they MUST ensure that all {@link Triple}s contained by 
      * the passed graph are also added to the {@link ContentItem#getMetadata() 
@@ -196,7 +196,7 @@
      * @throws IllegalArgumentException if the passed {@link ContentReference}
      * is <code>null</code>.
      */
-    ContentItem createContentItem(ContentReference reference, MGraph metadata) throws IOException;
+    ContentItem createContentItem(ContentReference reference, Graph metadata) throws IOException;
     /**
      * Creates a new Blob based on the passed {@link ContentSource}<p>
      * The content provided by the {@link ContentSource} is consumed by the
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/NoSuchPartException.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/NoSuchPartException.java
index b757cd6..6b6b829 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/NoSuchPartException.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/NoSuchPartException.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.servicesapi;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 /**
  * Indicates that a COntent Item doesn't has the requested part
@@ -29,7 +29,7 @@
     public NoSuchPartException(int index) {
 		super("The Content Item has no part with index "+index);
 	}
-    public NoSuchPartException(UriRef partUri) {
+    public NoSuchPartException(IRI partUri) {
         super("The Content Item has no part with index "+partUri);
     }
 	public NoSuchPartException(String message) {
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/ContentItemHelper.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/ContentItemHelper.java
index 0703b27..41cdd6f 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/ContentItemHelper.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/ContentItemHelper.java
@@ -33,7 +33,7 @@
 import java.util.Set;
 import java.util.StringTokenizer;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.commons.io.IOUtils;
 import org.apache.stanbol.enhancer.servicesapi.Blob;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
@@ -72,12 +72,12 @@
      * Check that ContentItem#getId returns a valid URI or make an urn out of
      * it.
      */
-    public static UriRef ensureUri(ContentItem ci) {
+    public static IRI ensureUri(ContentItem ci) {
         String uri = ci.getUri().getUnicodeString();
         if (!uri.startsWith("http://") && !uri.startsWith("urn:")) {
             uri = "urn:" + urlEncode(uri);
         }
-        return new UriRef(uri);
+        return new IRI(uri);
     }
 
     public static String urlEncode(String uriPart) {
@@ -145,22 +145,22 @@
         return buf.toString();
     }
 
-    public static UriRef makeDefaultUrn(Blob blob) {
+    public static IRI makeDefaultUrn(Blob blob) {
         return makeDefaultUri(DEFAULT_CONTENT_ITEM_PREFIX, blob.getStream());
     }
-    public static UriRef makeDefaultUrn(InputStream in) {
+    public static IRI makeDefaultUrn(InputStream in) {
         return makeDefaultUri(DEFAULT_CONTENT_ITEM_PREFIX, in);
     }
-    public static UriRef makeDefaultUrn(byte[] data){
+    public static IRI makeDefaultUrn(byte[] data){
         return makeDefaultUri(DEFAULT_CONTENT_ITEM_PREFIX, new ByteArrayInputStream(data));
     }
-    public static UriRef makeDefaultUri(String baseUri, Blob blob) {
+    public static IRI makeDefaultUri(String baseUri, Blob blob) {
         return makeDefaultUri(baseUri, blob.getStream());
     }
-    public static UriRef makeDefaultUri(String baseUri, byte[] data) {
+    public static IRI makeDefaultUri(String baseUri, byte[] data) {
         return makeDefaultUri(baseUri, new ByteArrayInputStream(data));
     }
-    public static UriRef makeDefaultUri(String baseUri, InputStream in) {
+    public static IRI makeDefaultUri(String baseUri, InputStream in) {
         // calculate an ID based on the digest of the content
         if (!baseUri.startsWith("urn:") && !baseUri.endsWith("/")) {
             baseUri += "/";
@@ -174,7 +174,7 @@
             		"of an ContentItem!",e);
         }
         IOUtils.closeQuietly(in);
-        return new UriRef(baseUri + SHA1.toLowerCase() + "-" + hexDigest);
+        return new IRI(baseUri + SHA1.toLowerCase() + "-" + hexDigest);
     }
     /**
      * This parses and validates the mime-type and parameters from the
@@ -230,7 +230,7 @@
         return parsed;
     }
     /**
-     * Searches an {@link ContentItem#getPart(UriRef, Class) content part}
+     * Searches an {@link ContentItem#getPart(IRI, Class) content part}
      * of the type {@link Blob} with one of the the parsed mimeTypes. <p>
      * NOTE:<ul>
      * <li> MimeTypes are converted to lower case before compared with
@@ -243,20 +243,20 @@
      * this method does NOT throw {@link NoSuchPartException}.
      * @param ci the contentITem
      * @param mimeTypes List of possible mimeTypes
-     * @return the {@link UriRef URI} and the {@link Blob content} of the content 
+     * @return the {@link IRI URI} and the {@link Blob content} of the content 
      * part or <code>null</code> if not found
      * @throws IllegalArgumentException If the parsed {@link ContentItem} is
      * <code>null</code> or the parsed Set with the mimeTypes is <code>null</code>
      * or {@link Set#isEmpty() empty}.
      */
-    public static Entry<UriRef, Blob> getBlob(ContentItem ci, Set<String> mimeTypes){
+    public static Entry<IRI, Blob> getBlob(ContentItem ci, Set<String> mimeTypes){
         if(ci == null){
             throw new IllegalArgumentException("The parsed ContentItem MUST NOT be NULL!");
         }
         if(mimeTypes == null || mimeTypes.isEmpty()){
             throw new IllegalArgumentException("The parsed Set with mime type  MUST NOT be NULL nor empty!");
         }
-        UriRef cpUri = null;
+        IRI cpUri = null;
         int index = 0;
         ci.getLock().readLock().lock();
         try {
@@ -293,14 +293,14 @@
      * this method does NOT throw {@link NoSuchPartException}.
      * @param ci the content item
      * @param clazz the class of the content part
-     * @return the Map with the {@link UriRef id}s and the content as entries.
+     * @return the Map with the {@link IRI id}s and the content as entries.
      */
-    public static <T> Map<UriRef,T> getContentParts(ContentItem ci, Class<T> clazz){
+    public static <T> Map<IRI,T> getContentParts(ContentItem ci, Class<T> clazz){
         if(ci == null){
             throw new IllegalArgumentException("The parsed ContentItem MUST NOT be NULL!");
         }
-        LinkedHashMap<UriRef,T> blobs = new LinkedHashMap<UriRef,T>();
-        UriRef cpUri = null;
+        LinkedHashMap<IRI,T> blobs = new LinkedHashMap<IRI,T>();
+        IRI cpUri = null;
         int index = 0;
         ci.getLock().readLock().lock();
         try {
@@ -365,7 +365,7 @@
      * EnhancementEngine properties <p>
      * @since 0.12.1
      */
-    public static final UriRef REQUEST_PROPERTIES_URI = new UriRef(
+    public static final IRI REQUEST_PROPERTIES_URI = new IRI(
         "urn:apache.org:stanbol.enhancer:request.properties");
 
     /**
@@ -373,7 +373,7 @@
      * <code>0.12.0</code>
      */
     @Deprecated
-    private static final UriRef WEB_ENHANCEMENT_PROPERTIES_URI = new UriRef(
+    private static final IRI WEB_ENHANCEMENT_PROPERTIES_URI = new IRI(
         "urn:apache.org:stanbol.web:enhancement.properties");
     
     /**
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/EnhancementEngineHelper.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/EnhancementEngineHelper.java
index 3e4f1bc..e1a05c2 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/EnhancementEngineHelper.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/EnhancementEngineHelper.java
@@ -39,21 +39,19 @@
 import java.util.Random;
 import java.util.UUID;
 
-import org.apache.clerezza.rdf.core.BNode;
+import org.apache.clerezza.commons.rdf.BlankNode;
 import org.apache.clerezza.rdf.core.InvalidLiteralTypeException;
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
 import org.apache.stanbol.enhancer.servicesapi.Chain;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
 import org.apache.stanbol.enhancer.servicesapi.EnhancementEngine;
@@ -117,22 +115,22 @@
      * Create a new instance with the types enhancer:Enhancement and
      * enhancer:TextAnnotation in the metadata-graph of the content
      * item along with default properties (dc:creator and dc:created) and return
-     * the UriRef of the extraction so that engines can further add.
+     * the IRI of the extraction so that engines can further add.
      *
      * @param ci the ContentItem being under analysis
      * @param engine the Engine performing the analysis
      *
      * @return the URI of the new enhancement instance
      */
-    public static UriRef createTextEnhancement(ContentItem ci,
+    public static IRI createTextEnhancement(ContentItem ci,
             EnhancementEngine engine){
-        return createTextEnhancement(ci.getMetadata(), engine, new UriRef(ci.getUri().getUnicodeString()));
+        return createTextEnhancement(ci.getMetadata(), engine, new IRI(ci.getUri().getUnicodeString()));
     }
     /**
      * Create a new instance with the types enhancer:Enhancement and
      * enhancer:TextAnnotation in the parsed graph along with default properties
      * (dc:creator, dc:created and enhancer:extracted-form) and return
-     * the UriRef of the extraction so that engines can further add.
+     * the IRI of the extraction so that engines can further add.
      *
      * @param metadata the graph
      * @param engine the engine
@@ -140,9 +138,9 @@
      *
      * @return the URI of the new enhancement instance
      */
-    public static UriRef createTextEnhancement(MGraph metadata,
-                EnhancementEngine engine, UriRef contentItemId){
-        UriRef enhancement = createEnhancement(metadata, engine,contentItemId);
+    public static IRI createTextEnhancement(Graph metadata,
+                EnhancementEngine engine, IRI contentItemId){
+        IRI enhancement = createEnhancement(metadata, engine,contentItemId);
         //add the Text Annotation Type
         metadata.add(new TripleImpl(enhancement, RDF_TYPE,
                 ENHANCER_TEXTANNOTATION));
@@ -153,7 +151,7 @@
      * fise:selected-text and fise:selection-suffix properties for the 
      * parsed fise:TextAnnotation instance according to the parsed parameters.<p>
      * While it is intended to be used for TextAnnotations this method can also
-     * be used to add the mentioned properties to {@link UriRef}s with different
+     * be used to add the mentioned properties to {@link IRI}s with different
      * type.<p>
      * <b>NOTE</b> the <code>allowSelectionHeadTail</code>: This parameter allows
      * to deactivate the usage of fise:selection-head and fise:selection-tail.
@@ -161,7 +159,7 @@
      * and <code>true</code> in case sections of the text (e.g. phrases, sentences,
      * chapters ...) are selected.
      * @param metadata The RDF graph to add the information
-     * @param textAnnotation the UriRef of the fise:TextAnnotation
+     * @param textAnnotation the IRI of the fise:TextAnnotation
      * @param content the plain text content as String
      * @param start the start index of the occurrence 
      * @param end the end index of the occurrence
@@ -175,7 +173,7 @@
      * size of the selected area.
      * @since 0.11.0
      */
-    public static void setOccurrence(MGraph metadata, UriRef textAnnotation,
+    public static void setOccurrence(Graph metadata, IRI textAnnotation,
             String content, Integer start, Integer end, Language lang, int prefixSuffixSize, 
             boolean allowSelectionHeadTail){
         //set start, end
@@ -275,21 +273,21 @@
      * Create a new instance with the types enhancer:Enhancement and
      * enhancer:EntityAnnotation in the metadata-graph of the content
      * item along with default properties (dc:creator and dc:created) and return
-     * the UriRef of the extraction so that engines can further add
+     * the IRI of the extraction so that engines can further add
      *
      * @param ci the ContentItem being under analysis
      * @param engine the Engine performing the analysis
      * @return the URI of the new enhancement instance
      */
-    public static UriRef createEntityEnhancement(ContentItem ci,
+    public static IRI createEntityEnhancement(ContentItem ci,
             EnhancementEngine engine){
-        return createEntityEnhancement(ci.getMetadata(), engine, new UriRef(ci.getUri().getUnicodeString()));
+        return createEntityEnhancement(ci.getMetadata(), engine, new IRI(ci.getUri().getUnicodeString()));
     }
     /**
      * Create a new instance with the types enhancer:Enhancement and
      * enhancer:EntityAnnotation in the parsed graph along with default properties
      * (dc:creator, dc:created and enhancer:extracted-form) and return
-     * the UriRef of the extraction so that engines can further add.
+     * the IRI of the extraction so that engines can further add.
      *
      * @param metadata the graph
      * @param engine the engine
@@ -297,9 +295,9 @@
      *
      * @return the URI of the new enhancement instance
      */
-    public static UriRef createEntityEnhancement(MGraph metadata,
-                EnhancementEngine engine, UriRef contentItemId){
-        UriRef enhancement = createEnhancement(metadata, engine, contentItemId);
+    public static IRI createEntityEnhancement(Graph metadata,
+                EnhancementEngine engine, IRI contentItemId){
+        IRI enhancement = createEnhancement(metadata, engine, contentItemId);
         metadata.add(new TripleImpl(enhancement, RDF_TYPE, ENHANCER_ENTITYANNOTATION));
         return enhancement;
     }
@@ -307,7 +305,7 @@
      * Create a new instance with the types enhancer:Enhancement and
      * enhancer:TopicAnnotation in the parsed graph along with default properties
      * (dc:creator, dc:created and enhancer:extracted-form) and return
-     * the UriRef of the extraction so that engines can further add.
+     * the IRI of the extraction so that engines can further add.
      *
      * @param metadata the graph
      * @param engine the engine
@@ -315,9 +313,9 @@
      *
      * @return the URI of the new enhancement instance
      */
-    public static UriRef createTopicEnhancement(MGraph metadata,
-                 EnhancementEngine engine, UriRef contentItemId){
-         UriRef enhancement = createEnhancement(metadata, engine, contentItemId);
+    public static IRI createTopicEnhancement(Graph metadata,
+                 EnhancementEngine engine, IRI contentItemId){
+         IRI enhancement = createEnhancement(metadata, engine, contentItemId);
          metadata.add(new TripleImpl(enhancement, RDF_TYPE, ENHANCER_TOPICANNOTATION));
          return enhancement;
      }
@@ -325,20 +323,20 @@
      * Create a new instance with the types enhancer:Enhancement and
      * enhancer:TopicAnnotation in the metadata-graph of the content
      * item along with default properties (dc:creator and dc:created) and return
-     * the UriRef of the extraction so that engines can further add
+     * the IRI of the extraction so that engines can further add
      *
      * @param ci the ContentItem being under analysis
      * @param engine the Engine performing the analysis
      * @return the URI of the new enhancement instance
      */
-    public static UriRef createTopicEnhancement(ContentItem ci,
+    public static IRI createTopicEnhancement(ContentItem ci,
             EnhancementEngine engine){
-        return createTopicEnhancement(ci.getMetadata(), engine, new UriRef(ci.getUri().getUnicodeString()));
+        return createTopicEnhancement(ci.getMetadata(), engine, new IRI(ci.getUri().getUnicodeString()));
     }
     /**
      * Create a new enhancement instance in the metadata-graph of the content
      * item along with default properties (dc:creator and dc:created) and return
-     * the UriRef of the extraction so that engines can further add. <p>
+     * the IRI of the extraction so that engines can further add. <p>
      * <i>NOTE:</i> This method was protected prior to <code>0.12.1</code> (see
      * <a href="https://issues.apache.org/jira/browse/STANBOL-1321">STANBOL-1321</a>)
      *
@@ -348,11 +346,11 @@
      * @return the URI of the new enhancement instance
      * @since 0.12.1
      */
-    public static UriRef createEnhancement(MGraph metadata,
-            EnhancementEngine engine, UriRef contentItemId){
+    public static IRI createEnhancement(Graph metadata,
+            EnhancementEngine engine, IRI contentItemId){
         LiteralFactory literalFactory = LiteralFactory.getInstance();
 
-        UriRef enhancement = new UriRef("urn:enhancement-"
+        IRI enhancement = new IRI("urn:enhancement-"
                 + EnhancementEngineHelper.randomUUID());
         //add the Enhancement Type
         metadata.add(new TripleImpl(enhancement, RDF_TYPE,
@@ -386,7 +384,7 @@
      * @param enhancement the enhancement
      * @param engine the engine
      */
-    public static void addContributingEngine(MGraph metadata, UriRef enhancement,
+    public static void addContributingEngine(Graph metadata, IRI enhancement,
                                              EnhancementEngine engine){
         LiteralFactory literalFactory = LiteralFactory.getInstance();
         // TODO: use a public dereferencing URI instead?
@@ -399,7 +397,7 @@
     /**
      * Create a new extraction instance in the metadata-graph of the content
      * item along with default properties (dc:creator and dc:created) and return
-     * the UriRef of the extraction so that engines can further add
+     * the IRI of the extraction so that engines can further add
      *
      * @param ci the ContentItem being under analysis
      * @param engine the Engine performing the analysis
@@ -409,12 +407,12 @@
      * @see EnhancementEngineHelper#createTextEnhancement(ContentItem, EnhancementEngine)
      */
     @Deprecated
-    public static UriRef createNewExtraction(ContentItem ci,
+    public static IRI createNewExtraction(ContentItem ci,
             EnhancementEngine engine) {
         LiteralFactory literalFactory = LiteralFactory.getInstance();
 
-        MGraph metadata = ci.getMetadata();
-        UriRef extraction = new UriRef("urn:extraction-"
+        Graph metadata = ci.getMetadata();
+        IRI extraction = new IRI("urn:extraction-"
                 + EnhancementEngineHelper.randomUUID());
 
         metadata.add(new TripleImpl(extraction, RDF_TYPE,
@@ -422,7 +420,7 @@
 
         // relate the extraction to the content item
         metadata.add(new TripleImpl(extraction,
-                ENHANCER_RELATED_CONTENT_ITEM, new UriRef(ci.getUri().getUnicodeString())));
+                ENHANCER_RELATED_CONTENT_ITEM, new IRI(ci.getUri().getUnicodeString())));
 
         // creation date
         metadata.add(new TripleImpl(extraction, DC_CREATED,
@@ -459,16 +457,16 @@
      * @param literalFactory the literalFactory
      * @return the value
      */
-    public static <T> T get(TripleCollection graph, NonLiteral resource, UriRef property, Class<T> type,
+    public static <T> T get(Graph graph, BlankNodeOrIRI resource, IRI property, Class<T> type,
             LiteralFactory literalFactory){
         Iterator<Triple> results = graph.filter(resource, property, null);
         if(results.hasNext()){
             while(results.hasNext()){
                 Triple result = results.next();
-                if(result.getObject() instanceof TypedLiteral){
-                    return literalFactory.createObject(type, (TypedLiteral)result.getObject());
+                if(result.getObject() instanceof Literal){
+                    return literalFactory.createObject(type, (Literal)result.getObject());
                 } else {
-                    log.debug("Triple {} does not have a TypedLiteral as object! -> ignore",result);
+                    log.debug("Triple {} does not have a Literal as object! -> ignore",result);
                 }
             }
             log.info("No value for {} and property {} had the requested Type {} -> return null",
@@ -487,7 +485,7 @@
      * @param property the property
      * @param value the value
      */
-    public static void set(MGraph graph, NonLiteral resource, UriRef property, Resource value){
+    public static void set(Graph graph, BlankNodeOrIRI resource, IRI property, RDFTerm value){
         set(graph,resource,property,value == null ? null : singleton(value),null);
     }
     /**
@@ -498,7 +496,7 @@
      * @param property the property
      * @param value the value
      */
-    public static void set(MGraph graph, NonLiteral resource, UriRef property, Collection<Resource> values){
+    public static void set(Graph graph, BlankNodeOrIRI resource, IRI property, Collection<RDFTerm> values){
         set(graph,resource,property,values,null);
     }
 
@@ -508,13 +506,13 @@
      * @param graph the graph
      * @param resource the resource
      * @param property the property
-     * @param value the value. In case it is an instance of {@link Resource} it
+     * @param value the value. In case it is an instance of {@link RDFTerm} it
      * is directly added to the graph. Otherwise the parsed {@link LiteralFactory}
      * is used to create a {@link TypedLiteral} for the parsed value.
      * @param literalFactory the {@link LiteralFactory} used in case the parsed
-     * value is not an {@link Resource}
+     * value is not an {@link RDFTerm}
      */
-    public static void set(MGraph graph, NonLiteral resource, UriRef property,
+    public static void set(Graph graph, BlankNodeOrIRI resource, IRI property,
                            Object value, LiteralFactory literalFactory){
         set(graph,resource,property,value == null ? null : singleton(value),literalFactory);
     }
@@ -524,13 +522,13 @@
      * @param graph the graph
      * @param resource the resource
      * @param property the property
-     * @param value the value. In case it is an instance of {@link Resource} it
+     * @param value the value. In case it is an instance of {@link RDFTerm} it
      * is directly added to the graph. Otherwise the parsed {@link LiteralFactory}
      * is used to create a {@link TypedLiteral} for the parsed value.
      * @param literalFactory the {@link LiteralFactory} used in case the parsed
-     * value is not an {@link Resource}
+     * value is not an {@link RDFTerm}
      */
-    public static void set(MGraph graph, NonLiteral resource, UriRef property,
+    public static void set(Graph graph, BlankNodeOrIRI resource, IRI property,
                                Collection<?> values, LiteralFactory literalFactory){
         Iterator<Triple> currentValues = graph.filter(resource, property, null);
         while(currentValues.hasNext()){
@@ -539,8 +537,8 @@
         }
         if(values != null){
             for(Object value : values){
-                if(value instanceof Resource){
-                    graph.add(new TripleImpl(resource, property, (Resource) value));
+                if(value instanceof RDFTerm){
+                    graph.add(new TripleImpl(resource, property, (RDFTerm) value));
                 } else if (value != null){
                     graph.add(new TripleImpl(resource, property, 
                         literalFactory.createTypedLiteral(value)));
@@ -560,16 +558,16 @@
      * @param literalFactory the literalFactory
      * @return the value
      */
-    public static <T> Iterator<T> getValues(TripleCollection graph, NonLiteral resource,
-            UriRef property, final Class<T> type, final  LiteralFactory literalFactory){
+    public static <T> Iterator<T> getValues(Graph graph, BlankNodeOrIRI resource,
+            IRI property, final Class<T> type, final  LiteralFactory literalFactory){
         final Iterator<Triple> results = graph.filter(resource, property, null);
         return new Iterator<T>() {
-            //TODO: dose not check if the object of the triple is of type UriRef
+            //TODO: dose not check if the object of the triple is of type IRI
             @Override
             public boolean hasNext() {    return results.hasNext(); }
             @Override
             public T next() {
-                return literalFactory.createObject(type, (TypedLiteral)results.next().getObject());
+                return literalFactory.createObject(type, (Literal)results.next().getObject());
             }
             @Override
             public void remove() { results.remove(); }
@@ -582,7 +580,7 @@
      * @param property the property
      * @return the value
      */
-    public static String getString(TripleCollection graph, NonLiteral resource, UriRef property){
+    public static String getString(Graph graph, BlankNodeOrIRI resource, IRI property){
         Iterator<Triple> results = graph.filter(resource, property, null);
         if(results.hasNext()){
             while (results.hasNext()){
@@ -608,10 +606,10 @@
      * @param property the property
      * @return the value
      */
-    public static Iterator<String> getStrings(TripleCollection graph, NonLiteral resource, UriRef property){
+    public static Iterator<String> getStrings(Graph graph, BlankNodeOrIRI resource, IRI property){
         final Iterator<Triple> results = graph.filter(resource, property, null);
         return new Iterator<String>() {
-            //TODO: dose not check if the object of the triple is of type UriRef
+            //TODO: dose not check if the object of the triple is of type IRI
             @Override
             public boolean hasNext() { return results.hasNext(); }
             @Override
@@ -629,18 +627,18 @@
      * @param property the property
      * @return the value
      */
-    public static UriRef getReference(TripleCollection graph, NonLiteral resource, UriRef property){
+    public static IRI getReference(Graph graph, BlankNodeOrIRI resource, IRI property){
         Iterator<Triple> results = graph.filter(resource, property, null);
         if(results.hasNext()){
             while(results.hasNext()){
             Triple result = results.next();
-                if(result.getObject() instanceof UriRef){
-                    return (UriRef)result.getObject();
+                if(result.getObject() instanceof IRI){
+                    return (IRI)result.getObject();
                 } else {
-                    log.debug("Triple "+result+" does not have a UriRef as object! -> ignore");
+                    log.debug("Triple "+result+" does not have a IRI as object! -> ignore");
                 }
             }
-            log.info("No UriRef value for {} and property {} -> return null",resource,property);
+            log.info("No IRI value for {} and property {} -> return null",resource,property);
             return null;
         } else {
             log.debug("No Triple found for {} and property {}! -> return null",resource,property);
@@ -655,14 +653,14 @@
      * @param property the property
      * @return The iterator over all the values (
      */
-    public static Iterator<UriRef> getReferences(TripleCollection graph, NonLiteral resource, UriRef property){
+    public static Iterator<IRI> getReferences(Graph graph, BlankNodeOrIRI resource, IRI property){
         final Iterator<Triple> results = graph.filter(resource, property, null);
-        return new Iterator<UriRef>() {
-            //TODO: dose not check if the object of the triple is of type UriRef
+        return new Iterator<IRI>() {
+            //TODO: dose not check if the object of the triple is of type IRI
             @Override
             public boolean hasNext() { return results.hasNext(); }
             @Override
-            public UriRef next() { return (UriRef)results.next().getObject(); }
+            public IRI next() { return (IRI)results.next().getObject(); }
             @Override
             public void remove() { results.remove(); }
         };
@@ -717,16 +715,16 @@
      * @return the sorted list of language annotations or an empty list if none.
      * @throws IllegalArgumentException if <code>null</code> is parsed as graph
      */
-    public static List<NonLiteral> getLanguageAnnotations(TripleCollection graph){
+    public static List<BlankNodeOrIRI> getLanguageAnnotations(Graph graph){
         if(graph == null){
             throw new IllegalArgumentException("The parsed graph MUST NOT be NULL!");
         }
         // I do not use SPARQL, because I do not want to instantiate a QueryEngine
-        final Map<NonLiteral,Double> confidences = new HashMap<NonLiteral,Double>();
-        List<NonLiteral> langAnnotations = new ArrayList<NonLiteral>();
+        final Map<BlankNodeOrIRI,Double> confidences = new HashMap<BlankNodeOrIRI,Double>();
+        List<BlankNodeOrIRI> langAnnotations = new ArrayList<BlankNodeOrIRI>();
         Iterator<Triple> textAnnoataions = graph.filter(null, RDF_TYPE, ENHANCER_TEXTANNOTATION);
         while(textAnnoataions.hasNext()){
-            NonLiteral textAnnotation = textAnnoataions.next().getSubject();
+            BlankNodeOrIRI textAnnotation = textAnnoataions.next().getSubject();
             String language = getString(graph, textAnnotation, DC_LANGUAGE);
             if(language != null){
                 Double confidence = null;
@@ -748,9 +746,9 @@
             }
         }
         if(langAnnotations.size() > 1){
-            Collections.sort(langAnnotations,new Comparator<NonLiteral>() {
+            Collections.sort(langAnnotations,new Comparator<BlankNodeOrIRI>() {
                 @Override
-                public int compare(NonLiteral o1, NonLiteral o2) {
+                public int compare(BlankNodeOrIRI o1, BlankNodeOrIRI o2) {
                     Double c1 = confidences.get(o1);
                     Double c2 = confidences.get(o2);
                     //decrising order (values without confidence last)
@@ -772,13 +770,13 @@
      * 'fise:confidence' value - or if no annotations are present - the
      * 'dc-terms:language' value of the {@link ContentItem#getUri()}.<p>
      * Users that want to obtain all language annotations should use
-     * {@link #getLanguageAnnotations(TripleCollection)} instead.<p>
+     * {@link #getLanguageAnnotations(Graph)} instead.<p>
      * This method ensures a write lock on the {@link ContentItem}.
      * @param ci the contentItem
      * @return the identified language of the parsed {@link ContentItem}.
      * <code>null</code> if not available.
      * @throws IllegalArgumentException if <code>null</code> is parsed as content item
-     * @see #getLanguageAnnotations(TripleCollection)
+     * @see #getLanguageAnnotations(Graph)
      */
     public static String getLanguage(ContentItem ci){
         if(ci == null){
@@ -786,7 +784,7 @@
         }
         ci.getLock().readLock().lock();
         try {
-            List<NonLiteral> langAnnotations = getLanguageAnnotations(ci.getMetadata());
+            List<BlankNodeOrIRI> langAnnotations = getLanguageAnnotations(ci.getMetadata());
             if(langAnnotations.isEmpty()){ //fallback
                 return getString(ci.getMetadata(), ci.getUri(), DC_LANGUAGE);
             } else {
@@ -921,10 +919,10 @@
         Map<String,Object> engineExProps = new HashMap<String,Object>();
         ci.getLock().readLock().lock();
         try{
-            MGraph em = ExecutionMetadataHelper.getExecutionMetadata(ci);
+            Graph em = ExecutionMetadataHelper.getExecutionMetadata(ci);
             //(1.a) retrieve EnhancementProperties from the ep:ExecutionPlan
             log.debug("> extract EnhancementProperties form the ExecutionPlan");
-            NonLiteral executionPlanNode = ExecutionMetadataHelper.getExecutionPlanNode(em, 
+            BlankNodeOrIRI executionPlanNode = ExecutionMetadataHelper.getExecutionPlanNode(em, 
                 ExecutionMetadataHelper.getChainExecution(em, ci.getUri()));
             extractEnhancementProperties(chainExProps, em, executionPlanNode, "Chain Execution");
             //(1.b) retrieve Enhancement Properties from the ep:ExectutionNode
@@ -935,7 +933,7 @@
             //NOTE: we expect only a single execution node for an engine, but if
             //      there are multiple we will merge the properties of those
             while(engineExecutions.hasNext()){
-                NonLiteral engineExecution = engineExecutions.next().getSubject();
+                BlankNodeOrIRI engineExecution = engineExecutions.next().getSubject();
                 if(em.contains(new TripleImpl(executionPlanNode, ExecutionPlan.HAS_EXECUTION_NODE, engineExecution))){
                     extractEnhancementProperties(engineExProps,em, engineExecution, "Engine Execution");
                 } //else engine execution of a different execution plan
@@ -959,8 +957,8 @@
      * @param node the node to extract the properties from
      * @param level the name of the level (only used for logging)
      */
-    private static void extractEnhancementProperties(Map<String,Object> properties, TripleCollection graph,
-            NonLiteral node, String level) {
+    private static void extractEnhancementProperties(Map<String,Object> properties, Graph graph,
+            BlankNodeOrIRI node, String level) {
         log.debug(" - extract {} properties from {}", level, node);
         Iterator<Triple> props = graph.filter(node, null, null);
         while(props.hasNext()){
@@ -968,7 +966,7 @@
             String propUri =  t.getPredicate().getUnicodeString();
             if(propUri.startsWith(EHPROP_NS)){
                 String prop = propUri.substring(EHPROP_NS_LENGTH);
-                Resource resource = t.getObject();
+                RDFTerm resource = t.getObject();
                 Object value = extractEnhancementPropertyValue(resource);
                 if(value != null && !prop.isEmpty()){
                     Object current = properties.get(prop);
@@ -997,17 +995,17 @@
     }
 
     /**
-     * Extracts the EnhancementProperty value from the parsed Resource.<p>
-     * Currently this will return {@link UriRef#getUnicodeString()} or
-     * {@link Literal#getLexicalForm()}. For {@link BNode}s <code>null</code> 
+     * Extracts the EnhancementProperty value from the parsed RDFTerm.<p>
+     * Currently this will return {@link IRI#getUnicodeString()} or
+     * {@link Literal#getLexicalForm()}. For {@link BlankNode}s <code>null</code> 
      * is returned.
      * @param r the resource to parse the value form
      * @return the parsed value
      */
-    private static Object extractEnhancementPropertyValue(Resource r) {
+    private static Object extractEnhancementPropertyValue(RDFTerm r) {
         Object value;
-        if(r instanceof UriRef){
-            value = ((UriRef)r).getUnicodeString();
+        if(r instanceof IRI){
+            value = ((IRI)r).getUnicodeString();
         } else if(r instanceof Literal){
             value = ((Literal) r).getLexicalForm();
         } else {
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/ExecutionMetadataHelper.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/ExecutionMetadataHelper.java
index 87006f1..6402dfb 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/ExecutionMetadataHelper.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/ExecutionMetadataHelper.java
@@ -48,18 +48,17 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.BNode;
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.enhancer.servicesapi.Chain;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
 import org.apache.stanbol.enhancer.servicesapi.EnhancementJobManager;
@@ -82,9 +81,9 @@
 
     private static final LiteralFactory lf = LiteralFactory.getInstance();
     
-    public static NonLiteral createChainExecutionNode(MGraph graph, NonLiteral executionPlan, 
-                                        UriRef ciUri, boolean defaultChain){
-        NonLiteral node = new BNode();
+    public static BlankNodeOrIRI createChainExecutionNode(Graph graph, BlankNodeOrIRI executionPlan, 
+                                        IRI ciUri, boolean defaultChain){
+        BlankNodeOrIRI node = new BlankNode();
         graph.add(new TripleImpl(node, RDF_TYPE, EXECUTION));
         graph.add(new TripleImpl(node, RDF_TYPE, CHAIN_EXECUTION));
         graph.add(new TripleImpl(node, ENHANCES, ciUri));
@@ -96,10 +95,10 @@
         return node;
     }
     
-    public static NonLiteral createEngineExecution(MGraph graph, NonLiteral chainExecution,
-                                     NonLiteral executionNode){
+    public static BlankNodeOrIRI createEngineExecution(Graph graph, BlankNodeOrIRI chainExecution,
+                                     BlankNodeOrIRI executionNode){
         
-        NonLiteral node = new BNode();
+        BlankNodeOrIRI node = new BlankNode();
         graph.add(new TripleImpl(node, RDF_TYPE, EXECUTION));
         graph.add(new TripleImpl(node, RDF_TYPE, ENGINE_EXECUTION));
         graph.add(new TripleImpl(node, EXECUTION_PART, chainExecution));
@@ -113,7 +112,7 @@
      * @param execution
      * @param message An optional message
      */
-    public static void setExecutionCompleted(MGraph graph,NonLiteral execution,String message){
+    public static void setExecutionCompleted(Graph graph,BlankNodeOrIRI execution,String message){
         Literal dateTime = lf.createTypedLiteral(new Date());
         setStatus(graph, execution,STATUS_COMPLETED);
         graph.add(new TripleImpl(execution, COMPLETED, dateTime));
@@ -127,7 +126,7 @@
      * @param graph the graph holding the execution metadata
      * @param execution the execution node
      */
-    public static void setExecutionScheduled(MGraph graph,NonLiteral execution){
+    public static void setExecutionScheduled(Graph graph,BlankNodeOrIRI execution){
         setStatus(graph, execution,STATUS_SCHEDULED);
         Iterator<Triple> it = graph.filter(execution, STARTED, null);
         while(it.hasNext()){
@@ -146,7 +145,7 @@
      * @param execution
      * @param message An message describing why the execution failed
      */
-    public static void setExecutionFaild(MGraph graph,NonLiteral execution,String message){
+    public static void setExecutionFaild(Graph graph,BlankNodeOrIRI execution,String message){
         Literal dateTime = lf.createTypedLiteral(new Date());
         setStatus(graph, execution,STATUS_FAILED);
         graph.add(new TripleImpl(execution, COMPLETED, dateTime));
@@ -163,7 +162,7 @@
      * @param execution
      * @param message An optional message why this execution was skipped
      */
-    public static void setExecutionSkipped(MGraph graph,NonLiteral execution,String message){
+    public static void setExecutionSkipped(Graph graph,BlankNodeOrIRI execution,String message){
         Literal dateTime = lf.createTypedLiteral(new Date());
         setStatus(graph, execution,STATUS_SKIPPED);
         graph.add(new TripleImpl(execution, STARTED, dateTime));
@@ -178,7 +177,7 @@
      * @param graph
      * @param execution
      */
-    public static void setExecutionInProgress(MGraph graph,NonLiteral execution){
+    public static void setExecutionInProgress(Graph graph,BlankNodeOrIRI execution){
         Literal dateTime = lf.createTypedLiteral(new Date());
         setStatus(graph, execution,STATUS_IN_PROGRESS);
         graph.add(new TripleImpl(execution, STARTED, dateTime));
@@ -190,7 +189,7 @@
      * @param graph
      * @param execution
      */
-    private static void setStatus(MGraph graph, NonLiteral execution, UriRef status) {
+    private static void setStatus(Graph graph, BlankNodeOrIRI execution, IRI status) {
         Iterator<Triple> it = graph.filter(execution, STATUS, null);
         while(it.hasNext()){
             it.next();
@@ -212,8 +211,8 @@
      * @param chainName the name of the executed chain
      * @return the node or <code>null</code> if not found.
      */
-    public static final NonLiteral getChainExecutionForChainName(TripleCollection em, TripleCollection ep, String chainName){
-        final NonLiteral executionPlanNode = ExecutionPlanHelper.getExecutionPlan(ep, chainName);
+    public static final BlankNodeOrIRI getChainExecutionForChainName(Graph em, Graph ep, String chainName){
+        final BlankNodeOrIRI executionPlanNode = ExecutionPlanHelper.getExecutionPlan(ep, chainName);
         if(executionPlanNode == null){
             return null;
         } else {
@@ -229,7 +228,7 @@
      * @param executionPlanNode the {@link ExecutionPlan#EXECUTION_PLAN} node
      * @return the {@link ExecutionMetadata#CHAIN_EXECUTION} node
      */
-    public static NonLiteral getChainExecutionForExecutionPlan(TripleCollection graph, final NonLiteral executionPlanNode) {
+    public static BlankNodeOrIRI getChainExecutionForExecutionPlan(Graph graph, final BlankNodeOrIRI executionPlanNode) {
         if(graph == null){
             throw new IllegalArgumentException("The parsed graph with the execution metadata MUST NOT be NULL!");
         }
@@ -251,13 +250,13 @@
      * content part
      * @since 0.12.1
      */
-    public static MGraph getExecutionMetadata(ContentItem contentItem) {
+    public static Graph getExecutionMetadata(ContentItem contentItem) {
         if(contentItem == null) {
             throw new IllegalArgumentException("The parsed ContentItme MUST NOT be NULL!");
         }
         contentItem.getLock().readLock().lock();
         try{
-            return contentItem.getPart(CHAIN_EXECUTION, MGraph.class);
+            return contentItem.getPart(CHAIN_EXECUTION, Graph.class);
         }finally{
             contentItem.getLock().readLock().unlock();
         }
@@ -271,32 +270,32 @@
      * content item otherwise it returns the existing part registered under that
      * URI.<p>
      * Typically users will also want to use 
-     * {@link #initExecutionMetadata(MGraph, TripleCollection, UriRef, String, boolean)}
+     * {@link #initExecutionMetadata(Graph, Graph, IRI, String, boolean)}
      * to initialise the state based on the grpah returned by this method.
      * NOTES:<ul>
      * <li> If a content part is registered under the URI 
      * {@link ExecutionMetadata#CHAIN_EXECUTION} that is not of type
-     * {@link MGraph} this method will replace it with an empty {@link MGraph}.
+     * {@link Graph} this method will replace it with an empty {@link Graph}.
      * <li> This method acquires a write lock on the content item while checking
      * for the content part.
      * </ul>
      * @param contentItem the contentItem
-     * @return the {@link MGraph} with the execution metadata as registered as
+     * @return the {@link Graph} with the execution metadata as registered as
      * content part with the URI {@link ExecutionMetadata#CHAIN_EXECUTION} to 
      * the {@link ContentItem}
      * @throws IllegalArgumentException if the parsed content itme is <code>null</code>.
      */
-    public static MGraph initExecutionMetadataContentPart(ContentItem contentItem) {
+    public static Graph initExecutionMetadataContentPart(ContentItem contentItem) {
         if(contentItem == null){
           throw new IllegalArgumentException("The parsed ContentItme MUST NOT be NULL!");  
         }
-        MGraph executionMetadata;
+        Graph executionMetadata;
         contentItem.getLock().writeLock().lock();
         try {
             try {
-                executionMetadata = contentItem.getPart(CHAIN_EXECUTION, MGraph.class);
+                executionMetadata = contentItem.getPart(CHAIN_EXECUTION, Graph.class);
             } catch (NoSuchPartException e) {
-                executionMetadata = new IndexedMGraph();
+                executionMetadata = new IndexedGraph();
                 contentItem.addPart(CHAIN_EXECUTION, executionMetadata);
             }
         } finally {
@@ -334,7 +333,7 @@
      * @throws IllegalArgumentException if any of the requirements stated in the
      * documentation for the parameters is not fulfilled.
      */
-    public static final Map<NonLiteral,NonLiteral> initExecutionMetadata(MGraph em, TripleCollection ep, UriRef ciUri, String chainName, Boolean isDefaultChain){
+    public static final Map<BlankNodeOrIRI,BlankNodeOrIRI> initExecutionMetadata(Graph em, Graph ep, IRI ciUri, String chainName, Boolean isDefaultChain){
         if(em == null){
             throw new IllegalArgumentException("The parsed ExecutionMetadata graph MUST NOT be NULL!");
         }
@@ -342,8 +341,8 @@
             throw new IllegalArgumentException("The parsed URI of the contentItem MUST NOT be NULL!");
         }
         //1. check for the ChainExecution node for the parsed content item
-        final NonLiteral executionPlanNode;
-        NonLiteral chainExecutionNode = getChainExecutionForExecutionPlan(em, ciUri);
+        final BlankNodeOrIRI executionPlanNode;
+        BlankNodeOrIRI chainExecutionNode = getChainExecutionForExecutionPlan(em, ciUri);
         if(chainExecutionNode != null){ //init from existing executin metadata
             // -> chainName and isDefaultChain may be null
             //init from existing
@@ -386,12 +385,12 @@
             chainExecutionNode = createChainExecutionNode(em, executionPlanNode, ciUri, isDefaultChain);
         }
         //2. check/init the EngineExecution nodes for for the ExecutionNodes of the ExecutionPlan
-        Map<NonLiteral,NonLiteral> executionsMap = new HashMap<NonLiteral,NonLiteral>();
-        Set<NonLiteral> executionNodes = getExecutionNodes(ep, executionPlanNode);
-        Set<NonLiteral> executions = getExecutions(em, chainExecutionNode);
-        for(NonLiteral en : executionNodes) {
+        Map<BlankNodeOrIRI,BlankNodeOrIRI> executionsMap = new HashMap<BlankNodeOrIRI,BlankNodeOrIRI>();
+        Set<BlankNodeOrIRI> executionNodes = getExecutionNodes(ep, executionPlanNode);
+        Set<BlankNodeOrIRI> executions = getExecutions(em, chainExecutionNode);
+        for(BlankNodeOrIRI en : executionNodes) {
             Iterator<Triple> it = em.filter(null, EXECUTION_NODE, en);
-            NonLiteral execution;
+            BlankNodeOrIRI execution;
             if(it.hasNext()){
                 execution = it.next().getSubject();
                 if(!executions.contains(execution)){
@@ -408,9 +407,9 @@
         }
         //3. check that there are no executions that are not part of the
         //   parsed ExecutionPlan
-        for(NonLiteral e : executions){
+        for(BlankNodeOrIRI e : executions){
             if(!executionsMap.containsKey(e)){
-                NonLiteral en = getExecutionNode(em, e);
+                BlankNodeOrIRI en = getExecutionNode(em, e);
                 throw new IllegalStateException("ChainExecution '"
                         + chainExecutionNode +"' (chain: '"+chainName+") contains"
                         + "Execution '"+e+"' for ExecutionNode '" + en
@@ -427,16 +426,16 @@
      * @param execution the em:Execution node
      * @return the ep:ExecutionNode node
      */
-    public static NonLiteral getExecutionNode(TripleCollection graph, NonLiteral execution){
+    public static BlankNodeOrIRI getExecutionNode(Graph graph, BlankNodeOrIRI execution){
         Iterator<Triple> it = graph.filter(execution, EXECUTION_NODE, null);
         if(it.hasNext()){
             Triple t = it.next();
-            Resource o = t.getObject();
-            if(o instanceof NonLiteral){
-                return (NonLiteral)o;
+            RDFTerm o = t.getObject();
+            if(o instanceof BlankNodeOrIRI){
+                return (BlankNodeOrIRI)o;
             } else {
                 throw new IllegalStateException("Value of property "+ EXECUTION_NODE
-                    + "MUST BE of type NonLiteral (triple: '"+t+"')!");
+                    + "MUST BE of type BlankNodeOrIRI (triple: '"+t+"')!");
             }
         } else {
             //maybe an em:ChainExecution
@@ -452,14 +451,14 @@
      * @param chainExecutionNode the chain execution node
      * @return the Set with all execution part of the chain execution
      */
-    public static Set<NonLiteral> getExecutions(TripleCollection em, NonLiteral chainExecutionNode) {
+    public static Set<BlankNodeOrIRI> getExecutions(Graph em, BlankNodeOrIRI chainExecutionNode) {
         if(em == null){
             throw new IllegalArgumentException("The parsed graph with the Execution metadata MUST NOT be NULL!");
         }
         if(chainExecutionNode == null){
             throw new IllegalArgumentException("The parsed chain execution plan node MUST NOT be NULL!");
         }
-        Set<NonLiteral> executionNodes = new HashSet<NonLiteral>();
+        Set<BlankNodeOrIRI> executionNodes = new HashSet<BlankNodeOrIRI>();
         Iterator<Triple> it = em.filter(null, ExecutionMetadata.EXECUTION_PART, chainExecutionNode);
         while(it.hasNext()){
             executionNodes.add(it.next().getSubject());
@@ -473,16 +472,16 @@
      * @param chainExecutionNode the chain execution node
      * @return the execution plan node
      */
-    public static NonLiteral getExecutionPlanNode(TripleCollection em, NonLiteral chainExecutionNode){
+    public static BlankNodeOrIRI getExecutionPlanNode(Graph em, BlankNodeOrIRI chainExecutionNode){
         Iterator<Triple> it = em.filter(chainExecutionNode, EXECUTION_PLAN, null);
         if(it.hasNext()){
             Triple t = it.next();
-            Resource r = t.getObject();
-            if(r instanceof NonLiteral){
-                return (NonLiteral)r;
+            RDFTerm r = t.getObject();
+            if(r instanceof BlankNodeOrIRI){
+                return (BlankNodeOrIRI)r;
             } else {
                 throw new IllegalStateException("Value of the property "+EXECUTION_PLAN
-                    + " MUST BE a NonLiteral (triple: '"+t+"')!");
+                    + " MUST BE a BlankNodeOrIRI (triple: '"+t+"')!");
             }
         } else {
             return null;
@@ -495,7 +494,7 @@
      * @param ciUri the ID of the content item
      * @return the node that {@link ExecutionMetadata#ENHANCES} the {@link ContentItem}
      */
-    public static NonLiteral getChainExecution(TripleCollection em, UriRef ciUri){
+    public static BlankNodeOrIRI getChainExecution(Graph em, IRI ciUri){
         Iterator<Triple> it = em.filter(null, ENHANCES, ciUri);
         if(it.hasNext()){
             return it.next().getSubject();
@@ -511,7 +510,7 @@
      * @param execution the execution node
      * @return <code>true</code> if the status is faild. Otherwise <code>false</code>.
      */
-    public static boolean isExecutionFailed(TripleCollection graph, NonLiteral execution){
+    public static boolean isExecutionFailed(Graph graph, BlankNodeOrIRI execution){
         return STATUS_FAILED.equals(getReference(graph,execution,STATUS));
     }
     /**
@@ -523,8 +522,8 @@
      * @param execution the execution node
      * @return <code>true</code> if the execution has already finished
      */
-    public static boolean isExecutionFinished(TripleCollection graph, NonLiteral execution){
-        UriRef status = getReference(graph,execution,STATUS);
+    public static boolean isExecutionFinished(Graph graph, BlankNodeOrIRI execution){
+        IRI status = getReference(graph,execution,STATUS);
         return STATUS_FAILED.equals(status) || STATUS_COMPLETED.equals(status);
     }
     /**
@@ -533,7 +532,7 @@
      * @param execution the execution instance
      * @return the time or <code>null</code> if not present
      */
-    public static Date getStarted(TripleCollection graph, NonLiteral execution){
+    public static Date getStarted(Graph graph, BlankNodeOrIRI execution){
         return get(graph, execution, ExecutionMetadata.STARTED, Date.class, lf);
     }
     /**
@@ -542,7 +541,7 @@
      * @param execution the execution instance
      * @return the time or <code>null</code> if not present
      */
-    public static Date getCompleted(TripleCollection graph, NonLiteral execution){
+    public static Date getCompleted(Graph graph, BlankNodeOrIRI execution){
         return get(graph, execution, ExecutionMetadata.COMPLETED, Date.class, lf);
     }
 }
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/ExecutionPlanHelper.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/ExecutionPlanHelper.java
index f637655..8e14c10 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/ExecutionPlanHelper.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/ExecutionPlanHelper.java
@@ -41,21 +41,20 @@
 import java.util.Map.Entry;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.BNode;
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.clerezza.rdf.core.NoConvertorException;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
+import org.apache.clerezza.rdf.core.LiteralFactory;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.enhancer.servicesapi.ChainException;
 import org.apache.stanbol.enhancer.servicesapi.EnhancementEngine;
 import org.apache.stanbol.enhancer.servicesapi.EnhancementEngineManager;
@@ -75,28 +74,28 @@
     private ExecutionPlanHelper(){/* Do not allow instances of utility classes*/}
 
     /**
-     * Writes all triples for an ep:ExecutionNode to the parsed {@link MGraph}.
-     * An {@link BNode} is use for representing the execution node resource.
+     * Writes all triples for an ep:ExecutionNode to the parsed {@link Graph}.
+     * An {@link BlankNode} is use for representing the execution node resource.
      * @param graph the graph to write the triples. MUST NOT be empty
-     * @param epNode the NonLiteral representing the ep:ExecutionPlan
+     * @param epNode the BlankNodeOrIRI representing the ep:ExecutionPlan
      * @param engineName the name of the engine. MUST NOT be <code>null</code> nor empty
      * @param optional if the execution of this node is optional or required
      * @param dependsOn other nodes that MUST BE executed before this one. Parse 
      * <code>null</code> or an empty set if none.
      * @return the resource representing the added ep:ExecutionNode.
-     * @deprecated use {@link #writeExecutionNode(MGraph, NonLiteral, String, boolean, Set, Map)}
+     * @deprecated use {@link #writeExecutionNode(Graph, BlankNodeOrIRI, String, boolean, Set, Map)}
      * with <code>null</code> as last parameter
      */
     @Deprecated
-    public static NonLiteral writeExecutionNode(MGraph graph,NonLiteral epNode, 
-            String engineName, boolean optional, Set<NonLiteral> dependsOn){
+    public static BlankNodeOrIRI writeExecutionNode(Graph graph,BlankNodeOrIRI epNode, 
+            String engineName, boolean optional, Set<BlankNodeOrIRI> dependsOn){
         return writeExecutionNode(graph,epNode,engineName,optional,dependsOn, null);
     }
     /**
-     * Writes all triples for an ep:ExecutionNode to the parsed {@link MGraph}.
-     * An {@link BNode} is use for representing the execution node resource.
+     * Writes all triples for an ep:ExecutionNode to the parsed {@link Graph}.
+     * An {@link BlankNode} is use for representing the execution node resource.
      * @param graph the graph to write the triples. MUST NOT be empty
-     * @param epNode the NonLiteral representing the ep:ExecutionPlan
+     * @param epNode the BlankNodeOrIRI representing the ep:ExecutionPlan
      * @param engineName the name of the engine. MUST NOT be <code>null</code> nor empty
      * @param optional if the execution of this node is optional or required
      * @param dependsOn other nodes that MUST BE executed before this one. Parse 
@@ -106,11 +105,11 @@
      * @return the resource representing the added ep:ExecutionNode.
      * @since 0.12.1
      */
-    public static NonLiteral writeExecutionNode(MGraph graph,NonLiteral epNode, 
-            String engineName, boolean optional, Set<NonLiteral> dependsOn, 
+    public static BlankNodeOrIRI writeExecutionNode(Graph graph,BlankNodeOrIRI epNode, 
+            String engineName, boolean optional, Set<BlankNodeOrIRI> dependsOn, 
             Map<String,Object> enhProps){
         if(graph == null){
-            throw new IllegalArgumentException("The parsed MGraph MUST NOT be NULL!");
+            throw new IllegalArgumentException("The parsed Graph MUST NOT be NULL!");
         }
         if(engineName == null || engineName.isEmpty()){
             throw new IllegalArgumentException("The parsed Engine name MUST NOT be NULL nor empty!");
@@ -118,12 +117,12 @@
         if(epNode == null){
             throw new IllegalArgumentException("The ep:ExecutionPlan instance MUST NOT be NULL!");
         }
-        NonLiteral node = new BNode();
+        BlankNodeOrIRI node = new BlankNode();
         graph.add(new TripleImpl(epNode, HAS_EXECUTION_NODE, node));
         graph.add(new TripleImpl(node, RDF_TYPE, EXECUTION_NODE));
         graph.add(new TripleImpl(node,ENGINE,new PlainLiteralImpl(engineName)));
         if(dependsOn != null){
-            for(NonLiteral dependend : dependsOn){
+            for(BlankNodeOrIRI dependend : dependsOn){
                 if(dependend != null){
                     graph.add(new TripleImpl(node, DEPENDS_ON, dependend));
                 }
@@ -134,20 +133,20 @@
         return node;
     }
     /**
-     * Creates an ExecutionPlan for the parsed chainName in the parsed Graph
+     * Creates an ExecutionPlan for the parsed chainName in the parsed ImmutableGraph
      * @param graph the graph
      * @param chainName the chain name
      * @return the node representing the ex:ExecutionPlan
-     * @deprecated use {@link #createExecutionPlan(MGraph, String, Map)} with
+     * @deprecated use {@link #createExecutionPlan(Graph, String, Map)} with
      * parsing <code>null</code> as last parameter
      */
     @Deprecated
-    public static NonLiteral createExecutionPlan(MGraph graph,String chainName){
+    public static BlankNodeOrIRI createExecutionPlan(Graph graph,String chainName){
         return createExecutionPlan(graph, chainName, null);
     }
     
     /**
-     * Creates an ExecutionPlan for the parsed chainName in the parsed Graph
+     * Creates an ExecutionPlan for the parsed chainName in the parsed ImmutableGraph
      * @param graph the graph
      * @param chainName the chain name
      * @param enhProps the map with the enhancement properties defined for the
@@ -155,14 +154,14 @@
      * @return the node representing the ex:ExecutionPlan
      * @since 0.12.1
      */
-    public static NonLiteral createExecutionPlan(MGraph graph,String chainName, Map<String,Object> enhProps){
+    public static BlankNodeOrIRI createExecutionPlan(Graph graph,String chainName, Map<String,Object> enhProps){
         if(graph == null){
-            throw new IllegalArgumentException("The parsed MGraph MUST NOT be NULL!");
+            throw new IllegalArgumentException("The parsed Graph MUST NOT be NULL!");
         }
         if(chainName == null || chainName.isEmpty()){
             throw new IllegalArgumentException("The parsed Chain name MUST NOT be NULL nor empty!");
         }
-        NonLiteral node = new BNode();
+        BlankNodeOrIRI node = new BlankNode();
         graph.add(new TripleImpl(node, RDF_TYPE, EXECUTION_PLAN));
         graph.add(new TripleImpl(node, CHAIN,new PlainLiteralImpl(chainName)));
         writeEnhancementProperties(graph, node, null, enhProps);
@@ -170,7 +169,7 @@
     }
     
     /**
-     * Evaluates the parsed {@link Graph execution plan} and the set of already executed
+     * Evaluates the parsed {@link ImmutableGraph execution plan} and the set of already executed
      * {@link ExecutionPlan#EXECUTION_NODE ep:ExecutionNode}s to find the next
      * nodes that can be executed. 
      * @param executionPlan the execution plan
@@ -179,10 +178,10 @@
      * @return the set of nodes that can be executed next or an empty set if
      * there are no more nodes to execute.
      */
-    public static Set<NonLiteral>getExecutable(TripleCollection executionPlan, Set<NonLiteral> executed){
-        Set<NonLiteral> executeable = new HashSet<NonLiteral>();
+    public static Set<BlankNodeOrIRI>getExecutable(Graph executionPlan, Set<BlankNodeOrIRI> executed){
+        Set<BlankNodeOrIRI> executeable = new HashSet<BlankNodeOrIRI>();
         for(Iterator<Triple> nodes = executionPlan.filter(null, RDF_TYPE, EXECUTION_NODE);nodes.hasNext();){
-            NonLiteral node = nodes.next().getSubject();
+            BlankNodeOrIRI node = nodes.next().getSubject();
             if(!executed.contains(node)){
                 Iterator<Triple> dependsIt = executionPlan.filter(node, DEPENDS_ON, null);
                 boolean dependendExecuted = true;
@@ -213,7 +212,7 @@
      * with <code>null</code> as last argument instead
      */
     @Deprecated
-    public static Graph calculateExecutionPlan(String chainName, List<EnhancementEngine> availableEngines, 
+    public static ImmutableGraph calculateExecutionPlan(String chainName, List<EnhancementEngine> availableEngines, 
             Set<String> optional, Set<String> missing) {
         return calculateExecutionPlan(chainName, availableEngines, optional, missing, null);
     }
@@ -240,7 +239,7 @@
      * @return the execution plan
      * @since 0.12.1
      */
-    public static Graph calculateExecutionPlan(String chainName, List<EnhancementEngine> availableEngines, 
+    public static ImmutableGraph calculateExecutionPlan(String chainName, List<EnhancementEngine> availableEngines, 
             Set<String> optional, Set<String> missing, Map<String,Map<String,Object>> enhProps) {
         if(chainName == null || chainName.isEmpty()){
             throw new IllegalArgumentException("The parsed ChainName MUST NOT be empty!");
@@ -248,15 +247,15 @@
         Collections.sort(availableEngines,EXECUTION_ORDER_COMPARATOR);
         //now we have all required and possible also optional engines
         //  -> build the execution plan
-        MGraph ep = new IndexedMGraph();
-        NonLiteral epNode = createExecutionPlan(ep, chainName,
+        Graph ep = new IndexedGraph();
+        BlankNodeOrIRI epNode = createExecutionPlan(ep, chainName,
             enhProps != null ? enhProps.get(null) : null);
         Integer prevOrder = null;
-        Set<NonLiteral> prev = null;
-        Set<NonLiteral> current = new HashSet<NonLiteral>();
+        Set<BlankNodeOrIRI> prev = null;
+        Set<BlankNodeOrIRI> current = new HashSet<BlankNodeOrIRI>();
         for(String name : missing){
             boolean optionalMissing = optional.contains(name);
-            NonLiteral node = writeExecutionNode(ep, epNode, name, optionalMissing, null,
+            BlankNodeOrIRI node = writeExecutionNode(ep, epNode, name, optionalMissing, null,
                 enhProps == null ? null : enhProps.get(name));
             if(!optionalMissing){
                 current.add(node);
@@ -267,11 +266,11 @@
             Integer order = getEngineOrder(engine);
             if(prevOrder == null || !prevOrder.equals(order)){
                 prev = current;
-                current = new HashSet<NonLiteral>();
+                current = new HashSet<BlankNodeOrIRI>();
                 prevOrder = order;
             }
             try {
-                NonLiteral executionNode = writeExecutionNode(ep, epNode, name, 
+                BlankNodeOrIRI executionNode = writeExecutionNode(ep, epNode, name, 
                     optional.contains(name), prev, 
                     enhProps == null ? null : enhProps.get(name));
                 current.add(executionNode);
@@ -282,7 +281,7 @@
                 throw e; //rethrow it
             }
         }
-        return ep.getGraph();
+        return ep.getImmutableGraph();
     }
     /**
      * Writes the enhancementProperties for an engine/chain to the parsed 
@@ -296,7 +295,7 @@
      * if none
      * @since 0.12.1
      */
-    private static void writeEnhancementProperties(MGraph ep, NonLiteral node, String engineName,
+    private static void writeEnhancementProperties(Graph ep, BlankNodeOrIRI node, String engineName,
             Map<String,Object> enhProps) {
         if(enhProps == null){ //no enhancement properties for this engine
             return;
@@ -308,7 +307,7 @@
                         engineName == null ? "" : engineName});
             } else {
                 writeEnhancementProperty(ep, node,
-                    new UriRef(NamespaceEnum.ehp + enhprop.getKey()),
+                    new IRI(NamespaceEnum.ehp + enhprop.getKey()),
                     enhprop.getValue());
             }
         }
@@ -325,8 +324,8 @@
      * @throws NullPointerException if any of the parsed parameter is <code>null</code>
      */
     @SuppressWarnings("unchecked")
-    private static void writeEnhancementProperty(MGraph ep, NonLiteral epNode, 
-            UriRef property, Object value) {
+    private static void writeEnhancementProperty(Graph ep, BlankNodeOrIRI epNode, 
+            IRI property, Object value) {
         Collection<Object> values;
         if(value instanceof Collection<?>){
             values = (Collection<Object>)value;
@@ -366,19 +365,19 @@
      * to an other execution node in the parsed graph
      * <ul><p>
      * This method does not modify the parsed graph. Therefore it is save
-     * to parse a {@link Graph} object.<p>
+     * to parse a {@link ImmutableGraph} object.<p>
      * TODO: There is no check for cycles implemented yet.
      * @param the graph to check
      * @return the engine names referenced by the validated execution plan-
      * @throws ChainException
      */
-    public static Set<String> validateExecutionPlan(TripleCollection executionPlan) throws ChainException {
+    public static Set<String> validateExecutionPlan(Graph executionPlan) throws ChainException {
         Iterator<Triple> executionNodeIt = executionPlan.filter(null, RDF_TYPE, EXECUTION_NODE);
         Set<String> engineNames = new HashSet<String>();
-        Map<NonLiteral, Collection<NonLiteral>> nodeDependencies = new HashMap<NonLiteral,Collection<NonLiteral>>();
+        Map<BlankNodeOrIRI, Collection<BlankNodeOrIRI>> nodeDependencies = new HashMap<BlankNodeOrIRI,Collection<BlankNodeOrIRI>>();
         //1. check the ExecutionNodes
         while(executionNodeIt.hasNext()){
-            NonLiteral node = executionNodeIt.next().getSubject();
+            BlankNodeOrIRI node = executionNodeIt.next().getSubject();
             Iterator<String> engines = EnhancementEngineHelper.getStrings(executionPlan, node,ENGINE);
             if(!engines.hasNext()){
                 throw new ChainException("Execution Node "+node+" does not define " +
@@ -394,11 +393,11 @@
                         "an empty String as engine name (property "+ENGINE+")!");
             }
             engineNames.add(engine);
-            Collection<NonLiteral> dependsOn = new HashSet<NonLiteral>();
+            Collection<BlankNodeOrIRI> dependsOn = new HashSet<BlankNodeOrIRI>();
             for(Iterator<Triple> t = executionPlan.filter(node, DEPENDS_ON, null);t.hasNext();){
-                Resource o = t.next().getObject();
-                if(o instanceof NonLiteral){
-                    dependsOn.add((NonLiteral)o);
+                RDFTerm o = t.next().getObject();
+                if(o instanceof BlankNodeOrIRI){
+                    dependsOn.add((BlankNodeOrIRI)o);
                 } else {
                     throw new ChainException("Execution Node "+node+" defines the literal '" +
                         o+"' as value for the "+DEPENDS_ON +" property. However this" +
@@ -408,9 +407,9 @@
             nodeDependencies.put(node, dependsOn);
         }
         //2. now check the dependency graph
-        for(Entry<NonLiteral,Collection<NonLiteral>> entry : nodeDependencies.entrySet()){
+        for(Entry<BlankNodeOrIRI,Collection<BlankNodeOrIRI>> entry : nodeDependencies.entrySet()){
             if(entry.getValue() != null){
-                for(NonLiteral dependent : entry.getValue()){
+                for(BlankNodeOrIRI dependent : entry.getValue()){
                     if(!nodeDependencies.containsKey(dependent)){
                         throw new ChainException("Execution Node "+entry.getKey()+
                             " defines a dependency to an non existent ex:ExectutionNode "+
@@ -423,20 +422,20 @@
         return engineNames;
     }
     
-    public static Set<NonLiteral> getDependend(TripleCollection executionPlan, NonLiteral executionNode){
-        Set<NonLiteral> dependend = new HashSet<NonLiteral>();
+    public static Set<BlankNodeOrIRI> getDependend(Graph executionPlan, BlankNodeOrIRI executionNode){
+        Set<BlankNodeOrIRI> dependend = new HashSet<BlankNodeOrIRI>();
         addDependend(dependend, executionPlan, executionNode);
         return dependend;
     }
-    public static void addDependend(Collection<NonLiteral> collection, TripleCollection executionPlan, NonLiteral executionNode){
+    public static void addDependend(Collection<BlankNodeOrIRI> collection, Graph executionPlan, BlankNodeOrIRI executionNode){
         for(Iterator<Triple> it = executionPlan.filter(executionNode, DEPENDS_ON, null);
-                it.hasNext();collection.add((NonLiteral)it.next().getObject()));
+                it.hasNext();collection.add((BlankNodeOrIRI)it.next().getObject()));
     }
-    public static boolean isOptional(TripleCollection executionPlan, NonLiteral executionNode) {
+    public static boolean isOptional(Graph executionPlan, BlankNodeOrIRI executionNode) {
         Boolean optional = get(executionPlan,executionNode,OPTIONAL,Boolean.class,lf);
         return optional == null ? false : optional.booleanValue();
     }
-    public static String getEngine(TripleCollection executionPlan, NonLiteral executionNode) {
+    public static String getEngine(Graph executionPlan, BlankNodeOrIRI executionNode) {
         return getString(executionPlan, executionNode, ENGINE);
     }
 
@@ -447,13 +446,13 @@
      * @param ep the execution plan
      * @return
      */
-    public static List<EnhancementEngine> getActiveEngines(EnhancementEngineManager engineManager, TripleCollection ep) {
+    public static List<EnhancementEngine> getActiveEngines(EnhancementEngineManager engineManager, Graph ep) {
         List<EnhancementEngine> engines = new ArrayList<EnhancementEngine>();
-        Set<NonLiteral> visited = new HashSet<NonLiteral>();
-        Set<NonLiteral> executeable;
+        Set<BlankNodeOrIRI> visited = new HashSet<BlankNodeOrIRI>();
+        Set<BlankNodeOrIRI> executeable;
         do {
             executeable = getExecutable(ep, visited);
-            for(NonLiteral node : executeable){
+            for(BlankNodeOrIRI node : executeable){
                 String engineName = getString(ep, node, ENGINE);
                 EnhancementEngine engine = engineManager.getEngine(engineName);
                 if(engine != null){
@@ -474,7 +473,7 @@
      * @param chainName the chain name
      * @return the node or <code>null</code> if not found
      */
-    public static NonLiteral getExecutionPlan(TripleCollection graph, String chainName){
+    public static BlankNodeOrIRI getExecutionPlan(Graph graph, String chainName){
         if(graph == null){
             throw new IllegalArgumentException("The parsed graph MUST NOT be NULL!");
         }
@@ -494,23 +493,23 @@
      * @param ep the execution plan graph
      * @param executionPlanNode the execution plan node
      */
-    public static Set<NonLiteral> getExecutionNodes(TripleCollection ep, final NonLiteral executionPlanNode) {
+    public static Set<BlankNodeOrIRI> getExecutionNodes(Graph ep, final BlankNodeOrIRI executionPlanNode) {
         if(ep == null){
             throw new IllegalArgumentException("The parsed graph with the Executionplan MUST NOT be NULL!");
         }
         if(executionPlanNode == null){
             throw new IllegalArgumentException("The parsed execution plan node MUST NOT be NULL!");
         }
-        Set<NonLiteral> executionNodes = new HashSet<NonLiteral>();
+        Set<BlankNodeOrIRI> executionNodes = new HashSet<BlankNodeOrIRI>();
         Iterator<Triple> it = ep.filter(executionPlanNode, HAS_EXECUTION_NODE, null);
         while(it.hasNext()){
             Triple t = it.next();
-            Resource node = t.getObject();
-            if(node instanceof NonLiteral){
-                executionNodes.add((NonLiteral)node);
+            RDFTerm node = t.getObject();
+            if(node instanceof BlankNodeOrIRI){
+                executionNodes.add((BlankNodeOrIRI)node);
             } else {
                 throw new IllegalStateException("The value of the "+HAS_EXECUTION_NODE
-                    + " property MUST BE a NonLiteral (triple: "+t+")!");
+                    + " property MUST BE a BlankNodeOrIRI (triple: "+t+")!");
             }
         }
         return executionNodes;
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/execution/ChainExecution.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/execution/ChainExecution.java
index d444bdd..3e48fae 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/execution/ChainExecution.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/execution/ChainExecution.java
@@ -16,8 +16,8 @@
 */
 package org.apache.stanbol.enhancer.servicesapi.helper.execution;
 
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.TripleCollection;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.stanbol.enhancer.servicesapi.helper.EnhancementEngineHelper;
 import org.apache.stanbol.enhancer.servicesapi.helper.ExecutionMetadataHelper;
 import org.apache.stanbol.enhancer.servicesapi.rdf.ExecutionPlan;
@@ -32,9 +32,9 @@
     
     private final String chainName;
     
-    public ChainExecution(TripleCollection graph, NonLiteral node) {
+    public ChainExecution(Graph graph, BlankNodeOrIRI node) {
         super(null,graph,node);
-        NonLiteral ep = ExecutionMetadataHelper.getExecutionPlanNode(graph, node);
+        BlankNodeOrIRI ep = ExecutionMetadataHelper.getExecutionPlanNode(graph, node);
         if(ep != null){
             chainName = EnhancementEngineHelper.getString(graph, ep, ExecutionPlan.CHAIN);
         } else {
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/execution/Execution.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/execution/Execution.java
index 1c52ccd..b443cbb 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/execution/Execution.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/execution/Execution.java
@@ -20,9 +20,9 @@
 
 import java.util.Date;
 
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.servicesapi.helper.ExecutionMetadataHelper;
 import org.apache.stanbol.enhancer.servicesapi.rdf.ExecutionMetadata;
 
@@ -33,19 +33,19 @@
  */
 public class Execution implements Comparable<Execution>{
     
-    protected final NonLiteral node;
+    protected final BlankNodeOrIRI node;
     private final ExecutionNode executionNode;
-    private final UriRef status;
-    protected final TripleCollection graph;
+    private final IRI status;
+    protected final Graph graph;
     private final Date started;
     private final Date completed;
     private final Long duration;
     private final ChainExecution chain;
-    public Execution(ChainExecution parent, TripleCollection graph, NonLiteral node) {
+    public Execution(ChainExecution parent, Graph graph, BlankNodeOrIRI node) {
         this.chain = parent;
         this.graph = graph;
         this.node = node;
-        NonLiteral executionNode = ExecutionMetadataHelper.getExecutionNode(graph, node);
+        BlankNodeOrIRI executionNode = ExecutionMetadataHelper.getExecutionNode(graph, node);
         if(executionNode != null){
             this.executionNode = new ExecutionNode(graph, executionNode);
         } else {
@@ -65,7 +65,7 @@
      * The Status of the execution
      * @return the status
      */
-    public final UriRef getStatus() {
+    public final IRI getStatus() {
         return status;
     }
 
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/execution/ExecutionMetadata.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/execution/ExecutionMetadata.java
index 6482abd..0b085a3 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/execution/ExecutionMetadata.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/execution/ExecutionMetadata.java
@@ -20,9 +20,9 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.servicesapi.helper.ExecutionMetadataHelper;
 
 public final class ExecutionMetadata {
@@ -31,8 +31,8 @@
     private final ChainExecution chainExecution;
     private final Map<String,Execution> engineExecutions;
 
-    public static ExecutionMetadata parseFrom(TripleCollection executionMetadata, UriRef contentItemUri){
-        NonLiteral ce = ExecutionMetadataHelper.getChainExecution(executionMetadata, contentItemUri);
+    public static ExecutionMetadata parseFrom(Graph executionMetadata, IRI contentItemUri){
+        BlankNodeOrIRI ce = ExecutionMetadataHelper.getChainExecution(executionMetadata, contentItemUri);
         ExecutionMetadata em;
         if(ce != null){
             em = new ExecutionMetadata(executionMetadata, contentItemUri,ce);
@@ -42,10 +42,10 @@
         return em;
     }
     
-    private ExecutionMetadata(TripleCollection executionMetadata, UriRef contentItemUri, NonLiteral ce){
+    private ExecutionMetadata(Graph executionMetadata, IRI contentItemUri, BlankNodeOrIRI ce){
         chainExecution = new ChainExecution(executionMetadata, ce);
         engineExecutions = new HashMap<String,Execution>();
-        for(NonLiteral ex : ExecutionMetadataHelper.getExecutions(executionMetadata, ce)){
+        for(BlankNodeOrIRI ex : ExecutionMetadataHelper.getExecutions(executionMetadata, ce)){
             Execution execution = new Execution(chainExecution,executionMetadata, ex);
             engineExecutions.put(execution.getExecutionNode().getEngineName(),execution);
         }
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/execution/ExecutionNode.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/execution/ExecutionNode.java
index 14e3201..9a5962d 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/execution/ExecutionNode.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/helper/execution/ExecutionNode.java
@@ -16,8 +16,8 @@
 */
 package org.apache.stanbol.enhancer.servicesapi.helper.execution;
 
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.TripleCollection;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.stanbol.enhancer.servicesapi.helper.ExecutionPlanHelper;
 
 /**
@@ -27,12 +27,12 @@
  */
 public class ExecutionNode {
     
-    final NonLiteral node;
-    private final TripleCollection ep;
+    final BlankNodeOrIRI node;
+    private final Graph ep;
     private final boolean optional;
     private final String engineName;
     
-    public ExecutionNode(TripleCollection executionPlan, NonLiteral node) {
+    public ExecutionNode(Graph executionPlan, BlankNodeOrIRI node) {
         this.node = node;
         this.ep = executionPlan;
         this.optional = ExecutionPlanHelper.isOptional(ep, node);
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/impl/AbstractContentItemFactory.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/impl/AbstractContentItemFactory.java
index c792e78..6c6efc2 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/impl/AbstractContentItemFactory.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/impl/AbstractContentItemFactory.java
@@ -20,9 +20,9 @@
 import java.io.InputStream;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
 import org.apache.felix.scr.annotations.Service;
@@ -38,8 +38,8 @@
  * Abstract implementation of the {@link ContentItemFactory} that requires only
  * the three abstract methods <ul>
  * <li> {@link #createBlob(ContentSource)}
- * <li> {@link #createContentItem(String, Blob, MGraph)}
- * <li> {@link #createContentItem(UriRef, Blob, MGraph)}
+ * <li> {@link #createContentItem(String, Blob, Graph)}
+ * <li> {@link #createContentItem(IRI, Blob, Graph)}
  * </ul> to be overridden.<p>
  * Implementers should NOTE that {@link #createBlob(ContentSource)} will be
  * called to create the main {@link Blob} instance for a contentItem before
@@ -77,7 +77,7 @@
     
     @Override
     public final ContentItem createContentItem(ContentSource source) throws IOException {
-        return createContentItem((UriRef)null, source, null);
+        return createContentItem((IRI)null, source, null);
     }
 
     @Override
@@ -86,7 +86,7 @@
     }
 
     @Override
-    public final ContentItem createContentItem(UriRef id, ContentSource source) throws IOException {
+    public final ContentItem createContentItem(IRI id, ContentSource source) throws IOException {
         return createContentItem(id, source, null);
     }
 
@@ -96,14 +96,14 @@
     }
 
     @Override
-    public final ContentItem createContentItem(ContentReference reference, MGraph metadata) throws IOException {
+    public final ContentItem createContentItem(ContentReference reference, Graph metadata) throws IOException {
         if(reference == null){
             throw new IllegalArgumentException("The parsed ContentReference MUST NOT be NULL!");
         }
-        return createContentItem(new UriRef(reference.getReference()),createBlob(reference),metadata);
+        return createContentItem(new IRI(reference.getReference()),createBlob(reference),metadata);
     }
     @Override
-    public final ContentItem createContentItem(String prefix, ContentSource source,MGraph metadata) throws IOException {
+    public final ContentItem createContentItem(String prefix, ContentSource source,Graph metadata) throws IOException {
         if(prefix == null){
             throw new IllegalArgumentException("The parsed prefix MUST NOT be NULL!");
         }
@@ -114,7 +114,7 @@
     }
 
     @Override
-    public final ContentItem createContentItem(UriRef id, ContentSource source, MGraph metadata) throws IOException {
+    public final ContentItem createContentItem(IRI id, ContentSource source, Graph metadata) throws IOException {
         if(source == null){
             throw new IllegalArgumentException("The parsed ContentSource MUST NOT be NULL!");
         }
@@ -136,7 +136,7 @@
      * returned ContentItem.
      * @return the created content item
      */
-    protected abstract ContentItem createContentItem(UriRef id, Blob blob, MGraph metadata);
+    protected abstract ContentItem createContentItem(IRI id, Blob blob, Graph metadata);
     
     /**
      * Creates a ContentItem for the parsed parameters
@@ -152,7 +152,7 @@
      * returned ContentItem.
      * @return the created content item
      */
-    protected abstract ContentItem createContentItem(String prefix, Blob blob, MGraph metadata);
+    protected abstract ContentItem createContentItem(String prefix, Blob blob, Graph metadata);
 
     @Override
     public abstract Blob createBlob(ContentSource source) throws IOException;
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/impl/ContentItemImpl.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/impl/ContentItemImpl.java
index dd43fbb..b36cac5 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/impl/ContentItemImpl.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/impl/ContentItemImpl.java
@@ -22,10 +22,8 @@
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReadWriteLock;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.access.LockableMGraph;
-import org.apache.clerezza.rdf.core.access.LockableMGraphWrapper;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.servicesapi.Blob;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
 import org.apache.stanbol.enhancer.servicesapi.NoSuchPartException;
@@ -44,7 +42,7 @@
  * it is marked as abstract and has only a protected constructor because it is
  * not intended that users directly instantiate it. The intended usage is to
  * create subclasses that instantiate ContentItmes with specific combinations
- * of {@link Blob} nad {@link MGraph} implementations.<p>
+ * of {@link Blob} nad {@link Graph} implementations.<p>
  * Examples are: <ul>
  * <li>The {@link InMemoryContentItem} intended for in-memory
  * storage of ContentItems during the stateless enhancement workflow
@@ -61,22 +59,22 @@
     /**
      * Holds the content parts of this ContentItem
      */
-	private final Map<UriRef, Object> parts = new LinkedHashMap<UriRef, Object>();
+	private final Map<IRI, Object> parts = new LinkedHashMap<IRI, Object>();
 	/**
 	 * The uri of the ContentItem
 	 */
-	private final UriRef uri;
+	private final IRI uri;
 	/**
 	 * The uri of the main content part (the {@link Blob} parsed with the constructor)
 	 */
-	private final UriRef mainBlobUri;
+	private final IRI mainBlobUri;
 
-    private final LockableMGraph metadata; 
+    private final Graph metadata; 
 
     protected final Lock readLock;
     protected final Lock writeLock;
     
-	protected ContentItemImpl(UriRef uri, Blob main, MGraph metadata) {
+	protected ContentItemImpl(IRI uri, Blob main, Graph metadata) {
 	    if(uri == null){
 	        throw new IllegalArgumentException("The URI for the ContentItem MUST NOT be NULL!");
 	    }
@@ -87,19 +85,15 @@
 	        throw new IllegalArgumentException("Tha parsed graph MUST NOT be NULL!");
 	    }
         this.uri = uri;
-        this.mainBlobUri = new UriRef(uri.getUnicodeString()+MAIN_BLOB_SUFFIX);
+        this.mainBlobUri = new IRI(uri.getUnicodeString()+MAIN_BLOB_SUFFIX);
         this.parts.put(mainBlobUri, main);
-        if(metadata instanceof LockableMGraph){
-            this.metadata = (LockableMGraph)metadata;
-        } else {
-            this.metadata = new LockableMGraphWrapper(metadata);
-        }
+        this.metadata = metadata;
         //init the read and write lock
         this.readLock = this.metadata.getLock().readLock();
         this.writeLock = this.metadata.getLock().writeLock();
 		//Better parse the Blob in the Constructor than calling a public
 		//method on a may be not fully initialised instance
-		//parts.put(new UriRef(uri.getUnicodeString()+"_main"), getBlob());
+		//parts.put(new IRI(uri.getUnicodeString()+"_main"), getBlob());
 	}
 	
 	@Override
@@ -108,7 +102,7 @@
 	}
 	
 	/**
-	 * Final getter retrieving the Blob via {@link #getPart(UriRef, Class)}
+	 * Final getter retrieving the Blob via {@link #getPart(IRI, Class)}
 	 * with <code>{@link #getUri()}+{@link #MAIN_BLOB_SUFFIX}</code>
 	 */
 	@Override
@@ -131,7 +125,7 @@
 	
     @SuppressWarnings("unchecked")
 	@Override
-	public <T> T getPart(UriRef uri, Class<T> clazz) throws NoSuchPartException {
+	public <T> T getPart(IRI uri, Class<T> clazz) throws NoSuchPartException {
         readLock.lock();
         try {
             Object part = parts.get(uri);
@@ -151,11 +145,11 @@
 	}
 
 	@Override
-	public UriRef getPartUri(int index) throws NoSuchPartException {
+	public IRI getPartUri(int index) throws NoSuchPartException {
         readLock.lock();
         try {
     		int count = 0;
-    		for(Map.Entry<UriRef, Object> entry : parts.entrySet()) {
+    		for(Map.Entry<IRI, Object> entry : parts.entrySet()) {
     			if (count == index) {
     				return entry.getKey();
     			}
@@ -174,7 +168,7 @@
         try {
     		Object result = null;
     		int count = 0;
-    		for(Map.Entry<UriRef, Object> entry : parts.entrySet()) {
+    		for(Map.Entry<IRI, Object> entry : parts.entrySet()) {
     			if (count == index) {
     				result = entry.getValue();
     				if (!result.getClass().isAssignableFrom(clazz)) {
@@ -191,7 +185,7 @@
 	}
 	
 	@Override
-	public Object addPart(UriRef uriRef, Object object) {
+	public Object addPart(IRI uriRef, Object object) {
         writeLock.lock();
         try {
     	    if(uriRef == null || object == null){
@@ -219,20 +213,20 @@
 	    }
         writeLock.lock();
         try {
-            UriRef partUri = getPartUri(index);
+            IRI partUri = getPartUri(index);
             parts.remove(partUri);
         } finally {
             writeLock.unlock();
         }
 	}
 	@Override
-	public void removePart(UriRef uriRef) {
+	public void removePart(IRI uriRef) {
 	    if(uriRef == null){
 	        throw new IllegalArgumentException("The parsed uriRef MUST NOT be NULL!");
 	    }
         writeLock.lock();
         try {
-            UriRef mainContentPartUri = parts.keySet().iterator().next();
+            IRI mainContentPartUri = parts.keySet().iterator().next();
             if(uriRef.equals(mainContentPartUri)){
                 throw new IllegalStateException("The main ContentPart (uri '"
                     + uriRef+"') CAN NOT be removed!");
@@ -246,12 +240,12 @@
 	}
 	
     @Override
-	public UriRef getUri() {
+	public IRI getUri() {
 		return uri;
 	}
 
 	@Override
-	public LockableMGraph getMetadata() {
+	public Graph getMetadata() {
 	    return metadata;
 	}
 	@Override
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/impl/SingleEngineChain.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/impl/SingleEngineChain.java
index 118cafd..19fff37 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/impl/SingleEngineChain.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/impl/SingleEngineChain.java
@@ -23,9 +23,9 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.enhancer.servicesapi.Chain;
 import org.apache.stanbol.enhancer.servicesapi.ChainException;
 import org.apache.stanbol.enhancer.servicesapi.EnhancementEngine;
@@ -46,7 +46,7 @@
  */
 public class SingleEngineChain implements Chain {
 
-    private final Graph executionPlan;
+    private final ImmutableGraph executionPlan;
     private final EnhancementEngine engine;
     private final String name;
     
@@ -71,14 +71,14 @@
         }
         this.engine = engine;
         this.name = engine.getName()+"Chain";
-        MGraph graph = new IndexedMGraph();
+        Graph graph = new IndexedGraph();
         writeExecutionNode(graph, createExecutionPlan(graph, name, null),
             engine.getName(), false, null, enhProps);
-        executionPlan = graph.getGraph();
+        executionPlan = graph.getImmutableGraph();
     }
     
     @Override
-    public Graph getExecutionPlan() throws ChainException {
+    public ImmutableGraph getExecutionPlan() throws ChainException {
         return executionPlan;
     }
 
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/Enhancer.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/Enhancer.java
index 70d6e59..dbeb8b9 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/Enhancer.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/Enhancer.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.servicesapi.rdf;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 public final class Enhancer {
 
@@ -25,12 +25,12 @@
      */
     private Enhancer() {}
 
-    public static final UriRef CONTENT_ITEM = new UriRef(NamespaceEnum.enhancer+"ContentItem");
-    public static final UriRef ENHANCEMENT_ENGINE = new UriRef(NamespaceEnum.enhancer+"EnhancementEngine");
-    public static final UriRef ENHANCEMENT_CHAIN = new UriRef(NamespaceEnum.enhancer+"EnhancementChain");
-    public static final UriRef ENHANCER = new UriRef(NamespaceEnum.enhancer+"Enhancer");
-    public static final UriRef HAS_ENGINE = new UriRef(NamespaceEnum.enhancer+"hasEngine");
-    public static final UriRef HAS_CHAIN = new UriRef(NamespaceEnum.enhancer+"hasChain");
-    public static final UriRef HAS_DEFAULT_CHAIN = new UriRef(NamespaceEnum.enhancer+"hasDefaultChain");
+    public static final IRI CONTENT_ITEM = new IRI(NamespaceEnum.enhancer+"ContentItem");
+    public static final IRI ENHANCEMENT_ENGINE = new IRI(NamespaceEnum.enhancer+"EnhancementEngine");
+    public static final IRI ENHANCEMENT_CHAIN = new IRI(NamespaceEnum.enhancer+"EnhancementChain");
+    public static final IRI ENHANCER = new IRI(NamespaceEnum.enhancer+"Enhancer");
+    public static final IRI HAS_ENGINE = new IRI(NamespaceEnum.enhancer+"hasEngine");
+    public static final IRI HAS_CHAIN = new IRI(NamespaceEnum.enhancer+"hasChain");
+    public static final IRI HAS_DEFAULT_CHAIN = new IRI(NamespaceEnum.enhancer+"hasDefaultChain");
     
 }
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/ExecutionMetadata.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/ExecutionMetadata.java
index 18ecbf6..e22e654 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/ExecutionMetadata.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/ExecutionMetadata.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.servicesapi.rdf;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.servicesapi.Chain;
 import org.apache.stanbol.enhancer.servicesapi.ChainManager;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
@@ -38,41 +38,41 @@
      * This is considered an abstract concept. Use {@link #CHAIN_EXECUTION} or
      * {@link #ENGINE_EXECUTION} depending on the type of the executed component.
      */
-    public static final UriRef EXECUTION = new UriRef(NamespaceEnum.em+"Execution");
+    public static final IRI EXECUTION = new IRI(NamespaceEnum.em+"Execution");
 
     /**
      * Property that links {@link #EXECUTION} to its parent 
      * {@link #CHAIN_EXECUTION}.
      */
-    public static final UriRef EXECUTION_PART = new UriRef(NamespaceEnum.em+"executionPart");
+    public static final IRI EXECUTION_PART = new IRI(NamespaceEnum.em+"executionPart");
     
     /**
      * The current status of an {@link #EXECUTION}. Values are expected to be
      * one of {@link #EXECUTION_STATUS}.
      */
-    public static final UriRef STATUS = new UriRef(NamespaceEnum.em+"status");
+    public static final IRI STATUS = new IRI(NamespaceEnum.em+"status");
 
     /**
      * The 'xsd:startTime' when an {@link #EXECUTION} started
      */
-    public static final UriRef STARTED = new UriRef(NamespaceEnum.em+"started");
+    public static final IRI STARTED = new IRI(NamespaceEnum.em+"started");
 
     /**
      * The 'xsd:dateTime' when an {@link #EXECUTION} execution completed or
      * failed.
      */
-    public static final UriRef COMPLETED = new UriRef(NamespaceEnum.em+"completed");
+    public static final IRI COMPLETED = new IRI(NamespaceEnum.em+"completed");
 
     /**
      * Allows to add a status message to a {@link #EXECUTION} node.
      */
-    public static final UriRef STATUS_MESSAGE = new UriRef(NamespaceEnum.em+"statusMessage");
+    public static final IRI STATUS_MESSAGE = new IRI(NamespaceEnum.em+"statusMessage");
     
     /**
      * Class representing the execution of a {@link Chain}. This class is a 
      * sub-class of {@link #EXECUTION}
      */
-    public static final UriRef CHAIN_EXECUTION = new UriRef(NamespaceEnum.em+"ChainExecution");
+    public static final IRI CHAIN_EXECUTION = new IRI(NamespaceEnum.em+"ChainExecution");
 
     /**
      * Property indicating if the {@link ExecutionPlan#EXECUTION_PLAN} executed
@@ -80,65 +80,65 @@
      * {@link Chain} at that time. Values are expected to be of data type
      * 'xsd:boolean'.
      */
-    public static final UriRef IS_DEFAULT_CHAIN = new UriRef(NamespaceEnum.em+"defualtChain");
+    public static final IRI IS_DEFAULT_CHAIN = new IRI(NamespaceEnum.em+"defualtChain");
 
     /**
      * Property that links from the {@link #CHAIN_EXECUTION} to the
      * {@link ExecutionPlan#EXECUTION_PLAN}
      */
-    public static final UriRef EXECUTION_PLAN = new UriRef(NamespaceEnum.em+"executionPlan");
+    public static final IRI EXECUTION_PLAN = new IRI(NamespaceEnum.em+"executionPlan");
 
     /**
      * Property that links from the {@link #CHAIN_EXECUTION} node to the
      * enhanced {@link ContentItem#getUri()}
      */
-    public static final UriRef ENHANCES = new UriRef(NamespaceEnum.em+"enhances");
+    public static final IRI ENHANCES = new IRI(NamespaceEnum.em+"enhances");
 
     /**
      * Property that links from {@link ContentItem#getUri()} to the 
      * {@link #CHAIN_EXECUTION} defining the root node of the execution metadata
      */
-    public static final UriRef ENHANCED_BY = new UriRef(NamespaceEnum.em+"enhancedBy");
+    public static final IRI ENHANCED_BY = new IRI(NamespaceEnum.em+"enhancedBy");
 
     /**
      * Class that represents the execution of an {@link EnhancementEngine}.
      *  This is a sub-class of {@link #EXECUTION}.
      */
-    public static final UriRef ENGINE_EXECUTION = new UriRef(NamespaceEnum.em+"EngineExecution");
+    public static final IRI ENGINE_EXECUTION = new IRI(NamespaceEnum.em+"EngineExecution");
 
     /**
      * Property that links from the {@link #ENGINE_EXECUTION} to the
      * {@link ExecutionPlan#EXECUTION_NODE}
      */
-    public static final UriRef EXECUTION_NODE = new UriRef(NamespaceEnum.em+"executionNode");
+    public static final IRI EXECUTION_NODE = new IRI(NamespaceEnum.em+"executionNode");
 
     /**
      * Type for all ExecutionStatus values: {@link #STATUS_SCHEDULED},
      * {@link #STATUS_IN_PROGRESS}, {@link #STATUS_COMPLETED}, {@link #STATUS_SKIPPED},
      * {@link #STATUS_FAILED}.
      */
-    public static final UriRef EXECUTION_STATUS = new UriRef(NamespaceEnum.em+"ExecutionStatus");
+    public static final IRI EXECUTION_STATUS = new IRI(NamespaceEnum.em+"ExecutionStatus");
 
     /**
      * em:ExecutionStatus indicating that the execution is scheduled, but has not yet started
      */
-    public static final UriRef STATUS_SCHEDULED = new UriRef(NamespaceEnum.em+"StatusSheduled");
+    public static final IRI STATUS_SCHEDULED = new IRI(NamespaceEnum.em+"StatusSheduled");
     /**
      * em:ExecutionStatus indicating that the execution was skipped 
      */
-    public static final UriRef STATUS_SKIPPED = new UriRef(NamespaceEnum.em+"StatusSkipped");
+    public static final IRI STATUS_SKIPPED = new IRI(NamespaceEnum.em+"StatusSkipped");
     /**
      * em:ExecutionStatus indicating that the execution is in progress
      */
-    public static final UriRef STATUS_IN_PROGRESS = new UriRef(NamespaceEnum.em+"StatusInProgress");
+    public static final IRI STATUS_IN_PROGRESS = new IRI(NamespaceEnum.em+"StatusInProgress");
     /**
      * em:ExecutionStatus indicating that the execution has completed successfully
      */
-    public static final UriRef STATUS_COMPLETED = new UriRef(NamespaceEnum.em+"StatusCompleted");
+    public static final IRI STATUS_COMPLETED = new IRI(NamespaceEnum.em+"StatusCompleted");
     /**
      * em:ExecutionStatus indicating that the execution has failed
      */
-    public static final UriRef STATUS_FAILED = new UriRef(NamespaceEnum.em+"StatusFailed");
+    public static final IRI STATUS_FAILED = new IRI(NamespaceEnum.em+"StatusFailed");
 
     
 }
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/ExecutionPlan.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/ExecutionPlan.java
index ed43959..46f269d 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/ExecutionPlan.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/ExecutionPlan.java
@@ -16,11 +16,11 @@
 */
 package org.apache.stanbol.enhancer.servicesapi.rdf;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.servicesapi.EnhancementEngine;
 
 /**
- * Defines the {@link UriRef}s for all classes and properties defined by the
+ * Defines the {@link IRI}s for all classes and properties defined by the
  * Stanbol Enhancer Execution Plan ontology.
  *
  */
@@ -30,35 +30,35 @@
     /**
      * The Class ep:ExecutionPlan
      */
-    public static final UriRef EXECUTION_PLAN = new UriRef(NamespaceEnum.ep+"ExecutionPlan");
+    public static final IRI EXECUTION_PLAN = new IRI(NamespaceEnum.ep+"ExecutionPlan");
     /**
      * The property ep:chain linking an {@link #EXECUTION_PLAN} to the name
      * of the chain this plan is defined for
      */
-    public static final UriRef CHAIN = new UriRef(NamespaceEnum.ep+"chain");
+    public static final IRI CHAIN = new IRI(NamespaceEnum.ep+"chain");
     /**
      * the property ep:hasExecutionNode linking an {@link #EXECUTION_PLAN} with
      * all its {@link #EXECUTION_NODE}s
      */
-    public static final UriRef HAS_EXECUTION_NODE = new UriRef(NamespaceEnum.ep+"hasExecutionNode");
+    public static final IRI HAS_EXECUTION_NODE = new IRI(NamespaceEnum.ep+"hasExecutionNode");
     /**
      * The Class ep:ExecutionMode
      */
-    public static final UriRef EXECUTION_NODE = new UriRef(NamespaceEnum.ep+"ExecutionNode");
+    public static final IRI EXECUTION_NODE = new IRI(NamespaceEnum.ep+"ExecutionNode");
     /**
      * The property ep:engine linking an {@link #EXECUTION_NODE} with the name of 
      * the {@link EnhancementEngine} to be executed.
      */
-    public static final UriRef ENGINE = new UriRef(NamespaceEnum.ep+"engine");
+    public static final IRI ENGINE = new IRI(NamespaceEnum.ep+"engine");
     /**
      * The property ep:dependsOn defining the list of other {@link #EXECUTION_NODE}s
      * this one depends on
      */
-    public static final UriRef DEPENDS_ON = new UriRef(NamespaceEnum.ep+"dependsOn");
+    public static final IRI DEPENDS_ON = new IRI(NamespaceEnum.ep+"dependsOn");
     /**
      * The property ep:optional that can be used to define that the execution of
      * an {@link #EXECUTION_NODE} is optional. The default is <code>false</code>.
      */
-    public static final UriRef OPTIONAL = new UriRef(NamespaceEnum.ep+"optional");
+    public static final IRI OPTIONAL = new IRI(NamespaceEnum.ep+"optional");
  
 }
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/OntologicalClasses.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/OntologicalClasses.java
index 275c0db..aeed269 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/OntologicalClasses.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/OntologicalClasses.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.servicesapi.rdf;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 /**
  * Common entity types, a.k.a. ontological classes to be used as valuation of
@@ -30,19 +30,19 @@
  */
 public final class OntologicalClasses {
 
-    public static final UriRef DBPEDIA_PERSON = new UriRef(
+    public static final IRI DBPEDIA_PERSON = new IRI(
             NamespaceEnum.dbpedia_ont+"Person");
 
-    public static final UriRef DBPEDIA_PLACE = new UriRef(
+    public static final IRI DBPEDIA_PLACE = new IRI(
             NamespaceEnum.dbpedia_ont+"Place");
 
-    public static final UriRef DBPEDIA_ORGANISATION = new UriRef(
+    public static final IRI DBPEDIA_ORGANISATION = new IRI(
             NamespaceEnum.dbpedia_ont+"Organisation");
 
-    public static final UriRef SKOS_CONCEPT = new UriRef(
+    public static final IRI SKOS_CONCEPT = new IRI(
         NamespaceEnum.skos+"Concept");
     
-    public static final UriRef DC_LINGUISTIC_SYSTEM = new UriRef(
+    public static final IRI DC_LINGUISTIC_SYSTEM = new IRI(
         NamespaceEnum.dc+"LinguisticSystem");
 
     private OntologicalClasses() {
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/Properties.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/Properties.java
index 616e067..d6dcd14 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/Properties.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/Properties.java
@@ -16,8 +16,7 @@
 */
 package org.apache.stanbol.enhancer.servicesapi.rdf;
 
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 /**
  * Namespace of standard properties to be used as typed metadata by
@@ -41,12 +40,12 @@
      * the target of this property is an owl:Class such as the ones defined is
      * {@link OntologyClass}
      */
-    public static final UriRef RDF_TYPE = new UriRef(NamespaceEnum.rdf + "type");
+    public static final IRI RDF_TYPE = new IRI(NamespaceEnum.rdf + "type");
 
     /**
      * A label for resources of any type.
      */
-    public static final UriRef RDFS_LABEL = new UriRef(NamespaceEnum.rdfs
+    public static final IRI RDFS_LABEL = new IRI(NamespaceEnum.rdfs
             + "label");
 
     /**
@@ -56,14 +55,14 @@
      * @deprecated use ENHANCER_ENTITY instead
      */
     @Deprecated
-    public static final UriRef DC_REFERENCES = new UriRef(NamespaceEnum.dc
+    public static final IRI DC_REFERENCES = new IRI(NamespaceEnum.dc
             + "references");
 
     /**
      * Creation date of a resource. Used by Stanbol Enhancer to annotate the creation date
      * of the enhancement by the enhancement engine
      */
-    public static final UriRef DC_CREATED = new UriRef(NamespaceEnum.dc
+    public static final IRI DC_CREATED = new IRI(NamespaceEnum.dc
             + "created");
 
     /**
@@ -72,21 +71,21 @@
      * enhancement engine as the one creating it. Multiple changes of the
      * creating enhancement engines are not considered as modifications.
      */
-    public static final UriRef DC_MODIFIED = new UriRef(NamespaceEnum.dc
+    public static final IRI DC_MODIFIED = new IRI(NamespaceEnum.dc
             + "modified");
 
     /**
      * The entity responsible for the creation of a resource. Used by Stanbol Enhancer to
      * annotate the enhancement engine that created an enhancement
      */
-    public static final UriRef DC_CREATOR = new UriRef(NamespaceEnum.dc
+    public static final IRI DC_CREATOR = new IRI(NamespaceEnum.dc
             + "creator");
     /**
      * The entity contributed to a resource. Used by Stanbol Enhancer to
      * annotate the enhancement engine that changed an enhancement originally
      * created by an other enhancemetn engine
      */
-    public static final UriRef DC_CONTRIBUTOR = new UriRef(NamespaceEnum.dc
+    public static final IRI DC_CONTRIBUTOR = new IRI(NamespaceEnum.dc
             + "contributor");
 
     /**
@@ -94,21 +93,21 @@
      * the type of the enhancement. Values should be URIs defined in some
      * controlled vocabulary
      */
-    public static final UriRef DC_TYPE = new UriRef(NamespaceEnum.dc + "type");
+    public static final IRI DC_TYPE = new IRI(NamespaceEnum.dc + "type");
 
     /**
      * A related resource that is required by the described resource to support
      * its function, delivery, or coherence. Stanbol Enhancer uses this property to refer to
      * other enhancements an enhancement depends on.
      */
-    public static final UriRef DC_REQUIRES = new UriRef(NamespaceEnum.dc
+    public static final IRI DC_REQUIRES = new IRI(NamespaceEnum.dc
             + "requires");
 
     /**
      * A related resource. Stanbol Enhancer uses this property to define enhancements that
      * are referred by the actual one
      */
-    public static final UriRef DC_RELATION = new UriRef(NamespaceEnum.dc
+    public static final IRI DC_RELATION = new IRI(NamespaceEnum.dc
             + "relation");
 
     /**
@@ -117,54 +116,54 @@
      * separator.
      */
     @Deprecated
-    public static final UriRef GEORSS_POINT = new UriRef(NamespaceEnum.georss
+    public static final IRI GEORSS_POINT = new IRI(NamespaceEnum.georss
             + "point");
 
     @Deprecated
-    public static final UriRef GEO_LAT = new UriRef(NamespaceEnum.geo + "lat");
+    public static final IRI GEO_LAT = new IRI(NamespaceEnum.geo + "lat");
 
     @Deprecated
-    public static final UriRef GEO_LONG = new UriRef(NamespaceEnum.geo + "long");
+    public static final IRI GEO_LONG = new IRI(NamespaceEnum.geo + "long");
 
-    public static final UriRef SKOS_BROADER = new UriRef(NamespaceEnum.skos + "broader");
+    public static final IRI SKOS_BROADER = new IRI(NamespaceEnum.skos + "broader");
     
-    public static final UriRef SKOS_NARROWER = new UriRef(NamespaceEnum.skos + "narrower");
+    public static final IRI SKOS_NARROWER = new IRI(NamespaceEnum.skos + "narrower");
     
     /**
      * Refers to the content item the enhancement was extracted form
      */
-    public static final UriRef ENHANCER_EXTRACTED_FROM = new UriRef(
+    public static final IRI ENHANCER_EXTRACTED_FROM = new IRI(
             NamespaceEnum.fise + "extracted-from");
 
     /**
      * the character position of the start of a text selection.
      */
-    public static final UriRef ENHANCER_START = new UriRef(NamespaceEnum.fise
+    public static final IRI ENHANCER_START = new IRI(NamespaceEnum.fise
             + "start");
 
     /**
      * the character position of the end of a text selection.
      */
-    public static final UriRef ENHANCER_END = new UriRef(NamespaceEnum.fise + "end");
+    public static final IRI ENHANCER_END = new IRI(NamespaceEnum.fise + "end");
 
     /**
      * The text selected by the text annotation. This is an optional property
      */
-    public static final UriRef ENHANCER_SELECTED_TEXT = new UriRef(
+    public static final IRI ENHANCER_SELECTED_TEXT = new IRI(
             NamespaceEnum.fise + "selected-text");
 
     /**
      * The context (surroundings) of the text selected. (e.g. the sentence
      * containing a person selected by a NLP enhancer)
      */
-    public static final UriRef ENHANCER_SELECTION_CONTEXT = new UriRef(
+    public static final IRI ENHANCER_SELECTION_CONTEXT = new IRI(
             NamespaceEnum.fise + "selection-context");
     /**
      * The prefix of the {@link #ENHANCER_SELECTED_TEXT}. Intended to be used
      * to find the exact position within the text if char indexes can not be used
      * @since 0.11.0
      */
-    public final static UriRef ENHANCER_SELECTION_PREFIX = new UriRef(
+    public final static IRI ENHANCER_SELECTION_PREFIX = new IRI(
         NamespaceEnum.fise + "selection-prefix");
     /**
      * The first few chars of the {@link #ENHANCER_SELECTED_TEXT}. To be used if
@@ -172,7 +171,7 @@
      * e.g. when selection sentences or whole sections of the text).
      * @since 0.11.0
      */
-    public final static UriRef ENHANCER_SELECTION_HEAD = new UriRef(
+    public final static IRI ENHANCER_SELECTION_HEAD = new IRI(
         NamespaceEnum.fise + "selection-head");
     /**
      * The last few chars of the {@link #ENHANCER_SELECTED_TEXT}. To be used if
@@ -180,45 +179,45 @@
      * e.g. when selection sentences or whole sections of the text).
      * @since 0.11.0
      */
-    public final static UriRef ENHANCER_SELECTION_TAIL = new UriRef(
+    public final static IRI ENHANCER_SELECTION_TAIL = new IRI(
         NamespaceEnum.fise + "selection-tail");
     /**
      * The suffix of the {@link #ENHANCER_SELECTED_TEXT}. Intended to be used
      * to find the exact position within the text if char indexes can not be used
      * @since 0.11.0
      */
-    public final static UriRef ENHANCER_SELECTION_SUFFIX = new UriRef(
+    public final static IRI ENHANCER_SELECTION_SUFFIX = new IRI(
         NamespaceEnum.fise + "selection-suffix");
 
     /**
      * A positive double value to rank extractions according to the algorithm
      * confidence in the accuracy of the extraction.
      */
-    public static final UriRef ENHANCER_CONFIDENCE = new UriRef(NamespaceEnum.fise
+    public static final IRI ENHANCER_CONFIDENCE = new IRI(NamespaceEnum.fise
             + "confidence");
 
     /**
      * This refers to the URI identifying the referred named entity
      */
-    public static final UriRef ENHANCER_ENTITY_REFERENCE = new UriRef(
+    public static final IRI ENHANCER_ENTITY_REFERENCE = new IRI(
             NamespaceEnum.fise + "entity-reference");
 
     /**
      * This property can be used to specify the type of the entity (Optional)
      */
-    public static final UriRef ENHANCER_ENTITY_TYPE = new UriRef(NamespaceEnum.fise
+    public static final IRI ENHANCER_ENTITY_TYPE = new IRI(NamespaceEnum.fise
             + "entity-type");
 
     /**
      * The label(s) of the referred entity
      */
-    public static final UriRef ENHANCER_ENTITY_LABEL = new UriRef(
+    public static final IRI ENHANCER_ENTITY_LABEL = new IRI(
             NamespaceEnum.fise + "entity-label");
     /**
      * The confidence level (introducdes by
      * <a herf="https://issues.apache.org/jira/browse/STANBOL-631">STANBOL-631</a>)
      */
-    public static final UriRef ENHANCER_CONFIDENCE_LEVEL = new UriRef(
+    public static final IRI ENHANCER_CONFIDENCE_LEVEL = new IRI(
             NamespaceEnum.fise + "confidence-level");
 
     /**
@@ -228,7 +227,7 @@
      * originates from.
      * @since 0.12.1 (STANBOL-1391)
      */
-    public static final UriRef ENHANCER_ORIGIN = new UriRef(
+    public static final IRI ENHANCER_ORIGIN = new IRI(
             NamespaceEnum.fise + "origin");
     
     /**
@@ -237,13 +236,13 @@
      * @deprecated dc:FileFormat does not exist
      */
     @Deprecated
-    public static final UriRef DC_FILEFORMAT = new UriRef(NamespaceEnum.dc
+    public static final IRI DC_FILEFORMAT = new IRI(NamespaceEnum.dc
             + "FileFormat");
 
     /**
      * Language of the content item text.
      */
-    public static final UriRef DC_LANGUAGE = new UriRef(NamespaceEnum.dc
+    public static final IRI DC_LANGUAGE = new IRI(NamespaceEnum.dc
             + "language");
 
 
@@ -255,45 +254,45 @@
      *             specification
      */
     @Deprecated
-    public static final UriRef DC_SUBJECT = new UriRef(NamespaceEnum.dc
+    public static final IRI DC_SUBJECT = new IRI(NamespaceEnum.dc
             + "subject");
 
     /**
      * The sha1 hexadecimal digest of a content item.
      */
     @Deprecated
-    public static final UriRef FOAF_SHA1 = new UriRef(NamespaceEnum.foaf
+    public static final IRI FOAF_SHA1 = new IRI(NamespaceEnum.foaf
             + "sha1");
 
     /**
      * Link an semantic extraction or a manual annotation to a content item.
      */
     @Deprecated
-    public static final UriRef ENHANCER_RELATED_CONTENT_ITEM = new UriRef(
+    public static final IRI ENHANCER_RELATED_CONTENT_ITEM = new IRI(
             "http://iksproject.eu/ns/extraction/source-content-item");
 
     @Deprecated
-    public static final UriRef ENHANCER_RELATED_TOPIC = new UriRef(
+    public static final IRI ENHANCER_RELATED_TOPIC = new IRI(
             "http://iksproject.eu/ns/extraction/related-topic");
 
     @Deprecated
-    public static final UriRef ENHANCER_RELATED_TOPIC_LABEL = new UriRef(
+    public static final IRI ENHANCER_RELATED_TOPIC_LABEL = new IRI(
             "http://iksproject.eu/ns/extraction/related-topic-label");
 
     @Deprecated
-    public static final UriRef ENHANCER_MENTIONED_ENTITY_POSITION_START = new UriRef(
+    public static final IRI ENHANCER_MENTIONED_ENTITY_POSITION_START = new IRI(
             "http://iksproject.eu/ns/extraction/mention/position-start");
 
     @Deprecated
-    public static final UriRef ENHANCER_MENTIONED_ENTITY_POSITION_END = new UriRef(
+    public static final IRI ENHANCER_MENTIONED_ENTITY_POSITION_END = new IRI(
             "http://iksproject.eu/ns/extraction/mention/position-end");
 
     @Deprecated
-    public static final UriRef ENHANCER_MENTIONED_ENTITY_CONTEXT = new UriRef(
+    public static final IRI ENHANCER_MENTIONED_ENTITY_CONTEXT = new IRI(
             "http://iksproject.eu/ns/extraction/mention/context");
 
     @Deprecated
-    public static final UriRef ENHANCER_MENTIONED_ENTITY_OCCURENCE = new UriRef(
+    public static final IRI ENHANCER_MENTIONED_ENTITY_OCCURENCE = new IRI(
             "http://iksproject.eu/ns/extraction/mention/occurence");
 
 }
diff --git a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/TechnicalClasses.java b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/TechnicalClasses.java
index 25d010f..3c5bbc8 100644
--- a/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/TechnicalClasses.java
+++ b/enhancer/generic/servicesapi/src/main/java/org/apache/stanbol/enhancer/servicesapi/rdf/TechnicalClasses.java
@@ -20,7 +20,7 @@
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 /**
  * Classes to be used as types for resources that are not real life entities but
@@ -34,21 +34,21 @@
     /**
      * Type used for all enhancement created by Stanbol Enhancer
      */
-    public static final UriRef ENHANCER_ENHANCEMENT = new UriRef(
+    public static final IRI ENHANCER_ENHANCEMENT = new IRI(
             NamespaceEnum.fise+"Enhancement");
 
     /**
      * Type used for annotations on Text created by Stanbol Enhancer. This type is intended
      * to be used in combination with ENHANCER_ENHANCEMENT
      */
-    public static final UriRef ENHANCER_TEXTANNOTATION = new UriRef(
+    public static final IRI ENHANCER_TEXTANNOTATION = new IRI(
             NamespaceEnum.fise+"TextAnnotation");
 
     /**
      * Type used for annotations of named entities. This type is intended
      * to be used in combination with ENHANCER_ENHANCEMENT
      */
-    public static final UriRef ENHANCER_ENTITYANNOTATION = new UriRef(
+    public static final IRI ENHANCER_ENTITYANNOTATION = new IRI(
             NamespaceEnum.fise+"EntityAnnotation");
     
     /**
@@ -62,14 +62,14 @@
      * The entity or concept is not necessarily explicitly mentioned
      * in the document (like a traditional entity occurrence would).
      */
-    public static final UriRef ENHANCER_TOPICANNOTATION = new UriRef(
+    public static final IRI ENHANCER_TOPICANNOTATION = new IRI(
             NamespaceEnum.fise+"TopicAnnotation");
 
     /**
      * To be used as a type pour any semantic knowledge extraction
      */
     @Deprecated
-    public static final UriRef ENHANCER_EXTRACTION = new UriRef(
+    public static final IRI ENHANCER_EXTRACTION = new IRI(
             "http://iks-project.eu/ns/enhancer/extraction/Extraction");
 
     /**
@@ -79,13 +79,13 @@
      * @deprecated
      */
     @Deprecated
-    public static final UriRef ANNOTEA_ANNOTATION = new UriRef(
+    public static final IRI ANNOTEA_ANNOTATION = new IRI(
             "http://www.w3.org/2000/10/annotation-ns#Annotation");
 
     /**
      * To be used to type the URI of the content item being annotated by Stanbol Enhancer
      */
-    public static final UriRef FOAF_DOCUMENT = new UriRef(
+    public static final IRI FOAF_DOCUMENT = new IRI(
             NamespaceEnum.foaf + "Document");
 
     /**
@@ -96,7 +96,7 @@
      * {@link OntologicalClasses#SKOS_CONCEPT} (see 
      * <a href="https://issues.apache.org/jira/browse/STANBOL-617">STANBOL-617</a>)
      */
-    public static final UriRef ENHANCER_CATEGORY = new UriRef(
+    public static final IRI ENHANCER_CATEGORY = new IRI(
             NamespaceEnum.fise + "Category");
 
     /**
@@ -107,13 +107,13 @@
      * (see 
      * <a href="https://issues.apache.org/jira/browse/STANBOL-613">STANBOL-613</a>)
      */
-    public static final UriRef DCTERMS_LINGUISTIC_SYSTEM = new UriRef(
+    public static final IRI DCTERMS_LINGUISTIC_SYSTEM = new IRI(
             NamespaceEnum.dc + "LinguisticSystem");
     
     /**
      * The confidence level of {@link #ENHANCER_ENHANCEMENT}s
      */
-    public static final UriRef FNHANCER_CONFIDENCE_LEVEL = new UriRef(
+    public static final IRI FNHANCER_CONFIDENCE_LEVEL = new IRI(
             NamespaceEnum.fise + "ConfidenceLevel");
     
     /**
@@ -129,12 +129,12 @@
     public static enum CONFIDENCE_LEVEL_ENUM{
         certain,ambiguous,suggestion,uncertain;
 
-        private final UriRef uri;
+        private final IRI uri;
         private final String localName;
         
         private CONFIDENCE_LEVEL_ENUM() {
             localName = "cl-"+name();
-            uri = new UriRef(NamespaceEnum.fise+localName);
+            uri = new IRI(NamespaceEnum.fise+localName);
         }
         
         public String getLocalName(){
@@ -145,14 +145,14 @@
             return uri.toString();
         };
         
-        public UriRef getUri(){
+        public IRI getUri(){
             return uri;
         }
         
-        private static final Map<UriRef,CONFIDENCE_LEVEL_ENUM> uriRef2enum;
+        private static final Map<IRI,CONFIDENCE_LEVEL_ENUM> uriRef2enum;
         private static final Map<String,CONFIDENCE_LEVEL_ENUM> uri2enum;
         static {
-            Map<UriRef,CONFIDENCE_LEVEL_ENUM> ur = new HashMap<UriRef,TechnicalClasses.CONFIDENCE_LEVEL_ENUM>();
+            Map<IRI,CONFIDENCE_LEVEL_ENUM> ur = new HashMap<IRI,TechnicalClasses.CONFIDENCE_LEVEL_ENUM>();
             Map<String,CONFIDENCE_LEVEL_ENUM> us = new HashMap<String,TechnicalClasses.CONFIDENCE_LEVEL_ENUM>();
             for(CONFIDENCE_LEVEL_ENUM cl : CONFIDENCE_LEVEL_ENUM.values()){
                 ur.put(cl.getUri(), cl);
@@ -162,17 +162,17 @@
             uri2enum = Collections.unmodifiableMap(us);
         }
         /**
-         * Getter for the fise:ConfidenceLevel instance for the {@link UriRef}
+         * Getter for the fise:ConfidenceLevel instance for the {@link IRI}
          * @param uri the URI
          * @return the fise:ConfidenceLevel instance or <code>null</code> if the
          * parsed URI is not one of the four defined instances
          */
-        public static CONFIDENCE_LEVEL_ENUM getConfidenceLevel(UriRef uri){
+        public static CONFIDENCE_LEVEL_ENUM getConfidenceLevel(IRI uri){
             return uriRef2enum.get(uri);
         }
         
         /**
-         * Getter for the fise:ConfidenceLevel instance for the {@link UriRef}
+         * Getter for the fise:ConfidenceLevel instance for the {@link IRI}
          * @param uri the URI string
          * @return the fise:ConfidenceLevel instance or <code>null</code> if the
          * parsed URI is not one of the four defined instances
diff --git a/enhancer/generic/servicesapi/src/test/java/org/apache/stanbol/enhancer/serviceapi/helper/EnhancementEngineHelperTest.java b/enhancer/generic/servicesapi/src/test/java/org/apache/stanbol/enhancer/serviceapi/helper/EnhancementEngineHelperTest.java
index c7f3e6e..5a53cbb 100644
--- a/enhancer/generic/servicesapi/src/test/java/org/apache/stanbol/enhancer/serviceapi/helper/EnhancementEngineHelperTest.java
+++ b/enhancer/generic/servicesapi/src/test/java/org/apache/stanbol/enhancer/serviceapi/helper/EnhancementEngineHelperTest.java
@@ -21,11 +21,11 @@
 import java.util.Collection;
 import java.util.HashSet;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
 import org.apache.stanbol.enhancer.servicesapi.EngineException;
 import org.apache.stanbol.enhancer.servicesapi.EnhancementEngine;
@@ -67,9 +67,9 @@
         Language lang = new Language("en");
         int start = content.indexOf("Stanbol");
         int end = start+"Stanbol Enhancer".length();
-        UriRef ciUri = new UriRef("http://www.example.org/contentItem#1");
-        MGraph metadata = new IndexedMGraph();
-        UriRef ta = EnhancementEngineHelper.createTextEnhancement(metadata, dummyEngine, ciUri);
+        IRI ciUri = new IRI("http://www.example.org/contentItem#1");
+        Graph metadata = new IndexedGraph();
+        IRI ta = EnhancementEngineHelper.createTextEnhancement(metadata, dummyEngine, ciUri);
         EnhancementEngineHelper.setOccurrence(metadata, ta, content, start, end, lang, -1, true);
         Assert.assertEquals("The ", EnhancementEngineHelper.getString(
             metadata, ta,Properties.ENHANCER_SELECTION_PREFIX));
diff --git a/enhancer/generic/servicesapi/src/test/java/org/apache/stanbol/enhancer/serviceapi/helper/EnhancementPropertyTest.java b/enhancer/generic/servicesapi/src/test/java/org/apache/stanbol/enhancer/serviceapi/helper/EnhancementPropertyTest.java
index 570687c..f88021f 100644
--- a/enhancer/generic/servicesapi/src/test/java/org/apache/stanbol/enhancer/serviceapi/helper/EnhancementPropertyTest.java
+++ b/enhancer/generic/servicesapi/src/test/java/org/apache/stanbol/enhancer/serviceapi/helper/EnhancementPropertyTest.java
@@ -40,16 +40,15 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Graph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.enhancer.servicesapi.Blob;
 import org.apache.stanbol.enhancer.servicesapi.Chain;
 import org.apache.stanbol.enhancer.servicesapi.ChainException;
@@ -114,7 +113,7 @@
     private static class TestContentItem extends ContentItemImpl {
 
         protected TestContentItem(String uri, String content) {
-            super(new UriRef(uri), new TestBlob(content), new IndexedMGraph());
+            super(new IRI(uri), new TestBlob(content), new IndexedGraph());
         }
         
     }
@@ -171,7 +170,7 @@
         }
         
         @Override
-        public Graph getExecutionPlan() throws ChainException {
+        public ImmutableGraph getExecutionPlan() throws ChainException {
             return ExecutionPlanHelper.calculateExecutionPlan(name, engines, 
                 Collections.<String>emptySet(), Collections.<String>emptySet(),
                 chainProperties);
@@ -232,8 +231,8 @@
      */
     protected void initExecutionMetadata(Chain chain) throws ChainException {
         //init the ExecutionMetadata ... this is normally done by the EnhancementJobManager
-        MGraph em = ExecutionMetadataHelper.initExecutionMetadataContentPart(contentItem);
-        Graph ep = chain.getExecutionPlan();
+        Graph em = ExecutionMetadataHelper.initExecutionMetadataContentPart(contentItem);
+        ImmutableGraph ep = chain.getExecutionPlan();
         em.addAll(ep);
         ExecutionMetadataHelper.initExecutionMetadata(em, ep, 
             contentItem.getUri(), chain.getName(), false);
@@ -349,8 +348,8 @@
         Collection<String> derefernceLanguages = Arrays.asList("en","de");
         Integer maxSuggestions = Integer.valueOf(5);
         
-        UriRef maxSuggestionsProperty = new UriRef(NamespaceEnum.ehp + PROPERTY_MAX_SUGGESTIONS);
-        UriRef dereferenceLanguagesProperty = new UriRef(NamespaceEnum.ehp + PROPERTY_DEREFERENCE_LANGUAGES);
+        IRI maxSuggestionsProperty = new IRI(NamespaceEnum.ehp + PROPERTY_MAX_SUGGESTIONS);
+        IRI dereferenceLanguagesProperty = new IRI(NamespaceEnum.ehp + PROPERTY_DEREFERENCE_LANGUAGES);
 
         //set up the map with the enhancement properties we want to set for the
         //Enhancement Chain
@@ -363,25 +362,25 @@
         enhancementProperties.put(linking.getName(), linkingProperties);
         
         //create the ExecutionPlan
-        Graph ep = ExecutionPlanHelper.calculateExecutionPlan("test", engines, 
+        ImmutableGraph ep = ExecutionPlanHelper.calculateExecutionPlan("test", engines, 
             Collections.<String>emptySet(), Collections.<String>emptySet(), 
             enhancementProperties);
         
         //now assert that the enhancement properties where correctly written
         //first the property we set on the chain level
-        NonLiteral epNode = ExecutionPlanHelper.getExecutionPlan(ep, "test");
+        BlankNodeOrIRI epNode = ExecutionPlanHelper.getExecutionPlan(ep, "test");
         assertNotNull(epNode);
         Iterator<Triple> maxSuggestionValues = ep.filter(epNode, maxSuggestionsProperty, null);
         assertTrue(maxSuggestionValues.hasNext());
-        Resource maxSuggestionValue = maxSuggestionValues.next().getObject();
+        RDFTerm maxSuggestionValue = maxSuggestionValues.next().getObject();
         assertFalse(maxSuggestionValues.hasNext());
-        assertTrue(maxSuggestionValue instanceof TypedLiteral);
-        assertEquals(maxSuggestions.toString(), ((TypedLiteral)maxSuggestionValue).getLexicalForm());
+        assertTrue(maxSuggestionValue instanceof Literal);
+        assertEquals(maxSuggestions.toString(), ((Literal)maxSuggestionValue).getLexicalForm());
         assertEquals(maxSuggestions, LiteralFactory.getInstance().createObject(
-            Integer.class, (TypedLiteral)maxSuggestionValue));
+            Integer.class, (Literal)maxSuggestionValue));
         //second the property we set for the linking engine
         boolean found = false;
-        for(NonLiteral ee : ExecutionPlanHelper.getExecutionNodes(ep, epNode)){
+        for(BlankNodeOrIRI ee : ExecutionPlanHelper.getExecutionNodes(ep, epNode)){
             String engineName = ExecutionPlanHelper.getEngine(ep, ee);
             if(linking.getName().equals(engineName)){
                 found = true;
@@ -389,9 +388,9 @@
                 assertTrue(derefLangValues.hasNext());
                 int numValues = 0;
                 while(derefLangValues.hasNext()){
-                    Resource r = derefLangValues.next().getObject();
-                    assertTrue(r instanceof PlainLiteral);
-                    assertTrue(derefernceLanguages.contains(((PlainLiteral)r).getLexicalForm()));
+                    RDFTerm r = derefLangValues.next().getObject();
+                    assertTrue(r instanceof Literal);
+                    assertTrue(derefernceLanguages.contains(((Literal)r).getLexicalForm()));
                     numValues++;
                 }
                 assertEquals(derefernceLanguages.size(), numValues);
diff --git a/enhancer/generic/test/src/main/java/org/apache/stanbol/enhancer/test/ContentItemFactoryTest.java b/enhancer/generic/test/src/main/java/org/apache/stanbol/enhancer/test/ContentItemFactoryTest.java
index 9c7bd99..6520a9a 100644
--- a/enhancer/generic/test/src/main/java/org/apache/stanbol/enhancer/test/ContentItemFactoryTest.java
+++ b/enhancer/generic/test/src/main/java/org/apache/stanbol/enhancer/test/ContentItemFactoryTest.java
@@ -26,11 +26,11 @@
 import java.io.OutputStream;
 import java.nio.charset.Charset;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.commons.io.IOUtils;
 import org.apache.stanbol.enhancer.servicesapi.Blob;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
@@ -80,11 +80,11 @@
     /**
      * The {@link ContentItem#getUri() ID} used for testing
      */
-    private static UriRef ID = new UriRef("http://www.example.com/content-items#12345");
+    private static IRI ID = new IRI("http://www.example.com/content-items#12345");
     /**
-     * Graph used to test of parsed metadata are preserved
+     * ImmutableGraph used to test of parsed metadata are preserved
      */
-    private static MGraph METADATA = new SimpleMGraph();
+    private static Graph METADATA = new SimpleGraph();
     static {
         METADATA.add(new TripleImpl(ID, Properties.RDF_TYPE, Enhancer.CONTENT_ITEM));
         METADATA.add(new TripleImpl(ID, Properties.RDFS_LABEL, new PlainLiteralImpl("Test ContentItem")));
@@ -138,11 +138,11 @@
     }
     @Test(expected=IllegalArgumentException.class)
     public void missingCiContentSource4() throws IOException{
-        contentItemFactory.createContentItem(ID,null,new SimpleMGraph());
+        contentItemFactory.createContentItem(ID,null,new SimpleGraph());
     }
     @Test(expected=IllegalArgumentException.class)
     public void missingCiContentSource5() throws IOException{
-        contentItemFactory.createContentItem(PREFIX,null,new SimpleMGraph());
+        contentItemFactory.createContentItem(PREFIX,null,new SimpleGraph());
     }
     /*
      * Set of tests to test that IllegalArgumentExceptions are
@@ -155,7 +155,7 @@
     }
     @Test(expected=IllegalArgumentException.class)
     public void missingCiContentReference2() throws IOException{
-        contentItemFactory.createContentItem(null,new SimpleMGraph());
+        contentItemFactory.createContentItem(null,new SimpleGraph());
     }
     /*
      * Set of tests to test that IllegalArgumentExceptions are
@@ -179,7 +179,7 @@
     }
     @Test(expected=IllegalArgumentException.class)
     public void missingCiPrefix2() throws IOException{
-        contentItemFactory.createContentItem((String)null,TEST_CS,new SimpleMGraph());
+        contentItemFactory.createContentItem((String)null,TEST_CS,new SimpleGraph());
     }
     /**
      * Test that the generated ID starts with the parsed prefix
@@ -192,7 +192,7 @@
         assertTrue("The ID of the created ContentItem MUST start with the parsed prefix", 
             ci.getUri().getUnicodeString().startsWith(PREFIX));
         
-        ci = contentItemFactory.createContentItem(PREFIX, TEST_CS,new SimpleMGraph());
+        ci = contentItemFactory.createContentItem(PREFIX, TEST_CS,new SimpleGraph());
         assertNotNull(ci);
         assertNotNull(ci.getUri());
         assertTrue("The ID of the created ContentItem MUST start with the parsed prefix", 
@@ -209,7 +209,7 @@
         assertTrue("The ID of the created ContentItem MUST be equals to the parsed ID", 
             ci.getUri().equals(ID));
         
-        ci = contentItemFactory.createContentItem(ID, TEST_CS,new SimpleMGraph());
+        ci = contentItemFactory.createContentItem(ID, TEST_CS,new SimpleGraph());
         assertNotNull(ci);
         assertNotNull(ci.getUri());
         assertTrue("The ID of the created ContentItem MUST be equals to the parsed ID", 
@@ -224,10 +224,10 @@
         ContentItem ci = contentItemFactory.createContentItem(TEST_CS);
         assertNotNull(ci);
         assertNotNull(ci.getUri());
-        ci = contentItemFactory.createContentItem((UriRef)null,TEST_CS);
+        ci = contentItemFactory.createContentItem((IRI)null,TEST_CS);
         assertNotNull(ci);
         assertNotNull(ci.getUri());
-        ci = contentItemFactory.createContentItem((UriRef)null,TEST_CS, new SimpleMGraph());
+        ci = contentItemFactory.createContentItem((IRI)null,TEST_CS, new SimpleGraph());
         assertNotNull(ci);
         assertNotNull(ci.getUri());
     }
@@ -242,7 +242,7 @@
         assertNotNull(ci.getUri());
         assertEquals(TEST_CR.getReference(),ci.getUri().getUnicodeString());
         
-        contentItemFactory.createContentItem(TEST_CR, new SimpleMGraph());
+        contentItemFactory.createContentItem(TEST_CR, new SimpleGraph());
         assertNotNull(ci);
         assertNotNull(ci.getUri());
         assertEquals(TEST_CR.getReference(),ci.getUri().getUnicodeString());
diff --git a/enhancer/generic/test/src/main/java/org/apache/stanbol/enhancer/test/ContentItemTest.java b/enhancer/generic/test/src/main/java/org/apache/stanbol/enhancer/test/ContentItemTest.java
index 9dd3586..7c4de6c 100644
--- a/enhancer/generic/test/src/main/java/org/apache/stanbol/enhancer/test/ContentItemTest.java
+++ b/enhancer/generic/test/src/main/java/org/apache/stanbol/enhancer/test/ContentItemTest.java
@@ -24,8 +24,8 @@
 import java.io.IOException;
 import java.util.Date;
 
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.stanbol.enhancer.servicesapi.Blob;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
 import org.apache.stanbol.enhancer.servicesapi.ContentSource;
@@ -38,7 +38,7 @@
 public abstract class ContentItemTest {
     
     private final Logger log = LoggerFactory.getLogger(ContentItemTest.class);
-    //private static final UriRef ciUri = new UriRef("http://example.org/");
+    //private static final IRI ciUri = new IRI("http://example.org/");
     private static final ContentSource contentSource = new StringSource("This is a Test!"); 
     /**
      * Used to create ContentItems used by this Test. Each call MUST return a
@@ -60,21 +60,21 @@
 		ContentItem ci = createContentItem(contentSource);
 		assertNotNull(ci);
 		assertNotNull(ci.getUri());
-		UriRef partUri = new UriRef("http://foo/");
+		IRI partUri = new IRI("http://foo/");
 		Date someObject = new Date();
 		ci.addPart(partUri, someObject);
-		ci.getMetadata().add(new TripleImpl(ci.getUri(), new UriRef("http://example.org/ontology#hasPart"), partUri));
-        ci.getMetadata().add(new TripleImpl(partUri, new UriRef("http://example.org/ontology#isPartOf"),ci.getUri()));
+		ci.getMetadata().add(new TripleImpl(ci.getUri(), new IRI("http://example.org/ontology#hasPart"), partUri));
+        ci.getMetadata().add(new TripleImpl(partUri, new IRI("http://example.org/ontology#isPartOf"),ci.getUri()));
 		assertEquals(someObject, ci.getPart(partUri, Date.class));
 		assertEquals(someObject, ci.getPart(1, Date.class));
 		assertEquals(partUri, ci.getPartUri(1));
-		assertEquals(new UriRef(ci.getUri().getUnicodeString()+"_main"), ci.getPartUri(0));
+		assertEquals(new IRI(ci.getUri().getUnicodeString()+"_main"), ci.getPartUri(0));
 		try {
 		    ci.getPart(2, Object.class);
 		    assertTrue("Requesting non existance part MUST throw an NoSuchPartException", false);
 		} catch (NoSuchPartException e) {/* expected*/}
         try {
-            ci.getPart(new UriRef("http://foo/nonexisting"), Object.class);
+            ci.getPart(new IRI("http://foo/nonexisting"), Object.class);
             assertTrue("Requesting non existance part MUST throw an NoSuchPartException", false);
         } catch (NoSuchPartException e) {/* expected*/}
         try {
@@ -92,7 +92,7 @@
     @Test(expected=IllegalArgumentException.class)
     public void addPartWithoutPartContent() throws IOException{
         ContentItem ci = createContentItem(contentSource);
-        ci.addPart(new UriRef("http://foo/"), null);
+        ci.addPart(new IRI("http://foo/"), null);
     }
     /**
      * The ContentItem MUST NOT allow to replace the main content part (the
@@ -101,7 +101,7 @@
     @Test(expected=IllegalArgumentException.class)
     public void replaceMainPart() throws IOException{
         ContentItem ci = createContentItem(contentSource);
-        UriRef mainPart = ci.getPartUri(0);
+        IRI mainPart = ci.getPartUri(0);
         ci.addPart(mainPart, new Date());
     }
     @Test(expected=IllegalArgumentException.class)
@@ -127,7 +127,7 @@
     @Test(expected=NoSuchPartException.class)
     public void removeNonExistentPartByUri() throws IOException {
         ContentItem ci = createContentItem(contentSource);
-        ci.removePart(new UriRef("urn:does.not.exist:and.can.not.be.removed"));
+        ci.removePart(new IRI("urn:does.not.exist:and.can.not.be.removed"));
     }
     @Test(expected=NoSuchPartException.class)
     public void removeNonExistentPartByIndex() throws IOException {
@@ -137,7 +137,7 @@
     @Test
     public void removeRemoveByUri() throws IOException {
         ContentItem ci = createContentItem(contentSource);
-        UriRef uri = new UriRef("urn:content.part:remove.test");
+        IRI uri = new IRI("urn:content.part:remove.test");
         ci.addPart(uri, new Date());
         try {
             ci.getPart(uri, Date.class);
@@ -157,12 +157,12 @@
     @Test
     public void removeRemoveByIndex() throws IOException {
         ContentItem ci = createContentItem(contentSource);
-        UriRef uri = new UriRef("urn:content.part:remove.test");
+        IRI uri = new IRI("urn:content.part:remove.test");
         ci.addPart(uri, new Date());
         int index = -1;
         try {
             for(int i=0; index < 0; i++){
-                UriRef partUri = ci.getPartUri(i);
+                IRI partUri = ci.getPartUri(i);
                 if(partUri.equals(uri)){
                     index = i;
                 }
diff --git a/enhancer/generic/test/src/main/java/org/apache/stanbol/enhancer/test/helper/EnhancementStructureHelper.java b/enhancer/generic/test/src/main/java/org/apache/stanbol/enhancer/test/helper/EnhancementStructureHelper.java
index 7886bd8..558d70b 100644
--- a/enhancer/generic/test/src/main/java/org/apache/stanbol/enhancer/test/helper/EnhancementStructureHelper.java
+++ b/enhancer/generic/test/src/main/java/org/apache/stanbol/enhancer/test/helper/EnhancementStructureHelper.java
@@ -47,16 +47,14 @@
 import java.util.List;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
 import org.apache.clerezza.rdf.ontologies.DCTERMS;
 import org.apache.clerezza.rdf.ontologies.XSD;
 import org.apache.stanbol.enhancer.servicesapi.EnhancementEngine;
@@ -86,7 +84,7 @@
      * {@link Class#getName()} as value.
      * @return the number of found TextAnnotations
      */
-    public static int validateAllTextAnnotations(TripleCollection enhancements, String content, Map<UriRef,Resource> expectedValues) {
+    public static int validateAllTextAnnotations(Graph enhancements, String content, Map<IRI,RDFTerm> expectedValues) {
         return validateAllTextAnnotations(enhancements,content,expectedValues,false);
     }
     /**
@@ -107,7 +105,7 @@
      * @return the number of found TextAnnotations
      */
     @SuppressWarnings("unchecked")
-    public static int validateAllTextAnnotations(TripleCollection enhancements, String content, Map<UriRef,Resource> expectedValues, boolean validatePrefixSuffix) {
+    public static int validateAllTextAnnotations(Graph enhancements, String content, Map<IRI,RDFTerm> expectedValues, boolean validatePrefixSuffix) {
         expectedValues = expectedValues == null ? Collections.EMPTY_MAP : expectedValues;
         Iterator<Triple> textAnnotationIterator = enhancements.filter(null,
                 RDF_TYPE, ENHANCER_TEXTANNOTATION);
@@ -116,7 +114,7 @@
         //  -> this might be used to test that there are no TextAnnotations
         int textAnnotationCount = 0;
         while (textAnnotationIterator.hasNext()) {
-            UriRef textAnnotation = (UriRef) textAnnotationIterator.next().getSubject();
+            IRI textAnnotation = (IRI) textAnnotationIterator.next().getSubject();
             // test if selected Text is added
             validateTextAnnotation(enhancements, textAnnotation,content,expectedValues, validatePrefixSuffix);
             textAnnotationCount++;
@@ -127,7 +125,7 @@
     /**
      * Validates the parsed TextAnnotation with a fise:selected-text. This
      * method also validates rules defined by fise:Enhancement by calling
-     * {@link #validateEnhancement(TripleCollection, UriRef, Map)}<p>
+     * {@link #validateEnhancement(Graph, IRI, Map)}<p>
      * NOTE: this method MUST NOT be used to validate fise:TextAnnotations that
      * do NOT select a part of the text - meaning TextAnnotations about the
      * whole parsed content.
@@ -136,7 +134,7 @@
      * @param content the enhanced content
      * @param expectedValues expected values (properties for the values are used as keys)
      */
-    public static void validateTextAnnotation(TripleCollection enhancements, UriRef textAnnotation, String content, Map<UriRef,Resource> expectedValues) {
+    public static void validateTextAnnotation(Graph enhancements, IRI textAnnotation, String content, Map<IRI,RDFTerm> expectedValues) {
         validateTextAnnotation(enhancements,textAnnotation,content,expectedValues,false);
     }
     /**
@@ -152,7 +150,7 @@
      * @param validatePrefixSuffix enforce the presence of fise:selection-prefix and 
      * fise:selection-suffix if fise:start and fise:end are set.
      */
-    public static void validateTextAnnotation(TripleCollection enhancements, UriRef textAnnotation, String content, Map<UriRef,Resource> expectedValues, boolean validatePrefixSuffix) {
+    public static void validateTextAnnotation(Graph enhancements, IRI textAnnotation, String content, Map<IRI,RDFTerm> expectedValues, boolean validatePrefixSuffix) {
         //validate the rdf:type
         Iterator<Triple> rdfTypeIterator = enhancements.filter(textAnnotation, RDF_TYPE, ENHANCER_TEXTANNOTATION);
         assertTrue("Parsed Enhancement "+textAnnotation +" is missing the fise:TextAnnotation type ",
@@ -160,12 +158,12 @@
         Iterator<Triple> selectedTextIterator = enhancements.filter(textAnnotation,
                 ENHANCER_SELECTED_TEXT, null);
         // check if the selected text is added (or not)
-        Resource selectedTextResource;
+        RDFTerm selectedTextResource;
         if(selectedTextIterator.hasNext()){
             // test if the selected text is part of the TEXT_TO_TEST
             selectedTextResource = selectedTextIterator.next().getObject();
             assertTrue("fise:selected-text MUST BE of type PlainLiteral (uri: "+textAnnotation+")",
-                selectedTextResource instanceof PlainLiteral);
+                selectedTextResource instanceof Literal);
             Literal selectedText = (Literal)selectedTextResource;
             assertTrue("The parsed content MUST contain the fise:selected-text value '"
                 +selectedText.getLexicalForm()+"' (uri: "+textAnnotation+")!",content.contains(selectedText.getLexicalForm()));
@@ -174,7 +172,7 @@
             selectedTextResource = null; //no selected text
         }
         //check against an expected value
-        Resource expectedSelectedText = expectedValues.get(ENHANCER_SELECTED_TEXT);
+        RDFTerm expectedSelectedText = expectedValues.get(ENHANCER_SELECTED_TEXT);
         if(expectedSelectedText != null){
             assertEquals("The fise:selected-text is not the expected value "+expectedSelectedText+" (uri: "+textAnnotation+")!",
                 expectedSelectedText, selectedTextResource);
@@ -184,12 +182,12 @@
         if(selectedTextResource != null){
             Assert.assertFalse("If fise:selected-text is present fise:selection-head MUST NOT be present",selectionHeadIterator.hasNext());
         }
-        Resource selectionHeadResource;
+        RDFTerm selectionHeadResource;
         if(selectionHeadIterator.hasNext()){
             // test if the selected text is part of the TEXT_TO_TEST
             selectionHeadResource = selectionHeadIterator.next().getObject();
             assertTrue("fise:selection-head MUST BE of type PlainLiteral (uri: "+textAnnotation+")",
-                selectionHeadResource instanceof PlainLiteral);
+                selectionHeadResource instanceof Literal);
             Literal selectionHeadText = (Literal)selectionHeadResource;
             assertTrue("The parsed content MUST contain the fise:selected-head value '"
                 +selectionHeadText.getLexicalForm()+"' (uri: "+textAnnotation+")!",content.contains(selectionHeadText.getLexicalForm()));
@@ -202,12 +200,12 @@
         if(selectedTextResource != null){
             Assert.assertFalse("If fise:selected-text is present fise:selection-tail MUST NOT be present",selectionTailIterator.hasNext());
         }
-        Resource selectionTailResource;
+        RDFTerm selectionTailResource;
         if(selectionTailIterator.hasNext()){
             // test if the selected text is part of the TEXT_TO_TEST
             selectionTailResource = selectionTailIterator.next().getObject();
             assertTrue("fise:selection-head MUST BE of type PlainLiteral (uri: "+textAnnotation+")",
-                selectionTailResource instanceof PlainLiteral);
+                selectionTailResource instanceof Literal);
             Literal selectionTailText = (Literal)selectionTailResource;
             assertTrue("The parsed content MUST contain the fise:selected-tail value '"
                 +selectionTailText.getLexicalForm()+"' (uri: "+textAnnotation+")!",content.contains(selectionTailText.getLexicalForm()));
@@ -220,7 +218,7 @@
             (selectionHeadResource != null && selectionTailResource != null) ||
             (selectionHeadResource == null && selectionTailResource == null));
         
-        Resource selectionContextResource;
+        RDFTerm selectionContextResource;
         // test if context is added
         Iterator<Triple> selectionContextIterator = enhancements.filter(textAnnotation,
                 ENHANCER_SELECTION_CONTEXT, null);
@@ -231,7 +229,7 @@
             // test if the selected text is part of the TEXT_TO_TEST
             selectionContextResource = selectionContextIterator.next().getObject();
             assertTrue("The fise:selection-context MUST BE of type PlainLiteral (uri: "+textAnnotation+")",
-                selectionContextResource instanceof PlainLiteral);
+                selectionContextResource instanceof Literal);
             //check that the content contains the context
             assertTrue("The fise:selection-context MUST BE contained in the Content | context= "+ selectionContextResource,
             content.contains(((Literal)selectionContextResource).getLexicalForm()));
@@ -255,7 +253,7 @@
             assertNull("If no fise:selection-context is present also fise:selected-text MUST BE NOT present!", selectedTextResource);
             selectionContextResource = null;
         }
-        Resource expectedSelectionContext = expectedValues.get(ENHANCER_SELECTION_CONTEXT);
+        RDFTerm expectedSelectionContext = expectedValues.get(ENHANCER_SELECTION_CONTEXT);
         if(expectedSelectionContext != null){
             assertEquals("The value of fise:selection-context has not the expected value "+expectedSelectionContext,
                 expectedSelectionContext, selectionContextResource);
@@ -266,19 +264,19 @@
         Iterator<Triple> endPosIterator = enhancements.filter(textAnnotation,
                 ENHANCER_END, null);
         //start end is optional, but if start is present, that also end needs to be set
-        TypedLiteral startPosLiteral;
-        TypedLiteral endPosLiteral;
+        Literal startPosLiteral;
+        Literal endPosLiteral;
         if(startPosIterator.hasNext()){
             //NOTE: TextAnnotations might be use to select whole sections of a text
             //      (e.g. see STANBOL-617) in those cases adding the text of the
             //      whole section is not feasible.
             //assertNotNull("If fise:start is present the fise:selection-context MUST also be present (uri: "+textAnnotation+")!",
             //    selectionContextResource);
-            Resource resource = startPosIterator.next().getObject();
+            RDFTerm resource = startPosIterator.next().getObject();
             //only a single start position is supported
             assertFalse("fise:start MUST HAVE only a single value (uri: "+textAnnotation+")!",startPosIterator.hasNext());
-            assertTrue("fise:start MUST be a typed Literal (uri: "+textAnnotation+")!",resource instanceof TypedLiteral);
-            startPosLiteral = (TypedLiteral) resource;
+            assertTrue("fise:start MUST be a typed Literal (uri: "+textAnnotation+")!",resource instanceof Literal);
+            startPosLiteral = (Literal) resource;
             assertEquals("fise:start MUST use xsd:int as data type (uri: "+textAnnotation+")",XSD.int_, startPosLiteral.getDataType());
             resource = null;
             Integer start = LiteralFactory.getInstance().createObject(Integer.class, startPosLiteral);
@@ -289,8 +287,8 @@
             resource = endPosIterator.next().getObject();
             //only a single end position is supported
             assertFalse("fise:end MUST HAVE only a single value (uri: "+textAnnotation+")!",endPosIterator.hasNext());
-            assertTrue("fise:end values MUST BE TypedLiterals (uri: "+textAnnotation+")",resource instanceof TypedLiteral);
-            endPosLiteral = (TypedLiteral) resource;
+            assertTrue("fise:end values MUST BE TypedLiterals (uri: "+textAnnotation+")",resource instanceof Literal);
+            endPosLiteral = (Literal) resource;
             assertEquals("fise:end MUST use xsd:int as data type (uri: "+textAnnotation+")",XSD.int_, endPosLiteral.getDataType());
             resource = null;
             Integer end = LiteralFactory.getInstance().createObject(Integer.class, endPosLiteral);
@@ -309,12 +307,12 @@
             startPosLiteral = null;
             endPosLiteral = null;
         }
-        Resource expectedStartPos = expectedValues.get(ENHANCER_START);
+        RDFTerm expectedStartPos = expectedValues.get(ENHANCER_START);
         if(expectedStartPos != null){
             assertEquals("The fise:start value is not the expected "+expectedStartPos,
                 expectedStartPos, startPosLiteral);
         }
-        Resource expectedEndPos = expectedValues.get(ENHANCER_END);
+        RDFTerm expectedEndPos = expectedValues.get(ENHANCER_END);
         if(expectedEndPos != null){
             assertEquals("The fise:end value is not the expected "+expectedEndPos,
                 expectedEndPos, endPosLiteral);
@@ -331,9 +329,9 @@
                 !validatePrefixSuffix); //to support old and new fise:TextAnnotation model
             // test if the selected text is part of the TEXT_TO_TEST
             if(selectionPrefixIterator.hasNext()){
-                Resource selectionPrefixResource = selectionPrefixIterator.next().getObject();
+                RDFTerm selectionPrefixResource = selectionPrefixIterator.next().getObject();
                 assertTrue("fise:selection-prefix MUST BE of type PlainLiteral (uri: "+textAnnotation+")",
-                    selectionPrefixResource instanceof PlainLiteral);
+                    selectionPrefixResource instanceof Literal);
                 prefixLiteral = (Literal)selectionPrefixResource;
                 assertTrue("The parsed content MUST contain the fise:selection-prefix value '"
                         +prefixLiteral.getLexicalForm()+"' (uri: "+textAnnotation+")!",content.contains(prefixLiteral.getLexicalForm()));
@@ -355,9 +353,9 @@
                 !validatePrefixSuffix); //to support old and new fise:TextAnnotation model
             if(selectionSuffixIterator.hasNext()){
                 // test if the selected text is part of the TEXT_TO_TEST
-                Resource selectionSuffixResource = selectionSuffixIterator.next().getObject();
+                RDFTerm selectionSuffixResource = selectionSuffixIterator.next().getObject();
                 assertTrue("fise:selection-suffix MUST BE of type PlainLiteral (uri: "+textAnnotation+")",
-                    selectionSuffixResource instanceof PlainLiteral);
+                    selectionSuffixResource instanceof Literal);
                 suffixLiteral = (Literal)selectionSuffixResource;
                 assertTrue("The parsed content MUST contain the fise:selection-suffix value '"
                         +suffixLiteral.getLexicalForm()+"' (uri: "+textAnnotation+")!",content.contains(suffixLiteral.getLexicalForm()));
@@ -405,15 +403,15 @@
      * Validates the correctness of fise:TextAnnotations that annotate the language 
      * of the text as defined by 
      * <a href="https://issues.apache.org/jira/browse/STANBOL-613">STANBOL-613</a><p>
-     * Called by {@link #validateTextAnnotation(TripleCollection, UriRef, String, Map)}
+     * Called by {@link #validateTextAnnotation(Graph, IRI, String, Map)}
      * @param enhancements
      * @param textAnnotation
      */
-    private static void validateLanguageAnnotations(TripleCollection enhancements, UriRef textAnnotation) {
+    private static void validateLanguageAnnotations(Graph enhancements, IRI textAnnotation) {
         Iterator<Triple> dcLanguageIterator = enhancements.filter(textAnnotation, DC_LANGUAGE, null);
         if(dcLanguageIterator.hasNext()){ //a language annotation
-            Resource dcLanguageResource = dcLanguageIterator.next().getObject();
-            assertTrue("The dc:language value MUST BE a PlainLiteral", dcLanguageResource instanceof PlainLiteral);
+            RDFTerm dcLanguageResource = dcLanguageIterator.next().getObject();
+            assertTrue("The dc:language value MUST BE a PlainLiteral", dcLanguageResource instanceof Literal);
             assertTrue("The dc:language value '"+dcLanguageResource+"'MUST BE at least two chars long", 
                 ((Literal)dcLanguageResource).getLexicalForm().length() >=2);
             assertFalse("TextAnnotations with the dc:language property MUST only have a single dc:language value (uri "
@@ -428,7 +426,7 @@
                 +textAnnotation+")",dcTypeIterator.hasNext());
             //assert that the created TextAnnotation is correctly returned by the
             //EnhancementEngineHelper methods
-            List<NonLiteral> languageAnnotation = EnhancementEngineHelper.getLanguageAnnotations(enhancements);
+            List<BlankNodeOrIRI> languageAnnotation = EnhancementEngineHelper.getLanguageAnnotations(enhancements);
             assertTrue("Language annotation "+textAnnotation+" was not returned by "
                 +"EnhancementEngineHelper.getLanguageAnnotations(..)!",languageAnnotation.contains(textAnnotation));
         } else { //no language annotation
@@ -446,16 +444,16 @@
      * dbp-ont:Organisation and dbp-ont:Place do have a
      * fise:selected-text value (this implicitly also checks that
      * fise:selection-context, fise:start and fise:end are defined!<p>
-     * Called by {@link #validateTextAnnotation(TripleCollection, UriRef, String, Map)}
+     * Called by {@link #validateTextAnnotation(Graph, IRI, String, Map)}
      * @param enhancements
      * @param textAnnotation
      * @param selectedTextResource the fise:selected-text value
      */
-    private static void validateNERAnnotations(TripleCollection enhancements, UriRef textAnnotation, Resource selectedTextResource) {
+    private static void validateNERAnnotations(Graph enhancements, IRI textAnnotation, RDFTerm selectedTextResource) {
         Iterator<Triple> dcTypeIterator = enhancements.filter(textAnnotation, DC_TYPE, null);
         boolean isNERAnnotation = false;
         while(dcTypeIterator.hasNext() && !isNERAnnotation){
-            Resource dcTypeValue = dcTypeIterator.next().getObject();
+            RDFTerm dcTypeValue = dcTypeIterator.next().getObject();
             isNERAnnotation = DBPEDIA_PERSON.equals(dcTypeValue) ||
                     DBPEDIA_ORGANISATION.equals(dcTypeValue) ||
                     DBPEDIA_PLACE.equals(dcTypeValue);
@@ -477,13 +475,13 @@
      * @return the number of found and validated EntityAnnotations.
      */
     @SuppressWarnings("unchecked")
-    public static int validateAllEntityAnnotations(TripleCollection enhancements,Map<UriRef,Resource> expectedValues) {
+    public static int validateAllEntityAnnotations(Graph enhancements,Map<IRI,RDFTerm> expectedValues) {
         expectedValues = expectedValues == null ? Collections.EMPTY_MAP : expectedValues;
         Iterator<Triple> entityAnnotationIterator = enhancements.filter(null,
                 RDF_TYPE, ENHANCER_ENTITYANNOTATION);
         int entityAnnotationCount = 0;
         while (entityAnnotationIterator.hasNext()) {
-            UriRef entityAnnotation = (UriRef) entityAnnotationIterator.next().getSubject();
+            IRI entityAnnotation = (IRI) entityAnnotationIterator.next().getSubject();
             // test if selected Text is added
             validateEntityAnnotation(enhancements, entityAnnotation, 
                 expectedValues);
@@ -495,12 +493,12 @@
     /**
      * Checks if a fise:EntityAnnotation is valid. NOTE that this also validates
      * all fise:Enhancement related requirements by calling
-     * {@link #validateEnhancement(TripleCollection, UriRef, Map)}
+     * {@link #validateEnhancement(Graph, IRI, Map)}
      * @param enhancements the enhancements graph
      * @param entityAnnotation the entity annotation to validate
      * @param expectedValues expected values (properties for the values are used as keys)
      */
-    public static void validateEntityAnnotation(TripleCollection enhancements, UriRef entityAnnotation,Map<UriRef,Resource> expectedValues) {
+    public static void validateEntityAnnotation(Graph enhancements, IRI entityAnnotation,Map<IRI,RDFTerm> expectedValues) {
         Iterator<Triple> relationToTextAnnotationIterator = enhancements.filter(
                 entityAnnotation, DC_RELATION, null);
         // check if the relation to the text annotation is set
@@ -509,7 +507,7 @@
             // test if the referred annotations are text annotations or
             // the referenced annotations is a fise:EntityAnnotation AND also a
             // dc:requires link is defined (STANBOL-766)
-            UriRef referredTextAnnotation = (UriRef) relationToTextAnnotationIterator.next().getObject();
+            IRI referredTextAnnotation = (IRI) relationToTextAnnotationIterator.next().getObject();
             assertTrue("fise:EntityAnnotations MUST BE dc:related to a fise:TextAnnotation OR dc:requires and dc:related to the same fise:EntityAnnotation",
                 enhancements.filter(referredTextAnnotation, RDF_TYPE,
                     ENHANCER_TEXTANNOTATION).hasNext() || (
@@ -523,11 +521,11 @@
                 ENHANCER_ENTITY_REFERENCE, null);
         assertTrue("fise:entity-reference MUST BE present! (EntityAnnotation: '"
                 +entityAnnotation+"')'",entityReferenceIterator.hasNext());
-        Resource expectedReferencedEntity = expectedValues.get(ENHANCER_ENTITY_REFERENCE);
+        RDFTerm expectedReferencedEntity = expectedValues.get(ENHANCER_ENTITY_REFERENCE);
         while(entityReferenceIterator.hasNext()){ //check possible multiple references
-            Resource entityReferenceResource = entityReferenceIterator.next().getObject();
+            RDFTerm entityReferenceResource = entityReferenceIterator.next().getObject();
             // test if the reference is an URI
-            assertTrue("fise:entity-reference value MUST BE of URIs",entityReferenceResource instanceof UriRef);
+            assertTrue("fise:entity-reference value MUST BE of URIs",entityReferenceResource instanceof IRI);
             if(expectedReferencedEntity != null && expectedReferencedEntity.equals(entityReferenceResource)){
                 expectedReferencedEntity = null; //found
             }
@@ -538,11 +536,11 @@
         //test if the entity label is set
         Iterator<Triple> entityLabelIterator = enhancements.filter(entityAnnotation, ENHANCER_ENTITY_LABEL, null);
         assertTrue(entityLabelIterator.hasNext());
-        Resource expectedEntityLabel = expectedValues.get(ENHANCER_ENTITY_LABEL);
+        RDFTerm expectedEntityLabel = expectedValues.get(ENHANCER_ENTITY_LABEL);
         while(entityLabelIterator.hasNext()){
-            Resource entityLabelResource =  entityLabelIterator.next().getObject();
+            RDFTerm entityLabelResource =  entityLabelIterator.next().getObject();
             assertTrue("fise:entity-label values MUST BE PlainLiterals (EntityAnnotation: "+entityAnnotation+")!",
-                entityLabelResource instanceof PlainLiteral);
+                entityLabelResource instanceof Literal);
             if(expectedEntityLabel != null && expectedEntityLabel.equals(entityLabelResource)){
                 expectedEntityLabel = null;
             }
@@ -552,10 +550,10 @@
         
         //test the optional entity types
         Iterator<Triple> entityTypeIterator = enhancements.filter(entityAnnotation, Properties.ENHANCER_ENTITY_TYPE, null);
-        Resource expectedEntityType = expectedValues.get(Properties.ENHANCER_ENTITY_TYPE);
+        RDFTerm expectedEntityType = expectedValues.get(Properties.ENHANCER_ENTITY_TYPE);
         if(entityTypeIterator.hasNext()){
-            Resource entityTypeResource = entityTypeIterator.next().getObject();
-            assertTrue("fise:entity-type values MUST BE URIs",entityTypeResource instanceof UriRef);
+            RDFTerm entityTypeResource = entityTypeIterator.next().getObject();
+            assertTrue("fise:entity-type values MUST BE URIs",entityTypeResource instanceof IRI);
             if(expectedEntityType != null && expectedEntityType.equals(entityTypeResource)){
                 expectedEntityType = null; //found
             }
@@ -567,13 +565,13 @@
     }
     /**
      * Validates all fise:Enhancement related properties and values. NOTE that
-     * this method is called by {@link #validateEntityAnnotation(TripleCollection, UriRef, Map)}
-     * and {@link #validateTextAnnotation(TripleCollection, UriRef, String)}.
+     * this method is called by {@link #validateEntityAnnotation(Graph, IRI, Map)}
+     * and {@link #validateTextAnnotation(Graph, IRI, String)}.
      * @param enhancements the enhancements graph
      * @param enhancement the fise:Enhancement to validate
      * @param expectedValues expected values (properties for the values are used as keys)
      */
-    public static void validateEnhancement(TripleCollection enhancements, UriRef enhancement, Map<UriRef,Resource> expectedValues){
+    public static void validateEnhancement(Graph enhancements, IRI enhancement, Map<IRI,RDFTerm> expectedValues){
         //validate the rdf:type
         Iterator<Triple> rdfTypeIterator = enhancements.filter(enhancement, RDF_TYPE, ENHANCER_ENHANCEMENT);
         assertTrue("Parsed Enhancement "+enhancement +" is missing the fise:Enhancement type ",
@@ -581,28 +579,28 @@
         //validate the creator
         Iterator<Triple> creatorIterator = enhancements.filter(enhancement, Properties.DC_CREATOR, null);
         assertTrue("Enhancements MUST HAVE a creator",creatorIterator.hasNext());
-        Resource creatorResource = creatorIterator.next().getObject();
+        RDFTerm creatorResource = creatorIterator.next().getObject();
         assertTrue("Creator MUST BE an TypedLiteral (found '"+creatorResource.getClass().getSimpleName()+"')!",
-            creatorResource instanceof TypedLiteral || creatorResource instanceof UriRef);
-        if(creatorResource instanceof TypedLiteral){
+            creatorResource instanceof Literal || creatorResource instanceof IRI);
+        if(creatorResource instanceof Literal){
             assertEquals("The dc:creator value MUST be of dataType xsd:string",
-                XSD.string,((TypedLiteral)creatorResource).getDataType());
+                XSD.string,((Literal)creatorResource).getDataType());
         }
-        Resource expectedCreator = expectedValues.get(Properties.DC_CREATOR);
+        RDFTerm expectedCreator = expectedValues.get(Properties.DC_CREATOR);
         if(expectedCreator != null){
             assertEquals("Creator is not the expected value!",expectedCreator, creatorResource);
         }
         assertFalse("only a single creater MUST BE present for an Enhancement", creatorIterator.hasNext());
         //validate the optional contributor
-        Resource expectedContributor = expectedValues.get(DCTERMS.contributor);
+        RDFTerm expectedContributor = expectedValues.get(DCTERMS.contributor);
         Iterator<Triple> contributorIterator = enhancements.filter(enhancement, DCTERMS.contributor, null);
         while(contributorIterator.hasNext()){
-            Resource contributorResource = contributorIterator.next().getObject();
-            assertTrue("Creator MUST BE an TypedLiteral or an UriRef (found '"+contributorResource.getClass().getSimpleName()+"')!",
-                contributorResource instanceof TypedLiteral || contributorResource instanceof UriRef);
-            if(contributorResource instanceof TypedLiteral){
+            RDFTerm contributorResource = contributorIterator.next().getObject();
+            assertTrue("Creator MUST BE an TypedLiteral or an IRI (found '"+contributorResource.getClass().getSimpleName()+"')!",
+                contributorResource instanceof Literal || contributorResource instanceof IRI);
+            if(contributorResource instanceof Literal){
                 assertEquals("The dc:contributor value MUST be of dataType xsd:string",
-                    XSD.string,((TypedLiteral)contributorResource).getDataType());
+                    XSD.string,((Literal)contributorResource).getDataType());
             }
             if(expectedContributor != null && expectedContributor.equals(expectedContributor)){
                 expectedContributor = null; //found
@@ -613,11 +611,11 @@
         //validate creation date
         Iterator<Triple> createdIterator = enhancements.filter(enhancement, Properties.DC_CREATED, null);
         assertTrue("The creation date MUST BE present for an Enhancement", createdIterator.hasNext());
-        Resource createdResource = createdIterator.next().getObject();
-        assertTrue("Creation date MUST be a typed Literal", createdResource instanceof TypedLiteral);
+        RDFTerm createdResource = createdIterator.next().getObject();
+        assertTrue("Creation date MUST be a typed Literal", createdResource instanceof Literal);
         assertTrue("Creation date MUST have the dataTyoe xsd:dateTime",
-            XSD.dateTime.equals(((TypedLiteral)createdResource).getDataType()));
-        Date creationDate = LiteralFactory.getInstance().createObject(Date.class, (TypedLiteral)createdResource);
+            XSD.dateTime.equals(((Literal)createdResource).getDataType()));
+        Date creationDate = LiteralFactory.getInstance().createObject(Date.class, (Literal)createdResource);
         assertNotNull("Unable to convert "+createdResource+" to a Java Date object",creationDate);
         Date now = new Date();
         assertTrue("CreationDate MUST NOT be in the Future",now.after(creationDate) || now.equals(creationDate));
@@ -625,21 +623,21 @@
         //validate optional modification date if present
         Iterator<Triple> modDateIterator = enhancements.filter(enhancement, DCTERMS.modified, null);
         while(modDateIterator.hasNext()){
-            Resource modDateResurce = modDateIterator.next().getObject();
-            assertTrue("Creation date MUST be a typed Literal", modDateResurce instanceof TypedLiteral);
+            RDFTerm modDateResurce = modDateIterator.next().getObject();
+            assertTrue("Creation date MUST be a typed Literal", modDateResurce instanceof Literal);
             assertTrue("Creation date MUST have the dataTyoe xsd:dateTime",
-                XSD.dateTime.equals(((TypedLiteral)modDateResurce).getDataType()));
-            Date modDate = LiteralFactory.getInstance().createObject(Date.class, (TypedLiteral)modDateResurce);
+                XSD.dateTime.equals(((Literal)modDateResurce).getDataType()));
+            Date modDate = LiteralFactory.getInstance().createObject(Date.class, (Literal)modDateResurce);
             assertNotNull("Unable to convert "+modDateResurce+" to a Java Date object",modDate);
             assertTrue("CreationDate MUST NOT be in the Future",new Date().after(modDate));
         }
         //validate the fise:extracted-from
         Iterator<Triple> extractedIterator = enhancements.filter(enhancement, Properties.ENHANCER_EXTRACTED_FROM, null);
         assertTrue("The fise:extracted-from property MUST BE present for an Enhancement", extractedIterator.hasNext());
-        Resource extractedResource = extractedIterator.next().getObject();
-        assertTrue("Creator MUST BE an UriRef (found '"+extractedResource.getClass().getSimpleName()+"')!",
-            extractedResource instanceof UriRef);
-        Resource expectedExtractedFrom = expectedValues.get(Properties.ENHANCER_EXTRACTED_FROM);
+        RDFTerm extractedResource = extractedIterator.next().getObject();
+        assertTrue("Creator MUST BE an IRI (found '"+extractedResource.getClass().getSimpleName()+"')!",
+            extractedResource instanceof IRI);
+        RDFTerm expectedExtractedFrom = expectedValues.get(Properties.ENHANCER_EXTRACTED_FROM);
         if(expectedExtractedFrom != null){
             assertEquals("fise:extracted-from has not the expected value!",expectedExtractedFrom, extractedResource);
         }
@@ -647,27 +645,27 @@
         //validate that all dc:requires and dc:relation link to resources of type fise:Enhancement
         Iterator<Triple> relatedIterator = enhancements.filter(enhancement, Properties.DC_RELATION, null);
         while(relatedIterator.hasNext()){
-            Resource relatedResource = relatedIterator.next().getObject();
-            assertTrue("dc:relation values MUST BE URIs", relatedResource instanceof UriRef);
-            Iterator<Triple> relatedTypes = enhancements.filter((UriRef)relatedResource, RDF_TYPE, TechnicalClasses.ENHANCER_ENHANCEMENT);
+            RDFTerm relatedResource = relatedIterator.next().getObject();
+            assertTrue("dc:relation values MUST BE URIs", relatedResource instanceof IRI);
+            Iterator<Triple> relatedTypes = enhancements.filter((IRI)relatedResource, RDF_TYPE, TechnicalClasses.ENHANCER_ENHANCEMENT);
             assertTrue("dc:relation Resources MUST BE of rdf:type fise:Enhancement",relatedTypes.hasNext());
         }
         Iterator<Triple> requiresIterator = enhancements.filter(enhancement, Properties.DC_REQUIRES, null);
         while(requiresIterator.hasNext()){
-            Resource requiredResource = requiresIterator.next().getObject();
-            assertTrue("dc:requires values MUST BE URIs", requiredResource instanceof UriRef);
-            Iterator<Triple> relatedTypes = enhancements.filter((UriRef)requiredResource, RDF_TYPE, TechnicalClasses.ENHANCER_ENHANCEMENT);
+            RDFTerm requiredResource = requiresIterator.next().getObject();
+            assertTrue("dc:requires values MUST BE URIs", requiredResource instanceof IRI);
+            Iterator<Triple> relatedTypes = enhancements.filter((IRI)requiredResource, RDF_TYPE, TechnicalClasses.ENHANCER_ENHANCEMENT);
             assertTrue("dc:requires Resources MUST BE of rdf:type fise:Enhancement",relatedTypes.hasNext());
         }
         //validate that fise:confidence has [0..1] values and are of type xsd:float
         Iterator<Triple> confidenceIterator = enhancements.filter(enhancement,Properties.ENHANCER_CONFIDENCE,null);
         boolean confidenceRequired = expectedValues.containsKey(Properties.ENHANCER_CONFIDENCE);
         if(confidenceIterator.hasNext()){ //confidence is optional
-            Resource confidenceResource = confidenceIterator.next().getObject();
-            assertTrue("fise:confidence value MUST BE a TypedLiteral", confidenceResource instanceof TypedLiteral);
+            RDFTerm confidenceResource = confidenceIterator.next().getObject();
+            assertTrue("fise:confidence value MUST BE a TypedLiteral", confidenceResource instanceof Literal);
             assertTrue("fise:confidence MUST BE xsd:double",
-                XSD.double_.equals(((TypedLiteral)confidenceResource).getDataType()));
-            Double confidence = LiteralFactory.getInstance().createObject(Double.class, (TypedLiteral)confidenceResource);
+                XSD.double_.equals(((Literal)confidenceResource).getDataType()));
+            Double confidence = LiteralFactory.getInstance().createObject(Double.class, (Literal)confidenceResource);
             assertNotNull("Unable to convert TypedLiteral '"+confidenceResource+"' to a Java Double value",confidence);
             assertFalse("fise:confidence MUST HAVE [0..1] values",confidenceIterator.hasNext());
             //STANBOL-630: confidence [0..1]
@@ -677,7 +675,7 @@
             assertTrue("fise:confidence MUST BE >= 0 (value= '"+confidence
                     +"',enhancement "+enhancement+")",
                     0.0 <= confidence.doubleValue());
-            Resource expectedConfidence = expectedValues.get(Properties.ENHANCER_CONFIDENCE);
+            RDFTerm expectedConfidence = expectedValues.get(Properties.ENHANCER_CONFIDENCE);
             if(expectedConfidence != null){
                 assertEquals("The fise:confidence for enhancement "
                     +enhancement+" does not have the expected value", expectedConfidence,confidenceResource);
@@ -688,10 +686,10 @@
         }
         //validate that the (optional) dc:type is an URI and that there are not multiple values
         Iterator<Triple> dcTypeIterator = enhancements.filter(enhancement, Properties.DC_TYPE, null);
-        Resource expectedDcType = expectedValues.get(Properties.DC_TYPE);
+        RDFTerm expectedDcType = expectedValues.get(Properties.DC_TYPE);
         if(dcTypeIterator.hasNext()){ //dc:type is optional
-            Resource dcTypeResource = dcTypeIterator.next().getObject();
-            assertTrue("dc:type values MUST BE URIs",dcTypeResource instanceof UriRef);
+            RDFTerm dcTypeResource = dcTypeIterator.next().getObject();
+            assertTrue("dc:type values MUST BE URIs",dcTypeResource instanceof IRI);
             if(expectedDcType != null) {
                 assertEquals("The dc:type value is not the expected "+expectedDcType+"!",
                     expectedDcType,dcTypeResource);
@@ -701,15 +699,15 @@
         //validate the fise:confidence-value introduced by STANBOL-631
         Iterator<Triple> confidenceLevelIterator = enhancements.filter(
             enhancement, Properties.ENHANCER_CONFIDENCE_LEVEL, null);
-        Resource expectedConfidenceValue = expectedValues.get(Properties.ENHANCER_CONFIDENCE_LEVEL);
+        RDFTerm expectedConfidenceValue = expectedValues.get(Properties.ENHANCER_CONFIDENCE_LEVEL);
         if(confidenceLevelIterator.hasNext()){
-            Resource confidenceLevelResource = confidenceLevelIterator.next().getObject();
+            RDFTerm confidenceLevelResource = confidenceLevelIterator.next().getObject();
             assertTrue("fise:confidence-level values MUST BE URIs but found "+confidenceLevelResource,
-                confidenceLevelResource instanceof UriRef);
+                confidenceLevelResource instanceof IRI);
             assertNotNull("The fise:confidence-level value MUST BE one of the four "
                 + "values defined in the ontology! (found: "+ confidenceLevelResource
                 + " | enhancement " + enhancement+")",
-                CONFIDENCE_LEVEL_ENUM.getConfidenceLevel((UriRef)confidenceLevelResource));
+                CONFIDENCE_LEVEL_ENUM.getConfidenceLevel((IRI)confidenceLevelResource));
             assertFalse("The fise:confidence-level property is functional and MUST "
                 + "HAVE only a single value (enhancement " +
                     enhancement+")!",confidenceLevelIterator.hasNext());
@@ -731,13 +729,13 @@
      * @return the number of found and validated TopicAnnotations.
      */
     @SuppressWarnings("unchecked")
-    public static int validateAllTopicAnnotations(TripleCollection enhancements,Map<UriRef,Resource> expectedValues) {
+    public static int validateAllTopicAnnotations(Graph enhancements,Map<IRI,RDFTerm> expectedValues) {
         expectedValues = expectedValues == null ? Collections.EMPTY_MAP : expectedValues;
         Iterator<Triple> topicAnnotationIterator = enhancements.filter(null,
                 RDF_TYPE, ENHANCER_TOPICANNOTATION);
         int topicAnnotationCount = 0;
         while (topicAnnotationIterator.hasNext()) {
-            UriRef topicAnnotation = (UriRef) topicAnnotationIterator.next().getSubject();
+            IRI topicAnnotation = (IRI) topicAnnotationIterator.next().getSubject();
             // test if selected Text is added
             validateTopicAnnotation(enhancements, topicAnnotation, 
                 expectedValues);
@@ -750,12 +748,12 @@
      * Checks if a fise:TopicAnnotation is valid as defined by 
      * <a herf="https://issues.apache.org/jira/browse/STANBOL-617">STANBOL-617</a>. 
      * NOTE that this also validates all fise:Enhancement related requirements by 
-     * calling {@link #validateEnhancement(TripleCollection, UriRef, Map)}
+     * calling {@link #validateEnhancement(Graph, IRI, Map)}
      * @param enhancements the enhancements graph
      * @param topicAnnotation the topic annotation to validate
      * @param expectedValues expected values (properties for the values are used as keys)
      */
-    public static void validateTopicAnnotation(TripleCollection enhancements, UriRef topicAnnotation, Map<UriRef,Resource> expectedValues){
+    public static void validateTopicAnnotation(Graph enhancements, IRI topicAnnotation, Map<IRI,RDFTerm> expectedValues){
         //validate the rdf:type
         Iterator<Triple> rdfTypeIterator = enhancements.filter(topicAnnotation, RDF_TYPE, ENHANCER_TOPICANNOTATION);
         assertTrue("Parsed Enhancement "+topicAnnotation +" is missing the fise:TopicAnnotation type ",
@@ -771,7 +769,7 @@
         assertTrue(relationToTextAnnotationIterator.hasNext());
         while (relationToTextAnnotationIterator.hasNext()) {
             // test if the referred annotations are text annotations
-            UriRef referredTextAnnotation = (UriRef) relationToTextAnnotationIterator.next().getObject();
+            IRI referredTextAnnotation = (IRI) relationToTextAnnotationIterator.next().getObject();
             assertTrue(enhancements.filter(referredTextAnnotation, RDF_TYPE,
                     ENHANCER_TEXTANNOTATION).hasNext());
         }
@@ -780,11 +778,11 @@
         // fise:EntityAnnotations this property is NOT required - cardinality [0..*]
         Iterator<Triple> entityReferenceIterator = enhancements.filter(topicAnnotation,
                 ENHANCER_ENTITY_REFERENCE, null);
-        Resource expectedReferencedEntity = expectedValues.get(ENHANCER_ENTITY_REFERENCE);
+        RDFTerm expectedReferencedEntity = expectedValues.get(ENHANCER_ENTITY_REFERENCE);
         while(entityReferenceIterator.hasNext()){ //check possible multiple references
-            Resource entityReferenceResource = entityReferenceIterator.next().getObject();
+            RDFTerm entityReferenceResource = entityReferenceIterator.next().getObject();
             // test if the reference is an URI
-            assertTrue("fise:entity-reference value MUST BE of URIs",entityReferenceResource instanceof UriRef);
+            assertTrue("fise:entity-reference value MUST BE of URIs",entityReferenceResource instanceof IRI);
             if(expectedReferencedEntity != null && expectedReferencedEntity.equals(entityReferenceResource)){
                 expectedReferencedEntity = null; //found
             }
@@ -795,11 +793,11 @@
         //test if the entity label is set (required)
         Iterator<Triple> entityLabelIterator = enhancements.filter(topicAnnotation, ENHANCER_ENTITY_LABEL, null);
         assertTrue(entityLabelIterator.hasNext());
-        Resource expectedEntityLabel = expectedValues.get(ENHANCER_ENTITY_LABEL);
+        RDFTerm expectedEntityLabel = expectedValues.get(ENHANCER_ENTITY_LABEL);
         while(entityLabelIterator.hasNext()){
-            Resource entityLabelResource =  entityLabelIterator.next().getObject();
+            RDFTerm entityLabelResource =  entityLabelIterator.next().getObject();
             assertTrue("fise:entity-label values MUST BE PlainLiterals (EntityAnnotation: "+topicAnnotation+")!",
-                entityLabelResource instanceof PlainLiteral);
+                entityLabelResource instanceof Literal);
             if(expectedEntityLabel != null && expectedEntityLabel.equals(entityLabelResource)){
                 expectedEntityLabel = null;
             }
@@ -809,10 +807,10 @@
         
         // test fise:entity-type(s). NOTE: this is not required - cardinality [0..*]
         Iterator<Triple> entityTypeIterator = enhancements.filter(topicAnnotation, Properties.ENHANCER_ENTITY_TYPE, null);
-        Resource expectedEntityType = expectedValues.get(Properties.ENHANCER_ENTITY_TYPE);
+        RDFTerm expectedEntityType = expectedValues.get(Properties.ENHANCER_ENTITY_TYPE);
         if(entityTypeIterator.hasNext()){
-            Resource entityTypeResource = entityTypeIterator.next().getObject();
-            assertTrue("fise:entity-type values MUST BE URIs",entityTypeResource instanceof UriRef);
+            RDFTerm entityTypeResource = entityTypeIterator.next().getObject();
+            assertTrue("fise:entity-type values MUST BE URIs",entityTypeResource instanceof IRI);
             if(expectedEntityType != null && expectedEntityType.equals(entityTypeResource)){
                 expectedEntityType = null; //found
             }
diff --git a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/reader/ContentItemReader.java b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/reader/ContentItemReader.java
index e9fa7cf..cbe0848 100644
--- a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/reader/ContentItemReader.java
+++ b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/reader/ContentItemReader.java
@@ -63,11 +63,11 @@
 import javax.ws.rs.ext.MessageBodyReader;
 import javax.ws.rs.ext.Provider;
 
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.commons.fileupload.FileItemIterator;
 import org.apache.commons.fileupload.FileItemStream;
@@ -80,7 +80,7 @@
 import org.apache.felix.scr.annotations.Property;
 import org.apache.felix.scr.annotations.Reference;
 import org.apache.felix.scr.annotations.Service;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.enhancer.servicesapi.Blob;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
 import org.apache.stanbol.enhancer.servicesapi.ContentItemFactory;
@@ -147,7 +147,7 @@
                                 InputStream entityStream) throws IOException, WebApplicationException {
         //boolean withMetadata = withMetadata(httpHeaders);
         ContentItem contentItem = null;
-        UriRef contentItemId = getContentItemId();
+        IRI contentItemId = getContentItemId();
         if(log.isTraceEnabled()){
             //NOTE: enabling TRACE level logging will copy the parsed content
             //      into a BYTE array
@@ -166,7 +166,7 @@
         if(mediaType.isCompatible(MULTIPART)){
             log.debug(" - parse Multipart MIME ContentItem");
             //try to read ContentItem from "multipart/from-data"
-            MGraph metadata = null;
+            Graph metadata = null;
             FileItemIterator fileItemIterator;
             try {
                 fileItemIterator = fu.getItemIterator(new MessageBodyReaderContext(entityStream, mediaType));
@@ -183,9 +183,9 @@
                         //the metadata may define the ID for the contentItem
                         //only used if not parsed as query param
                         if(contentItemId == null && fis.getName() != null && !fis.getName().isEmpty()){
-                            contentItemId = new UriRef(fis.getName());
+                            contentItemId = new IRI(fis.getName());
                         }
-                        metadata = new IndexedMGraph();
+                        metadata = new IndexedGraph();
                         try {
                             getParser().parse(metadata, fis.openStream(), fis.getContentType());
                         } catch (Exception e) {
@@ -254,7 +254,7 @@
                                 		"MUST define the contentParts URI as" +
                                 		"'name' of the MIME part!").build());
                         }
-                        MGraph graph = new IndexedMGraph();
+                        Graph graph = new IndexedGraph();
                         try {
                             getParser().parse(graph, fis.openStream(), fis.getContentType());
                         } catch (Exception e) {
@@ -265,7 +265,7 @@
                                         fis.getName(),fis.getContentType()))
                                 .build());
                         }
-                        UriRef contentPartId = new UriRef(fis.getFieldName());
+                        IRI contentPartId = new IRI(fis.getFieldName());
                         contentItem.addPart(contentPartId, graph);
                     }
                 }
@@ -316,8 +316,8 @@
      * @param lang the parsed language
      */
     private void createParsedLanguageAnnotation(ContentItem ci, String lang){
-        MGraph m = ci.getMetadata();
-        UriRef la = new UriRef("urn:enhancement-"+ EnhancementEngineHelper.randomUUID());
+        Graph m = ci.getMetadata();
+        IRI la = new IRI("urn:enhancement-"+ EnhancementEngineHelper.randomUUID());
         //add the fise:Enhancement information
         m.add(new TripleImpl(la, RDF_TYPE, ENHANCER_ENHANCEMENT));
         m.add(new TripleImpl(la, RDF_TYPE, ENHANCER_TEXTANNOTATION));
@@ -335,7 +335,7 @@
      * {@link #request}.
      * @return the parsed URI or <code>null</code> if none
      */
-    private UriRef getContentItemId() {
+    private IRI getContentItemId() {
         //NOTE: check for request NULL is needed because of unit tests
         if (uriInfo == null) return null;
         URI uri = uriInfo.getRequestUri();
@@ -377,7 +377,7 @@
                        + "of the " + source, Response.Status.BAD_REQUEST);
             }
         }
-        return ciUri == null ? null : new UriRef(ciUri);
+        return ciUri == null ? null : new IRI(ciUri);
     }
     /**
      * Getter for the <code>Content-Language</code> header
@@ -458,7 +458,7 @@
      * @throws FileUploadException if the parsed contents are not correctly
      * encoded Multipart MIME
      */
-    private ContentItem createContentItem(UriRef id, MGraph metadata, FileItemStream content,Set<String> parsedContentParts) throws IOException, FileUploadException {
+    private ContentItem createContentItem(IRI id, Graph metadata, FileItemStream content,Set<String> parsedContentParts) throws IOException, FileUploadException {
         MediaType partContentType = MediaType.valueOf(content.getContentType());
         ContentItem contentItem = null;
         ContentItemFactory ciFactory = getContentItemFactory();
@@ -479,13 +479,13 @@
                 } else {
                     log.debug("  - create Blob for content (type:{})", fis.getContentType());
                     Blob blob = ciFactory.createBlob(new StreamSource(fis.openStream(), fis.getContentType()));
-                    UriRef contentPartId = null;
+                    IRI contentPartId = null;
                     if(fis.getFieldName() != null && !fis.getFieldName().isEmpty()){
-                        contentPartId = new UriRef(fis.getFieldName());
+                        contentPartId = new IRI(fis.getFieldName());
                     } else {
                         //generating a random ID might break metadata 
                         //TODO maybe we should throw an exception instead
-                        contentPartId = new UriRef("urn:contentpart:"+ randomUUID());
+                        contentPartId = new IRI("urn:contentpart:"+ randomUUID());
                     }
                     log.debug("    ... add Blob {} to ContentItem {} with content (type:{})",
                         new Object[]{contentPartId, id, fis.getContentType()});
diff --git a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/AbstractEnhancerResource.java b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/AbstractEnhancerResource.java
index 190ce23..53f4f00 100644
--- a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/AbstractEnhancerResource.java
+++ b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/AbstractEnhancerResource.java
@@ -51,9 +51,9 @@
 import javax.ws.rs.core.Response.ResponseBuilder;
 import javax.ws.rs.core.UriInfo;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.commons.web.base.resource.BaseStanbolResource;
 import org.apache.stanbol.commons.web.base.resource.LayoutConfiguration;
 import org.apache.stanbol.commons.web.base.resource.TemplateLayoutConfiguration;
@@ -186,13 +186,13 @@
         }
         reqProp.put(OMIT_PARSED_CONTENT, omitParsed);
         if(contentParts != null && !contentParts.isEmpty()){
-            Set<UriRef> outputContentParts = new HashSet<UriRef>();
+            Set<IRI> outputContentParts = new HashSet<IRI>();
             for(String contentPartUri : contentParts){
                 if(contentPartUri != null && !contentPartUri.isEmpty()){
                     if("*".equals(contentPartUri)){
                         outputContentParts.add(null); //indicated wildcard
                     } else {
-                        outputContentParts.add(new UriRef(contentPartUri));
+                        outputContentParts.add(new IRI(contentPartUri));
                     }
                 }
             }
@@ -230,11 +230,11 @@
         if (jobManager != null) {
             jobManager.enhanceContent(ci, getChain());
         }
-        MGraph graph = ci.getMetadata();
+        Graph graph = ci.getMetadata();
         Boolean includeExecutionMetadata = RequestPropertiesHelper.isIncludeExecutionMetadata(reqProp);
         if (includeExecutionMetadata != null && includeExecutionMetadata.booleanValue()) {
             try {
-                graph.addAll(ci.getPart(ExecutionMetadata.CHAIN_EXECUTION, TripleCollection.class));
+                graph.addAll(ci.getPart(ExecutionMetadata.CHAIN_EXECUTION, Graph.class));
             } catch (NoSuchPartException e) {
                 // no executionMetadata available
             }
diff --git a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/ChainsRootResource.java b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/ChainsRootResource.java
index f759693..4db86d9 100644
--- a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/ChainsRootResource.java
+++ b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/ChainsRootResource.java
@@ -45,8 +45,8 @@
 import javax.ws.rs.core.Response;
 import javax.ws.rs.core.Response.ResponseBuilder;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -180,7 +180,7 @@
     @Produces(value={JSON_LD, APPLICATION_JSON,N3,N_TRIPLE,RDF_JSON,RDF_XML,TURTLE,X_TURTLE})
     public Response getEngines(@Context HttpHeaders headers){
         String rootUrl = uriInfo.getBaseUriBuilder().path(getRootUrl()).build().toString();
-        MGraph graph = new SimpleMGraph();
+        Graph graph = new SimpleGraph();
         addActiveChains(getActiveChains(), chainTracker.getDefault(),graph,rootUrl);
         ResponseBuilder res = Response.ok(graph);
         //addCORSOrigin(servletContext,res, headers);
diff --git a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/ContentItemResource.java b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/ContentItemResource.java
index 3fa5a8f..863710d 100644
--- a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/ContentItemResource.java
+++ b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/ContentItemResource.java
@@ -67,22 +67,21 @@
 import javax.ws.rs.core.Response.ResponseBuilder;
 import javax.ws.rs.core.UriInfo;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.clerezza.rdf.core.sparql.ParseException;
 import org.apache.clerezza.rdf.ontologies.RDF;
 import org.apache.commons.lang.StringUtils;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.commons.viewable.Viewable;
 import org.apache.stanbol.commons.web.base.resource.BaseStanbolResource;
 import org.apache.stanbol.commons.web.base.resource.LayoutConfiguration;
@@ -107,13 +106,13 @@
     private final Logger log = LoggerFactory.getLogger(getClass());
 
     // TODO make this configurable trough a property
-    public static final UriRef SUMMARY = new UriRef("http://www.w3.org/2000/01/rdf-schema#comment");
+    public static final IRI SUMMARY = new IRI("http://www.w3.org/2000/01/rdf-schema#comment");
 
     // TODO make this configurable trough a property
-    public static final UriRef THUMBNAIL = new UriRef("http://dbpedia.org/ontology/thumbnail");
-    public static final UriRef DEPICTION = new UriRef("http://xmlns.com/foaf/0.1/depiction");
+    public static final IRI THUMBNAIL = new IRI("http://dbpedia.org/ontology/thumbnail");
+    public static final IRI DEPICTION = new IRI("http://xmlns.com/foaf/0.1/depiction");
 
-    public final Map<UriRef,String> defaultThumbnails = new HashMap<UriRef,String>();
+    public final Map<IRI,String> defaultThumbnails = new HashMap<IRI,String>();
 
     protected ContentItem contentItem;
 
@@ -140,10 +139,10 @@
      * {@link Properties#ENHANCER_SELECTED_TEXT}.
      * This map is initialised by {@link #initOccurrences()}.
      */
-    protected Map<UriRef,Map<EntityExtractionSummary,EntityExtractionSummary>> extractionsByTypeMap = 
-        new HashMap<UriRef,Map<EntityExtractionSummary,EntityExtractionSummary>>();
+    protected Map<IRI,Map<EntityExtractionSummary,EntityExtractionSummary>> extractionsByTypeMap = 
+        new HashMap<IRI,Map<EntityExtractionSummary,EntityExtractionSummary>>();
 
-    private MGraph executionMetadata;
+    private Graph executionMetadata;
 
     private ChainExecution chainExecution;
 
@@ -169,7 +168,7 @@
         this.enhancementException = enhancementException;
         if (localId != null) {
             URI rawURI = uriInfo.getBaseUriBuilder().path(storePath).path("raw").path(localId).build();
-            Entry<UriRef,Blob> plainTextContentPart = ContentItemHelper.getBlob(contentItem, Collections.singleton("text/plain"));
+            Entry<IRI,Blob> plainTextContentPart = ContentItemHelper.getBlob(contentItem, Collections.singleton("text/plain"));
             if (plainTextContentPart != null) {
                 this.textContent = ContentItemHelper.getText(plainTextContentPart.getValue());
             } 
@@ -191,16 +190,16 @@
         }
         //init ExecutionMetadata
         try {
-            executionMetadata = ci.getPart(ExecutionMetadata.CHAIN_EXECUTION, MGraph.class);
+            executionMetadata = ci.getPart(ExecutionMetadata.CHAIN_EXECUTION, Graph.class);
         } catch(NoSuchPartException e){
             executionMetadata = null;
         }
         if(executionMetadata != null){
-            NonLiteral ce = ExecutionMetadataHelper.getChainExecution(executionMetadata, ci.getUri());
+            BlankNodeOrIRI ce = ExecutionMetadataHelper.getChainExecution(executionMetadata, ci.getUri());
             if(ce != null){
                 chainExecution = new ChainExecution(executionMetadata, ce);
                 engineExecutions = new ArrayList<Execution>();
-                for(NonLiteral ex : ExecutionMetadataHelper.getExecutions(executionMetadata, ce)){
+                for(BlankNodeOrIRI ex : ExecutionMetadataHelper.getExecutions(executionMetadata, ce)){
                     engineExecutions.add(new Execution(chainExecution,executionMetadata, ex));
                 }
                 Collections.sort(engineExecutions);
@@ -275,8 +274,8 @@
     /**
      * Used to print occurrences with other types than the natively supported
      */
-    public Collection<UriRef> getOtherOccurrencyTypes(){
-        Set<UriRef>  types = new HashSet<UriRef>(extractionsByTypeMap.keySet());
+    public Collection<IRI> getOtherOccurrencyTypes(){
+        Set<IRI>  types = new HashSet<IRI>(extractionsByTypeMap.keySet());
         types.remove(DBPEDIA_PERSON);
         types.remove(DBPEDIA_ORGANISATION);
         types.remove(DBPEDIA_PLACE);
@@ -285,7 +284,7 @@
         types.remove(null); //other
         return types;
     }
-    public static String extractLabel(UriRef uri){
+    public static String extractLabel(IRI uri){
         String fullUri = uri.getUnicodeString();
         int index = Math.max(fullUri.lastIndexOf('#'),fullUri.lastIndexOf('/'));
         index = Math.max(index, fullUri.lastIndexOf(':'));
@@ -296,7 +295,7 @@
             return uri.getUnicodeString();
         }
     }
-    public Collection<EntityExtractionSummary> getOccurrences(UriRef type){
+    public Collection<EntityExtractionSummary> getOccurrences(IRI type){
         Map<EntityExtractionSummary,EntityExtractionSummary> typeMap = extractionsByTypeMap.get(type);
         Collection<EntityExtractionSummary> typeOccurrences;
         if(typeMap != null){
@@ -340,14 +339,14 @@
     }
 
     private void initOccurrences() {
-        MGraph graph = contentItem.getMetadata();
+        Graph graph = contentItem.getMetadata();
         LiteralFactory lf = LiteralFactory.getInstance();
-        Map<UriRef,Collection<NonLiteral>> suggestionMap = new HashMap<UriRef,Collection<NonLiteral>>();
+        Map<IRI,Collection<BlankNodeOrIRI>> suggestionMap = new HashMap<IRI,Collection<BlankNodeOrIRI>>();
         // 1) get Entity Annotations
-        Map<NonLiteral,Map<EAProps,Object>> entitySuggestionMap = new HashMap<NonLiteral,Map<EAProps,Object>>();
+        Map<BlankNodeOrIRI,Map<EAProps,Object>> entitySuggestionMap = new HashMap<BlankNodeOrIRI,Map<EAProps,Object>>();
         Iterator<Triple> entityAnnotations = graph.filter(null, RDF.type, ENHANCER_ENTITYANNOTATION);
         while(entityAnnotations.hasNext()){
-            NonLiteral entityAnnotation = entityAnnotations.next().getSubject();
+            BlankNodeOrIRI entityAnnotation = entityAnnotations.next().getSubject();
             //to avoid multiple lookups (e.g. if one entityAnnotation links to+
             //several TextAnnotations) we cache the data in an intermediate Map
             Map<EAProps,Object> eaData = new EnumMap<EAProps,Object>(EAProps.class);
@@ -356,12 +355,12 @@
             eaData.put(EAProps.confidence, EnhancementEngineHelper.get(
                 graph, entityAnnotation, ENHANCER_CONFIDENCE, Double.class, lf));
             entitySuggestionMap.put(entityAnnotation, eaData);
-            Iterator<UriRef> textAnnotations = getReferences(graph, entityAnnotation, DC_RELATION);
+            Iterator<IRI> textAnnotations = getReferences(graph, entityAnnotation, DC_RELATION);
             while(textAnnotations.hasNext()){
-                UriRef textAnnotation = textAnnotations.next();
-                Collection<NonLiteral> suggestions = suggestionMap.get(textAnnotation);
+                IRI textAnnotation = textAnnotations.next();
+                Collection<BlankNodeOrIRI> suggestions = suggestionMap.get(textAnnotation);
                 if(suggestions == null){
-                    suggestions = new ArrayList<NonLiteral>();
+                    suggestions = new ArrayList<BlankNodeOrIRI>();
                     suggestionMap.put(textAnnotation, suggestions);
                 }
                 suggestions.add(entityAnnotation);
@@ -370,7 +369,7 @@
         // 2) get the TextAnnotations
         Iterator<Triple> textAnnotations = graph.filter(null, RDF.type, ENHANCER_TEXTANNOTATION);
         while(textAnnotations.hasNext()){
-            NonLiteral textAnnotation = textAnnotations.next().getSubject();
+            BlankNodeOrIRI textAnnotation = textAnnotations.next().getSubject();
             //we need to process those to show multiple mentions
 //            if (graph.filter(textAnnotation, DC_RELATION, null).hasNext()) {
 //                // this is not the most specific occurrence of this name: skip
@@ -388,12 +387,12 @@
                 ENHANCER_END,Integer.class,lf);
             Double confidence = EnhancementEngineHelper.get(graph, textAnnotation, 
                 ENHANCER_CONFIDENCE, Double.class, lf);
-            Iterator<UriRef> types = getReferences(graph, textAnnotation, DC_TYPE);
+            Iterator<IRI> types = getReferences(graph, textAnnotation, DC_TYPE);
             if(!types.hasNext()){ //create an iterator over null in case no types are present
-                types = Collections.singleton((UriRef)null).iterator();
+                types = Collections.singleton((IRI)null).iterator();
             }
             while(types.hasNext()){
-                UriRef type = types.next();
+                IRI type = types.next();
                 Map<EntityExtractionSummary,EntityExtractionSummary> occurrenceMap = extractionsByTypeMap.get(type);
                 if(occurrenceMap == null){
                     occurrenceMap = new TreeMap<EntityExtractionSummary,EntityExtractionSummary>();
@@ -405,12 +404,12 @@
                         DC_LANGUAGE);
                 }
                 EntityExtractionSummary entity = new EntityExtractionSummary(text, type, start,end,confidence,defaultThumbnails);
-                Collection<NonLiteral> suggestions = suggestionMap.get(textAnnotation);
+                Collection<BlankNodeOrIRI> suggestions = suggestionMap.get(textAnnotation);
                 if(suggestions != null){
-                    for(NonLiteral entityAnnotation : suggestions){
+                    for(BlankNodeOrIRI entityAnnotation : suggestions){
                         Map<EAProps,Object> eaData = entitySuggestionMap.get(entityAnnotation);
                         entity.addSuggestion(
-                            (UriRef)eaData.get(EAProps.entity),
+                            (IRI)eaData.get(EAProps.entity),
                             (String)eaData.get(EAProps.label), 
                             (Double)eaData.get(EAProps.confidence), 
                             graph);
@@ -577,14 +576,14 @@
         protected final String name;
 
         
-        protected final UriRef type;
+        protected final IRI type;
 
         protected List<EntitySuggestion> suggestions = new ArrayList<EntitySuggestion>();
-        protected Set<UriRef> suggestionSet = new HashSet<UriRef>();
+        protected Set<IRI> suggestionSet = new HashSet<IRI>();
 
         protected List<Mention> mentions = new ArrayList<Mention>();
 
-        public final Map<UriRef,String> defaultThumbnails;
+        public final Map<IRI,String> defaultThumbnails;
 
 
         private Integer start;
@@ -594,7 +593,7 @@
 
         private Double confidence;
 
-        public EntityExtractionSummary(String name, UriRef type, Integer start, Integer end, Double confidence, Map<UriRef,String> defaultThumbnails) {
+        public EntityExtractionSummary(String name, IRI type, Integer start, Integer end, Double confidence, Map<IRI,String> defaultThumbnails) {
             if(name == null){
                 this.name = extractLabel(type);
             } else {
@@ -608,7 +607,7 @@
             this.confidence = confidence;
         }
 
-        public void addSuggestion(UriRef uri, String label, Double confidence, TripleCollection properties) {
+        public void addSuggestion(IRI uri, String label, Double confidence, Graph properties) {
             EntitySuggestion suggestion = new EntitySuggestion(uri, type, label, confidence, properties,
                     defaultThumbnails);
             suggestionSet.add(uri);
@@ -748,24 +747,24 @@
 
     public static class EntitySuggestion implements Comparable<EntitySuggestion> {
 
-        protected final UriRef uri;
+        protected final IRI uri;
 
-        protected final UriRef type;
+        protected final IRI type;
 
         protected final String label;
 
         protected final Double confidence;
 
-        protected TripleCollection entityProperties;
+        protected Graph entityProperties;
 
-        protected final Map<UriRef,String> defaultThumbnails;
+        protected final Map<IRI,String> defaultThumbnails;
 
-        public EntitySuggestion(UriRef uri,
-                                UriRef type,
+        public EntitySuggestion(IRI uri,
+                                IRI type,
                                 String label,
                                 Double confidence,
-                                TripleCollection entityProperties,
-                                Map<UriRef,String> defaultThumbnails) {
+                                Graph entityProperties,
+                                Map<IRI,String> defaultThumbnails) {
             this.uri = uri;
             if(label == null){
                 this.label = extractLabel(uri);
@@ -799,17 +798,17 @@
         public String getThumbnailSrc() {
             Iterator<Triple> thumbnails = entityProperties.filter(uri, THUMBNAIL, null);
             while (thumbnails.hasNext()) {
-                Resource object = thumbnails.next().getObject();
-                if (object instanceof UriRef) {
-                    return ((UriRef) object).getUnicodeString();
+                RDFTerm object = thumbnails.next().getObject();
+                if (object instanceof IRI) {
+                    return ((IRI) object).getUnicodeString();
                 }
             }
             //if no dbpedia ontology thumbnail was found. try the same with foaf:depiction
             thumbnails = entityProperties.filter(uri, DEPICTION, null);
             while (thumbnails.hasNext()) {
-                Resource object = thumbnails.next().getObject();
-                if (object instanceof UriRef) {
-                    return ((UriRef) object).getUnicodeString();
+                RDFTerm object = thumbnails.next().getObject();
+                if (object instanceof IRI) {
+                    return ((IRI) object).getUnicodeString();
                 }
             }
             return getMissingThumbnailSrc();
@@ -826,9 +825,9 @@
         public String getSummary() {
             Iterator<Triple> abstracts = entityProperties.filter(uri, SUMMARY, null);
             while (abstracts.hasNext()) {
-                Resource object = abstracts.next().getObject();
-                if (object instanceof PlainLiteral) {
-                    PlainLiteral abstract_ = (PlainLiteral) object;
+                RDFTerm object = abstracts.next().getObject();
+                if (object instanceof Literal) {
+                    Literal abstract_ = (Literal) object;
                     if (new Language("en").equals(abstract_.getLanguage())) {
                         return abstract_.getLexicalForm();
                     }
@@ -869,15 +868,15 @@
      * @return an RDF/JSON descriptions of places for the word map widget
      */
     public String getPlacesAsJSON() throws ParseException, UnsupportedEncodingException {
-        MGraph g = new IndexedMGraph();
+        Graph g = new IndexedGraph();
         LiteralFactory lf = LiteralFactory.getInstance();
-        MGraph metadata = contentItem.getMetadata();
+        Graph metadata = contentItem.getMetadata();
         for (EntityExtractionSummary p : getPlaceOccurrences()) {
             EntitySuggestion bestGuess = p.getBestGuess();
             if (bestGuess == null) {
                 continue;
             }
-            UriRef uri = new UriRef(bestGuess.getUri());
+            IRI uri = new IRI(bestGuess.getUri());
             Iterator<Triple> latitudes = metadata.filter(uri, GEO_LAT, null);
             if (latitudes.hasNext()) {
                 g.add(latitudes.next());
diff --git a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/EnhancementEnginesRootResource.java b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/EnhancementEnginesRootResource.java
index 85d176a..b55835d 100644
--- a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/EnhancementEnginesRootResource.java
+++ b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/EnhancementEnginesRootResource.java
@@ -50,8 +50,8 @@
 import javax.ws.rs.core.Response.ResponseBuilder;
 import javax.ws.rs.core.UriInfo;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Deactivate;
@@ -191,7 +191,7 @@
     @Produces(value={JSON_LD, APPLICATION_JSON,N3,N_TRIPLE,RDF_JSON,RDF_XML,TURTLE,X_TURTLE})
     public Response getEngines(@Context HttpHeaders headers){
         String rootUrl = uriInfo.getBaseUriBuilder().path(getRootUrl()).build().toString();
-        MGraph graph = new SimpleMGraph();
+        Graph graph = new SimpleGraph();
         addActiveEngines(getActiveEngines(), graph, rootUrl);
         ResponseBuilder res = Response.ok(graph);
    //     addCORSOrigin(servletContext,res, headers);
diff --git a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/EnhancerRootResource.java b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/EnhancerRootResource.java
index 90dee1e..5f83662 100644
--- a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/EnhancerRootResource.java
+++ b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/EnhancerRootResource.java
@@ -47,10 +47,10 @@
 import javax.ws.rs.core.Response.Status;
 import javax.ws.rs.core.UriInfo;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.clerezza.rdf.core.sparql.ParseException;
 import org.apache.clerezza.rdf.core.sparql.QueryEngine;
@@ -62,6 +62,7 @@
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Property;
 import org.apache.felix.scr.annotations.Reference;
+import org.apache.felix.scr.annotations.ReferenceCardinality;
 import org.apache.felix.scr.annotations.Service;
 import org.apache.stanbol.enhancer.servicesapi.rdf.Enhancer;
 import org.apache.stanbol.commons.viewable.Viewable;
@@ -95,7 +96,7 @@
     private ContentItemFactory ciFactory;
     @Reference
     private Serializer serializer;
-    @Reference
+    @Reference(cardinality = ReferenceCardinality.OPTIONAL_UNARY)
     private QueryEngine queryEngine;
     
     @Path("")
@@ -123,7 +124,7 @@
         @GET
         @Produces(value = {JSON_LD, APPLICATION_JSON, N3, N_TRIPLE, RDF_JSON, RDF_XML, TURTLE, X_TURTLE})
         public Response getEngines(@Context HttpHeaders headers) {
-            MGraph graph = getEnhancerConfigGraph();
+            Graph graph = getEnhancerConfigGraph();
             ResponseBuilder res = Response.ok(graph);
             //addCORSOrigin(servletContext,res, headers);
             return res.build();
@@ -134,10 +135,10 @@
          *
          * @return the graph with the configuration
          */
-        private MGraph getEnhancerConfigGraph() {
+        private Graph getEnhancerConfigGraph() {
             String rootUrl = getUriInfo().getBaseUriBuilder().path(getRootUrl()).build().toString();
-            UriRef enhancerResource = new UriRef(rootUrl + "enhancer");
-            MGraph graph = new SimpleMGraph();
+            IRI enhancerResource = new IRI(rootUrl + "enhancer");
+            Graph graph = new SimpleGraph();
             graph.add(new TripleImpl(enhancerResource, RDF.type, Enhancer.ENHANCER));
             addActiveEngines(engineManager, graph, rootUrl);
             addActiveChains(chainManager, graph, rootUrl);
diff --git a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/GenericEnhancerUiResource.java b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/GenericEnhancerUiResource.java
index bc1a875..67046ff 100644
--- a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/GenericEnhancerUiResource.java
+++ b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/resource/GenericEnhancerUiResource.java
@@ -36,9 +36,9 @@
 import javax.ws.rs.core.Response.ResponseBuilder;
 import javax.ws.rs.core.UriInfo;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.TripleCollection;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.clerezza.rdf.core.sparql.QueryEngine;
 import org.apache.felix.scr.annotations.Component;
@@ -181,7 +181,7 @@
      */
     public Set<ExecutionNode> getExecutionNodes() {
         if (_executionNodes == null) {
-            Graph ep;
+            ImmutableGraph ep;
             try {
                 ep = chain.getExecutionPlan();
             } catch (ChainException e) {
@@ -189,11 +189,11 @@
             }
             if (ep != null) {
                 _executionNodes = new LinkedHashSet<ExecutionNode>();
-                Set<NonLiteral> processed = new HashSet<NonLiteral>();
-                Set<NonLiteral> next;
+                Set<BlankNodeOrIRI> processed = new HashSet<BlankNodeOrIRI>();
+                Set<BlankNodeOrIRI> next;
                 do {
                     next = ExecutionPlanHelper.getExecutable(ep, processed);
-                    for (NonLiteral node : next) {
+                    for (BlankNodeOrIRI node : next) {
                         _executionNodes.add(new ExecutionNode(ep, node));
                     }
                     processed.addAll(next);
@@ -236,12 +236,12 @@
     }
     public class ExecutionNode {
 
-        private final NonLiteral node;
-        private final TripleCollection ep;
+        private final BlankNodeOrIRI node;
+        private final Graph ep;
         private final boolean optional;
         private final String engineName;
 
-        public ExecutionNode(TripleCollection executionPlan, NonLiteral node) {
+        public ExecutionNode(Graph executionPlan, BlankNodeOrIRI node) {
             this.node = node;
             this.ep = executionPlan;
             this.optional = ExecutionPlanHelper.isOptional(ep, node);
diff --git a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/utils/EnhancerUtils.java b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/utils/EnhancerUtils.java
index 3fd21e1..c080cbf 100644
--- a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/utils/EnhancerUtils.java
+++ b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/utils/EnhancerUtils.java
@@ -23,10 +23,10 @@
 import java.util.Map;
 import java.util.Map.Entry;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.ontologies.RDF;
 import org.apache.clerezza.rdf.ontologies.RDFS;
 import org.apache.stanbol.commons.web.base.resource.BaseStanbolResource;
@@ -98,7 +98,7 @@
      * @param graph the RDF graph to add the triples
      * @param rootUrl the root URL used by the current request
      */
-    public static void addActiveEngines(EnhancementEngineManager engineManager,MGraph graph, String rootUrl) {
+    public static void addActiveEngines(EnhancementEngineManager engineManager,Graph graph, String rootUrl) {
         addActiveEngines(buildEnginesMap(engineManager).values(), graph, rootUrl);
     }
     /**
@@ -114,11 +114,11 @@
      * @param rootUrl the root URL used by the current request
      * @see EnhancerUtils#buildEnginesMap(EnhancementEngineManager)
      */
-    public static void addActiveEngines(Iterable<Entry<ServiceReference,EnhancementEngine>> activeEngines,MGraph graph, String rootUrl) {
-        UriRef enhancerResource = new UriRef(rootUrl+"enhancer");
+    public static void addActiveEngines(Iterable<Entry<ServiceReference,EnhancementEngine>> activeEngines,Graph graph, String rootUrl) {
+        IRI enhancerResource = new IRI(rootUrl+"enhancer");
         graph.add(new TripleImpl(enhancerResource, RDF.type, Enhancer.ENHANCER));
         for(Entry<ServiceReference,EnhancementEngine> entry : activeEngines){
-            UriRef engineResource = new UriRef(rootUrl+"enhancer/engine/"+entry.getValue().getName());
+            IRI engineResource = new IRI(rootUrl+"enhancer/engine/"+entry.getValue().getName());
             graph.add(new TripleImpl(enhancerResource, Enhancer.HAS_ENGINE, engineResource));
             graph.add(new TripleImpl(engineResource, RDF.type, ENHANCEMENT_ENGINE));
             graph.add(new TripleImpl(engineResource, RDFS.label, new PlainLiteralImpl(entry.getValue().getName())));
@@ -137,7 +137,7 @@
      * @param graph the RDF graph to add the triples
      * @param rootUrl the root URL used by the current request
      */
-    public static void addActiveChains(ChainManager chainManager, MGraph graph, String rootUrl) {
+    public static void addActiveChains(ChainManager chainManager, Graph graph, String rootUrl) {
         addActiveChains(buildChainsMap(chainManager).values(), chainManager.getDefault(), graph, rootUrl);
     }
     /**
@@ -153,11 +153,11 @@
      * @param graph the RDF graph to add the triples
      * @param rootUrl the root URL used by the current request
      */
-    public static void addActiveChains(Iterable<Entry<ServiceReference,Chain>> activeChains, Chain defaultChain, MGraph graph, String rootUrl) {
-        UriRef enhancer = new UriRef(rootUrl+"enhancer");
+    public static void addActiveChains(Iterable<Entry<ServiceReference,Chain>> activeChains, Chain defaultChain, Graph graph, String rootUrl) {
+        IRI enhancer = new IRI(rootUrl+"enhancer");
         graph.add(new TripleImpl(enhancer, RDF.type, Enhancer.ENHANCER));
         for(Entry<ServiceReference,Chain> entry : activeChains){
-            UriRef chainResource = new UriRef(rootUrl+"enhancer/chain/"+entry.getValue().getName());
+            IRI chainResource = new IRI(rootUrl+"enhancer/chain/"+entry.getValue().getName());
             graph.add(new TripleImpl(enhancer, Enhancer.HAS_CHAIN, chainResource));
             if(entry.getValue().equals(defaultChain)){
                 graph.add(new TripleImpl(enhancer, Enhancer.HAS_DEFAULT_CHAIN, chainResource));
diff --git a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/utils/RequestPropertiesHelper.java b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/utils/RequestPropertiesHelper.java
index e7cf11f..dfde940 100644
--- a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/utils/RequestPropertiesHelper.java
+++ b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/utils/RequestPropertiesHelper.java
@@ -23,8 +23,8 @@
 
 import javax.ws.rs.QueryParam;
 
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.enhancer.servicesapi.Blob;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
 import org.apache.stanbol.enhancer.servicesapi.helper.ContentItemHelper;
@@ -43,7 +43,7 @@
     /**
      * @see ContentItemHelper#REQUEST_PROPERTIES_URI
      */
-    public static final UriRef REQUEST_PROPERTIES_URI =
+    public static final IRI REQUEST_PROPERTIES_URI =
             ContentItemHelper.REQUEST_PROPERTIES_URI;
     /**
      * Boolean switch parsed as {@link QueryParam} tha allows to deactivate the
@@ -52,8 +52,8 @@
     public static final String OMIT_METADATA = "stanbol.enhancer.web.omitMetadata";
     /**
      * {@link Set Set&lt;String&gt;} containing all the URIs of the
-     * {@link ContentItem#getPart(UriRef, Class) ContentParts} representing 
-     * RDF data (compatible to Clerezza {@link TripleCollection}). If the 
+     * {@link ContentItem#getPart(IRI, Class) ContentParts} representing 
+     * RDF data (compatible to Clerezza {@link Graph}). If the 
      * returned set contains '*' than all such content parts need to be returned.<p>
      * NOTE: This can also be used to include the Request Properties
      * as "applciation/json" in the Response by adding this
@@ -74,7 +74,7 @@
     public static final String OUTPUT_CONTENT = "stanbol.enhancer.web.outputContent";
     /**
      * This allows to copy the {@link ExecutionMetadata} and {@link ExecutionPlan}
-     * data stored in a {@link ContentItem#getPart(UriRef, Class) contentPart} with
+     * data stored in a {@link ContentItem#getPart(IRI, Class) contentPart} with
      * the URI {@link ExecutionMetadata#CHAIN_EXECUTION} over to the
      * {@link ContentItem#getMetadata() metadata} of the content item.<p>
      * This feature is intended to allow users to retrieve such meta information
@@ -88,7 +88,7 @@
      */
     public static final String RDF_FORMAT = "stanbol.enhancer.web.rdfFormat";
     /**
-     * {@link Set Set&lt;String&gt;} containing all the {@link UriRef}s of 
+     * {@link Set Set&lt;String&gt;} containing all the {@link IRI}s of 
      * {@link ContentItem#getPart(int, Class) ContentItem.getPart}(uri,{@link Blob})
      * that where parsed with the request.
      */
diff --git a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/writers/ContentItemWriter.java b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/writers/ContentItemWriter.java
index 51f14e6..0e036ac 100644
--- a/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/writers/ContentItemWriter.java
+++ b/enhancer/jersey/src/main/java/org/apache/stanbol/enhancer/jersey/writers/ContentItemWriter.java
@@ -59,8 +59,8 @@
 import javax.ws.rs.ext.MessageBodyWriter;
 import javax.ws.rs.ext.Provider;
 
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.clerezza.rdf.core.serializedform.UnsupportedSerializationFormatException;
 import org.apache.commons.io.IOUtils;
@@ -197,7 +197,7 @@
                         + mediaType.toString(),Response.Status.NOT_ACCEPTABLE);
                 }
             } else { //  (2) return a single content part
-                Entry<UriRef,Blob> contentPart = getBlob(ci, Collections.singleton(mediaType.toString()));
+                Entry<IRI,Blob> contentPart = getBlob(ci, Collections.singleton(mediaType.toString()));
                 if(contentPart == null){ //no alternate content with the requeste media type
                     throw new WebApplicationException("The requested enhancement chain has not created an "
                             + "version of the parsed content in the reuqest media type "
@@ -267,11 +267,11 @@
             }
             //(3) serialising the Content (Bloby)
             //(3.a) Filter based on parameter
-            List<Entry<UriRef,Blob>> includedBlobs = filterBlobs(ci, reqProp);
+            List<Entry<IRI,Blob>> includedBlobs = filterBlobs(ci, reqProp);
             //(3.b) Serialise the filtered
             if(!includedBlobs.isEmpty()) {
                 Map<String,ContentBody> contentParts = new LinkedHashMap<String,ContentBody>();
-                for(Entry<UriRef,Blob> entry : includedBlobs){
+                for(Entry<IRI,Blob> entry : includedBlobs){
                     Blob blob = entry.getValue();
                     ContentType ct = ContentType.create(blob.getMimeType());
                     String cs = blob.getParameter().get("charset");
@@ -304,7 +304,7 @@
                          ContentType.APPLICATION_JSON.withCharset(UTF8));
                 }
                 //(5) additional RDF metadata stored in contentParts
-                for(Entry<UriRef,TripleCollection> entry : getContentParts(ci, TripleCollection.class).entrySet()){
+                for(Entry<IRI,Graph> entry : getContentParts(ci, Graph.class).entrySet()){
                     if(includeContentParts.isEmpty() || includeContentParts.contains(
                         entry.getKey())){
                         entityBuilder.addPart(entry.getKey().getUnicodeString(), 
@@ -372,16 +372,16 @@
      * @param properties
      * @return
      */
-    private List<Entry<UriRef,Blob>> filterBlobs(ContentItem ci, Map<String,Object> properties) {
-        final List<Entry<UriRef,Blob>> includedContentPartList;
+    private List<Entry<IRI,Blob>> filterBlobs(ContentItem ci, Map<String,Object> properties) {
+        final List<Entry<IRI,Blob>> includedContentPartList;
         Set<MediaType> includeMediaTypes = getIncludedMediaTypes(properties);
         if(includeMediaTypes == null){
             includedContentPartList = Collections.emptyList();
         } else {
-            includedContentPartList = new ArrayList<Map.Entry<UriRef,Blob>>();
+            includedContentPartList = new ArrayList<Map.Entry<IRI,Blob>>();
             Set<String> ignoreContentPartUris = getIgnoredContentURIs(properties);
             nextContentPartEntry: 
-            for(Entry<UriRef,Blob> entry : getContentParts(ci,Blob.class).entrySet()){
+            for(Entry<IRI,Blob> entry : getContentParts(ci,Blob.class).entrySet()){
                 if(!ignoreContentPartUris.contains(entry.getKey().getUnicodeString())){
                     Blob blob = entry.getValue();
                     MediaType blobMediaType = MediaType.valueOf(blob.getMimeType());
@@ -505,11 +505,11 @@
      */
     private class ClerezzaContentBody extends AbstractContentBody implements ContentBody,ContentDescriptor {
 
-        private TripleCollection graph;
+        private Graph graph;
         private String charset;
         private String name;
 
-        protected ClerezzaContentBody(String name, TripleCollection graph, MediaType mimeType){
+        protected ClerezzaContentBody(String name, Graph graph, MediaType mimeType){
             super(ContentType.create(new StringBuilder(mimeType.getType())
             .append('/').append(mimeType.getSubtype()).toString(), UTF8));
             charset = mimeType.getParameters().get("charset");
diff --git a/enhancer/jersey/src/test/java/org/apache/stanbol/enhancer/jersey/ContentItemReaderWriterTest.java b/enhancer/jersey/src/test/java/org/apache/stanbol/enhancer/jersey/ContentItemReaderWriterTest.java
index 82f7f5c..8f814d3 100644
--- a/enhancer/jersey/src/test/java/org/apache/stanbol/enhancer/jersey/ContentItemReaderWriterTest.java
+++ b/enhancer/jersey/src/test/java/org/apache/stanbol/enhancer/jersey/ContentItemReaderWriterTest.java
@@ -51,11 +51,11 @@
 import javax.ws.rs.core.MultivaluedMap;
 import javax.ws.rs.ext.RuntimeDelegate;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.clerezza.rdf.ontologies.RDF;
@@ -91,7 +91,7 @@
      */
     @BeforeClass
     public static void createTestContentItem() throws IOException {
-        contentItem = ciFactory.createContentItem(new UriRef("urn:test"),
+        contentItem = ciFactory.createContentItem(new IRI("urn:test"),
             new StringSource(
                 "<html>\n" +
                 "  <body>\n" +
@@ -99,11 +99,11 @@
                 "  </body>\n" +
                 "</html>","text/html"));
         RuntimeDelegate.setInstance(new RuntimeDelegateImpl());
-        contentItem.addPart(new UriRef("run:text:text"), 
+        contentItem.addPart(new IRI("run:text:text"), 
             ciFactory.createBlob(new StringSource(
             "This is a ContentItem to Mime Multipart test!")));
         contentItem.getMetadata().add(new TripleImpl(
-            new UriRef("urn:test"), RDF.type, new UriRef("urn:types:Document")));
+            new IRI("urn:test"), RDF.type, new IRI("urn:types:Document")));
         //mark the main content as parsed and also that all 
         //contents and contentparts should be included
         Map<String,Object> properties = initRequestPropertiesContentPart(contentItem);
@@ -111,8 +111,8 @@
         properties.put(OUTPUT_CONTENT, Collections.singleton("*/*"));
         properties.put(OUTPUT_CONTENT_PART, Collections.singleton("*"));
         properties.put(RDF_FORMAT, "application/rdf+xml");
-        MGraph em = initExecutionMetadataContentPart(contentItem);
-        NonLiteral ep = createExecutionPlan(em, "testChain",null);
+        Graph em = initExecutionMetadataContentPart(contentItem);
+        BlankNodeOrIRI ep = createExecutionPlan(em, "testChain",null);
         writeExecutionNode(em, ep, "testEngine", true, null,null);
         initExecutionMetadata(em, em, contentItem.getUri(), "testChain", false);
 
@@ -201,7 +201,7 @@
         //assert ID
         assertEquals(contentItem.getUri(), ci.getUri());
         //assert metadata
-        MGraph copy = new SimpleMGraph();
+        Graph copy = new SimpleGraph();
         copy.addAll(contentItem.getMetadata());
         assertTrue(copy.removeAll(ci.getMetadata()));
         assertTrue(copy.isEmpty());
@@ -210,12 +210,12 @@
         String content = IOUtils.toString(contentItem.getStream(),"UTF-8");
         String readContent = IOUtils.toString(ci.getStream(), "UTF-8");
         assertEquals(content, readContent);
-        Iterator<Entry<UriRef,Blob>> contentItemBlobsIt = ContentItemHelper.getContentParts(contentItem, Blob.class).entrySet().iterator();
-        Iterator<Entry<UriRef,Blob>> ciBlobsIt = ContentItemHelper.getContentParts(ci, Blob.class).entrySet().iterator();
+        Iterator<Entry<IRI,Blob>> contentItemBlobsIt = ContentItemHelper.getContentParts(contentItem, Blob.class).entrySet().iterator();
+        Iterator<Entry<IRI,Blob>> ciBlobsIt = ContentItemHelper.getContentParts(ci, Blob.class).entrySet().iterator();
         Set<String> expectedParsedContentIds = new HashSet<String>(); //later used to validate enhancementMetadata
         while(contentItemBlobsIt.hasNext() && ciBlobsIt.hasNext()){
-            Entry<UriRef,Blob> contentItemBlobPart = contentItemBlobsIt.next();
-            Entry<UriRef,Blob> ciBlobPart = ciBlobsIt.next();
+            Entry<IRI,Blob> contentItemBlobPart = contentItemBlobsIt.next();
+            Entry<IRI,Blob> ciBlobPart = ciBlobsIt.next();
             expectedParsedContentIds.add(ciBlobPart.getKey().getUnicodeString());
             assertEquals(contentItemBlobPart.getKey(), ciBlobPart.getKey());
             String partContentType = contentItemBlobPart.getValue().getMimeType();
@@ -226,8 +226,8 @@
             assertEquals(partContent, readPartContent);
         }
         //validate ExecutionMetadata
-        MGraph executionMetadata = contentItem.getPart(ExecutionMetadata.CHAIN_EXECUTION, MGraph.class);
-        MGraph readExecutionMetadata = ci.getPart(ExecutionMetadata.CHAIN_EXECUTION, MGraph.class);
+        Graph executionMetadata = contentItem.getPart(ExecutionMetadata.CHAIN_EXECUTION, Graph.class);
+        Graph readExecutionMetadata = ci.getPart(ExecutionMetadata.CHAIN_EXECUTION, Graph.class);
         assertNotNull(executionMetadata);
         assertNotNull(readExecutionMetadata);
         assertEquals(executionMetadata.size(), readExecutionMetadata.size());
diff --git a/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/Constants.java b/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/Constants.java
index 7591a79..c3a6cea 100644
--- a/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/Constants.java
+++ b/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/Constants.java
@@ -16,7 +16,7 @@
 */
 package org.apache.stanbol.enhancer.jobmanager.event;
 
-import org.apache.clerezza.rdf.core.NonLiteral;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
 import org.apache.stanbol.enhancer.jobmanager.event.impl.EnhancementJob;
 import org.apache.stanbol.enhancer.servicesapi.rdf.ExecutionMetadata;
 import org.osgi.service.event.Event;
@@ -41,7 +41,7 @@
      */
     String PROPERTY_JOB_MANAGER = "stanbol.enhancer.jobmanager.event.job";
     /**
-     * Property used to provide the {@link NonLiteral} describing the
+     * Property used to provide the {@link BlankNodeOrIRI} describing the
      * {@link ExecutionMetadata#EXECUTION} instance
      */
     String PROPERTY_EXECUTION = "stanbol.enhancer.jobmanager.event.execution";
diff --git a/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EnhancementJob.java b/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EnhancementJob.java
index 2ac6c14..dafc471 100644
--- a/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EnhancementJob.java
+++ b/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EnhancementJob.java
@@ -43,10 +43,10 @@
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReadWriteLock;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.commons.collections.BidiMap;
 import org.apache.commons.collections.bidimap.DualHashBidiMap;
 import org.apache.stanbol.enhancer.servicesapi.Chain;
@@ -88,13 +88,13 @@
     /**
      * The read only executionPlan
      */
-    private final Graph executionPlan;
+    private final ImmutableGraph executionPlan;
     /**
      * The read/write able execution metadata. Also accessible via
-     * {@link ContentItem#getPart(UriRef, Class)} with the URI
+     * {@link ContentItem#getPart(IRI, Class)} with the URI
      * {@link ExecutionMetadata#CHAIN_EXECUTION}
      */
-    private final MGraph executionMetadata;
+    private final Graph executionMetadata;
     /**
      * Map with the em:Execution nodes of the em:ChainExecution for this
      * ContentItem. Values are are ep:ExecutionNodes of the ep:ExecutionPlan
@@ -103,11 +103,11 @@
     /**
      * The em:ChainExecution for this {@link ContentItem}
      */
-    private final NonLiteral chainExecutionNode;
+    private final BlankNodeOrIRI chainExecutionNode;
     /**
      * The ep:ExecutionPlan for this {@link ContentItem}
      */
-    private final NonLiteral executionPlanNode;
+    private final BlankNodeOrIRI executionPlanNode;
     /**
      * The name of the {@link Chain} used to enhance this {@link ContentItem}.
      */
@@ -121,32 +121,32 @@
      * The completed ep:ExecutionPlan nodes. <p>
      * NOTE: This contains ep:ExecutionNodes and NOT em:Exetution instances!
      */
-    private final Set<NonLiteral> completed = new HashSet<NonLiteral>();
+    private final Set<BlankNodeOrIRI> completed = new HashSet<BlankNodeOrIRI>();
     /**
      * Unmodifiable and final set of completed executables. Replaced by a new
      * instance every time {@link #completed} changes
      */
-    private Set<NonLiteral> completedExec = Collections.emptySet();
+    private Set<BlankNodeOrIRI> completedExec = Collections.emptySet();
     /**
      * The running ep:ExecutionPlan nodes <p>
      * NOTE: This contains ep:ExecutionNodes and NOT em:Exetution instances!
      */
-    private final Set<NonLiteral> running = new HashSet<NonLiteral>();
+    private final Set<BlankNodeOrIRI> running = new HashSet<BlankNodeOrIRI>();
     /**
      * Unmodifiable and final set of running executables. Replaced by a new
      * instance every time {@link #running} changes.
      */
-    private Set<NonLiteral> runningExec = Collections.emptySet();
+    private Set<BlankNodeOrIRI> runningExec = Collections.emptySet();
 
     /**
      * Unmodifiable and final set of executable em:Execution nodes. 
      * Replaced by a new instance every time {@link #running} or 
      * {@link #completed} changes.
      */
-    private Set<NonLiteral> executable;
+    private Set<BlankNodeOrIRI> executable;
     /**
      * Used to store any {@link Exception} parsed with the call to
-     * {@link #setFailed(NonLiteral, EnhancementEngine, Exception)} causing the
+     * {@link #setFailed(BlankNodeOrIRI, EnhancementEngine, Exception)} causing the
      * enhancement process to fail. This Exception is typically re-thrown by the
      * {@link EnhancementJobManager#enhanceContent(ContentItem, Chain)} method.
      * @see #getError()
@@ -162,7 +162,7 @@
      * @param executionPlan
      * @param isDefaultChain
      */
-    public EnhancementJob(ContentItem contentItem, String chainName, Graph executionPlan, boolean isDefaultChain) {
+    public EnhancementJob(ContentItem contentItem, String chainName, ImmutableGraph executionPlan, boolean isDefaultChain) {
         if (contentItem == null || chainName == null || executionPlan == null) {
             throw new IllegalArgumentException("The parsed contentItem and executionPlan MUST NOT be NULL");
         }
@@ -191,7 +191,7 @@
      * Creates an EnhancemenJob based on already existing execution metadata present
      * for a ContentItem.
      * @param contentItem the ContentItem with an already existing content part
-     * containing an {@link MGraph} with all required execution metadata and the 
+     * containing an {@link Graph} with all required execution metadata and the 
      * execution plan.
      * @throws IllegalArgumentException if the parsed {@link ContentItem} does
      * not provide the required data to (re)initialise the EnhancementJob.
@@ -204,13 +204,13 @@
         this.readLock = contentItem.getLock().readLock();
         this.writeLock = contentItem.getLock().writeLock();
         try {
-            contentItem.getPart(ExecutionMetadata.CHAIN_EXECUTION, MGraph.class);
+            contentItem.getPart(ExecutionMetadata.CHAIN_EXECUTION, Graph.class);
         } catch (NoSuchPartException e) {
             throw new IllegalArgumentException("Cannot (re)initialise an EnhancementJob" +
                     "without existing execution metadata content part!",e);
         }
         executionMetadata = initExecutionMetadataContentPart(contentItem);
-        this.executionPlan = executionMetadata.getGraph();
+        this.executionPlan = executionMetadata.getImmutableGraph();
         chainExecutionNode = getChainExecution(executionMetadata, contentItem.getUri());
         if(chainExecutionNode == null){
             throw new IllegalArgumentException("Cannot (re)initialise an EnhancementJob" +
@@ -231,10 +231,10 @@
                     "enhance  ContentItem '"+contentItem.getUri()+"'!");
         }
         //the executionPlan is part of the execution metadata
-        Map<NonLiteral,NonLiteral> executionsMap = initExecutionMetadata(executionMetadata, 
+        Map<BlankNodeOrIRI,BlankNodeOrIRI> executionsMap = initExecutionMetadata(executionMetadata, 
             executionPlan, contentItem.getUri(), null, null);
-        for(Entry<NonLiteral,NonLiteral> executionEntry : executionsMap.entrySet()){
-            UriRef status = getReference(executionMetadata, executionEntry.getKey(), STATUS);
+        for(Entry<BlankNodeOrIRI,BlankNodeOrIRI> executionEntry : executionsMap.entrySet()){
+            IRI status = getReference(executionMetadata, executionEntry.getKey(), STATUS);
             if(status == null){
                 throw new IllegalArgumentException("The ex:Execution '"
                         + executionEntry.getKey()+"' of the ex:ChainExecution for ContentItme '"
@@ -260,8 +260,8 @@
      * @throws IllegalArgumentException if the parsed em:Execution is not
      * part of the execution metadata of this enhancement job
      */
-    public NonLiteral getExecutionNode(NonLiteral execution){
-        NonLiteral node = (NonLiteral)executionsMap.get(execution);
+    public BlankNodeOrIRI getExecutionNode(BlankNodeOrIRI execution){
+        BlankNodeOrIRI node = (BlankNodeOrIRI)executionsMap.get(execution);
         if(node == null){
             throw new IllegalArgumentException("Unknown sp:ExecutionNode instance "+node);
         }
@@ -273,8 +273,8 @@
      * @throws IllegalArgumentException if the parsed ep:ExecutionNode is not
      * part of the execution plan of this enhancement job
      */
-    public NonLiteral getExecution(NonLiteral executionNode){
-        NonLiteral execution = (NonLiteral)executionsMap.getKey(executionNode);
+    public BlankNodeOrIRI getExecution(BlankNodeOrIRI executionNode){
+        BlankNodeOrIRI execution = (BlankNodeOrIRI)executionsMap.getKey(executionNode);
         if(execution == null){
             throw new IllegalArgumentException("Unknown em:Execution instance "+executionNode);
         }
@@ -286,7 +286,7 @@
      * 
      * @return the executionPlan
      */
-    public final Graph getExecutionPlan() {
+    public final ImmutableGraph getExecutionPlan() {
         return executionPlan;
     }
 
@@ -329,7 +329,7 @@
      * 
      * @return the currently running executions.
      */
-    public Set<NonLiteral> getRunning() {
+    public Set<BlankNodeOrIRI> getRunning() {
         log.trace("++ r: {}","getRunning");
         readLock.lock();
         try {
@@ -346,7 +346,7 @@
      * 
      * @return the completed execution nodes
      */
-    public Set<NonLiteral> getCompleted() {
+    public Set<BlankNodeOrIRI> getCompleted() {
         log.trace("++ r: {}","getCompleted");
         readLock.lock();
         try {
@@ -371,12 +371,12 @@
      *             if the parsed execution node can not be marked as completed because some of its
      *             depended nodes are not yet marked as completed.
      */
-    public void setCompleted(NonLiteral execution) {
+    public void setCompleted(BlankNodeOrIRI execution) {
         if(execution == null) {
             throw new IllegalArgumentException("The parsed em:Execution instance MUST NOT be NULL!");
         }
         writeLock.lock();
-        NonLiteral executionNode = getExecutionNode(execution);
+        BlankNodeOrIRI executionNode = getExecutionNode(execution);
         log.trace("++ w: {}: {}","setCompleted",getEngine(executionPlan, executionNode));
         try {
             log.trace(">> w: {}: {}","setCompleted",getEngine(executionPlan, executionNode));
@@ -390,16 +390,16 @@
     /**
      * Internally used to update the state kept in {@link #completed} and
      * {@link #running} and {@link #executable} after an execution was set to
-     * {@link #setCompleted(NonLiteral) completed} or 
-     * {@link #setFailed(NonLiteral, EnhancementEngine, Exception) failed}.<p>
+     * {@link #setCompleted(BlankNodeOrIRI) completed} or 
+     * {@link #setFailed(BlankNodeOrIRI, EnhancementEngine, Exception) failed}.<p>
      * This method expects to be called within an active {@link #writeLock}.
      * @param executionNode the ep:ExecutionNode linked to the em:Execution that
      * finished. 
      */
-    private void setNodeCompleted(NonLiteral executionNode) {
+    private void setNodeCompleted(BlankNodeOrIRI executionNode) {
         String engine = getEngine(executionPlan, executionNode);
         boolean optional = isOptional(executionPlan, executionNode);
-        Set<NonLiteral> dependsOn = getDependend(executionPlan, executionNode);
+        Set<BlankNodeOrIRI> dependsOn = getDependend(executionPlan, executionNode);
         if (completed.contains(executionNode)) {
             log.warn("Execution of Engine '{}' for ContentItem {} already "
                      + "marked as completed(chain: {}, node: {}, optional {})."
@@ -447,14 +447,14 @@
      *             if the parsed execution node can not be marked as running because some of its depended
      *             nodes are not yet marked as completed.
      */
-    public void setRunning(NonLiteral execution) {
+    public void setRunning(BlankNodeOrIRI execution) {
         if(execution == null) {
             throw new IllegalArgumentException("The parsed em:Execution instance MUST NOT be NULL!");
         }
-        NonLiteral executionNode = getExecutionNode(execution);
+        BlankNodeOrIRI executionNode = getExecutionNode(execution);
         String engine = getEngine(executionPlan, executionNode);
         boolean optional = isOptional(executionPlan, executionNode);
-        Set<NonLiteral> dependsOn = getDependend(executionPlan, executionNode);
+        Set<BlankNodeOrIRI> dependsOn = getDependend(executionPlan, executionNode);
         log.trace("++ w: {}: {}","setRunning",ExecutionPlanHelper.getEngine(executionPlan, executionNode));
         writeLock.lock();
         try {
@@ -510,8 +510,8 @@
      * updates the {@link #runningExec} based on {@link #running}
      */
     private void updateRunningExec() {
-        Set<NonLiteral> runningExec = new HashSet<NonLiteral>(running.size());
-        for(NonLiteral node : running){
+        Set<BlankNodeOrIRI> runningExec = new HashSet<BlankNodeOrIRI>(running.size());
+        for(BlankNodeOrIRI node : running){
             runningExec.add(getExecution(node));
         }
         this.runningExec = Collections.unmodifiableSet(runningExec);
@@ -520,8 +520,8 @@
      * updates the {@link #runningExec} based on {@link #running}
      */
     private void updateCompletedExec() {
-        Set<NonLiteral> completedExec = new HashSet<NonLiteral>(completed.size());
-        for(NonLiteral node : completed){
+        Set<BlankNodeOrIRI> completedExec = new HashSet<BlankNodeOrIRI>(completed.size());
+        for(BlankNodeOrIRI node : completed){
             completedExec.add(getExecution(node));
         }
         this.completedExec = Collections.unmodifiableSet(completedExec);
@@ -531,7 +531,7 @@
      * Assumed to be called within a write lock!
      */
     private void checkExecutable(){
-        Set<NonLiteral> executeableNodes = 
+        Set<BlankNodeOrIRI> executeableNodes = 
                 ExecutionPlanHelper.getExecutable(executionPlan, completed);
         //a Chain finishes if no engine is running and no more nodes are executable
         if(!ExecutionMetadata.STATUS_FAILED.equals(
@@ -539,7 +539,7 @@
             executeableNodes.removeAll(running);
             if(log.isDebugEnabled()){
                 Collection<String> engines = new ArrayList<String>(executeableNodes.size());
-                for(NonLiteral node : executeableNodes){
+                for(BlankNodeOrIRI node : executeableNodes){
                     engines.add(getEngine(executionPlan, node));
                 }
                 log.trace("MARK {} as executeable",engines);
@@ -550,8 +550,8 @@
             } else if( executeableNodes.size() == 1){
                 this.executable = Collections.singleton(getExecution(executeableNodes.iterator().next()));
             } else {
-                Set<NonLiteral> executable = new HashSet<NonLiteral>(executeableNodes.size());
-                for(NonLiteral exeutableNode : executeableNodes){
+                Set<BlankNodeOrIRI> executable = new HashSet<BlankNodeOrIRI>(executeableNodes.size());
+                for(BlankNodeOrIRI exeutableNode : executeableNodes){
                     executable.add(getExecution(exeutableNode));
                 }
                 this.executable = Collections.unmodifiableSet(executable);
@@ -570,7 +570,7 @@
      * @return the nodes that can be executed next based on the completed and
      * currently running engines.
      */
-    public Set<NonLiteral> getExecutable(){
+    public Set<BlankNodeOrIRI> getExecutable(){
         log.trace("++ r: {}","getExecutable");
         readLock.lock();
         log.trace(">> r: {}","getExecutable");
@@ -598,11 +598,11 @@
         }
     }
 
-    public void setFailed(NonLiteral execution, EnhancementEngine engine, Exception exception) {
+    public void setFailed(BlankNodeOrIRI execution, EnhancementEngine engine, Exception exception) {
         if(execution == null) {
             throw new IllegalArgumentException("The parsed em:Execution instance MUST NOT be NULL!");
         }
-        NonLiteral executionNode = getExecutionNode(execution);
+        BlankNodeOrIRI executionNode = getExecutionNode(execution);
         final boolean optional = isOptional(executionPlan, executionNode);
         final String engineName = getEngine(executionPlan, executionNode);
         log.trace("++ w: {}: {}","setFailed",ExecutionPlanHelper.getEngine(executionPlan, executionNode));
@@ -696,7 +696,7 @@
      * Getter for the ExecutionMetadata.
      * @return the execution metadata.
      */
-    public MGraph getExecutionMetadata() {
+    public Graph getExecutionMetadata() {
         return executionMetadata;
     }
     /**
diff --git a/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EnhancementJobHandler.java b/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EnhancementJobHandler.java
index eb94d48..d245da8 100644
--- a/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EnhancementJobHandler.java
+++ b/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EnhancementJobHandler.java
@@ -35,9 +35,8 @@
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
 import org.apache.stanbol.enhancer.servicesapi.EngineException;
 import org.apache.stanbol.enhancer.servicesapi.EnhancementEngine;
@@ -189,7 +188,7 @@
     @Override
     public void handleEvent(Event event) {
         EnhancementJob job = (EnhancementJob)event.getProperty(PROPERTY_JOB_MANAGER);
-        NonLiteral execution = (NonLiteral)event.getProperty(PROPERTY_EXECUTION);
+        BlankNodeOrIRI execution = (BlankNodeOrIRI)event.getProperty(PROPERTY_EXECUTION);
         if(job == null || execution == null){
             log.warn("Unable to process EnhancementEvent where EnhancementJob " +
             		"{} or Execution node {} is null -> ignore",job,execution);
@@ -222,7 +221,7 @@
             } else {
                 if(log.isInfoEnabled()){
                     Collection<String> running = new ArrayList<String>(3);
-                    for(NonLiteral runningNode : job.getRunning()){
+                    for(BlankNodeOrIRI runningNode : job.getRunning()){
                         running.add(getEngine(job.getExecutionPlan(), job.getExecutionNode(runningNode)));
                     }
                     log.info("Job {} failed, but {} still running!",
@@ -238,7 +237,7 @@
      * @param job
      * @param execution
      */
-    private void processEvent(EnhancementJob job, NonLiteral execution) {
+    private void processEvent(EnhancementJob job, BlankNodeOrIRI execution) {
         String engineName = getEngine(job.getExecutionPlan(), 
             job.getExecutionNode(execution));
         //(1) execute the parsed ExecutionNode
@@ -340,7 +339,7 @@
     protected boolean executeNextNodes(EnhancementJob job) {
         //getExecutable returns an snapshot so we do not need to lock
         boolean startedExecution = false;
-        for(NonLiteral executable : job.getExecutable()){
+        for(BlankNodeOrIRI executable : job.getExecutable()){
             if(log.isTraceEnabled()){
                 log.trace("PREPARE execution of Engine {}",
                     getEngine(job.getExecutionPlan(), job.getExecutionNode(executable)));
@@ -425,15 +424,15 @@
         log.info("   content-item: {}", job.getContentItem().getUri());
         if(logExecutions){
             log.info("  executions:");
-            for(NonLiteral completedExec : job.getCompleted()){
+            for(BlankNodeOrIRI completedExec : job.getCompleted()){
                 log.info("    - {} completed",getEngine(job.getExecutionMetadata(), 
                     job.getExecutionNode(completedExec)));
             }
-            for(NonLiteral runningExec : job.getRunning()){
+            for(BlankNodeOrIRI runningExec : job.getRunning()){
                 log.info("    - {} running",getEngine(job.getExecutionMetadata(), 
                     job.getExecutionNode(runningExec)));
             }
-            for(NonLiteral executeable : job.getExecutable()){
+            for(BlankNodeOrIRI executeable : job.getExecutable()){
                 log.info("    - {} executeable",getEngine(job.getExecutionMetadata(), 
                     job.getExecutionNode(executeable)));
             }
diff --git a/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EventJobManagerImpl.java b/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EventJobManagerImpl.java
index 5ee8ff9..301b4f1 100644
--- a/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EventJobManagerImpl.java
+++ b/enhancer/jobmanager/event/src/main/java/org/apache/stanbol/enhancer/jobmanager/event/impl/EventJobManagerImpl.java
@@ -26,8 +26,8 @@
 import java.util.List;
 import java.util.Map.Entry;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.Triple;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Triple;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.felix.scr.annotations.Activate;
@@ -270,7 +270,7 @@
             throw new IllegalStateException("Currently no enhancement chain is " +
                 "active. Please configure a Chain or enable the default chain");
         }
-        Graph ep;
+        ImmutableGraph ep;
         try {
             ep = defaultChain.getExecutionPlan();
         } catch (ChainException e) {
diff --git a/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/EnhancerLDPath.java b/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/EnhancerLDPath.java
index 65c2a6a..e1cba9c 100644
--- a/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/EnhancerLDPath.java
+++ b/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/EnhancerLDPath.java
@@ -26,7 +26,7 @@
 import static org.apache.stanbol.enhancer.servicesapi.rdf.TechnicalClasses.ENHANCER_TEXTANNOTATION;
 import static org.apache.stanbol.enhancer.servicesapi.rdf.TechnicalClasses.ENHANCER_TOPICANNOTATION;
 
-import org.apache.clerezza.rdf.core.Resource;
+import org.apache.clerezza.commons.rdf.RDFTerm;
 import org.apache.marmotta.ldpath.api.functions.SelectorFunction;
 import org.apache.marmotta.ldpath.api.selectors.NodeSelector;
 import org.apache.marmotta.ldpath.model.Constants;
@@ -46,7 +46,7 @@
     
     private EnhancerLDPath(){}
     
-    private static Configuration<Resource> CONFIG;
+    private static Configuration<RDFTerm> CONFIG;
     
     /**
      * The LDPath configuration including the <ul>
@@ -55,9 +55,9 @@
      * </ul>
      * @return the LDPath configuration for the Stanbol Enhancer
      */
-    public static final Configuration<Resource> getConfig(){
+    public static final Configuration<RDFTerm> getConfig(){
         if(CONFIG == null){
-            CONFIG = new DefaultConfiguration<Resource>();
+            CONFIG = new DefaultConfiguration<RDFTerm>();
             //add the namespaces
             for(NamespaceEnum ns : NamespaceEnum.values()){
                 CONFIG.addNamespace(ns.getPrefix(), ns.getNamespace());
@@ -65,7 +65,7 @@
             //now add the functions
             addFunction(CONFIG, new ContentFunction());
             String path;
-            NodeSelector<Resource> selector;
+            NodeSelector<RDFTerm> selector;
             //TextAnnotations
             path = String.format("^%s[%s is %s]",
                 ENHANCER_EXTRACTED_FROM,RDF_TYPE,ENHANCER_TEXTANNOTATION);
@@ -75,7 +75,7 @@
                 throw new IllegalStateException("Unable to parse the ld-path selector '" +
                         path + "'used to select all TextAnnotations of a contentItem!", e);
             }
-            addFunction(CONFIG, new PathFunction<Resource>(
+            addFunction(CONFIG, new PathFunction<RDFTerm>(
                     "textAnnotation",selector));
             
             //EntityAnnotations
@@ -87,7 +87,7 @@
                 throw new IllegalStateException("Unable to parse the ld-path selector '" +
                         path + "'used to select all EntityAnnotations of a contentItem!", e);
             }
-            addFunction(CONFIG,new PathFunction<Resource>(
+            addFunction(CONFIG,new PathFunction<RDFTerm>(
                     "entityAnnotation", selector));
             
             //TopicAnnotations
@@ -99,7 +99,7 @@
                 throw new IllegalStateException("Unable to parse the ld-path selector '" +
                         path + "'used to select all TopicAnnotations of a contentItem!", e);
             }
-            addFunction(CONFIG,new PathFunction<Resource>(
+            addFunction(CONFIG,new PathFunction<RDFTerm>(
                     "topicAnnotation",selector));
             //Enhancements
             path = String.format("^%s[%s is %s]",
@@ -110,13 +110,13 @@
                 throw new IllegalStateException("Unable to parse the ld-path selector '" +
                         path + "'used to select all Enhancements of a contentItem!", e);
             }
-            addFunction(CONFIG,new PathFunction<Resource>(
+            addFunction(CONFIG,new PathFunction<RDFTerm>(
                     "enhancement",selector));
             
             //Suggested EntityAnnotations for Text/TopicAnnotations
             
             //(1) to select the suggestions
-            NodeSelector<Resource> linkedEntityAnnotations;
+            NodeSelector<RDFTerm> linkedEntityAnnotations;
             path = String.format("^%s[%s is %s]",
                 DC_RELATION,RDF_TYPE,ENHANCER_ENTITYANNOTATION,ENHANCER_CONFIDENCE);
             try {
@@ -126,7 +126,7 @@
                         path + "'used to select all entity suggestions for an Enhancement!", e);
             }
             //(2) to select the confidence value of Enhancements
-            NodeSelector<Resource> confidenceSelector;
+            NodeSelector<RDFTerm> confidenceSelector;
             path = ENHANCER_CONFIDENCE.toString();
             try {
                 confidenceSelector = Utils.parseSelector(path);
@@ -141,7 +141,7 @@
             
             //The suggestion and confidence selectors can be the same as above,
             //but we need an additional result selector
-            NodeSelector<Resource> entityReferenceSelector;
+            NodeSelector<RDFTerm> entityReferenceSelector;
             path = ENHANCER_ENTITY_REFERENCE.toString();
             try {
                 entityReferenceSelector = Utils.parseSelector(path);
diff --git a/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/backend/ContentItemBackend.java b/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/backend/ContentItemBackend.java
index 42e2e3d..50bf1f0 100644
--- a/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/backend/ContentItemBackend.java
+++ b/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/backend/ContentItemBackend.java
@@ -32,11 +32,11 @@
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.locks.Lock;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.utils.UnionMGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.rdf.utils.UnionGraph;
 import org.apache.marmotta.ldpath.api.backend.RDFBackend;
 import org.apache.stanbol.commons.ldpath.clerezza.ClerezzaBackend;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
@@ -49,16 +49,16 @@
  * @author Rupert Westenthaler
  *
  */
-public class ContentItemBackend implements RDFBackend<Resource>{
+public class ContentItemBackend implements RDFBackend<RDFTerm>{
 
     private final Logger log = LoggerFactory.getLogger(ContentItemBackend.class);
     
-    private static final Map<UriRef,TripleCollection> EMPTY_INCLUDED = emptyMap();
+    private static final Map<IRI,Graph> EMPTY_INCLUDED = emptyMap();
     
     private final ContentItem ci;
     private final Lock readLock;
     private final ClerezzaBackend backend;
-    private final Map<UriRef,TripleCollection> included;
+    private final Map<IRI,Graph> included;
     
     /**
      * Creates a {@link RDFBackend} over the {@link ContentItem#getMetadata()
@@ -71,23 +71,23 @@
     /**
      * Creates a {@link RDFBackend} over the {@link ContentItem#getMetadata()
      * metadata} and all {@link ContentItem#getPart(int, Class) content parts}
-     * compatible to {@link TripleCollection} 
+     * compatible to {@link Graph} 
      * @param ci the content item
      * @param includeAdditionalMetadata if <code>true</code> the {@link RDFBackend}
      * will also include RDF data stored in content parts
      */
     public ContentItemBackend(ContentItem ci, boolean includeAdditionalMetadata){
         included = includeAdditionalMetadata ?
-                unmodifiableMap(getContentParts(ci, TripleCollection.class)) :
+                unmodifiableMap(getContentParts(ci, Graph.class)) :
                     EMPTY_INCLUDED;
-        MGraph graph;
+        Graph graph;
         if(included.isEmpty()){
             graph = ci.getMetadata();
         } else {
-            TripleCollection[] tcs = new TripleCollection[included.size()+1];
+            Graph[] tcs = new Graph[included.size()+1];
             tcs[0] = ci.getMetadata();
             System.arraycopy(included.values().toArray(), 0, tcs, 1, included.size());
-            graph = new UnionMGraph(tcs);
+            graph = new UnionGraph(tcs);
         }
         backend = new ClerezzaBackend(graph);
         this.ci = ci;
@@ -97,15 +97,15 @@
      * Creates a {@link RDFBackend} over the {@link ContentItem#getMetadata()
      * metadata} and RDF data stored in content parts with the parsed URIs.
      * If no content part for a parsed URI exists or its type is not compatible
-     * to {@link TripleCollection} it will be not included.
+     * to {@link Graph} it will be not included.
      * @param ci the content item
      * @param includedMetadata the URIs for the content parts to include
      */
-    public ContentItemBackend(ContentItem ci, Set<UriRef> includedMetadata){
-        Map<UriRef,TripleCollection> included = new LinkedHashMap<UriRef,TripleCollection>();
-        for(UriRef ref : includedMetadata){
+    public ContentItemBackend(ContentItem ci, Set<IRI> includedMetadata){
+        Map<IRI,Graph> included = new LinkedHashMap<IRI,Graph>();
+        for(IRI ref : includedMetadata){
             try {
-                TripleCollection metadata = ci.getPart(ref, TripleCollection.class);
+                Graph metadata = ci.getPart(ref, Graph.class);
                 included.put(ref, metadata);
             } catch (RuntimeException e) {
                 log.warn("Unable to add requested Metadata-ContentPart "+ref+" to" +
@@ -113,14 +113,14 @@
             }
         }
         this.included = unmodifiableMap(included);
-        MGraph graph;
+        Graph graph;
         if(!included.isEmpty()){
             graph = ci.getMetadata();
         } else {
-            TripleCollection[] tcs = new TripleCollection[included.size()+1];
+            Graph[] tcs = new Graph[included.size()+1];
             tcs[0] = ci.getMetadata();
             System.arraycopy(tcs, 1, included.values().toArray(), 0, included.size());
-            graph = new UnionMGraph(tcs);
+            graph = new UnionGraph(tcs);
         }
         backend = new ClerezzaBackend(graph);
         this.ci = ci;
@@ -129,7 +129,7 @@
     
 
     @Override
-    public Collection<Resource> listObjects(Resource subject, Resource property) {
+    public Collection<RDFTerm> listObjects(RDFTerm subject, RDFTerm property) {
         readLock.lock();
         try {
             return backend.listObjects(subject, property);
@@ -139,7 +139,7 @@
     }
 
     @Override
-    public Collection<Resource> listSubjects(Resource property, Resource object) {
+    public Collection<RDFTerm> listSubjects(RDFTerm property, RDFTerm object) {
         readLock.lock();
         try {
             return backend.listSubjects(property, object);
@@ -159,84 +159,84 @@
      * RDF backend
      * @return the content parts included in this {@link RDFBackend}
      */
-    public Map<UriRef,TripleCollection> getIncludedMetadata(){
+    public Map<IRI,Graph> getIncludedMetadata(){
         return included;
     }
     
     @Override
-    public boolean isLiteral(Resource n) {
+    public boolean isLiteral(RDFTerm n) {
         return backend.isLiteral(n);
     }
     @Override
-    public boolean isURI(Resource n) {
+    public boolean isURI(RDFTerm n) {
         return backend.isURI(n);
     }
     @Override
-    public boolean isBlank(Resource n) {
+    public boolean isBlank(RDFTerm n) {
         return backend.isBlank(n);
     }
     @Override
-    public Locale getLiteralLanguage(Resource n) {
+    public Locale getLiteralLanguage(RDFTerm n) {
         return backend.getLiteralLanguage(n);
     }
     @Override
-    public URI getLiteralType(Resource n) {
+    public URI getLiteralType(RDFTerm n) {
         return backend.getLiteralType(n);
     }
     @Override
-    public Resource createLiteral(String content) {
+    public RDFTerm createLiteral(String content) {
         return backend.createLiteral(content);
     }
     @Override
-    public Resource createLiteral(String content, Locale language, URI type) {
+    public RDFTerm createLiteral(String content, Locale language, URI type) {
         return backend.createLiteral(content, language, type);
     }
     @Override
-    public Resource createURI(String uri) {
+    public RDFTerm createURI(String uri) {
         return backend.createURI(uri);
     }
     @Override
-    public String stringValue(Resource node) {
+    public String stringValue(RDFTerm node) {
         return backend.stringValue(node);
     }
     @Override
-    public Double doubleValue(Resource node) {
+    public Double doubleValue(RDFTerm node) {
         return backend.doubleValue(node);
     }
     @Override
-    public Long longValue(Resource node) {
+    public Long longValue(RDFTerm node) {
         return backend.longValue(node);
     }
     @Override
-    public Boolean booleanValue(Resource node) {
+    public Boolean booleanValue(RDFTerm node) {
         return backend.booleanValue(node);
     }
     @Override
-    public Date dateTimeValue(Resource node) {
+    public Date dateTimeValue(RDFTerm node) {
         return backend.dateTimeValue(node);
     }
     @Override
-    public Date dateValue(Resource node) {
+    public Date dateValue(RDFTerm node) {
         return backend.dateValue(node);
     }
     @Override
-    public Date timeValue(Resource node) {
+    public Date timeValue(RDFTerm node) {
         return backend.timeValue(node);
     }
     @Override
-    public Float floatValue(Resource node) {
+    public Float floatValue(RDFTerm node) {
         return backend.floatValue(node);
     }
     @Override
-    public Integer intValue(Resource node) {
+    public Integer intValue(RDFTerm node) {
         return backend.intValue(node);
     }
     @Override
-    public BigInteger integerValue(Resource node) {
+    public BigInteger integerValue(RDFTerm node) {
         return backend.integerValue(node);
     }
     @Override
-    public BigDecimal decimalValue(Resource node) {
+    public BigDecimal decimalValue(RDFTerm node) {
         return backend.decimalValue(node);
     }
     
diff --git a/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/function/ContentFunction.java b/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/function/ContentFunction.java
index bf3b2a8..3382962 100644
--- a/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/function/ContentFunction.java
+++ b/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/function/ContentFunction.java
@@ -26,8 +26,8 @@
 import java.util.Set;
 
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.commons.io.IOUtils;
 import org.apache.marmotta.ldpath.api.backend.RDFBackend;
 import org.apache.marmotta.ldpath.util.Collections;
@@ -55,15 +55,15 @@
     }
     
     @Override
-    public Collection<Resource> apply(ContentItemBackend backend, Resource context, Collection<Resource>... args) throws IllegalArgumentException {
+    public Collection<RDFTerm> apply(ContentItemBackend backend, RDFTerm context, Collection<RDFTerm>... args) throws IllegalArgumentException {
         ContentItem ci = backend.getContentItem();
         Set<String> mimeTypes;
         if(args == null || args.length < 1){
             mimeTypes = null;
         } else {
             mimeTypes = new HashSet<String>();
-            for(Iterator<Resource> params = Collections.concat(args).iterator();params.hasNext();){
-                Resource param = params.next();
+            for(Iterator<RDFTerm> params = Collections.concat(args).iterator();params.hasNext();){
+                RDFTerm param = params.next();
                 String mediaTypeString = backend.stringValue(param);
                 try {
                     mimeTypes.add(parseMimeType(mediaTypeString).get(null));
@@ -73,12 +73,12 @@
                 }
             }
         }
-        Collection<Resource> result;
+        Collection<RDFTerm> result;
         Blob blob;
         if(mimeTypes == null || mimeTypes.isEmpty()){
             blob = ci.getBlob();
         } else {
-            Entry<UriRef,Blob> entry = ContentItemHelper.getBlob(ci, mimeTypes);
+            Entry<IRI,Blob> entry = ContentItemHelper.getBlob(ci, mimeTypes);
             blob = entry != null ? entry.getValue() : null;
         }
         if(blob == null){
@@ -92,7 +92,7 @@
                 } else { //binary content
                     byte[] data = IOUtils.toByteArray(blob.getStream());
                     result = java.util.Collections.singleton(
-                        (Resource)lf.createTypedLiteral(data));
+                        (RDFTerm)lf.createTypedLiteral(data));
                 }
             } catch (IOException e) {
                 throw new IllegalStateException("Unable to read contents from Blob '"
diff --git a/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/function/ContentItemFunction.java b/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/function/ContentItemFunction.java
index 95f69de..47283d8 100644
--- a/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/function/ContentItemFunction.java
+++ b/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/function/ContentItemFunction.java
@@ -18,7 +18,7 @@
 
 import java.util.Collection;
 
-import org.apache.clerezza.rdf.core.Resource;
+import org.apache.clerezza.commons.rdf.RDFTerm;
 import org.apache.stanbol.enhancer.ldpath.backend.ContentItemBackend;
 import org.apache.marmotta.ldpath.api.backend.RDFBackend;
 import org.apache.marmotta.ldpath.api.functions.SelectorFunction;
@@ -33,7 +33,7 @@
  * @author Rupert Westenthaler
  *
  */
-public abstract class ContentItemFunction extends SelectorFunction<Resource> {
+public abstract class ContentItemFunction extends SelectorFunction<RDFTerm> {
     
     private final String name;
 
@@ -44,7 +44,7 @@
         this.name = name;
     }
     
-    public final Collection<Resource> apply(RDFBackend<Resource> backend, Resource context, Collection<Resource>... args) throws IllegalArgumentException {
+    public final Collection<RDFTerm> apply(RDFBackend<RDFTerm> backend, RDFTerm context, Collection<RDFTerm>... args) throws IllegalArgumentException {
         if(backend instanceof ContentItemBackend){
             return apply((ContentItemBackend)backend, context, args);
         } else {
@@ -55,7 +55,7 @@
         }
     };
 
-    public abstract Collection<Resource> apply(ContentItemBackend backend,Resource context, Collection<Resource>... args);
+    public abstract Collection<RDFTerm> apply(ContentItemBackend backend,RDFTerm context, Collection<RDFTerm>... args);
     
     @Override
     protected String getLocalName() {
diff --git a/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/function/SuggestionFunction.java b/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/function/SuggestionFunction.java
index 71e262b..ed515c1 100644
--- a/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/function/SuggestionFunction.java
+++ b/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/function/SuggestionFunction.java
@@ -25,7 +25,7 @@
 import java.util.List;
 import java.util.Map.Entry;
 
-import org.apache.clerezza.rdf.core.Resource;
+import org.apache.clerezza.commons.rdf.RDFTerm;
 import org.apache.marmotta.ldpath.api.backend.RDFBackend;
 import org.apache.marmotta.ldpath.api.functions.SelectorFunction;
 import org.apache.marmotta.ldpath.api.selectors.NodeSelector;
@@ -34,13 +34,13 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-public class SuggestionFunction extends SelectorFunction<Resource> {
+public class SuggestionFunction extends SelectorFunction<RDFTerm> {
     
-    private static final Comparator<Entry<Double,Resource>> SUGGESTION_COMPARATOR = 
-            new Comparator<Entry<Double,Resource>>() {
+    private static final Comparator<Entry<Double,RDFTerm>> SUGGESTION_COMPARATOR = 
+            new Comparator<Entry<Double,RDFTerm>>() {
 
         @Override
-        public int compare(Entry<Double,Resource> e1, Entry<Double,Resource> e2) {
+        public int compare(Entry<Double,RDFTerm> e1, Entry<Double,RDFTerm> e2) {
             return e2.getKey().compareTo(e1.getKey());
         }
         
@@ -58,22 +58,22 @@
     Logger log = LoggerFactory.getLogger(SuggestionFunction.class);
 
     private final String name;
-    private final IntTransformer<Resource> intTransformer;
-    private final StringTransformer<Resource> stringTransformer;
-    private final NodeSelector<Resource> suggestionSelector;
-    private final NodeSelector<Resource> confidenceSelector;
-    private final NodeSelector<Resource> resultSelector;
+    private final IntTransformer<RDFTerm> intTransformer;
+    private final StringTransformer<RDFTerm> stringTransformer;
+    private final NodeSelector<RDFTerm> suggestionSelector;
+    private final NodeSelector<RDFTerm> confidenceSelector;
+    private final NodeSelector<RDFTerm> resultSelector;
     public SuggestionFunction(String name,
-                              NodeSelector<Resource> suggestionSelector,
-                              NodeSelector<Resource> confidenceSelector){
+                              NodeSelector<RDFTerm> suggestionSelector,
+                              NodeSelector<RDFTerm> confidenceSelector){
         this(name,null,suggestionSelector,confidenceSelector);
     }
     public SuggestionFunction(String name,
-                              NodeSelector<Resource> suggestionSelector,
-                              NodeSelector<Resource> confidenceSelector,
-                              NodeSelector<Resource> resultSelector) {
-        intTransformer = new IntTransformer<Resource>();
-        stringTransformer = new StringTransformer<Resource>();
+                              NodeSelector<RDFTerm> suggestionSelector,
+                              NodeSelector<RDFTerm> confidenceSelector,
+                              NodeSelector<RDFTerm> resultSelector) {
+        intTransformer = new IntTransformer<RDFTerm>();
+        stringTransformer = new StringTransformer<RDFTerm>();
         if(name == null || name.isEmpty()){
             throw new IllegalArgumentException("The parsed function name MUST NOT be NULL nor empty!");
         }
@@ -90,12 +90,12 @@
     }
     
     @Override
-    public Collection<Resource> apply(final RDFBackend<Resource> backend, Resource context, Collection<Resource>... args) throws IllegalArgumentException {
+    public Collection<RDFTerm> apply(final RDFBackend<RDFTerm> backend, RDFTerm context, Collection<RDFTerm>... args) throws IllegalArgumentException {
         int paramIndex = 0;
-        Collection<Resource> contexts = null;
+        Collection<RDFTerm> contexts = null;
         if(args != null && args.length > 0 && args[0] != null && !args[0].isEmpty()){
-            contexts = new ArrayList<Resource>();
-            for(Resource r : args[0]){
+            contexts = new ArrayList<RDFTerm>();
+            for(RDFTerm r : args[0]){
                 if(backend.isURI(r)){
                     contexts.add(r);
                     paramIndex = 1;
@@ -108,11 +108,11 @@
         Integer limit = parseParamLimit(backend, args,paramIndex);
 //        final String processingMode = parseParamProcessingMode(backend, args,2);
         final int missingConfidenceMode = parseParamMissingConfidenceMode(backend, args,paramIndex+1);
-        List<Resource> result = new ArrayList<Resource>();
+        List<RDFTerm> result = new ArrayList<RDFTerm>();
 //        if(processingMode.equals(ANNOTATION_PROCESSING_MODE_UNION)){
             processAnnotations(backend, contexts, limit, missingConfidenceMode, result);
 //        } else {
-//            for(Resource context : args[0]){
+//            for(RDFTerm context : args[0]){
 //                processAnnotations(backend, singleton(context),
 //                    limit, missingConfidenceMode, result);
 //            }
@@ -131,16 +131,16 @@
      * @param missingConfidenceMode
      * @param result results are added to this list.
      */
-    private void processAnnotations(final RDFBackend<Resource> backend,
-                                    Collection<Resource> annotations,
+    private void processAnnotations(final RDFBackend<RDFTerm> backend,
+                                    Collection<RDFTerm> annotations,
                                     Integer limit,
                                     final int missingConfidenceMode,
-                                    List<Resource> result) {
-        List<Entry<Double,Resource>> suggestions = new ArrayList<Entry<Double,Resource>>();
-        for(Resource annotation : annotations){
+                                    List<RDFTerm> result) {
+        List<Entry<Double,RDFTerm>> suggestions = new ArrayList<Entry<Double,RDFTerm>>();
+        for(RDFTerm annotation : annotations){
             //NOTE: no Path Tracking support possible for selectors wrapped in functions
-            for(Resource suggestion : suggestionSelector.select(backend, annotation,null,null)){
-                Collection<Resource> cs = confidenceSelector.select(backend, suggestion,null,null);
+            for(RDFTerm suggestion : suggestionSelector.select(backend, annotation,null,null)){
+                Collection<RDFTerm> cs = confidenceSelector.select(backend, suggestion,null,null);
                 Double confidence = !cs.isEmpty() ? backend.doubleValue(cs.iterator().next()) : 
                         missingConfidenceMode == MISSING_CONFIDENCE_FILTER ?
                                 null : missingConfidenceMode == MISSING_CONFIDENCE_FIRST ?
@@ -153,7 +153,7 @@
         }
         Collections.sort(suggestions, SUGGESTION_COMPARATOR);
         int resultSize = limit != null ? Math.min(limit, suggestions.size()) : suggestions.size();
-        for(Entry<Double,Resource> suggestion : suggestions.subList(0, resultSize)){
+        for(Entry<Double,RDFTerm> suggestion : suggestions.subList(0, resultSize)){
             if(resultSelector == null){
                 result.add(suggestion.getValue());
             } else {
@@ -169,8 +169,8 @@
      * @param args
      * @return
      */
-    private int parseParamMissingConfidenceMode(final RDFBackend<Resource> backend,
-                                                Collection<Resource>[] args, int index) {
+    private int parseParamMissingConfidenceMode(final RDFBackend<RDFTerm> backend,
+                                                Collection<RDFTerm>[] args, int index) {
         final int missingConfidenceMode;
         if(args.length > index && !args[index].isEmpty()){
             String mode = stringTransformer.transform(backend, args[index].iterator().next(),
@@ -196,7 +196,7 @@
 //     * @param args
 //     * @return
 //     */
-//    private String parseParamProcessingMode(final RDFBackend<Resource> backend, Collection<Resource>[] args, int index) {
+//    private String parseParamProcessingMode(final RDFBackend<RDFTerm> backend, Collection<RDFTerm>[] args, int index) {
 //        final String processingMode;
 //        if(args.length > index && !args[index].isEmpty()){
 //            String mode = stringTransformer.transform(backend, args[index].iterator().next());
@@ -219,10 +219,10 @@
      * @param args
      * @return
      */
-    private Integer parseParamLimit(final RDFBackend<Resource> backend, Collection<Resource>[] args,int index) {
+    private Integer parseParamLimit(final RDFBackend<RDFTerm> backend, Collection<RDFTerm>[] args,int index) {
         Integer limit = null;
         if(args.length > index && !args[index].isEmpty()){
-            Resource value = args[index].iterator().next();
+            RDFTerm value = args[index].iterator().next();
             try {
                 limit = intTransformer.transform(backend, value, Collections.<String,String>emptyMap());
                 if(limit < 1){
diff --git a/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/utils/Utils.java b/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/utils/Utils.java
index aca0c71..3ad5d04 100644
--- a/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/utils/Utils.java
+++ b/enhancer/ldpath/src/main/java/org/apache/stanbol/enhancer/ldpath/utils/Utils.java
@@ -21,8 +21,8 @@
 import java.io.StringReader;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
 import org.apache.marmotta.ldpath.api.backend.RDFBackend;
 import org.apache.marmotta.ldpath.api.selectors.NodeSelector;
 import org.apache.marmotta.ldpath.parser.LdPathParser;
@@ -35,7 +35,7 @@
     
 
 
-    public static RDFBackend<Resource> EMPTY_BACKEND;
+    public static RDFBackend<RDFTerm> EMPTY_BACKEND;
     
     /**
      * Returns an empty {@link RDFBackend} instance intended to be used to create
@@ -53,20 +53,20 @@
      * used for the 2nd purpose and does contain no information!
      * <li>
      */
-    public static RDFBackend<Resource> getEmptyBackend(){
+    public static RDFBackend<RDFTerm> getEmptyBackend(){
         if(EMPTY_BACKEND == null){
-            EMPTY_BACKEND = new ClerezzaBackend(new SimpleMGraph());
+            EMPTY_BACKEND = new ClerezzaBackend(new SimpleGraph());
         }
         return EMPTY_BACKEND;
     }
     
 
     
-    public static NodeSelector<Resource> parseSelector(String path) throws ParseException {
+    public static NodeSelector<RDFTerm> parseSelector(String path) throws ParseException {
         return parseSelector(path, null);
     }
-    public static NodeSelector<Resource> parseSelector(String path, Map<String,String> additionalNamespaceMappings) throws ParseException {
-        LdPathParser<Resource> parser = new LdPathParser<Resource>(
+    public static NodeSelector<RDFTerm> parseSelector(String path, Map<String,String> additionalNamespaceMappings) throws ParseException {
+        LdPathParser<RDFTerm> parser = new LdPathParser<RDFTerm>(
                getEmptyBackend(), getConfig(), new StringReader(path));
         return parser.parseSelector(additionalNamespaceMappings);
     }
diff --git a/enhancer/ldpath/src/test/java/org/apache/stanbol/enhancer/ldpath/ContentItemBackendTest.java b/enhancer/ldpath/src/test/java/org/apache/stanbol/enhancer/ldpath/ContentItemBackendTest.java
index 49fb9ef..a9ed25f 100644
--- a/enhancer/ldpath/src/test/java/org/apache/stanbol/enhancer/ldpath/ContentItemBackendTest.java
+++ b/enhancer/ldpath/src/test/java/org/apache/stanbol/enhancer/ldpath/ContentItemBackendTest.java
@@ -34,23 +34,21 @@
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipInputStream;
 
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.serializedform.ParsingProvider;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.clerezza.rdf.jena.parser.JenaParserProvider;
 import org.apache.commons.io.IOUtils;
 import org.apache.marmotta.ldpath.LDPath;
 import org.apache.marmotta.ldpath.exception.LDPathParseException;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.enhancer.contentitem.inmemory.InMemoryContentItemFactory;
 import org.apache.stanbol.enhancer.ldpath.backend.ContentItemBackend;
 import org.apache.stanbol.enhancer.servicesapi.ContentItem;
@@ -88,20 +86,20 @@
     private static String htmlContent;
     private static ContentItem ci;
     private ContentItemBackend backend;
-    private LDPath<Resource> ldpath;
+    private LDPath<RDFTerm> ldpath;
     @BeforeClass
     public static void readTestData() throws IOException {
         //add the metadata
         ParsingProvider parser = new JenaParserProvider();
         //create the content Item with the HTML content
-        MGraph rdfData = parseRdfData(parser,"metadata.rdf.zip");
-        UriRef contentItemId = null;
+        Graph rdfData = parseRdfData(parser,"metadata.rdf.zip");
+        IRI contentItemId = null;
         Iterator<Triple> it = rdfData.filter(null, Properties.ENHANCER_EXTRACTED_FROM, null);
         while(it.hasNext()){
-            Resource r = it.next().getObject();
+            RDFTerm r = it.next().getObject();
             if(contentItemId == null){
-                if(r instanceof UriRef){
-                    contentItemId = (UriRef)r;
+                if(r instanceof IRI){
+                    contentItemId = (IRI)r;
                 }
             } else {
                 assertEquals("multiple ContentItems IDs contained in the RDF test data", 
@@ -123,7 +121,7 @@
         byte[] textData = IOUtils.toByteArray(in);
         IOUtils.closeQuietly(in);
         assertNotNull("Plain text content not found",in);
-        ci.addPart(new UriRef(ci.getUri().getUnicodeString()+"_text"),
+        ci.addPart(new IRI(ci.getUri().getUnicodeString()+"_text"),
             ciFactory.createBlob(new ByteArraySource(textData, "text/plain; charset=UTF-8")));
         textContent = new String(textData, UTF8);
         //add the metadata
@@ -135,8 +133,8 @@
      * @return
      * @throws IOException
      */
-    protected static MGraph parseRdfData(ParsingProvider parser,String name) throws IOException {
-        MGraph rdfData = new IndexedMGraph();
+    protected static Graph parseRdfData(ParsingProvider parser,String name) throws IOException {
+        Graph rdfData = new IndexedGraph();
         InputStream in = getTestResource(name);
         assertNotNull("File '"+name+"' not found",in);
         ZipInputStream zipIn = new ZipInputStream(new BufferedInputStream(in));
@@ -166,17 +164,17 @@
             backend = new ContentItemBackend(ci);
         }
         if(ldpath == null){
-            ldpath = new LDPath<Resource>(backend, EnhancerLDPath.getConfig());
+            ldpath = new LDPath<RDFTerm>(backend, EnhancerLDPath.getConfig());
         }
     }
     
     @Test
     public void testContent() throws LDPathParseException {
-        Collection<Resource> result = ldpath.pathQuery(ci.getUri(), "fn:content(\"text/plain\")", null);
+        Collection<RDFTerm> result = ldpath.pathQuery(ci.getUri(), "fn:content(\"text/plain\")", null);
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 1);
-        Resource r = result.iterator().next();
+        RDFTerm r = result.iterator().next();
         assertTrue(r instanceof Literal);
         String content = ((Literal)r).getLexicalForm();
         assertEquals(content, textContent);
@@ -194,20 +192,20 @@
     @Test
     public void testContentWithAdditionalMetadata() throws IOException, LDPathParseException {
         byte[] content = "text content".getBytes();
-        UriRef uri = ContentItemHelper.makeDefaultUrn(content);
+        IRI uri = ContentItemHelper.makeDefaultUrn(content);
 
         ContentItem contentItem = ciFactory.createContentItem(uri, new ByteArraySource(content,
                 "text/plain; charset=UTF-8"));
 
-        TripleCollection tc = new SimpleMGraph();
-        TypedLiteral literal = LiteralFactory.getInstance().createTypedLiteral("Michael Jackson");
-        UriRef subject = new UriRef("dummyUri");
-        tc.add(new TripleImpl(subject, new UriRef("http://xmlns.com/foaf/0.1/givenName"), literal));
-        contentItem.addPart(new UriRef(uri.getUnicodeString() + "_additionalMetadata"), tc);
+        Graph tc = new SimpleGraph();
+        Literal literal = LiteralFactory.getInstance().createTypedLiteral("Michael Jackson");
+        IRI subject = new IRI("dummyUri");
+        tc.add(new TripleImpl(subject, new IRI("http://xmlns.com/foaf/0.1/givenName"), literal));
+        contentItem.addPart(new IRI(uri.getUnicodeString() + "_additionalMetadata"), tc);
 
         ContentItemBackend ciBackend = new ContentItemBackend(contentItem, true);
-        LDPath<Resource> ldPath = new LDPath<Resource>(ciBackend, EnhancerLDPath.getConfig());
-        Collection<Resource> result = ldPath.pathQuery(subject, "foaf:givenName", null);
+        LDPath<RDFTerm> ldPath = new LDPath<RDFTerm>(ciBackend, EnhancerLDPath.getConfig());
+        Collection<RDFTerm> result = ldPath.pathQuery(subject, "foaf:givenName", null);
 
         assertTrue("Additional metadata cannot be found", result.contains(literal));
     }
@@ -215,13 +213,13 @@
     @Test
     public void testTextAnnotationFunction() throws LDPathParseException {
         String path = "fn:textAnnotation(.)/fise:selected-text";
-        Collection<Resource> result = ldpath.pathQuery(ci.getUri(), path, null);
+        Collection<RDFTerm> result = ldpath.pathQuery(ci.getUri(), path, null);
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 2);
         Set<String> expectedValues = new HashSet<String>(
                 Arrays.asList("Bob Marley","Paris"));
-        for(Resource r : result){
+        for(RDFTerm r : result){
             assertTrue(r instanceof Literal);
             assertTrue(expectedValues.remove(((Literal)r).getLexicalForm()));
         }
@@ -235,7 +233,7 @@
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 1);
-        Resource r = result.iterator().next();
+        RDFTerm r = result.iterator().next();
         assertTrue(r instanceof Literal);
         assertEquals(((Literal)r).getLexicalForm(), "Bob Marley");
 
@@ -243,13 +241,13 @@
     @Test
     public void testTextAnnotationFunctionWithoutParsedContext() throws LDPathParseException {
         String path = "fn:textAnnotation()/fise:selected-text";
-        Collection<Resource> result = ldpath.pathQuery(ci.getUri(), path, null);
+        Collection<RDFTerm> result = ldpath.pathQuery(ci.getUri(), path, null);
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 2);
         Set<String> expectedValues = new HashSet<String>(
                 Arrays.asList("Bob Marley","Paris"));
-        for(Resource r : result){
+        for(RDFTerm r : result){
             assertTrue(r instanceof Literal);
             assertTrue(expectedValues.remove(((Literal)r).getLexicalForm()));
         }
@@ -263,7 +261,7 @@
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 1);
-        Resource r = result.iterator().next();
+        RDFTerm r = result.iterator().next();
         assertTrue(r instanceof Literal);
         assertEquals(((Literal)r).getLexicalForm(), "Bob Marley");
 
@@ -271,18 +269,18 @@
     @Test
     public void testEntityAnnotation() throws LDPathParseException {
         String path = "fn:entityAnnotation(.)/fise:entity-reference";
-        Collection<Resource> result = ldpath.pathQuery(ci.getUri(), path, null);
+        Collection<RDFTerm> result = ldpath.pathQuery(ci.getUri(), path, null);
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 4);
-        Set<UriRef> expectedValues = new HashSet<UriRef>(
+        Set<IRI> expectedValues = new HashSet<IRI>(
                 Arrays.asList(
-                    new UriRef("http://dbpedia.org/resource/Paris"),
-                    new UriRef("http://dbpedia.org/resource/Bob_Marley"),
-                    new UriRef("http://dbpedia.org/resource/Centre_Georges_Pompidou"),
-                    new UriRef("http://dbpedia.org/resource/Paris,_Texas")));
-        for(Resource r : result){
-            assertTrue(r instanceof UriRef);
+                    new IRI("http://dbpedia.org/resource/Paris"),
+                    new IRI("http://dbpedia.org/resource/Bob_Marley"),
+                    new IRI("http://dbpedia.org/resource/Centre_Georges_Pompidou"),
+                    new IRI("http://dbpedia.org/resource/Paris,_Texas")));
+        for(RDFTerm r : result){
+            assertTrue(r instanceof IRI);
             log.info("Entity: {}",r);
             assertTrue(expectedValues.remove(r));
         }
@@ -293,23 +291,23 @@
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 1);
-        assertTrue(result.contains(new UriRef("http://dbpedia.org/resource/Bob_Marley")));
+        assertTrue(result.contains(new IRI("http://dbpedia.org/resource/Bob_Marley")));
     }
     @Test
     public void testEntityAnnotationWithoutParsedContext() throws LDPathParseException {
         String path = "fn:entityAnnotation()/fise:entity-reference";
-        Collection<Resource> result = ldpath.pathQuery(ci.getUri(), path, null);
+        Collection<RDFTerm> result = ldpath.pathQuery(ci.getUri(), path, null);
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 4);
-        Set<UriRef> expectedValues = new HashSet<UriRef>(
+        Set<IRI> expectedValues = new HashSet<IRI>(
                 Arrays.asList(
-                    new UriRef("http://dbpedia.org/resource/Paris"),
-                    new UriRef("http://dbpedia.org/resource/Bob_Marley"),
-                    new UriRef("http://dbpedia.org/resource/Centre_Georges_Pompidou"),
-                    new UriRef("http://dbpedia.org/resource/Paris,_Texas")));
-        for(Resource r : result){
-            assertTrue(r instanceof UriRef);
+                    new IRI("http://dbpedia.org/resource/Paris"),
+                    new IRI("http://dbpedia.org/resource/Bob_Marley"),
+                    new IRI("http://dbpedia.org/resource/Centre_Georges_Pompidou"),
+                    new IRI("http://dbpedia.org/resource/Paris,_Texas")));
+        for(RDFTerm r : result){
+            assertTrue(r instanceof IRI);
             log.info("Entity: {}",r);
             assertTrue(expectedValues.remove(r));
         }
@@ -320,17 +318,17 @@
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 1);
-        assertTrue(result.contains(new UriRef("http://dbpedia.org/resource/Bob_Marley")));
+        assertTrue(result.contains(new IRI("http://dbpedia.org/resource/Bob_Marley")));
     }
     @Test
     public void testEnhancements() throws LDPathParseException {
         String path = "fn:enhancement(.)";
-        Collection<Resource> result = ldpath.pathQuery(ci.getUri(), path, null);
+        Collection<RDFTerm> result = ldpath.pathQuery(ci.getUri(), path, null);
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 7);
-        for(Resource r : result){
-            assertTrue(r instanceof UriRef);
+        for(RDFTerm r : result){
+            assertTrue(r instanceof IRI);
             log.info("Entity: {}",r);
         }
         //and with a filter
@@ -339,25 +337,25 @@
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 3);
-//        assertTrue(result.contains(new UriRef("http://dbpedia.org/resource/Bob_Marley")));
+//        assertTrue(result.contains(new IRI("http://dbpedia.org/resource/Bob_Marley")));
         path = "fn:enhancement(.)/dc:language";
         result = ldpath.pathQuery(ci.getUri(), path, null);
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 1);
-        Resource r = result.iterator().next();
+        RDFTerm r = result.iterator().next();
         assertTrue(r instanceof Literal);
         assertEquals("en",((Literal)r).getLexicalForm());
     }
     @Test
     public void testEnhancementsWithoutParsedContext() throws LDPathParseException {
         String path = "fn:enhancement()";
-        Collection<Resource> result = ldpath.pathQuery(ci.getUri(), path, null);
+        Collection<RDFTerm> result = ldpath.pathQuery(ci.getUri(), path, null);
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 7);
-        for(Resource r : result){
-            assertTrue(r instanceof UriRef);
+        for(RDFTerm r : result){
+            assertTrue(r instanceof IRI);
             log.info("Entity: {}",r);
         }
         //and with a filter
@@ -366,13 +364,13 @@
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 3);
-//        assertTrue(result.contains(new UriRef("http://dbpedia.org/resource/Bob_Marley")));
+//        assertTrue(result.contains(new IRI("http://dbpedia.org/resource/Bob_Marley")));
         path = "fn:enhancement()/dc:language";
         result = ldpath.pathQuery(ci.getUri(), path, null);
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 1);
-        Resource r = result.iterator().next();
+        RDFTerm r = result.iterator().next();
         assertTrue(r instanceof Literal);
         assertEquals("en",((Literal)r).getLexicalForm());
     }
@@ -386,27 +384,27 @@
         // are returned and later that a limit of 2 only returns the two top
         // most.
         String path = "fn:textAnnotation(.)[dc:type is dbpedia-ont:Place]/fn:suggestion(.)";
-        Collection<Resource> result = ldpath.pathQuery(ci.getUri(), path, null);
+        Collection<RDFTerm> result = ldpath.pathQuery(ci.getUri(), path, null);
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 3);
         Double lowestConfidence = null;
         //stores the lowest confidence suggestion for the 2nd part of this test
-        UriRef lowestConfidenceSuggestion = null;
+        IRI lowestConfidenceSuggestion = null;
         path = "fise:confidence :: xsd:double";
-        for(Resource r : result){
-            assertTrue(r instanceof UriRef);
+        for(RDFTerm r : result){
+            assertTrue(r instanceof IRI);
             log.info("confidence: {}",r);
             Double current = (Double)ldpath.pathTransform(r, path, null).iterator().next();
             assertNotNull(current);
             if(lowestConfidence == null || lowestConfidence > current){
                 lowestConfidence = current;
-                lowestConfidenceSuggestion = (UriRef) r;
+                lowestConfidenceSuggestion = (IRI) r;
             }
         }
         assertNotNull(lowestConfidenceSuggestion);
         path = "fn:textAnnotation(.)[dc:type is dbpedia-ont:Place]/fn:suggestion(.,\"2\")";
-        Collection<Resource> result2 = ldpath.pathQuery(ci.getUri(), path, null);
+        Collection<RDFTerm> result2 = ldpath.pathQuery(ci.getUri(), path, null);
         assertNotNull(result2);
         assertFalse(result2.isEmpty());
         assertTrue(result2.size() == 2);
@@ -425,27 +423,27 @@
         // are returned and later that a limit of 2 only returns the two top
         // most.
         String path = "fn:textAnnotation()[dc:type is dbpedia-ont:Place]/fn:suggestion()";
-        Collection<Resource> result = ldpath.pathQuery(ci.getUri(), path, null);
+        Collection<RDFTerm> result = ldpath.pathQuery(ci.getUri(), path, null);
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 3);
         Double lowestConfidence = null;
         //stores the lowest confidence suggestion for the 2nd part of this test
-        UriRef lowestConfidenceSuggestion = null;
+        IRI lowestConfidenceSuggestion = null;
         path = "fise:confidence :: xsd:double";
-        for(Resource r : result){
-            assertTrue(r instanceof UriRef);
+        for(RDFTerm r : result){
+            assertTrue(r instanceof IRI);
             log.info("confidence: {}",r);
             Double current = (Double)ldpath.pathTransform(r, path, null).iterator().next();
             assertNotNull(current);
             if(lowestConfidence == null || lowestConfidence > current){
                 lowestConfidence = current;
-                lowestConfidenceSuggestion = (UriRef) r;
+                lowestConfidenceSuggestion = (IRI) r;
             }
         }
         assertNotNull(lowestConfidenceSuggestion);
         path = "fn:textAnnotation()[dc:type is dbpedia-ont:Place]/fn:suggestion(\"2\")";
-        Collection<Resource> result2 = ldpath.pathQuery(ci.getUri(), path, null);
+        Collection<RDFTerm> result2 = ldpath.pathQuery(ci.getUri(), path, null);
         assertNotNull(result2);
         assertFalse(result2.isEmpty());
         assertTrue(result2.size() == 2);
@@ -461,16 +459,16 @@
         //    In this example we parse all TextAnnotations
         //NOTE: '.' MUST BE used as first argument in this case
         String path = "fn:textAnnotation(.)/fn:suggestedEntity(.,\"1\")";
-        Collection<Resource> result = ldpath.pathQuery(ci.getUri(), path, null);
+        Collection<RDFTerm> result = ldpath.pathQuery(ci.getUri(), path, null);
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 2);
-        Set<UriRef> expectedValues = new HashSet<UriRef>(
+        Set<IRI> expectedValues = new HashSet<IRI>(
                 Arrays.asList(
-                    new UriRef("http://dbpedia.org/resource/Paris"),
-                    new UriRef("http://dbpedia.org/resource/Bob_Marley")));
-        for(Resource r : result){
-            assertTrue(r instanceof UriRef);
+                    new IRI("http://dbpedia.org/resource/Paris"),
+                    new IRI("http://dbpedia.org/resource/Bob_Marley")));
+        for(RDFTerm r : result){
+            assertTrue(r instanceof IRI);
             log.info("Entity: {}",r);
             assertTrue(expectedValues.remove(r));
         }
@@ -485,7 +483,7 @@
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 1);
-        assertEquals(new UriRef("http://dbpedia.org/resource/Paris"),
+        assertEquals(new IRI("http://dbpedia.org/resource/Paris"),
             result.iterator().next());
         
     }
@@ -496,16 +494,16 @@
         //    In this example we parse all TextAnnotations
         //NOTE: '.' MUST BE used as first argument in this case
         String path = "fn:textAnnotation()/fn:suggestedEntity(\"1\")";
-        Collection<Resource> result = ldpath.pathQuery(ci.getUri(), path, null);
+        Collection<RDFTerm> result = ldpath.pathQuery(ci.getUri(), path, null);
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 2);
-        Set<UriRef> expectedValues = new HashSet<UriRef>(
+        Set<IRI> expectedValues = new HashSet<IRI>(
                 Arrays.asList(
-                    new UriRef("http://dbpedia.org/resource/Paris"),
-                    new UriRef("http://dbpedia.org/resource/Bob_Marley")));
-        for(Resource r : result){
-            assertTrue(r instanceof UriRef);
+                    new IRI("http://dbpedia.org/resource/Paris"),
+                    new IRI("http://dbpedia.org/resource/Bob_Marley")));
+        for(RDFTerm r : result){
+            assertTrue(r instanceof IRI);
             log.info("Entity: {}",r);
             assertTrue(expectedValues.remove(r));
         }
@@ -520,7 +518,7 @@
         assertNotNull(result);
         assertFalse(result.isEmpty());
         assertTrue(result.size() == 1);
-        assertEquals(new UriRef("http://dbpedia.org/resource/Paris"),
+        assertEquals(new IRI("http://dbpedia.org/resource/Paris"),
             result.iterator().next());
         
     }
diff --git a/enhancer/ldpath/src/test/java/org/apache/stanbol/enhancer/ldpath/UsageExamples.java b/enhancer/ldpath/src/test/java/org/apache/stanbol/enhancer/ldpath/UsageExamples.java
index 186752c..dd8eaa7 100644
--- a/enhancer/ldpath/src/test/java/org/apache/stanbol/enhancer/ldpath/UsageExamples.java
+++ b/enhancer/ldpath/src/test/java/org/apache/stanbol/enhancer/ldpath/UsageExamples.java
@@ -30,10 +30,10 @@
 import java.util.Map;
 import java.util.Map.Entry;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.serializedform.ParsingProvider;
 import org.apache.clerezza.rdf.jena.parser.JenaParserProvider;
 import org.apache.commons.io.IOUtils;
@@ -75,7 +75,7 @@
     
     private static ContentItem ci;
     private ContentItemBackend backend;
-    private LDPath<Resource> ldpath;
+    private LDPath<RDFTerm> ldpath;
     private static double indexingTime;
     
     @BeforeClass
@@ -83,14 +83,14 @@
         //add the metadata
         ParsingProvider parser = new JenaParserProvider();
         //create the content Item with the HTML content
-        MGraph rdfData = parseRdfData(parser,"example.rdf.zip");
-        UriRef contentItemId = null;
+        Graph rdfData = parseRdfData(parser,"example.rdf.zip");
+        IRI contentItemId = null;
         Iterator<Triple> it = rdfData.filter(null, Properties.ENHANCER_EXTRACTED_FROM, null);
         while(it.hasNext()){
-            Resource r = it.next().getObject();
+            RDFTerm r = it.next().getObject();
             if(contentItemId == null){
-                if(r instanceof UriRef){
-                    contentItemId = (UriRef)r;
+                if(r instanceof IRI){
+                    contentItemId = (IRI)r;
                 }
             } else {
                 assertEquals("multiple ContentItems IDs contained in the RDF test data", 
@@ -114,7 +114,7 @@
             backend = new ContentItemBackend(ci);
         }
         if(ldpath == null){
-            ldpath = new LDPath<Resource>(backend, EnhancerLDPath.getConfig());
+            ldpath = new LDPath<RDFTerm>(backend, EnhancerLDPath.getConfig());
         }
     }
 
@@ -138,7 +138,7 @@
         program.append("linkedArtists = fn:textAnnotation()" +
                 "[dc:type is dbpedia-ont:Person]/fn:suggestion()" +
                 "[fise:entity-type is dbpedia-ont:Artist]/fise:entity-reference :: xsd:anyURI;");
-        Program<Resource> personProgram = ldpath.parseProgram(new StringReader(program.toString()));
+        Program<RDFTerm> personProgram = ldpath.parseProgram(new StringReader(program.toString()));
         log.info("- - - - - - - - - - - - - ");
         log.info("Person Indexing Examples");
         Map<String,Collection<?>> result = execute(personProgram);
@@ -152,7 +152,7 @@
      * @param personProgram
      * @return the results
      */
-    private Map<String,Collection<?>> execute(Program<Resource> personProgram) {
+    private Map<String,Collection<?>> execute(Program<RDFTerm> personProgram) {
         long start = System.currentTimeMillis();
         Map<String,Collection<?>> result = personProgram.execute(backend, ci.getUri());
         for(int i=1;i<ITERATIONS;i++){
@@ -183,7 +183,7 @@
         program.append("linkedCountries = fn:textAnnotation()" +
                 "[dc:type is dbpedia-ont:Place]/fn:suggestion()" +
                 "[fise:entity-type is dbpedia-ont:Country]/fise:entity-reference :: xsd:anyURI;");
-        Program<Resource> personProgram = ldpath.parseProgram(new StringReader(program.toString()));
+        Program<RDFTerm> personProgram = ldpath.parseProgram(new StringReader(program.toString()));
         log.info("- - - - - - - - - - - - -");
         log.info("Places Indexing Examples");
         Map<String,Collection<?>> result = execute(personProgram);
@@ -212,7 +212,7 @@
         program.append("linkedEducationOrg = fn:textAnnotation()" +
                 "[dc:type is dbpedia-ont:Organisation]/fn:suggestion()" +
                 "[fise:entity-type is dbpedia-ont:EducationalInstitution]/fise:entity-reference :: xsd:anyURI;");
-        Program<Resource> personProgram = ldpath.parseProgram(new StringReader(program.toString()));
+        Program<RDFTerm> personProgram = ldpath.parseProgram(new StringReader(program.toString()));
         log.info("- - - - - - - - - - - - -");
         log.info("Places Indexing Examples");
         Map<String,Collection<?>> result = execute(personProgram);
@@ -234,7 +234,7 @@
         //but also the selected-text as fallback if no entity is suggested.
         program.append("linkedConcepts = fn:entityAnnotation()" +
                 "[fise:entity-type is skos:Concept]/fise:entity-reference :: xsd:anyURI;");
-        Program<Resource> personProgram = ldpath.parseProgram(new StringReader(program.toString()));
+        Program<RDFTerm> personProgram = ldpath.parseProgram(new StringReader(program.toString()));
         log.info("- - - - - - - - - - - - -");
         log.info("Concept Indexing Examples");
         Map<String,Collection<?>> result = execute(personProgram);
diff --git a/entityhub/generic/test/src/main/java/org/apache/stanbol/entityhub/test/model/RepresentationTest.java b/entityhub/generic/test/src/main/java/org/apache/stanbol/entityhub/test/model/RepresentationTest.java
index 7221cfa..5fbe6e8 100644
--- a/entityhub/generic/test/src/main/java/org/apache/stanbol/entityhub/test/model/RepresentationTest.java
+++ b/entityhub/generic/test/src/main/java/org/apache/stanbol/entityhub/test/model/RepresentationTest.java
@@ -1040,9 +1040,9 @@
         Representation rep = initNaturalLanguageTest(field);
         Set<String> textSet = new HashSet<String>(NL_TEST_all);
         rep.removeAllNaturalText(field, "de", "de-AT");
-        for (Iterator<Text> texts = rep.getText(field); texts.hasNext(); textSet.remove(texts.next()
-                .getText()))
-            ;
+        for (Iterator<Text> texts = rep.getText(field); 
+                texts.hasNext(); 
+                textSet.remove(texts.next().getText()));
         assertTrue(textSet.size() == 2);
         assertTrue(textSet.remove(NL_TEST_de));
         assertTrue(textSet.remove(NL_TEST_de_AT));
diff --git a/entityhub/generic/test/src/main/java/org/apache/stanbol/entityhub/test/model/ValueFactoryTest.java b/entityhub/generic/test/src/main/java/org/apache/stanbol/entityhub/test/model/ValueFactoryTest.java
index d0855af..22c9819 100644
--- a/entityhub/generic/test/src/main/java/org/apache/stanbol/entityhub/test/model/ValueFactoryTest.java
+++ b/entityhub/generic/test/src/main/java/org/apache/stanbol/entityhub/test/model/ValueFactoryTest.java
@@ -100,7 +100,7 @@
     }
 
     @Test
-    public void testUriReference() throws URISyntaxException {
+    public void testIRIerence() throws URISyntaxException {
         URI refObject = new URI("http://www.test.org/uriTest");
         Reference ref = testRef(refObject);
         assertEquals(ref.getReference(), refObject.toString());
diff --git a/entityhub/indexing/core/src/main/java/org/apache/stanbol/entityhub/indexing/Urify.java b/entityhub/indexing/core/src/main/java/org/apache/stanbol/entityhub/indexing/Urify.java
index 44a225c..17e7a64 100644
--- a/entityhub/indexing/core/src/main/java/org/apache/stanbol/entityhub/indexing/Urify.java
+++ b/entityhub/indexing/core/src/main/java/org/apache/stanbol/entityhub/indexing/Urify.java
@@ -175,7 +175,7 @@
             }
             InputStream is = new FileInputStream(source);
             OutputStream os = new FileOutputStream(target);
-            log.info("Resource: {}",resource);
+            log.info("RDFTerm: {}",resource);
             log.info("Target  : {}",target);
             if ("gz".equalsIgnoreCase(FilenameUtils.getExtension(name))) {
                 is = new GZIPInputStream(is);
diff --git a/entityhub/indexing/core/src/main/java/org/apache/stanbol/entityhub/indexing/core/config/IndexingConfig.java b/entityhub/indexing/core/src/main/java/org/apache/stanbol/entityhub/indexing/core/config/IndexingConfig.java
index dc8a1f1..a154b4d 100644
--- a/entityhub/indexing/core/src/main/java/org/apache/stanbol/entityhub/indexing/core/config/IndexingConfig.java
+++ b/entityhub/indexing/core/src/main/java/org/apache/stanbol/entityhub/indexing/core/config/IndexingConfig.java
@@ -441,13 +441,13 @@
     private File getResource(String path, String fileName) {
         File resourceDir = new File(getWorkingDirectory(),path);
         File resource = new File(resourceDir,fileName);
-        log.info("request for Resource {} (folder: {})",fileName,resourceDir);
+        log.info("request for RDFTerm {} (folder: {})",fileName,resourceDir);
         if(resource.getAbsoluteFile().exists()){
-            log.info(" > rquested Resource present");
+            log.info(" > rquested RDFTerm present");
         } else if(copyFromClasspath(new File(path,fileName))){
-            log.info(" > rquested Resource copied from Classpath ");
+            log.info(" > rquested RDFTerm copied from Classpath ");
         } else {
-            log.info(" > rquested Resource not found");
+            log.info(" > rquested RDFTerm not found");
         }
         return resource.getAbsoluteFile();
     }
@@ -598,7 +598,7 @@
         }
         URL contextUrl = loadViaClasspath(contextResource);
         if(contextUrl == null){// if indexing.properties is not found via classpath
-            log.info("No '{}' found via classpath. Loading Resource via" +
+            log.info("No '{}' found via classpath. Loading RDFTerm via" +
             		"the classpath is deactivated.",
             		contextResource);
             return null;
diff --git a/entityhub/indexing/core/src/main/java/org/apache/stanbol/entityhub/indexing/core/source/ResourceLoader.java b/entityhub/indexing/core/src/main/java/org/apache/stanbol/entityhub/indexing/core/source/ResourceLoader.java
index a42b5fa..926eddd 100644
--- a/entityhub/indexing/core/src/main/java/org/apache/stanbol/entityhub/indexing/core/source/ResourceLoader.java
+++ b/entityhub/indexing/core/src/main/java/org/apache/stanbol/entityhub/indexing/core/source/ResourceLoader.java
@@ -285,7 +285,7 @@
                     try {
                         files.put(moveToImportedFolder(new File(file)).toString(), state);
                     } catch (IOException ioe) {
-                       log.warn("Unable to move loaded Resource {} to imported Directory! "
+                       log.warn("Unable to move loaded RDFTerm {} to imported Directory! "
                            + "Please move the file manually to {}!",file,importedDir);
                        log.warn("Reason: "+ioe.getMessage(),ioe);
                        files.put(file, state);
@@ -296,7 +296,7 @@
                 if(ResourceState.ERROR == state){
                 	//if failOnError is activated we stop the loading on the first error!                   
                     if (failOnError){
-                    	 String msg = "Error while loading Resource "+file;
+                    	 String msg = "Error while loading RDFTerm "+file;
                     	if(e != null){
                             throw new IllegalStateException(msg,e);
                         } else {
diff --git a/entityhub/indexing/core/src/test/java/org/apache/stanbol/entityhub/indexing/core/ResourceLoaderTest.java b/entityhub/indexing/core/src/test/java/org/apache/stanbol/entityhub/indexing/core/ResourceLoaderTest.java
index 211f583..307a33c 100644
--- a/entityhub/indexing/core/src/test/java/org/apache/stanbol/entityhub/indexing/core/ResourceLoaderTest.java
+++ b/entityhub/indexing/core/src/test/java/org/apache/stanbol/entityhub/indexing/core/ResourceLoaderTest.java
@@ -71,7 +71,7 @@
             assertTrue("resourceName '"+resourceName+"' not expected",
                 expectedNames.remove(resourceName));
             IOUtils.closeQuietly(is);
-            log.debug("Import Resource {}",resourceName);
+            log.debug("Import RDFTerm {}",resourceName);
             if(resourceName.startsWith("ignore")){
                 return ResourceState.IGNORED;
             } else if(resourceName.startsWith("error")){
diff --git a/entityhub/indexing/geonames/src/main/java/org/apache/stanbol/entityhub/indexing/geonames/GeonamesIndexingSource.java b/entityhub/indexing/geonames/src/main/java/org/apache/stanbol/entityhub/indexing/geonames/GeonamesIndexingSource.java
index bbfffcd..722c4c0 100644
--- a/entityhub/indexing/geonames/src/main/java/org/apache/stanbol/entityhub/indexing/geonames/GeonamesIndexingSource.java
+++ b/entityhub/indexing/geonames/src/main/java/org/apache/stanbol/entityhub/indexing/geonames/GeonamesIndexingSource.java
@@ -77,10 +77,10 @@
     
     private ResourceLoader loader = new ResourceLoader(this, false, false);
 
-    protected static class Resource {
+    protected static class RDFTerm {
         protected final String name;
         protected final InputStream is;
-        protected Resource(String name, InputStream is) {
+        protected RDFTerm(String name, InputStream is) {
             this.name = name;
             this.is = is;
         }
@@ -100,7 +100,7 @@
         }
         
     }
-    private List<Resource> resourceList = new ArrayList<GeonamesIndexingSource.Resource>();
+    private List<RDFTerm> resourceList = new ArrayList<GeonamesIndexingSource.RDFTerm>();
     private boolean consumed;
     
     @Override
@@ -158,14 +158,14 @@
     @Override
     public void close() {
         loader = null;
-        for(Resource resource : resourceList){
+        for(RDFTerm resource : resourceList){
             IOUtils.closeQuietly(resource.is);
         }
     }
 
     @Override
     public ResourceState importResource(InputStream is, String resourceName) throws IOException {
-        resourceList.add(new Resource(resourceName, is));
+        resourceList.add(new RDFTerm(resourceName, is));
         return ResourceState.LOADED;
     }
 
@@ -180,8 +180,8 @@
         }
         return new EntityDataIterator() {
             
-            Iterator<Resource> resources = resourceList.iterator();
-            Resource r;
+            Iterator<RDFTerm> resources = resourceList.iterator();
+            RDFTerm r;
             LineIterator it = null;
             private String next;
             private Representation rep;
@@ -196,7 +196,7 @@
                     try {
                         it = r.getEntries();
                     } catch (IOException e) {
-                        log.error("Unable to read Resource '"+r.getName()+"' because of "+e.getMessage(),e);
+                        log.error("Unable to read RDFTerm '"+r.getName()+"' because of "+e.getMessage(),e);
                         e.printStackTrace();
                         IOUtils.closeQuietly(r.is);
                         it = null;
diff --git a/entityhub/indexing/source/jenatdb/src/main/java/org/apache/stanbol/entityhub/indexing/source/jenatdb/RdfIndexingSource.java b/entityhub/indexing/source/jenatdb/src/main/java/org/apache/stanbol/entityhub/indexing/source/jenatdb/RdfIndexingSource.java
index c33d05a..338dd22 100644
--- a/entityhub/indexing/source/jenatdb/src/main/java/org/apache/stanbol/entityhub/indexing/source/jenatdb/RdfIndexingSource.java
+++ b/entityhub/indexing/source/jenatdb/src/main/java/org/apache/stanbol/entityhub/indexing/source/jenatdb/RdfIndexingSource.java
@@ -97,7 +97,7 @@
      */
     private static final String PARAM_BNODE_STATE = "bnode";
     /**
-     * If present, this Parameter allows to convert RDF BNodes to dereferable
+     * If present, this Parameter allows to convert RDF BlankNodes to dereferable
      * URIs by using {bnode-prefix}{bnode-id} (see 
      * <a href="https://issues.apache.org/jira/browse/STANBOL-765">STANBOL-765</a>
      * for details)
@@ -141,7 +141,7 @@
 
     protected String bnodePrefix; //protected to allow direct access in inner classes
     /**
-     * used for logging a single WARN level entry on the first ignored BNode
+     * used for logging a single WARN level entry on the first ignored BlankNode
      */
     private boolean bnodeIgnored = false;
     private RdfImportFilter importFilter;
@@ -429,7 +429,7 @@
         }
         if(found) {
             if(log.isTraceEnabled()){
-                log.info("Resource: \n{}", ModelUtils.getRepresentationInfo(source));
+                log.info("RDFTerm: \n{}", ModelUtils.getRepresentationInfo(source));
             }
             return source;
         } else {
@@ -510,12 +510,12 @@
                 logIgnoredBnode(log, source, field, value);
             }
         }  else {
-            log.warn("ignoreing value {} for field {} and Resource {} because it is of an unsupported type!",
+            log.warn("ignoreing value {} for field {} and RDFTerm {} because it is of an unsupported type!",
                     new Object[]{value,field,source.getId()});
         } //end different value node type
     }
     /**
-     * Logs that a BNode was ignored (only the first time). Also debugs the
+     * Logs that a BlankNode was ignored (only the first time). Also debugs the
      * ignored triple.
      * @param log the logger to use
      * @param s subject
@@ -706,7 +706,7 @@
                 Node entityNode = binding.get(entityVar);
                 //NOTES:
                 // * for URIs we need to check for empty URIs!
-                // * STANBOL-765: added support for BNodes
+                // * STANBOL-765: added support for BlankNodes
                 if((entityNode.isURI() && !entityNode.toString().isEmpty()) ||
                         entityNode.isBlank() && bnodePrefix != null){
                     if(!entityNode.equals(currentEntity)){
@@ -829,7 +829,7 @@
         return nodes;
     }
     /**
-     * Since STANBOL-765 BNodes are converted to URIs if a {@link #bnodePrefix}
+     * Since STANBOL-765 BlankNodes are converted to URIs if a {@link #bnodePrefix}
      * is configured. This also means that one needs to expect calls to the
      * {@link RDFBackend} interface with transformed Nodes. <p>
      * This method ensures that if someone requests an uri {@link Node} for a
diff --git a/entityhub/indexing/source/jenatdb/src/test/java/org/apache/stanbol/entityhub/indexing/source/jenatdb/RdfIndexingSourceTest.java b/entityhub/indexing/source/jenatdb/src/test/java/org/apache/stanbol/entityhub/indexing/source/jenatdb/RdfIndexingSourceTest.java
index ef07ec9..be2b6f9 100644
--- a/entityhub/indexing/source/jenatdb/src/test/java/org/apache/stanbol/entityhub/indexing/source/jenatdb/RdfIndexingSourceTest.java
+++ b/entityhub/indexing/source/jenatdb/src/test/java/org/apache/stanbol/entityhub/indexing/source/jenatdb/RdfIndexingSourceTest.java
@@ -209,8 +209,8 @@
             9, count);
     }
     @Test
-    public void testBNodeSupport(){
-        log.info(" --- testBNodeSupport ---");
+    public void testBlankNodeSupport(){
+        log.info(" --- testBlankNodeSupport ---");
         String testName = "bnode";
         IndexingConfig config = new IndexingConfig(CONFIG_ROOT+File.separatorChar+testName,
             CONFIG_ROOT+'/'+testName){};
@@ -231,7 +231,7 @@
         }
         //check if all entities where indexed
         //Expected are 3 entities First France from france.rdf
-        //and two from BNode Entities in bnode.nt
+        //and two from BlankNode Entities in bnode.nt
         assertEquals(String.format("> %s Entities expected but only %s processed!",
             3, count), 
             3, count);
diff --git a/entityhub/indexing/source/vcard/src/main/java/org/apache/stanbol/entityhub/indexing/source/vcard/VcardIndexingSource.java b/entityhub/indexing/source/vcard/src/main/java/org/apache/stanbol/entityhub/indexing/source/vcard/VcardIndexingSource.java
index 148d787..3d8dcda 100644
--- a/entityhub/indexing/source/vcard/src/main/java/org/apache/stanbol/entityhub/indexing/source/vcard/VcardIndexingSource.java
+++ b/entityhub/indexing/source/vcard/src/main/java/org/apache/stanbol/entityhub/indexing/source/vcard/VcardIndexingSource.java
@@ -331,7 +331,7 @@
             }
             return ResourceState.LOADED;
         } else {
-            log.debug("Resource {} ignored: Not an Vcard file.",resourceName);
+            log.debug("RDFTerm {} ignored: Not an Vcard file.",resourceName);
             return ResourceState.IGNORED;
         }
     }
@@ -620,7 +620,7 @@
                                 unitHierarchy[0] != null && unitHierarchy[0].trim().length()>0){
                             String orgName = unitHierarchy[0];
                             if(current == null){ //create new Representation for the Organisation
-                                //Note: this is an Entity and no sub-Resource!
+                                //Note: this is an Entity and no sub-RDFTerm!
                                 String orgEntityId = entityByName(entityMap, EntityType.organization, 
                                     orgName, null, false);
                                 if(orgEntityId == null){
diff --git a/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/jersey/resource/EntityhubRootResource.java b/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/jersey/resource/EntityhubRootResource.java
index ac27bf8..441596d 100644
--- a/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/jersey/resource/EntityhubRootResource.java
+++ b/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/jersey/resource/EntityhubRootResource.java
@@ -68,7 +68,7 @@
 
 import org.apache.marmotta.ldpath.exception.LDPathParseException;
 import org.apache.marmotta.ldpath.model.programs.Program;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.commons.namespaceprefix.NamespaceMappingUtils;
 import org.apache.stanbol.commons.namespaceprefix.NamespacePrefixService;
 import org.apache.stanbol.commons.web.viewable.Viewable;
@@ -610,7 +610,7 @@
      */
     private Response executeLDPathQuery(Entityhub entityhub,FieldQuery query, String ldpathProgramString, MediaType mediaType, HttpHeaders headers) {
         QueryResultList<Representation> result;
-        ValueFactory vf = new RdfValueFactory(new IndexedMGraph());
+        ValueFactory vf = new RdfValueFactory(new IndexedGraph());
         EntityhubBackend backend = new EntityhubBackend(entityhub);
         EntityhubLDPath ldPath = new EntityhubLDPath(backend,vf);
         //copy the selected fields, because we might need to delete some during
diff --git a/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/jersey/resource/ReferencedSiteRootResource.java b/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/jersey/resource/ReferencedSiteRootResource.java
index c7e7601..3db51f6 100644
--- a/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/jersey/resource/ReferencedSiteRootResource.java
+++ b/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/jersey/resource/ReferencedSiteRootResource.java
@@ -72,7 +72,7 @@
 import org.apache.clerezza.rdf.ontologies.RDFS;
 import org.apache.marmotta.ldpath.exception.LDPathParseException;
 import org.apache.marmotta.ldpath.model.programs.Program;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.commons.namespaceprefix.NamespaceMappingUtils;
 import org.apache.stanbol.commons.namespaceprefix.NamespacePrefixService;
 import org.apache.stanbol.commons.web.viewable.Viewable;
@@ -109,7 +109,7 @@
 import org.apache.felix.scr.annotations.Service;
 
 /**
- * Resource to provide a REST API for the {@link SiteManager}
+ * RDFTerm to provide a REST API for the {@link SiteManager}
  * <p/>
  * TODO: add description
  */
@@ -665,7 +665,7 @@
      */
     private Response executeLDPathQuery(Site site, FieldQuery query, String ldpathProgramString, MediaType mediaType, HttpHeaders headers) {
         QueryResultList<Representation> result;
-        ValueFactory vf = new RdfValueFactory(new IndexedMGraph());
+        ValueFactory vf = new RdfValueFactory(new IndexedGraph());
         SiteBackend backend = new SiteBackend(site,vf);
         EntityhubLDPath ldPath = new EntityhubLDPath(backend,vf);
         //copy the selected fields, because we might need to delete some during
diff --git a/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/jersey/resource/SiteManagerRootResource.java b/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/jersey/resource/SiteManagerRootResource.java
index 56fd843..a2482cc 100644
--- a/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/jersey/resource/SiteManagerRootResource.java
+++ b/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/jersey/resource/SiteManagerRootResource.java
@@ -58,7 +58,7 @@
 import org.apache.clerezza.rdf.ontologies.RDFS;
 import org.apache.marmotta.ldpath.exception.LDPathParseException;
 import org.apache.marmotta.ldpath.model.programs.Program;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.commons.namespaceprefix.NamespaceMappingUtils;
 import org.apache.stanbol.commons.namespaceprefix.NamespacePrefixService;
 import org.apache.stanbol.commons.web.base.resource.BaseStanbolResource;
@@ -85,7 +85,7 @@
 import org.apache.felix.scr.annotations.Service;
 
 /**
- * Resource to provide a REST API for the {@link SiteManager}.
+ * RDFTerm to provide a REST API for the {@link SiteManager}.
  * 
  * TODO: add description
  */
@@ -421,7 +421,7 @@
      */
     private Response executeLDPathQuery(SiteManager manager,FieldQuery query, String ldpathProgramString, MediaType mediaType, HttpHeaders headers) {
         QueryResultList<Representation> result;
-        ValueFactory vf = new RdfValueFactory(new IndexedMGraph());
+        ValueFactory vf = new RdfValueFactory(new IndexedGraph());
         SiteManagerBackend backend = new SiteManagerBackend(manager);
         EntityhubLDPath ldPath = new EntityhubLDPath(backend,vf);
         //copy the selected fields, because we might need to delete some during
diff --git a/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/jersey/utils/LDPathHelper.java b/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/jersey/utils/LDPathHelper.java
index b8cfa9e..dd7b1ab 100644
--- a/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/jersey/utils/LDPathHelper.java
+++ b/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/jersey/utils/LDPathHelper.java
@@ -36,14 +36,14 @@
 import javax.ws.rs.core.Response.ResponseBuilder;
 import javax.ws.rs.core.Response.Status;
 
-import org.apache.clerezza.rdf.core.MGraph;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.marmotta.ldpath.api.backend.RDFBackend;
 import org.apache.marmotta.ldpath.exception.LDPathParseException;
 import org.apache.marmotta.ldpath.model.fields.FieldMapping;
 import org.apache.marmotta.ldpath.model.programs.Program;
 import org.apache.marmotta.ldpath.model.selectors.PropertySelector;
 import org.apache.marmotta.ldpath.model.transformers.DoubleTransformer;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.commons.web.base.resource.BaseStanbolResource;
 import org.apache.stanbol.commons.web.viewable.Viewable;
 import org.apache.stanbol.entityhub.core.model.InMemoryValueFactory;
@@ -90,10 +90,10 @@
      * @return The results stored within an RDF graph
      * @throws LDPathParseException if the parsed LDPath program is invalid
      */
-    private static MGraph executeLDPath(RDFBackend<Object> backend,
+    private static Graph executeLDPath(RDFBackend<Object> backend,
                                  String ldpath,
                                  Set<String> contexts ) throws LDPathParseException {
-        MGraph data = new IndexedMGraph();
+        Graph data = new IndexedGraph();
         RdfValueFactory vf = new RdfValueFactory(data);
         EntityhubLDPath ldPath = new EntityhubLDPath(backend,vf);
         Program<Object> program = ldPath.parseProgram(getReader(ldpath));
@@ -104,10 +104,10 @@
         /*
          * NOTE: We do not need to process the Representations returned by
          * EntityhubLDPath#exdecute, because the RdfValueFactory used uses
-         * the local variable "MGraph data" to backup all created
+         * the local variable "Graph data" to backup all created
          * RdfRepresentation. Because of this all converted data will be
-         * automatically added the MGraph. The only thing we need to do is to
-         * wrap the MGraph in the response.
+         * automatically added the Graph. The only thing we need to do is to
+         * wrap the Graph in the response.
          */
         for(String context : contexts){
             ldPath.execute(vf.createReference(context), program);
@@ -186,7 +186,7 @@
             .entity("The requested content type "+TEXT_HTML+" is not supported.\n")
             .header(HttpHeaders.ACCEPT, acceptedMediaType).build();
         }
-        MGraph data;
+        Graph data;
         try {
             data = executeLDPath(backend, ldpath, contexts);
         } catch (LDPathParseException e) {
diff --git a/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/web/reader/FieldQueryReader.java b/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/web/reader/FieldQueryReader.java
index da8fed6..78ab3b8 100644
--- a/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/web/reader/FieldQueryReader.java
+++ b/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/web/reader/FieldQueryReader.java
@@ -117,7 +117,7 @@
             // 500 with no comment and HTML content type :(
             // As a workaround one could use a wrapping object as generic type
             // that parses the error and than throw the Exception within the
-            // Resource using this MessageBodyReader
+            // RDFTerm using this MessageBodyReader
             throw new WebApplicationException(
                 Response.status(Status.BAD_REQUEST).
                 entity(message.toString()).
diff --git a/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/web/reader/RepresentationReader.java b/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/web/reader/RepresentationReader.java
index 8a922b6..6618f70 100644
--- a/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/web/reader/RepresentationReader.java
+++ b/entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/web/reader/RepresentationReader.java
@@ -41,10 +41,10 @@
 import javax.ws.rs.ext.MessageBodyReader;
 import javax.ws.rs.ext.Provider;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.clerezza.rdf.core.serializedform.UnsupportedParsingFormatException;
@@ -52,7 +52,7 @@
 import org.apache.felix.scr.annotations.Property;
 import org.apache.felix.scr.annotations.Reference;
 import org.apache.felix.scr.annotations.Service;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.entityhub.jersey.utils.JerseyUtils;
 import org.apache.stanbol.entityhub.jersey.utils.MessageBodyReaderUtils;
 import org.apache.stanbol.entityhub.jersey.utils.MessageBodyReaderUtils.RequestData;
@@ -247,8 +247,8 @@
        } else if(isSupported(content.getMediaType())){ //from RDF serialisation
             RdfValueFactory valueFactory = RdfValueFactory.getInstance();
             Map<String,Representation> representations = new HashMap<String,Representation>();
-            Set<NonLiteral> processed = new HashSet<NonLiteral>();
-            MGraph graph = new IndexedMGraph();
+            Set<BlankNodeOrIRI> processed = new HashSet<BlankNodeOrIRI>();
+            Graph graph = new IndexedGraph();
             try {
                 parser.parse(graph,content.getEntityStream(), content.getMediaType().toString());
             } catch (UnsupportedParsingFormatException e) {
@@ -276,11 +276,11 @@
                     header(HttpHeaders.ACCEPT, acceptedMediaType).build());
             }
             for(Iterator<Triple> st = graph.iterator();st.hasNext();){
-                NonLiteral resource = st.next().getSubject();
-                if(resource instanceof UriRef && processed.add(resource)){
+                BlankNodeOrIRI resource = st.next().getSubject();
+                if(resource instanceof IRI && processed.add(resource)){
                     //build a new representation
-                    representations.put(((UriRef)resource).getUnicodeString(),
-                        valueFactory.createRdfRepresentation((UriRef)resource, graph));
+                    representations.put(((IRI)resource).getUnicodeString(),
+                        valueFactory.createRdfRepresentation((IRI)resource, graph));
                 }
             }
             return representations;
diff --git a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfReference.java b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfReference.java
index 1998c79..7dc858e 100644
--- a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfReference.java
+++ b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfReference.java
@@ -16,21 +16,21 @@
  */
 package org.apache.stanbol.entityhub.model.clerezza;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.entityhub.servicesapi.model.Reference;
 
 public class RdfReference implements Reference,Cloneable {
-    private final UriRef uri;
+    private final IRI uri;
     protected RdfReference(String reference){
         if(reference == null){
             throw new IllegalArgumentException("The parsed Reference MUST NOT be NULL!");
         } else if(reference.isEmpty()){
             throw new IllegalArgumentException("The parsed Reference MUST NOT be Empty!");
         } else {
-            this.uri = new UriRef(reference);
+            this.uri = new IRI(reference);
         }
     }
-    protected RdfReference(UriRef uri){
+    protected RdfReference(IRI uri){
         if(uri == null){
             throw new IllegalArgumentException("The parsed Reference MUST NOT be NULL!");
         } else if(uri.getUnicodeString().isEmpty()){
@@ -43,12 +43,12 @@
     public String getReference() {
         return uri.getUnicodeString();
     }
-    public UriRef getUriRef(){
+    public IRI getIRI(){
         return uri;
     }
     @Override
     protected Object clone() throws CloneNotSupportedException {
-        return new RdfReference(new UriRef(uri.getUnicodeString()));
+        return new RdfReference(new IRI(uri.getUnicodeString()));
     }
     @Override
     public int hashCode() {
diff --git a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfRepresentation.java b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfRepresentation.java
index 1bb77ef..d99697b 100644
--- a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfRepresentation.java
+++ b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfRepresentation.java
@@ -22,11 +22,11 @@
 import java.util.Collection;
 import java.util.Iterator;
 
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.NoConvertorException;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.utils.GraphNode;
 import org.apache.stanbol.entityhub.servicesapi.util.AdaptingIterator;
 import org.apache.stanbol.entityhub.servicesapi.util.FilteringIterator;
@@ -35,8 +35,8 @@
 import org.apache.stanbol.entityhub.model.clerezza.impl.LiteralAdapter;
 import org.apache.stanbol.entityhub.model.clerezza.impl.NaturalTextFilter;
 import org.apache.stanbol.entityhub.model.clerezza.impl.Resource2ValueAdapter;
-import org.apache.stanbol.entityhub.model.clerezza.impl.UriRef2ReferenceAdapter;
-import org.apache.stanbol.entityhub.model.clerezza.impl.UriRefAdapter;
+import org.apache.stanbol.entityhub.model.clerezza.impl.IRI2ReferenceAdapter;
+import org.apache.stanbol.entityhub.model.clerezza.impl.IRIAdapter;
 import org.apache.stanbol.entityhub.model.clerezza.utils.Resource2StringAdapter;
 import org.apache.stanbol.entityhub.servicesapi.model.Reference;
 import org.apache.stanbol.entityhub.servicesapi.model.Representation;
@@ -59,7 +59,7 @@
         return graphNode;
     }
 
-    protected RdfRepresentation(UriRef resource, TripleCollection graph) {
+    protected RdfRepresentation(IRI resource, Graph graph) {
         this.graphNode = new GraphNode(resource, graph);
     }
 
@@ -68,12 +68,12 @@
      *
      * @return The RDF graph of this Representation
      */
-    public TripleCollection getRdfGraph(){
+    public Graph getRdfGraph(){
         return graphNode.getGraph();
     }
 
-//    protected UriRef getRepresentationType(){
-//        Iterator<UriRef> it = this.graphNode.getUriRefObjects(REPRESENTATION_TYPE_PROPERTY);
+//    protected IRI getRepresentationType(){
+//        Iterator<IRI> it = this.graphNode.getIRIObjects(REPRESENTATION_TYPE_PROPERTY);
 //        return it.hasNext()?it.next():null;
 //    }
     @Override
@@ -86,32 +86,32 @@
         if(value == null){
             throw new IllegalArgumentException("NULL values are not supported by Representations");
         }
-        UriRef fieldUriRef = new UriRef(field);
+        IRI fieldIRI = new IRI(field);
         Collection<Object> values = new ArrayList<Object>();
         //process the parsed value with the Utility Method ->
         // this converts Objects as defined in the specification
         ModelUtils.checkValues(valueFactory, value, values);
         //We still need to implement support for specific types supported by this implementation
         for (Object current : values){
-            if (current instanceof Resource){ //native support for Clerezza types!
-                graphNode.addProperty(fieldUriRef, (Resource)current);
+            if (current instanceof RDFTerm){ //native support for Clerezza types!
+                graphNode.addProperty(fieldIRI, (RDFTerm)current);
             } else if (current instanceof RdfReference){
                 //treat RDF Implementations special to avoid creating new instances
-                graphNode.addProperty(fieldUriRef, ((RdfReference) current).getUriRef());
+                graphNode.addProperty(fieldIRI, ((RdfReference) current).getIRI());
             } else if (current instanceof Reference){
-                graphNode.addProperty(fieldUriRef, new UriRef(((Reference) current).getReference()));
+                graphNode.addProperty(fieldIRI, new IRI(((Reference) current).getReference()));
             } else if (current instanceof RdfText){
                 //treat RDF Implementations special to avoid creating new instances
-                graphNode.addProperty(fieldUriRef,((RdfText) current).getLiteral());
+                graphNode.addProperty(fieldIRI,((RdfText) current).getLiteral());
             } else if (current instanceof Text){
-                addNaturalText(fieldUriRef, ((Text)current).getText(), ((Text)current).getLanguage());
+                addNaturalText(fieldIRI, ((Text)current).getText(), ((Text)current).getLanguage());
             } else { //else add an typed Literal!
-                addTypedLiteral(fieldUriRef, current);
+                addTypedLiteral(fieldIRI, current);
             }
         }
     }
 
-    private void addTypedLiteral(UriRef field, Object literalValue){
+    private void addTypedLiteral(IRI field, Object literalValue){
         Literal literal;
         try {
             literal = RdfResourceUtils.createLiteral(literalValue);
@@ -134,7 +134,7 @@
         } else if (reference.isEmpty()) {
             throw new IllegalArgumentException("References MUST NOT be empty!");
         }
-        graphNode.addProperty(new UriRef(field), new UriRef(reference));
+        graphNode.addProperty(new IRI(field), new IRI(reference));
     }
     @Override
     public void addNaturalText(String field, String text, String...languages) {
@@ -146,9 +146,9 @@
         if(text == null){
             throw new IllegalArgumentException("NULL values are not supported by Representations");
         }
-        this.addNaturalText(new UriRef(field), text, languages);
+        this.addNaturalText(new IRI(field), text, languages);
     }
-    private void addNaturalText(UriRef field, String text, String...languages) {
+    private void addNaturalText(IRI field, String text, String...languages) {
         if(languages == null || languages.length == 0){
             languages = new String []{null};
         }
@@ -165,36 +165,36 @@
         } else if(field.isEmpty()){
             throw new IllegalArgumentException("The parsed field MUST NOT be Empty");
         }
-        UriRef fieldUriRef = new UriRef(field);
-        if(Resource.class.isAssignableFrom(type)){ //native support for Clerezza types
-            return new TypeSafeIterator<T>(graphNode.getObjects(fieldUriRef), type);
+        IRI fieldIRI = new IRI(field);
+        if(RDFTerm.class.isAssignableFrom(type)){ //native support for Clerezza types
+            return new TypeSafeIterator<T>(graphNode.getObjects(fieldIRI), type);
 // NOTE: (Rupert Westenthaler 12.01.2011)
 //     Converting everything to String is not an intended functionality. When
 //     someone parsed String.class he rather assumes that he gets only string
 //     values and not also string representations for Dates, Integer ...
 //       
 //        } else if(type.equals(String.class)){ //support to convert anything to String
-//            return (Iterator<T>) new AdaptingIterator<Resource,String>(
-//                    graphNode.getObjects(fieldUriRef),
-//                    new Resource2StringAdapter<Resource>(),
+//            return (Iterator<T>) new AdaptingIterator<RDFTerm,String>(
+//                    graphNode.getObjects(fieldIRI),
+//                    new Resource2StringAdapter<RDFTerm>(),
 //                    String.class);
         } else if(type.equals(URI.class) || type.equals(URL.class)){ //support for References
-            return new AdaptingIterator<UriRef, T>(
-                    graphNode.getUriRefObjects(fieldUriRef),
-                    new UriRefAdapter<T>(),
+            return new AdaptingIterator<IRI, T>(
+                    graphNode.getIRIObjects(fieldIRI),
+                    new IRIAdapter<T>(),
                     type);
         } else if(Reference.class.isAssignableFrom(type)){
-            return (Iterator<T>) new AdaptingIterator<UriRef,Reference>(
-                    graphNode.getUriRefObjects(fieldUriRef),
-                    new UriRef2ReferenceAdapter(),Reference.class);
+            return (Iterator<T>) new AdaptingIterator<IRI,Reference>(
+                    graphNode.getIRIObjects(fieldIRI),
+                    new IRI2ReferenceAdapter(),Reference.class);
         } else if(Text.class.isAssignableFrom(type)){
             return (Iterator<T>)new AdaptingIterator<Literal, Text>(
-                    graphNode.getLiterals(fieldUriRef),
+                    graphNode.getLiterals(fieldIRI),
                     new Literal2TextAdapter<Literal>(),
                     Text.class);
         } else { //support for Literals -> Type conversions
             return new AdaptingIterator<Literal, T>(
-                    graphNode.getLiterals(fieldUriRef),
+                    graphNode.getLiterals(fieldIRI),
                     new LiteralAdapter<Literal, T>(),
                     type);
         }
@@ -207,9 +207,9 @@
         } else if(field.isEmpty()){
             throw new IllegalArgumentException("The parsed field MUST NOT be Empty");
         }
-        return new AdaptingIterator<UriRef,Reference>(
-                graphNode.getUriRefObjects(new UriRef(field)),
-                new UriRef2ReferenceAdapter(),Reference.class);
+        return new AdaptingIterator<IRI,Reference>(
+                graphNode.getIRIObjects(new IRI(field)),
+                new IRI2ReferenceAdapter(),Reference.class);
     }
 
     @Override
@@ -220,7 +220,7 @@
             throw new IllegalArgumentException("The parsed field MUST NOT be Empty");
         }
         return new AdaptingIterator<Literal, Text>(
-                graphNode.getLiterals(new UriRef(field)),
+                graphNode.getLiterals(new IRI(field)),
                 new Literal2TextAdapter<Literal>(),
                 Text.class);
     }
@@ -232,8 +232,8 @@
         } else if(field.isEmpty()){
             throw new IllegalArgumentException("The parsed field MUST NOT be Empty");
         }
-        return new AdaptingIterator<Resource, Object>(graphNode.getObjects(new UriRef(field)),
-                new Resource2ValueAdapter<Resource>(),Object.class);
+        return new AdaptingIterator<RDFTerm, Object>(graphNode.getObjects(new IRI(field)),
+                new Resource2ValueAdapter<RDFTerm>(),Object.class);
     }
 
     @Override
@@ -244,15 +244,15 @@
             throw new IllegalArgumentException("The parsed field MUST NOT be Empty");
         }
         return new AdaptingIterator<Literal, Text>(
-                graphNode.getLiterals(new UriRef(field)),
+                graphNode.getLiterals(new IRI(field)),
                 new Literal2TextAdapter<Literal>(languages),
                 Text.class);
     }
 
     @Override
     public Iterator<String> getFieldNames() {
-        return new AdaptingIterator<UriRef, String>(graphNode.getProperties(),
-                new Resource2StringAdapter<UriRef>(), String.class);
+        return new AdaptingIterator<IRI, String>(graphNode.getProperties(),
+                new Resource2StringAdapter<IRI>(), String.class);
     }
 
     @Override
@@ -318,11 +318,11 @@
         return getNode().getUnicodeString();
     }
     /**
-     * Getter for the UriRef representing the ID of this Representation.
-     * @return The UriRef representing the ID of this Representation.
+     * Getter for the IRI representing the ID of this Representation.
+     * @return The IRI representing the ID of this Representation.
      */
-    public UriRef getNode(){
-        return (UriRef)graphNode.getNode();
+    public IRI getNode(){
+        return (IRI)graphNode.getNode();
     }
 
     @Override
@@ -337,26 +337,26 @@
                     +" and field "+field+" -> call ignored");
             return;
         }
-        UriRef fieldUriRef = new UriRef(field);
+        IRI fieldIRI = new IRI(field);
         Collection<Object> removeValues = new ArrayList<Object>();
         
         ModelUtils.checkValues(valueFactory, parsedValue, removeValues);
         //We still need to implement support for specific types supported by this implementation
         for (Object current : removeValues){
-            if (current instanceof Resource){ //native support for Clerezza types!
-                graphNode.deleteProperty(fieldUriRef, (Resource)current);
+            if (current instanceof RDFTerm){ //native support for Clerezza types!
+                graphNode.deleteProperty(fieldIRI, (RDFTerm)current);
             } else if (current instanceof RdfReference){
                 //treat RDF Implementations special to avoid creating new instances
-                graphNode.deleteProperty(fieldUriRef, ((RdfReference) current).getUriRef());
+                graphNode.deleteProperty(fieldIRI, ((RdfReference) current).getIRI());
             } else if (current instanceof Reference){
-                graphNode.deleteProperty(fieldUriRef, new UriRef(((Reference) current).getReference()));
+                graphNode.deleteProperty(fieldIRI, new IRI(((Reference) current).getReference()));
             } else if (current instanceof RdfText){
                 //treat RDF Implementations special to avoid creating new instances
-                graphNode.deleteProperty(fieldUriRef,((RdfText) current).getLiteral());
+                graphNode.deleteProperty(fieldIRI,((RdfText) current).getLiteral());
             } else if (current instanceof Text){
                 removeNaturalText(field,((Text)current).getText(),((Text)current).getLanguage());
             } else { //else add an typed Literal!
-                removeTypedLiteral(fieldUriRef, current);
+                removeTypedLiteral(fieldIRI, current);
             }
         }
     }
@@ -371,9 +371,9 @@
         if(reference == null){
             log.warn("NULL parsed as value in remove method for symbol "+getId()+" and field "+field+" -> call ignored");
         }
-        graphNode.deleteProperty(new UriRef(field), new UriRef(reference));
+        graphNode.deleteProperty(new IRI(field), new IRI(reference));
     }
-    protected void removeTypedLiteral(UriRef field, Object object){
+    protected void removeTypedLiteral(IRI field, Object object){
         Literal literal;
         try{
             literal = RdfResourceUtils.createLiteral(object);
@@ -398,13 +398,13 @@
             //need to be interpreted as default language
             languages = new String []{null};
         }
-        UriRef fieldUriRef = new UriRef(field);
+        IRI fieldIRI = new IRI(field);
         for(String language : languages){
-            graphNode.deleteProperty(fieldUriRef,RdfResourceUtils.createLiteral(value, language));
+            graphNode.deleteProperty(fieldIRI,RdfResourceUtils.createLiteral(value, language));
             if(language == null){ //if the language is null
                 //we need also try to remove a typed Literal with the data type
                 //xsd:string and the parsed value!
-                graphNode.deleteProperty(fieldUriRef,RdfResourceUtils.createLiteral(value));
+                graphNode.deleteProperty(fieldIRI,RdfResourceUtils.createLiteral(value));
             }
         }
     }
@@ -415,7 +415,7 @@
         } else if(field.isEmpty()){
             throw new IllegalArgumentException("The parsed field MUST NOT be Empty");
         }
-        graphNode.deleteProperties(new UriRef(field));
+        graphNode.deleteProperties(new IRI(field));
     }
     @Override
     public void removeAllNaturalText(String field, String... languages) {
@@ -427,17 +427,17 @@
 //        if(languages == null || languages.length == 0){
 //            languages = new String []{null};
 //        }
-        UriRef fieldUriRef = new UriRef(field);
+        IRI fieldIRI = new IRI(field);
         //get all the affected Literals
         Collection<Literal> toRemove = new ArrayList<Literal>();
         Iterator<Literal> it =  new FilteringIterator<Literal>(
-                graphNode.getLiterals(fieldUriRef),
+                graphNode.getLiterals(fieldIRI),
                 new NaturalTextFilter(languages),Literal.class);
         while(it.hasNext()){
             toRemove.add(it.next());
         }
         for(Literal l : toRemove){
-            graphNode.deleteProperty(fieldUriRef, l);
+            graphNode.deleteProperty(fieldIRI, l);
         }
     }
 
diff --git a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfResourceUtils.java b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfResourceUtils.java
index 09db4d0..7a3c926 100644
--- a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfResourceUtils.java
+++ b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfResourceUtils.java
@@ -27,19 +27,17 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
+
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.SimpleLiteralFactory;
 import org.apache.stanbol.entityhub.servicesapi.defaults.DataTypeEnum;
 
 /**
- * Utilities to create {@link Resource} instances for Java Objects.
+ * Utilities to create {@link RDFTerm} instances for Java Objects.
  * @author Rupert Westenthaler
  *
  */
@@ -106,21 +104,21 @@
      * by the {@link XsdDataTypeEnum}.
      */
 
-    public static final Map<UriRef, XsdDataTypeEnum> XSD_DATATYPE_VALUE_MAPPING;
+    public static final Map<IRI, XsdDataTypeEnum> XSD_DATATYPE_VALUE_MAPPING;
     /**
      * Unmodifiable containing all xsd data types that can be converted to
      * {@link Text} (without language).
      */
-    public static final Set<UriRef> STRING_DATATYPES;
+    public static final Set<IRI> STRING_DATATYPES;
 
     public static final Map<Class<?>, XsdDataTypeEnum> JAVA_OBJECT_XSD_DATATYPE_MAPPING;
     static {
-        Map<UriRef,XsdDataTypeEnum> dataTypeMappings = new HashMap<UriRef, XsdDataTypeEnum>();
+        Map<IRI,XsdDataTypeEnum> dataTypeMappings = new HashMap<IRI, XsdDataTypeEnum>();
         Map<Class<?>,XsdDataTypeEnum> objectMappings = new HashMap<Class<?>, XsdDataTypeEnum>();
-        Set<UriRef> stringDataTypes = new HashSet<UriRef>();
+        Set<IRI> stringDataTypes = new HashSet<IRI>();
         stringDataTypes.add(null);//map missing dataTypes to String
         for(XsdDataTypeEnum mapping : XsdDataTypeEnum.values()){
-            UriRef uri = new UriRef(mapping.getUri());
+            IRI uri = new IRI(mapping.getUri());
             dataTypeMappings.put(uri,mapping);
             if(mapping.getMappedClass() != null && String.class.isAssignableFrom(mapping.getMappedClass())){
                 stringDataTypes.add(uri);
@@ -195,10 +193,9 @@
         List<String> results = new ArrayList<String>();
         while (literals.hasNext()) {
             Literal act = literals.next();
-            if (act instanceof PlainLiteral) {
-                PlainLiteral pl = (PlainLiteral) act;
-                if (languageSet.contains(pl.getLanguage())) {
-                    results.add(0, pl.getLexicalForm()); //add to front
+            if (act.getLanguage() != null) {
+                if (languageSet.contains(act.getLanguage())) {
+                    results.add(0, act.getLexicalForm()); //add to front
                 }
             } else if (containsNull) { //add also all types Literals, because the do not define an language!
                 results.add(act.getLexicalForm()); //append to the end
@@ -213,7 +210,7 @@
      * @param uriRefObjects iterator over URIs
      * @return the unicode representation
      */
-    public static Collection<String> getUriRefValues(Iterator<UriRef> uriRefObjects) {
+    public static Collection<String> getIRIValues(Iterator<IRI> uriRefObjects) {
         Collection<String> results = new ArrayList<String>();
         while (uriRefObjects.hasNext()) {
             results.add(uriRefObjects.next().getUnicodeString());
@@ -232,12 +229,12 @@
      * @param lang the language of the literal
      * @return the Literal
      */
-    public static PlainLiteral createLiteral(String literalValue, String lang) {
+    public static Literal createLiteral(String literalValue, String lang) {
         Language language = (lang != null && lang.length() > 0) ? new Language(lang) : null;
         return new PlainLiteralImpl(literalValue, language);
     }
 
-    public static TypedLiteral createLiteral(Object object) {
+    public static Literal createLiteral(Object object) {
         return literalFactory.createTypedLiteral(object);
     }
 
diff --git a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfText.java b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfText.java
index cb4fe4e..a0f1c19 100644
--- a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfText.java
+++ b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfText.java
@@ -16,10 +16,9 @@
  */
 package org.apache.stanbol.entityhub.model.clerezza;
 
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
 import org.apache.stanbol.entityhub.servicesapi.model.Text;
 
 public class RdfText implements Text, Cloneable {
@@ -41,14 +40,14 @@
 
     protected RdfText(Literal literal) {
         this.literal = literal;
-        this.isPlain = literal instanceof PlainLiteral;
+        this.isPlain = literal instanceof Literal;
     }
 
     @Override
     public String getLanguage() {
         return isPlain && 
-            ((PlainLiteral) literal).getLanguage() != null ? 
-                ((PlainLiteral) literal).getLanguage().toString() : null;
+            (literal).getLanguage() != null ? 
+                (literal).getLanguage().toString() : null;
     }
 
     @Override
@@ -62,7 +61,7 @@
 
     @Override
     public RdfText clone() {
-        Language language = isPlain ? ((PlainLiteral) literal).getLanguage() : null;
+        Language language = isPlain ? (literal).getLanguage() : null;
         return new RdfText(new PlainLiteralImpl(literal.getLexicalForm(), language));
     }
 
diff --git a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfValueFactory.java b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfValueFactory.java
index 99dacd2..ba3d685 100644
--- a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfValueFactory.java
+++ b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/RdfValueFactory.java
@@ -18,13 +18,13 @@
 
 import java.util.Iterator;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.entityhub.servicesapi.model.Representation;
 import org.apache.stanbol.entityhub.servicesapi.model.ValueFactory;
 /**
@@ -53,7 +53,7 @@
      * If not <code>null</code> all {@link RdfRepresentation} created by this
      * instance will use this graph.
      */
-    private MGraph graph;
+    private Graph graph;
     private RdfValueFactory(){
         this(null);
     }
@@ -64,7 +64,7 @@
      * graph. 
      * @param graph
      */
-    public RdfValueFactory(MGraph graph){
+    public RdfValueFactory(Graph graph){
         super();
         this.graph = graph;
     }
@@ -73,8 +73,8 @@
     public RdfReference createReference(Object value) {
         if (value == null) {
             throw new IllegalArgumentException("The parsed value MUST NOT be NULL");
-        } else if (value instanceof UriRef) {
-            return new RdfReference((UriRef) value);
+        } else if (value instanceof IRI) {
+            return new RdfReference((IRI) value);
         } else {
             return new RdfReference(value.toString());
         }
@@ -103,14 +103,14 @@
         } else if(id.isEmpty()){
             throw new IllegalArgumentException("The parsed id MUST NOT be empty!");
         } else {
-            return createRdfRepresentation(new UriRef(id), 
-                graph == null ? new IndexedMGraph() : graph);
+            return createRdfRepresentation(new IRI(id), 
+                graph == null ? new IndexedGraph() : graph);
         }
     }
 
     /**
      * {@link RdfRepresentation} specific create Method based on an existing
-     * RDF Graph.
+     * RDF ImmutableGraph.
      *
      * @param node The node of the node used for the representation. If this
      *     node is not part of the parsed graph, the resulting representation
@@ -118,7 +118,7 @@
      * @param graph the graph.
      * @return The representation based on the state of the parsed graph
      */
-    public RdfRepresentation createRdfRepresentation(UriRef node, TripleCollection graph) {
+    public RdfRepresentation createRdfRepresentation(IRI node, Graph graph) {
         if (node == null) {
             throw new IllegalArgumentException("The parsed id MUST NOT be NULL!");
         }
@@ -129,11 +129,11 @@
     }
 
     /**
-     * Extracts the Graph for {@link RdfRepresentation} or creates a {@link Graph}
+     * Extracts the ImmutableGraph for {@link RdfRepresentation} or creates a {@link ImmutableGraph}
      * for all other implementations of {@link Representation}.
      *
      * @param representation the representation
-     * @return the read only RDF Graph.
+     * @return the read only RDF ImmutableGraph.
      */
     public RdfRepresentation toRdfRepresentation(Representation representation) {
         if (representation instanceof RdfRepresentation) {
diff --git a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/UriRef2ReferenceAdapter.java b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/IRI2ReferenceAdapter.java
similarity index 83%
rename from entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/UriRef2ReferenceAdapter.java
rename to entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/IRI2ReferenceAdapter.java
index 25f4093..8208f27 100644
--- a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/UriRef2ReferenceAdapter.java
+++ b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/IRI2ReferenceAdapter.java
@@ -16,24 +16,24 @@
  */
 package org.apache.stanbol.entityhub.model.clerezza.impl;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.entityhub.servicesapi.util.AdaptingIterator.Adapter;
 import org.apache.stanbol.entityhub.model.clerezza.RdfValueFactory;
 import org.apache.stanbol.entityhub.servicesapi.model.Reference;
 
 
 /**
- * Adapter that converts Clerezza {@link UriRef} instances to {@link Reference}s.
+ * Adapter that converts Clerezza {@link IRI} instances to {@link Reference}s.
  * The {@link RdfValueFactory} is used to create {@link Reference} instances.
  * @author Rupert Westenthaler
  *
  */
-public class UriRef2ReferenceAdapter implements Adapter<UriRef,Reference> {
+public class IRI2ReferenceAdapter implements Adapter<IRI,Reference> {
 
     private final RdfValueFactory valueFactory = RdfValueFactory.getInstance();
 
     @Override
-    public final Reference adapt(UriRef value, Class<Reference> type) {
+    public final Reference adapt(IRI value, Class<Reference> type) {
         return valueFactory.createReference(value);
     }
 
diff --git a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/UriRefAdapter.java b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/IRIAdapter.java
similarity index 79%
rename from entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/UriRefAdapter.java
rename to entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/IRIAdapter.java
index bbda47a..987b492 100644
--- a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/UriRefAdapter.java
+++ b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/IRIAdapter.java
@@ -21,38 +21,38 @@
 import java.net.URISyntaxException;
 import java.net.URL;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.entityhub.servicesapi.util.AdaptingIterator.Adapter;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 
-public class UriRefAdapter<A> implements Adapter<UriRef, A> {
+public class IRIAdapter<A> implements Adapter<IRI, A> {
 
-    private static Logger log = LoggerFactory.getLogger(UriRefAdapter.class);
+    private static Logger log = LoggerFactory.getLogger(IRIAdapter.class);
 
     @SuppressWarnings("unchecked")
     @Override
-    public final A adapt(UriRef value, Class<A> type) {
+    public final A adapt(IRI value, Class<A> type) {
         if(type.equals(URI.class)){
             try {
                 return (A) new URI(value.getUnicodeString());
             } catch (URISyntaxException e) {
-                log.warn("Unable to parse an URI for UriRef "+value,e);
+                log.warn("Unable to parse an URI for IRI "+value,e);
                 return null;
             }
         } else if(type.equals(URL.class)){
             try {
                 return (A) new URL(value.getUnicodeString());
             } catch (MalformedURLException e) {
-                log.warn("Unable to parse an URL for UriRef "+value,e);
+                log.warn("Unable to parse an URL for IRI "+value,e);
             }
         } else if(type.equals(String.class)){
             return (A) value.getUnicodeString();
-        } else if(type.equals(UriRef.class)){ //Who converts UriRef -> UriRef ^
+        } else if(type.equals(IRI.class)){ //Who converts IRI -> IRI ^
             return (A) value;
         } else {
-            log.warn(type+" is not a supported target type for "+UriRef.class);
+            log.warn(type+" is not a supported target type for "+IRI.class);
         }
         return null;
     }
diff --git a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/Literal2TextAdapter.java b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/Literal2TextAdapter.java
index e9c5e00..ce9fd74 100644
--- a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/Literal2TextAdapter.java
+++ b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/Literal2TextAdapter.java
@@ -21,10 +21,9 @@
 import java.util.HashSet;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Language;
 import org.apache.stanbol.entityhub.servicesapi.util.AdaptingIterator.Adapter;
 import org.apache.stanbol.entityhub.model.clerezza.RdfResourceUtils;
 import org.apache.stanbol.entityhub.model.clerezza.RdfValueFactory;
@@ -56,11 +55,11 @@
      * The xsd:string data type constant used for TypedLiterals to check if the
      * represent an string value!
      */
-    private static UriRef xsdString = new UriRef(DataTypeEnum.String.getUri());
+    private static IRI xsdString = new IRI(DataTypeEnum.String.getUri());
     /**
      * Unmodifiable set of the active languages
      */
-    private final Set<String> languages;
+    private final Set<Language> languages;
     private final boolean containsNull;
     private final RdfValueFactory valueFactory = RdfValueFactory.getInstance();
 
@@ -74,7 +73,15 @@
      */
     public Literal2TextAdapter(String...lang){
         if(lang != null && lang.length>0){
-            this.languages = Collections.unmodifiableSet(new HashSet<String>(Arrays.asList(lang)));
+            Set<Language> languagesConverted = new HashSet<Language>();
+            for (String lang1 : lang) {
+                if (lang1 == null) {
+                    languagesConverted.add(null);
+                } else {
+                    languagesConverted.add(new Language(lang1));
+                }
+            }
+            this.languages = Collections.unmodifiableSet(languagesConverted);
             this.containsNull = languages.contains(null);
         } else{
             this.languages = null;
@@ -85,14 +92,13 @@
 
     @Override
     public final Text adapt(T value, Class<Text> type) {
-        if(value instanceof PlainLiteral){
-            String literalLang = ((PlainLiteral) value).getLanguage() == null ? 
-                    null : ((PlainLiteral) value).getLanguage().toString();
+        if(value.getLanguage() != null) {
+            Language literalLang =  value.getLanguage();
             if(languages == null || languages.contains(literalLang)){
                 return valueFactory.createText(value);
             } //else wrong language -> filter
-        } else if(value instanceof TypedLiteral) {
-            if(containsNull && ((TypedLiteral)value).getDataType().equals(xsdString)){
+        } else {
+            if(containsNull && value.getDataType().equals(xsdString)){
                 /*
                  * if the null language is active, than we can also return
                  * "normal" literals (with no known language).
@@ -100,10 +106,7 @@
                  */
                 return valueFactory.createText(value);
             } // else no xsd:string dataType and therefore not a text with default lang!
-        } else {// unknown Literal type -> filter + warning
-            log.warn(String.format("Unknown LiteralType %s (lexicalForm=\"%s\") -> ignored! Pleas adapt this implementation to support this type!",
-                value.getClass(),value.getLexicalForm()));
-        }
+        } 
         return null;
     }
 
diff --git a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/LiteralAdapter.java b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/LiteralAdapter.java
index 61fdfec..94441c3 100644
--- a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/LiteralAdapter.java
+++ b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/LiteralAdapter.java
@@ -17,12 +17,9 @@
 package org.apache.stanbol.entityhub.model.clerezza.impl;
 
 import org.apache.clerezza.rdf.core.InvalidLiteralTypeException;
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
 import org.apache.clerezza.rdf.core.NoConvertorException;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.impl.SimpleLiteralFactory;
 import org.apache.stanbol.entityhub.servicesapi.util.AdaptingIterator.Adapter;
 import org.apache.stanbol.entityhub.model.clerezza.RdfResourceUtils;
 import org.apache.stanbol.entityhub.model.clerezza.RdfValueFactory;
@@ -37,7 +34,7 @@
  * <li> String: Converts all Literal to there lexical form
  * <li> Text: Converts {@link PlainLiteral}s and {@link TypedLiteral}s with a
  * data type constrained in {@link RdfResourceUtils#STRING_DATATYPES} to Text instances
- * <li> Int, Long, UriRef ... : Converts {@link TypedLiteral}s to the according
+ * <li> Int, Long, IRI ... : Converts {@link TypedLiteral}s to the according
  * Java Object by using the Clerezza {@link LiteralFactory} (see {@link SimpleLiteralFactory})
  * </ul>
  *
@@ -66,21 +63,15 @@
 //            return (A) value.getLexicalForm();
 //        } else 
         if(Text.class.isAssignableFrom(type)){
-            if(value instanceof PlainLiteral ||
-                    (value instanceof TypedLiteral &&
-                    RdfResourceUtils.STRING_DATATYPES.contains(((TypedLiteral)value).getDataType()))){
+            if(RdfResourceUtils.STRING_DATATYPES.contains(value.getDataType())){
                             return (A)valueFactory.createText(value);
             } else { //this Literal can not be converted to Text!
-                if(value instanceof TypedLiteral){ //TODO: maybe remove this debugging for performance reasons
-                    log.debug("TypedLiterals of type "+((TypedLiteral)value).getDataType()+" can not be converted to Text");
-                } else {
-                    log.warn("Literal of type"+value.getClass()+" are not supported by this Adapter");
-                }
+                log.warn("Literal of type"+value.getClass()+" are not supported by this Adapter");
                 return null;
             }
-        } else if(TypedLiteral.class.isAssignableFrom(value.getClass())){
+        } else if(Literal.class.isAssignableFrom(value.getClass())){
             try {
-                return lf.createObject(type, (TypedLiteral)value);
+                return lf.createObject(type, value);
             } catch (NoConvertorException e) {
                 //This usually indicates a missing converter ... so log in warning
                 log.warn("unable to convert "+value+" to "+type,e);
diff --git a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/NaturalTextFilter.java b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/NaturalTextFilter.java
index 29d7f35..a3e69aa 100644
--- a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/NaturalTextFilter.java
+++ b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/NaturalTextFilter.java
@@ -21,10 +21,9 @@
 import java.util.HashSet;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Language;
 import org.apache.stanbol.entityhub.servicesapi.util.FilteringIterator;
 import org.apache.stanbol.entityhub.servicesapi.util.FilteringIterator.Filter;
 import org.apache.stanbol.entityhub.servicesapi.defaults.DataTypeEnum;
@@ -55,8 +54,8 @@
      * The xsd:string data type constant used for TypedLiterals to check if the
      * represent an string value!
      */
-    private static UriRef xsdString = new UriRef(DataTypeEnum.String.getUri());
-    private final Set<String> languages;
+    private static IRI xsdString = new IRI(DataTypeEnum.String.getUri());
+    private final Set<Language> languages;
     private final boolean containsNull;
 
     public NaturalTextFilter(String...languages){
@@ -64,35 +63,36 @@
             this.languages = null;
             this.containsNull = true; // if no language is parse accept any (also the default)
         } else {
-            Set<String> languageSet = new HashSet<String>(Arrays.asList(languages));
-            if(languageSet.remove("")){
-                /*
-                 * Parsing "" as language needs to be interpreted as parsing
-                 * null
-                 */
-                languageSet.add(null);
+            Set<Language> languagesConverted = new HashSet<Language>();
+            for (String lang1 : languages) {
+                
+                if (lang1 == null || lang1.equals("")) {
+                     languagesConverted.add(null);
+                } else {
+                    languagesConverted.add(new Language(lang1));
+                }
             }
-            this.languages = Collections.unmodifiableSet(languageSet);
+            this.languages = Collections.unmodifiableSet(languagesConverted);
+            
             this.containsNull = this.languages.contains(null);
         }
     }
     @Override
     public final boolean isValid(Literal value) {
-        if (value instanceof PlainLiteral){
+        if (value.getLanguage() != null){
            if(languages == null) { //no language restrictions
                 return true; //return any Plain Literal
             } else {
-                String literalLang = ((PlainLiteral) value).getLanguage() == null ?
-                    null : ((PlainLiteral) value).getLanguage().toString();
+                Language literalLang = value.getLanguage();
                 return languages.contains(literalLang);
             }
-        } else if(value instanceof TypedLiteral){
+        } else if(value.getDataType().equals(xsdString)) {
             /*
              * if the null language is active, than we can also return
              * "normal" literals (with no known language). This includes
              * Types literals with the data type xsd:string
              */
-            return containsNull && ((TypedLiteral)value).getDataType().equals(xsdString);
+            return containsNull;
         } else {// unknown Literal type -> filter + warning
             log.warn(String.format("Unknown LiteralType %s (lexicalForm=\"%s\") -> ignored! Pleas adapt this implementation to support this type!",
                 value.getClass(),value.getLexicalForm()));
diff --git a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/Resource2ValueAdapter.java b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/Resource2ValueAdapter.java
index 6e363aa..54f69ad 100644
--- a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/Resource2ValueAdapter.java
+++ b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/impl/Resource2ValueAdapter.java
@@ -16,12 +16,10 @@
  */
 package org.apache.stanbol.entityhub.model.clerezza.impl;
 
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
 import org.apache.stanbol.entityhub.servicesapi.util.AdaptingIterator.Adapter;
 import org.apache.stanbol.entityhub.model.clerezza.RdfResourceUtils;
 import org.apache.stanbol.entityhub.model.clerezza.RdfValueFactory;
@@ -37,9 +35,9 @@
  * @author Rupert Westenthaler
  * 
  * @param <T>
- *            the type of the Resource that can be converted to values
+ *            the type of the RDFTerm that can be converted to values
  */
-public class Resource2ValueAdapter<T extends Resource> implements Adapter<T,Object> {
+public class Resource2ValueAdapter<T extends RDFTerm> implements Adapter<T,Object> {
 
     private static Logger log = LoggerFactory.getLogger(Resource2ValueAdapter.class);
 
@@ -49,12 +47,10 @@
 
     @Override
     public final Object adapt(T value, Class<Object> type) {
-        if (value instanceof UriRef) {
+        if (value instanceof IRI) {
             return valueFactory.createReference(value);
-        } else if (value instanceof PlainLiteral) {
-            return valueFactory.createText(value);
-        } else if (value instanceof TypedLiteral) {
-            TypedLiteral literal = (TypedLiteral) value;
+        } else if (value instanceof Literal) {
+            Literal literal = (Literal) value;
             if (literal.getDataType() == null) { // if no dataType is defined
                 // return a Text without a language
                 return valueFactory.createText(literal);
@@ -96,7 +92,7 @@
                 }
             }
         } else {
-            log.warn("Unsupported Resource Type {} -> return String by using the toString method",
+            log.warn("Unsupported RDFTerm Type {} -> return String by using the toString method",
                 value.getClass());
             return value.toString();
         }
diff --git a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/utils/Resource2StringAdapter.java b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/utils/Resource2StringAdapter.java
index ffcf7f5..0e9dc4c 100644
--- a/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/utils/Resource2StringAdapter.java
+++ b/entityhub/model/clerezza/src/main/java/org/apache/stanbol/entityhub/model/clerezza/utils/Resource2StringAdapter.java
@@ -16,27 +16,27 @@
  */
 package org.apache.stanbol.entityhub.model.clerezza.utils;
 
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.entityhub.servicesapi.util.AdaptingIterator.Adapter;
 
 /**
- * Needed because UriRefs and Literals use the RDF representation for the
+ * Needed because IRIs and Literals use the RDF representation for the
  * toString Method
  *
  * @author Rupert Westenthaler
  *
  * @param <T>
  */
-public class Resource2StringAdapter<T extends Resource> implements Adapter<T, String> {
+public class Resource2StringAdapter<T extends RDFTerm> implements Adapter<T, String> {
 
     @Override
     public final String adapt(T value, Class<String> type) {
         if (value == null) {
             return null;
-        } else if (value instanceof UriRef) {
-            return ((UriRef) value).getUnicodeString();
+        } else if (value instanceof IRI) {
+            return ((IRI) value).getUnicodeString();
         } else if (value instanceof Literal) {
             return ((Literal) value).getLexicalForm();
         } else {
diff --git a/entityhub/model/clerezza/src/test/java/org/apache/stanbol/entityhub/model/clerezza/RdfRepresentationTest.java b/entityhub/model/clerezza/src/test/java/org/apache/stanbol/entityhub/model/clerezza/RdfRepresentationTest.java
index 9e2be18..4bdf417 100644
--- a/entityhub/model/clerezza/src/test/java/org/apache/stanbol/entityhub/model/clerezza/RdfRepresentationTest.java
+++ b/entityhub/model/clerezza/src/test/java/org/apache/stanbol/entityhub/model/clerezza/RdfRepresentationTest.java
@@ -28,11 +28,10 @@
 import java.util.Iterator;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Language;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.PlainLiteral;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
 import org.apache.stanbol.entityhub.servicesapi.model.Representation;
 import org.apache.stanbol.entityhub.servicesapi.model.Text;
 import org.apache.stanbol.entityhub.servicesapi.model.ValueFactory;
@@ -65,7 +64,7 @@
      * Additional Tests for special Features of the Clerezza based implementation
      * 
      * This includes mainly support for additional types like PlainLiteral,
-     * TypedLiteral, UriRefs. The conversion to such types as well as getter for
+     * TypedLiteral, IRIs. The conversion to such types as well as getter for
      * such types.
      *--------------------------------------------------------------------------
      */
@@ -79,11 +78,11 @@
     @Test
     public void testPlainLiteralToTextConversion(){
         String field = "urn:test.RdfRepresentation:test.field";
-        PlainLiteral noLangLiteral = new PlainLiteralImpl("A plain literal without Language");
-        PlainLiteral enLiteral = new PlainLiteralImpl("An english literal",new Language("en"));
-        PlainLiteral deLiteral = new PlainLiteralImpl("Ein Deutsches Literal",new Language("de"));
-        PlainLiteral deATLiteral = new PlainLiteralImpl("Ein Topfen Verband hilft bei Zerrungen",new Language("de-AT"));
-        Collection<PlainLiteral> plainLiterals = Arrays.asList(noLangLiteral,enLiteral,deLiteral,deATLiteral);
+        Literal noLangLiteral = new PlainLiteralImpl("A plain literal without Language");
+        Literal enLiteral = new PlainLiteralImpl("An english literal",new Language("en"));
+        Literal deLiteral = new PlainLiteralImpl("Ein Deutsches Literal",new Language("de"));
+        Literal deATLiteral = new PlainLiteralImpl("Ein Topfen Verband hilft bei Zerrungen",new Language("de-AT"));
+        Collection<Literal> plainLiterals = Arrays.asList(noLangLiteral,enLiteral,deLiteral,deATLiteral);
         Representation rep = createRepresentation(null);
         rep.add(field, plainLiterals);
         //now test, that the Plain Literals are available as natural language
@@ -104,7 +103,7 @@
         assertFalse(enLangaugeTexts.hasNext());//only a single result
         //3) test to get all natural language values
         Set<String> stringValues = new HashSet<String>();
-        for(PlainLiteral plainLiteral : plainLiterals){
+        for(Literal plainLiteral : plainLiterals){
             stringValues.add(plainLiteral.getLexicalForm());
         }
         Iterator<Text> texts = rep.getText(field);
@@ -123,9 +122,9 @@
     @Test
     public void testTypedLiteralToTextConversion(){
         String field = "urn:test.RdfRepresentation:test.field";
-        TypedLiteral stringLiteral = literalFactory.createTypedLiteral("This is a stirng value");
+        Literal stringLiteral = literalFactory.createTypedLiteral("This is a stirng value");
         //also add an integer to test that other typed literals are not used as texts
-        TypedLiteral integerLiteral = literalFactory.createTypedLiteral(new Integer(5));
+        Literal integerLiteral = literalFactory.createTypedLiteral(new Integer(5));
         Representation rep = createRepresentation(null);
         rep.add(field, Arrays.asList(stringLiteral,integerLiteral));
         //test if the literal is returned when asking for natural language text without language
@@ -151,20 +150,20 @@
     public void testTypedLiteralToValueConversion(){
         String field = "urn:test.RdfRepresentation:test.field";
         Integer integerValue = 5;
-        TypedLiteral integerLiteral = literalFactory.createTypedLiteral(integerValue);
+        Literal integerLiteral = literalFactory.createTypedLiteral(integerValue);
         Date dateValue = new Date();
-        TypedLiteral dateLiteeral = literalFactory.createTypedLiteral(dateValue);
+        Literal dateLiteeral = literalFactory.createTypedLiteral(dateValue);
         Double doubleValue = Math.PI;
-        TypedLiteral doubleLiteral = literalFactory.createTypedLiteral(doubleValue);
+        Literal doubleLiteral = literalFactory.createTypedLiteral(doubleValue);
         String stringValue = "This is a string literal value";
-        TypedLiteral stringLiteral = literalFactory.createTypedLiteral(stringValue);
+        Literal stringLiteral = literalFactory.createTypedLiteral(stringValue);
         Representation rep = createRepresentation(null);
-        Collection<TypedLiteral> typedLiterals = 
+        Collection<Literal> typedLiterals = 
             Arrays.asList(integerLiteral,doubleLiteral,stringLiteral,dateLiteeral);
         rep.add(field, typedLiterals);
         
         //now check that such values are available via TypedLiteral
-        Iterator<TypedLiteral> typedLiteralValues = rep.get(field, TypedLiteral.class);
+        Iterator<Literal> typedLiteralValues = rep.get(field, Literal.class);
         int size = 0;
         while(typedLiteralValues.hasNext()){
             assertTrue(typedLiterals.contains(typedLiteralValues.next()));
diff --git a/entityhub/model/clerezza/src/test/java/org/apache/stanbol/entityhub/model/clerezza/RdfValueFactoryTest.java b/entityhub/model/clerezza/src/test/java/org/apache/stanbol/entityhub/model/clerezza/RdfValueFactoryTest.java
index ec8a266..8038ed1 100644
--- a/entityhub/model/clerezza/src/test/java/org/apache/stanbol/entityhub/model/clerezza/RdfValueFactoryTest.java
+++ b/entityhub/model/clerezza/src/test/java/org/apache/stanbol/entityhub/model/clerezza/RdfValueFactoryTest.java
@@ -16,10 +16,10 @@
  */
 package org.apache.stanbol.entityhub.model.clerezza;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.entityhub.servicesapi.model.ValueFactory;
 import org.apache.stanbol.entityhub.test.model.ValueFactoryTest;
 import org.junit.Before;
@@ -51,12 +51,12 @@
     }
     @Test(expected=IllegalArgumentException.class)
     public void testNullNodeRepresentation() {
-        MGraph graph = new IndexedMGraph();
+        Graph graph = new IndexedGraph();
         valueFactory.createRdfRepresentation(null, graph);
     }
     @Test(expected=IllegalArgumentException.class)
     public void testNullGraphRepresentation() {
-        UriRef rootNode = new UriRef("urn:test.rootNode");
+        IRI rootNode = new IRI("urn:test.rootNode");
         valueFactory.createRdfRepresentation(rootNode, null);
     }
     
diff --git a/entityhub/model/clerezza/src/test/java/org/apache/stanbol/entityhub/model/clerezza/impl/ResourceAdapterTest.java b/entityhub/model/clerezza/src/test/java/org/apache/stanbol/entityhub/model/clerezza/impl/ResourceAdapterTest.java
index a050a25..0d8712e 100644
--- a/entityhub/model/clerezza/src/test/java/org/apache/stanbol/entityhub/model/clerezza/impl/ResourceAdapterTest.java
+++ b/entityhub/model/clerezza/src/test/java/org/apache/stanbol/entityhub/model/clerezza/impl/ResourceAdapterTest.java
@@ -22,11 +22,11 @@
 import java.util.Iterator;
 import java.util.Set;
 
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.entityhub.model.clerezza.RdfValueFactory;
 import org.apache.stanbol.entityhub.servicesapi.model.Representation;
 import org.junit.Assert;
@@ -39,9 +39,9 @@
      */
     @Test
     public void testDouble(){
-        MGraph graph = new IndexedMGraph();
-        UriRef id = new UriRef("http://www.example.org/test");
-        UriRef doubleTestField = new UriRef("http://www.example.org/field/double");
+        Graph graph = new IndexedGraph();
+        IRI id = new IRI("http://www.example.org/test");
+        IRI doubleTestField = new IRI("http://www.example.org/field/double");
         LiteralFactory lf = LiteralFactory.getInstance();
         graph.add(new TripleImpl(id, doubleTestField, lf.createTypedLiteral(Double.NaN)));
         graph.add(new TripleImpl(id, doubleTestField, lf.createTypedLiteral(Double.POSITIVE_INFINITY)));
@@ -62,9 +62,9 @@
     
     @Test
     public void testFloat(){
-        MGraph graph = new IndexedMGraph();
-        UriRef id = new UriRef("http://www.example.org/test");
-        UriRef doubleTestField = new UriRef("http://www.example.org/field/double");
+        Graph graph = new IndexedGraph();
+        IRI id = new IRI("http://www.example.org/test");
+        IRI doubleTestField = new IRI("http://www.example.org/field/double");
         LiteralFactory lf = LiteralFactory.getInstance();
         graph.add(new TripleImpl(id, doubleTestField, lf.createTypedLiteral(Float.NaN)));
         graph.add(new TripleImpl(id, doubleTestField, lf.createTypedLiteral(Float.POSITIVE_INFINITY)));
@@ -85,9 +85,9 @@
 // TODO: how to create NAN, POSITIVE_INFINITY, NEGATIVE_INVINITY instances for BigDecimal
 //    @Test
 //    public void testBigDecimal(){
-//        MGraph graph = new IndexedMGraph();
-//        UriRef id = new UriRef("http://www.example.org/test");
-//        UriRef doubleTestField = new UriRef("http://www.example.org/field/double");
+//        Graph graph = new IndexedGraph();
+//        IRI id = new IRI("http://www.example.org/test");
+//        IRI doubleTestField = new IRI("http://www.example.org/field/double");
 //        LiteralFactory lf = LiteralFactory.getInstance();
 //        graph.add(new TripleImpl(id, doubleTestField, lf.createTypedLiteral(BigDecimal.valueOf(Double.NaN))));
 //        graph.add(new TripleImpl(id, doubleTestField, lf.createTypedLiteral(BigDecimal.valueOf(Double.POSITIVE_INFINITY))));
diff --git a/entityhub/query/clerezza/src/main/java/org/apache/stanbol/entityhub/query/clerezza/ClerezzaQueryUtils.java b/entityhub/query/clerezza/src/main/java/org/apache/stanbol/entityhub/query/clerezza/ClerezzaQueryUtils.java
index 82abb24..0c5fad5 100644
--- a/entityhub/query/clerezza/src/main/java/org/apache/stanbol/entityhub/query/clerezza/ClerezzaQueryUtils.java
+++ b/entityhub/query/clerezza/src/main/java/org/apache/stanbol/entityhub/query/clerezza/ClerezzaQueryUtils.java
@@ -18,10 +18,10 @@
 
 import java.util.Iterator;
 
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.entityhub.model.clerezza.RdfRepresentation;
 import org.apache.stanbol.entityhub.model.clerezza.RdfValueFactory;
 import org.apache.stanbol.entityhub.query.sparql.SparqlQueryUtils;
@@ -47,24 +47,24 @@
 
     private static final RdfValueFactory valueFavtory = RdfValueFactory.getInstance();
     /**
-     * {@link UriRef} constant for {@link RdfResourceEnum#queryResult}
+     * {@link IRI} constant for {@link RdfResourceEnum#queryResult}
      * 
      * @see RdfResourceEnum.fieldQueryResult
      */
-    public static final UriRef FIELD_QUERY_RESULT = new UriRef(RdfResourceEnum.queryResult.getUri());
+    public static final IRI FIELD_QUERY_RESULT = new IRI(RdfResourceEnum.queryResult.getUri());
     /**
-     * {@link UriRef} constant for {@link RdfResourceEnum#QueryResultSet}
+     * {@link IRI} constant for {@link RdfResourceEnum#QueryResultSet}
      * 
      * @see RdfResourceEnum.FieldQueryResultSet
      */
-    public static final UriRef FIELD_QUERY_RESULT_SET = new UriRef(RdfResourceEnum.QueryResultSet.getUri());
+    public static final IRI FIELD_QUERY_RESULT_SET = new IRI(RdfResourceEnum.QueryResultSet.getUri());
 
     /**
      * @param query
      * @param resultGraph
      * @return
      */
-    public static Iterator<RdfRepresentation> parseQueryResultsFromMGraph(final TripleCollection resultGraph) {
+    public static Iterator<RdfRepresentation> parseQueryResultsFromGraph(final Graph resultGraph) {
         Iterator<Triple> resultTripleIterator = resultGraph.filter(FIELD_QUERY_RESULT_SET,
             FIELD_QUERY_RESULT, null);
         Iterator<RdfRepresentation> resultIterator = new AdaptingIterator<Triple,RdfRepresentation>(
@@ -75,14 +75,14 @@
                      */
                     @Override
                     public RdfRepresentation adapt(Triple value, Class<RdfRepresentation> type) {
-                        Resource object = value.getObject();
+                        RDFTerm object = value.getObject();
                         if (object == null) {
                             return null;
-                        } else if (object instanceof UriRef) {
-                            return valueFavtory.createRdfRepresentation((UriRef) object, resultGraph);
+                        } else if (object instanceof IRI) {
+                            return valueFavtory.createRdfRepresentation((IRI) object, resultGraph);
                         } else {
                             log.warn("Unable to create representation for FieldQueryResult " + object
-                                     + " because this Resource is not of Type UriRef (type: "
+                                     + " because this RDFTerm is not of Type IRI (type: "
                                      + object.getClass() + ") -> result gets ignored");
                             return null;
                         }
diff --git a/entityhub/query/clerezza/src/main/java/org/apache/stanbol/entityhub/query/clerezza/RdfQueryResultList.java b/entityhub/query/clerezza/src/main/java/org/apache/stanbol/entityhub/query/clerezza/RdfQueryResultList.java
index 6c224ac..e4809fb 100644
--- a/entityhub/query/clerezza/src/main/java/org/apache/stanbol/entityhub/query/clerezza/RdfQueryResultList.java
+++ b/entityhub/query/clerezza/src/main/java/org/apache/stanbol/entityhub/query/clerezza/RdfQueryResultList.java
@@ -25,7 +25,7 @@
 import java.util.List;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.MGraph;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.stanbol.entityhub.model.clerezza.RdfRepresentation;
 import org.apache.stanbol.entityhub.servicesapi.model.Representation;
 import org.apache.stanbol.entityhub.servicesapi.query.FieldQuery;
@@ -37,19 +37,19 @@
 
     private final FieldQuery query;
     private final List<RdfRepresentation> results;
-    private final MGraph resultGraph;
+    private final Graph resultGraph;
 
-    public RdfQueryResultList(FieldQuery query,MGraph resultGraph) {
+    public RdfQueryResultList(FieldQuery query,Graph resultGraph) {
         if(query == null){
             throw new IllegalArgumentException("Parameter Query MUST NOT be NULL!");
         }
         if(resultGraph == null){
-            throw new IllegalArgumentException("Parameter \"MGraph resultGraph\" MUST NOT be NULL");
+            throw new IllegalArgumentException("Parameter \"Graph resultGraph\" MUST NOT be NULL");
         }
         this.query = query;
         this.resultGraph = resultGraph;
         List<RdfRepresentation> results = (List<RdfRepresentation>)ModelUtils.addToCollection(
-            ClerezzaQueryUtils.parseQueryResultsFromMGraph(resultGraph),
+            ClerezzaQueryUtils.parseQueryResultsFromGraph(resultGraph),
             new ArrayList<RdfRepresentation>());
         //sort the list based on the score
         Collections.sort(results,RESULT_SCORE_COMPARATOR);
@@ -92,10 +92,10 @@
         return results.size();
     }
     /**
-     * Getter for the RDF Graph holding the Results of the Query
-     * @return the RDF Graph with the Results
+     * Getter for the RDF ImmutableGraph holding the Results of the Query
+     * @return the RDF ImmutableGraph with the Results
      */
-    public final MGraph getResultGraph() {
+    public final Graph getResultGraph() {
         return resultGraph;
     }
     @Override
diff --git a/entityhub/query/clerezza/src/main/java/org/apache/stanbol/entityhub/query/clerezza/SparqlQueryUtils.java b/entityhub/query/clerezza/src/main/java/org/apache/stanbol/entityhub/query/clerezza/SparqlQueryUtils.java
index 893dc2a..d421edd 100644
--- a/entityhub/query/clerezza/src/main/java/org/apache/stanbol/entityhub/query/clerezza/SparqlQueryUtils.java
+++ b/entityhub/query/clerezza/src/main/java/org/apache/stanbol/entityhub/query/clerezza/SparqlQueryUtils.java
@@ -18,8 +18,8 @@
 
 import java.util.Iterator;
 
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.entityhub.model.clerezza.RdfRepresentation;
 import org.apache.stanbol.entityhub.query.sparql.SparqlEndpointTypeEnum;
 import org.apache.stanbol.entityhub.servicesapi.model.rdf.RdfResourceEnum;
@@ -39,19 +39,19 @@
     private SparqlQueryUtils() {}
 
     /**
-     * {@link UriRef} constant for {@link RdfResourceEnum#queryResult}
+     * {@link IRI} constant for {@link RdfResourceEnum#queryResult}
      * 
      * @see ClerezzaQueryUtils#FIELD_QUERY_RESULT
      */
     @Deprecated
-    public static final UriRef FIELD_QUERY_RESULT = ClerezzaQueryUtils.FIELD_QUERY_RESULT;
+    public static final IRI FIELD_QUERY_RESULT = ClerezzaQueryUtils.FIELD_QUERY_RESULT;
     /**
-     * {@link UriRef} constant for {@link RdfResourceEnum#QueryResultSet}
+     * {@link IRI} constant for {@link RdfResourceEnum#QueryResultSet}
      * 
      * @see ClerezzaQueryUtils#FIELD_QUERY_RESULT_SET
      */
     @Deprecated
-    public static final UriRef FIELD_QUERY_RESULT_SET = new UriRef(RdfResourceEnum.QueryResultSet.getUri());
+    public static final IRI FIELD_QUERY_RESULT_SET = new IRI(RdfResourceEnum.QueryResultSet.getUri());
 
     /**
      * Use {@link org.apache.stanbol.entityhub.query.sparql.SparqlQueryUtils}
@@ -246,11 +246,11 @@
      * @param query
      * @param resultGraph
      * @return
-     * @see ClerezzaQueryUtils#parseQueryResultsFromMGraph(TripleCollection)
+     * @see ClerezzaQueryUtils#parseQueryResultsFromGraph(Graph)
      */
     @Deprecated
-    public static Iterator<RdfRepresentation> parseQueryResultsFromMGraph(final TripleCollection resultGraph) {
-        return ClerezzaQueryUtils.parseQueryResultsFromMGraph(resultGraph);
+    public static Iterator<RdfRepresentation> parseQueryResultsFromGraph(final Graph resultGraph) {
+        return ClerezzaQueryUtils.parseQueryResultsFromGraph(resultGraph);
     }
 
 }
diff --git a/entityhub/query/clerezza/src/test/java/org/apache/stanbol/entityhub/query/clerezza/RdfResultListTest.java b/entityhub/query/clerezza/src/test/java/org/apache/stanbol/entityhub/query/clerezza/RdfResultListTest.java
index a4d118a..8e80fac 100644
--- a/entityhub/query/clerezza/src/test/java/org/apache/stanbol/entityhub/query/clerezza/RdfResultListTest.java
+++ b/entityhub/query/clerezza/src/test/java/org/apache/stanbol/entityhub/query/clerezza/RdfResultListTest.java
@@ -20,11 +20,11 @@
 import java.util.SortedMap;
 import java.util.TreeMap;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.entityhub.core.query.FieldQueryImpl;
 import org.apache.stanbol.entityhub.model.clerezza.RdfRepresentation;
 import org.apache.stanbol.entityhub.model.clerezza.RdfValueFactory;
@@ -44,10 +44,10 @@
     @Test
     public void testRdfResultSorting(){
         SortedMap<Double,RdfRepresentation> sorted = new TreeMap<Double,RdfRepresentation>();
-        MGraph resultGraph = new IndexedMGraph();
+        Graph resultGraph = new IndexedGraph();
         RdfValueFactory vf = new RdfValueFactory(resultGraph);
-        UriRef resultListNode = new UriRef(RdfResourceEnum.QueryResultSet.getUri());
-        UriRef resultProperty = new UriRef(RdfResourceEnum.queryResult.getUri());
+        IRI resultListNode = new IRI(RdfResourceEnum.QueryResultSet.getUri());
+        IRI resultProperty = new IRI(RdfResourceEnum.queryResult.getUri());
         for(int i=0;i<100;i++){
             Double rank;
             do { //avoid duplicate keys
diff --git a/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/CoolUriDereferencer.java b/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/CoolUriDereferencer.java
index 7957f16..e508f99 100644
--- a/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/CoolUriDereferencer.java
+++ b/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/CoolUriDereferencer.java
@@ -21,13 +21,13 @@
 import java.net.URL;
 import java.net.URLConnection;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Reference;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.entityhub.core.site.AbstractEntityDereferencer;
 import org.apache.stanbol.entityhub.model.clerezza.RdfValueFactory;
 import org.apache.stanbol.entityhub.servicesapi.model.Representation;
@@ -72,10 +72,10 @@
         long queryEnd = System.currentTimeMillis();
         log.debug("  > DereferenceTime: "+(queryEnd-start));
         if(in != null){
-            MGraph rdfData = new IndexedMGraph(parser.parse(in, format,new UriRef(getBaseUri())));
+            Graph rdfData = new IndexedGraph(parser.parse(in, format,new IRI(getBaseUri())));
             long parseEnd = System.currentTimeMillis();
             log.debug("  > ParseTime: "+(parseEnd-queryEnd));
-            return valueFactory.createRdfRepresentation(new UriRef(uri), rdfData);
+            return valueFactory.createRdfRepresentation(new IRI(uri), rdfData);
         } else {
             return null;
         }
diff --git a/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/LarqSearcher.java b/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/LarqSearcher.java
index 9cfb6e7..396c9d6 100644
--- a/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/LarqSearcher.java
+++ b/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/LarqSearcher.java
@@ -23,13 +23,13 @@
 import java.io.InputStream;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Reference;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.entityhub.core.query.QueryResultListImpl;
 import org.apache.stanbol.entityhub.core.site.AbstractEntitySearcher;
 import org.apache.stanbol.entityhub.query.clerezza.RdfQueryResultList;
@@ -69,13 +69,13 @@
         long queryEnd = System.currentTimeMillis();
         log.debug("  > QueryTime: "+(queryEnd-initEnd));
         if(in != null){
-            MGraph graph;
-            TripleCollection rdfData = parser.parse(in, SparqlSearcher.DEFAULT_RDF_CONTENT_TYPE,
-                new UriRef(getBaseUri()));
-            if(rdfData instanceof MGraph){
-                graph = (MGraph) rdfData;
+            Graph graph;
+            Graph rdfData = parser.parse(in, SparqlSearcher.DEFAULT_RDF_CONTENT_TYPE,
+                new IRI(getBaseUri()));
+            if(rdfData instanceof Graph){
+                graph = (Graph) rdfData;
             } else {
-                graph = new IndexedMGraph(rdfData);
+                graph = new IndexedGraph(rdfData);
             }
             long parseEnd = System.currentTimeMillis();
             log.debug("  > ParseTime: "+(parseEnd-queryEnd));
diff --git a/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/SparqlDereferencer.java b/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/SparqlDereferencer.java
index bba0823..015b528 100644
--- a/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/SparqlDereferencer.java
+++ b/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/SparqlDereferencer.java
@@ -19,14 +19,14 @@
 import java.io.IOException;
 import java.io.InputStream;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Reference;
 import org.apache.felix.scr.annotations.Service;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.entityhub.core.site.AbstractEntityDereferencer;
 import org.apache.stanbol.entityhub.model.clerezza.RdfValueFactory;
 import org.apache.stanbol.entityhub.servicesapi.model.Representation;
@@ -67,7 +67,7 @@
         if(uri==null){
             return null;
         }
-        UriRef reference = new UriRef(uri);
+        IRI reference = new IRI(uri);
         StringBuilder query = new StringBuilder();
         query.append("CONSTRUCT { ");
         query.append(reference);
@@ -86,10 +86,10 @@
         long queryEnd = System.currentTimeMillis();
         log.debug("  > DereferenceTime: {}",(queryEnd-start));
         if(in != null){
-            MGraph rdfData = new IndexedMGraph(parser.parse(in, format,new UriRef(getBaseUri())));
+            Graph rdfData = new IndexedGraph(parser.parse(in, format,new IRI(getBaseUri())));
             long parseEnd = System.currentTimeMillis();
             log.debug("  > ParseTime: {}",(parseEnd-queryEnd));
-            return valueFactory.createRdfRepresentation(new UriRef(uri), rdfData);
+            return valueFactory.createRdfRepresentation(new IRI(uri), rdfData);
         } else {
             return null;
         }
diff --git a/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/SparqlSearcher.java b/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/SparqlSearcher.java
index 9fadc9f..63fa1d4 100644
--- a/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/SparqlSearcher.java
+++ b/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/SparqlSearcher.java
@@ -22,15 +22,15 @@
 import java.util.Collections;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.commons.io.IOUtils;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Reference;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.entityhub.core.query.QueryResultListImpl;
 import org.apache.stanbol.entityhub.core.site.AbstractEntitySearcher;
 import org.apache.stanbol.entityhub.query.clerezza.RdfQueryResultList;
@@ -126,13 +126,13 @@
         long queryEnd = System.currentTimeMillis();
         log.debug("  > QueryTime: "+(queryEnd-initEnd));
         if(in != null){
-            MGraph graph;
-            TripleCollection rdfData = parser.parse(in, DEFAULT_RDF_CONTENT_TYPE,
-                new UriRef(getBaseUri()));
-            if(rdfData instanceof MGraph){
-                graph = (MGraph) rdfData;
+            Graph graph;
+            Graph rdfData = parser.parse(in, DEFAULT_RDF_CONTENT_TYPE,
+                new IRI(getBaseUri()));
+            if(rdfData instanceof Graph){
+                graph = (Graph) rdfData;
             } else {
-                graph = new IndexedMGraph(rdfData);
+                graph = new IndexedGraph(rdfData);
             }
             long parseEnd = System.currentTimeMillis();
             log.debug("  > ParseTime: "+(parseEnd-queryEnd));
diff --git a/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/VirtuosoSearcher.java b/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/VirtuosoSearcher.java
index 644463c..2d0ab53 100644
--- a/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/VirtuosoSearcher.java
+++ b/entityhub/site/linkeddata/src/main/java/org/apache/stanbol/entityhub/site/linkeddata/impl/VirtuosoSearcher.java
@@ -23,13 +23,13 @@
 import java.io.InputStream;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Reference;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.entityhub.core.query.QueryResultListImpl;
 import org.apache.stanbol.entityhub.core.site.AbstractEntitySearcher;
 import org.apache.stanbol.entityhub.query.clerezza.RdfQueryResultList;
@@ -67,13 +67,13 @@
         long queryEnd = System.currentTimeMillis();
         log.info("  > QueryTime: " + (queryEnd - initEnd));
         if (in != null) {
-            MGraph graph;
-            TripleCollection rdfData = parser.parse(in, SparqlSearcher.DEFAULT_RDF_CONTENT_TYPE, new UriRef(
+            Graph graph;
+            Graph rdfData = parser.parse(in, SparqlSearcher.DEFAULT_RDF_CONTENT_TYPE, new IRI(
                     getBaseUri()));
-            if (rdfData instanceof MGraph) {
-                graph = (MGraph) rdfData;
+            if (rdfData instanceof Graph) {
+                graph = (Graph) rdfData;
             } else {
-                graph = new IndexedMGraph(rdfData);
+                graph = new IndexedGraph(rdfData);
             }
             long parseEnd = System.currentTimeMillis();
             log.info("  > ParseTime: " + (parseEnd - queryEnd));
diff --git a/entityhub/web/clerezza/src/main/java/org/apache/stanbol/entityhub/web/writer/clerezza/ClerezzaModelWriter.java b/entityhub/web/clerezza/src/main/java/org/apache/stanbol/entityhub/web/writer/clerezza/ClerezzaModelWriter.java
index 43788e8..fc9bd91 100644
--- a/entityhub/web/clerezza/src/main/java/org/apache/stanbol/entityhub/web/writer/clerezza/ClerezzaModelWriter.java
+++ b/entityhub/web/clerezza/src/main/java/org/apache/stanbol/entityhub/web/writer/clerezza/ClerezzaModelWriter.java
@@ -11,13 +11,13 @@
 import javax.ws.rs.WebApplicationException;
 import javax.ws.rs.core.MediaType;
 
+
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.clerezza.rdf.ontologies.RDF;
@@ -25,7 +25,7 @@
 import org.apache.felix.scr.annotations.Reference;
 import org.apache.felix.scr.annotations.ReferenceCardinality;
 import org.apache.felix.scr.annotations.Service;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.commons.namespaceprefix.NamespacePrefixService;
 import org.apache.stanbol.entityhub.model.clerezza.RdfRepresentation;
 import org.apache.stanbol.entityhub.model.clerezza.RdfValueFactory;
@@ -82,24 +82,24 @@
         Arrays.asList(TURTLE_TYPE, JSONLD_TYPE, N3_TYPE, N_TRIPLE_TYPE, RDF_JSON_TYPE, RDF_XML_TYPE, X_TURTLE_TYPE));
 
     //some Concepts and Relations we use to represent Entities
-    private final static UriRef FOAF_DOCUMENT = new UriRef(NamespaceEnum.foaf+"Document");
-    private final static UriRef FOAF_PRIMARY_TOPIC = new UriRef(NamespaceEnum.foaf+"primaryTopic");
-    private final static UriRef FOAF_PRIMARY_TOPIC_OF = new UriRef(NamespaceEnum.foaf+"isPrimaryTopicOf");
-    private final static UriRef SIGN_SITE = new UriRef(RdfResourceEnum.site.getUri());
-//    private final static UriRef ENTITY_TYPE = new UriRef(RdfResourceEnum.Entity.getUri());
+    private final static IRI FOAF_DOCUMENT = new IRI(NamespaceEnum.foaf+"Document");
+    private final static IRI FOAF_PRIMARY_TOPIC = new IRI(NamespaceEnum.foaf+"primaryTopic");
+    private final static IRI FOAF_PRIMARY_TOPIC_OF = new IRI(NamespaceEnum.foaf+"isPrimaryTopicOf");
+    private final static IRI SIGN_SITE = new IRI(RdfResourceEnum.site.getUri());
+//    private final static IRI ENTITY_TYPE = new IRI(RdfResourceEnum.Entity.getUri());
     private final static RdfValueFactory valueFactory = RdfValueFactory.getInstance();
     /**
      * The URI used for the query result list (static for all responses)
      */
-    private static final UriRef QUERY_RESULT_LIST = new UriRef(RdfResourceEnum.QueryResultSet.getUri());
+    private static final IRI QUERY_RESULT_LIST = new IRI(RdfResourceEnum.QueryResultSet.getUri());
     /**
      * The property used for all results
      */
-    private static final UriRef QUERY_RESULT = new UriRef(RdfResourceEnum.queryResult.getUri());
+    private static final IRI QUERY_RESULT = new IRI(RdfResourceEnum.queryResult.getUri());
     /**
      * The property used for the JSON serialised FieldQuery (STANBOL-298)
      */
-    private static final UriRef FIELD_QUERY = new UriRef(RdfResourceEnum.query.getUri());
+    private static final IRI FIELD_QUERY = new IRI(RdfResourceEnum.query.getUri());
 
     /**
      * This Serializer only supports UTF-8
@@ -158,7 +158,7 @@
     @Override
     public void write(QueryResultList<?> result, OutputStream out, MediaType mediaType) throws WebApplicationException,
             IOException {
-        MGraph queryRdf = toRDF(result);
+        Graph queryRdf = toRDF(result);
         //we need also to the JSON formatted FieldQuery as a literal to the
         //RDF data.
         FieldQuery query = result.getQuery();
@@ -186,7 +186,7 @@
      * @param out
      * @param mediaType
      */
-    private void writeRdf(TripleCollection tc, OutputStream out, MediaType mediaType) {
+    private void writeRdf(Graph tc, OutputStream out, MediaType mediaType) {
         String charset = mediaType.getParameters().get("charset");
         if(charset == null){
             charset = ModelWriter.DEFAULT_CHARSET;
@@ -198,23 +198,23 @@
             .append('/').append(mediaType.getSubtype()).toString());
     }
 
-    private MGraph toRDF(Representation representation) {
-        MGraph graph = new IndexedMGraph();
+    private Graph toRDF(Representation representation) {
+        Graph graph = new IndexedGraph();
         addRDFTo(graph, representation);
         return graph;
     }
 
-    private void addRDFTo(MGraph graph, Representation representation) {
+    private void addRDFTo(Graph graph, Representation representation) {
         graph.addAll(valueFactory.toRdfRepresentation(representation).getRdfGraph());
     }
 
-    private TripleCollection toRDF(Entity entity) {
-        MGraph graph = new IndexedMGraph();
+    private Graph toRDF(Entity entity) {
+        Graph graph = new IndexedGraph();
         addRDFTo(graph, entity);
         return graph;
     }
 
-    private void addRDFTo(MGraph graph, Entity entity) {
+    private void addRDFTo(Graph graph, Entity entity) {
         addRDFTo(graph, entity.getRepresentation());
         addRDFTo(graph, entity.getMetadata());
         //now add some triples that represent the Sign
@@ -230,28 +230,28 @@
      * @param graph the graph to add the triples
      * @param sign the sign
      */
-    private void addEntityTriplesToGraph(MGraph graph, Entity sign) {
-        UriRef id = new UriRef(sign.getId());
-        UriRef metaId = new UriRef(sign.getMetadata().getId());
+    private void addEntityTriplesToGraph(Graph graph, Entity sign) {
+        IRI id = new IRI(sign.getId());
+        IRI metaId = new IRI(sign.getMetadata().getId());
         //add the FOAF triples between metadata and content
         graph.add(new TripleImpl(id, FOAF_PRIMARY_TOPIC_OF, metaId));
         graph.add(new TripleImpl(metaId, FOAF_PRIMARY_TOPIC, metaId));
         graph.add(new TripleImpl(metaId, RDF.type, FOAF_DOCUMENT));
         //add the site to the metadata
         //TODO: this should be the HTTP URI and not the id of the referenced site
-        TypedLiteral siteName = literalFactory.createTypedLiteral(sign.getSite());
+        Literal siteName = literalFactory.createTypedLiteral(sign.getSite());
         graph.add(new TripleImpl(metaId, SIGN_SITE, siteName));
         
     }
     
-    private MGraph toRDF(QueryResultList<?> resultList) {
-        final MGraph resultGraph;
+    private Graph toRDF(QueryResultList<?> resultList) {
+        final Graph resultGraph;
         Class<?> type = resultList.getType();
         if (String.class.isAssignableFrom(type)) {
-            resultGraph = new IndexedMGraph(); //create a new Graph
+            resultGraph = new IndexedGraph(); //create a new ImmutableGraph
             for (Object result : resultList) {
                 //add a triple to each reference in the result set
-                resultGraph.add(new TripleImpl(QUERY_RESULT_LIST, QUERY_RESULT, new UriRef(result.toString())));
+                resultGraph.add(new TripleImpl(QUERY_RESULT_LIST, QUERY_RESULT, new IRI(result.toString())));
             }
         } else {
             //first determine the type of the resultList
@@ -277,22 +277,22 @@
                     //now add the Sign specific triples and add result triples
                     //to the Sign IDs
                     for (Object result : resultList) {
-                        UriRef signId = new UriRef(((Entity) result).getId());
+                        IRI signId = new IRI(((Entity) result).getId());
                         addEntityTriplesToGraph(resultGraph, (Entity) result);
                         resultGraph.add(new TripleImpl(QUERY_RESULT_LIST, QUERY_RESULT, signId));
                     }
                 }
             } else { //any other implementation of the QueryResultList interface
-                resultGraph = new IndexedMGraph(); //create a new graph
+                resultGraph = new IndexedGraph(); //create a new graph
                 if (Representation.class.isAssignableFrom(type)) {
                     for (Object result : resultList) {
-                        UriRef resultId;
+                        IRI resultId;
                         if (!isSignType) {
                             addRDFTo(resultGraph, (Representation) result);
-                            resultId = new UriRef(((Representation) result).getId());
+                            resultId = new IRI(((Representation) result).getId());
                         } else {
                             addRDFTo(resultGraph, (Entity) result);
-                            resultId = new UriRef(((Entity) result).getId());
+                            resultId = new IRI(((Entity) result).getId());
                         }
                         //Note: In case of Representation this Triple points to
                         //      the representation. In case of Signs it points to
diff --git a/entityhub/web/sesame/src/main/java/org/apache/stanbol/entityhub/web/writer/sesame/SesameModelWriter.java b/entityhub/web/sesame/src/main/java/org/apache/stanbol/entityhub/web/writer/sesame/SesameModelWriter.java
index 26c4df9..301a73d 100644
--- a/entityhub/web/sesame/src/main/java/org/apache/stanbol/entityhub/web/writer/sesame/SesameModelWriter.java
+++ b/entityhub/web/sesame/src/main/java/org/apache/stanbol/entityhub/web/writer/sesame/SesameModelWriter.java
@@ -236,7 +236,7 @@
         final Model resultGraph;
         Class<?> type = resultList.getType();
         if (String.class.isAssignableFrom(type)) {
-            resultGraph = new LinkedHashModel(); //create a new Graph
+            resultGraph = new LinkedHashModel(); //create a new ImmutableGraph
             for (Object result : resultList) {
                 //add a triple to each reference in the result set
                 resultGraph.add(QUERY_RESULT_LIST, QUERY_RESULT, sesameFactory.createURI(result.toString()));
diff --git a/entityhub/yard/clerezza/src/main/java/org/apache/stanbol/entityhub/yard/clerezza/impl/ClerezzaYard.java b/entityhub/yard/clerezza/src/main/java/org/apache/stanbol/entityhub/yard/clerezza/impl/ClerezzaYard.java
index 1a38fe2..f4fd787 100644
--- a/entityhub/yard/clerezza/src/main/java/org/apache/stanbol/entityhub/yard/clerezza/impl/ClerezzaYard.java
+++ b/entityhub/yard/clerezza/src/main/java/org/apache/stanbol/entityhub/yard/clerezza/impl/ClerezzaYard.java
@@ -24,20 +24,19 @@
 import java.util.Set;
 import java.util.concurrent.locks.Lock;
 
-import org.apache.clerezza.rdf.core.BNode;
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.access.LockableMGraph;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.NoSuchEntityException;
 import org.apache.clerezza.rdf.core.access.TcManager;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
+import org.apache.clerezza.rdf.core.LiteralFactory;
 import org.apache.clerezza.rdf.core.sparql.ParseException;
 import org.apache.clerezza.rdf.core.sparql.QueryParser;
 import org.apache.clerezza.rdf.core.sparql.ResultSet;
@@ -52,7 +51,7 @@
 import org.apache.felix.scr.annotations.Property;
 import org.apache.felix.scr.annotations.Reference;
 import org.apache.felix.scr.annotations.Service;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.entityhub.core.query.QueryResultListImpl;
 import org.apache.stanbol.entityhub.core.query.QueryUtils;
 import org.apache.stanbol.entityhub.servicesapi.util.AdaptingIterator;
@@ -84,7 +83,7 @@
  * Implementation of the Yard Interface based on a RDF Triple Store. This
  * Implementation uses Clerezza as RDF Framework. The actual Triple Store used
  * to store the data depends on the configuration of Clerezza.<p>
- * This implementation uses {@link LockableMGraph} interface for write locks
+ * This implementation uses {@link LockableGraph} interface for write locks
  * when updating the graph. SPARQL queries are not within a write lock.<p>
  *
  * @author Rupert Westenthaler
@@ -108,7 +107,7 @@
 public class ClerezzaYard extends AbstractYard implements Yard {
     private static Logger log = LoggerFactory.getLogger(ClerezzaYard.class);
     /**
-     * Property used to mark empty Representations managed by this Graph. This is
+     * Property used to mark empty Representations managed by this ImmutableGraph. This is
      * needed to workaround the fact, that the Entityhub supports the storage of
      * empty Representations but this Yard uses the search for any outgoing
      * relation (triple with the id of the representation as Subject) for the 
@@ -120,11 +119,11 @@
      * <code> ?representationId <{@value #MANAGED_REPRESENTATION}> true^^xsd:boolean </code>
      * <br> for any empty Representation avoids this unwanted behaviour.
      */
-    public static final UriRef MANAGED_REPRESENTATION = new UriRef("urn:org.apache.stanbol:entityhub.yard:rdf.clerezza:managesRepresentation");
+    public static final IRI MANAGED_REPRESENTATION = new IRI("urn:org.apache.stanbol:entityhub.yard:rdf.clerezza:managesRepresentation");
     /**
-     * Property used to optionally configure the URI of the Clerezza Graph.
-     * This graph will be looked up by using {@link TcManager#getTriples(UriRef).<p>
-     * Note that if the returned RDF graph is of instance Graph the write/delete
+     * Property used to optionally configure the URI of the Clerezza ImmutableGraph.
+     * This graph will be looked up by using {@link TcManager#getTriples(IRI).<p>
+     * Note that if the returned RDF graph is of instance ImmutableGraph the write/delete
      * operations of this implementations will not work.
      */
     public static final String GRAPH_URI = "org.apache.stanbol.entityhub.yard.clerezza.graphuri";
@@ -134,18 +133,19 @@
     private static final Literal TRUE_LITERAL = LiteralFactory.getInstance().createTypedLiteral(Boolean.FALSE);
     
     //public static final String YARD_URI_PREFIX = "urn:org.apache.stanbol:entityhub.yard:rdf.clerezza:";
-//    public static final UriRef REPRESENTATION = new UriRef(RdfResourceEnum.Representation.getUri());
+//    public static final IRI REPRESENTATION = new IRI(RdfResourceEnum.Representation.getUri());
 //    protected ComponentContext context;
 //    protected Dictionary<String,?> properties;
     @Reference
     private TcManager tcManager;
-    private UriRef yardGraphUri;
-    private TripleCollection graph;
+    private IRI yardGraphUri;
+    private Graph graph;
     
     private ServiceRegistration graphRegistration;
     private ComponentContext context;
+    private boolean immutable;
     
-    //private LockableMGraph graph;
+    //private LockableGraph graph;
 
     public ClerezzaYard() {
         super();
@@ -183,18 +183,23 @@
         if(this.yardGraphUri == null){ // use default
             String yardUri = getUriPrefix();
             //remove the "." at the last position of the prefix
-            this.yardGraphUri = new UriRef(yardUri.substring(0, yardUri.length()-2));
+            this.yardGraphUri = new IRI(yardUri.substring(0, yardUri.length()-2));
         }
         try {
-            this.graph = tcManager.getTriples(yardGraphUri);
+             
+            try {
+                this.graph = tcManager.getImmutableGraph(yardGraphUri);
+                immutable = true;
+            } catch(NoSuchEntityException e) {
+                this.graph = tcManager.getGraph(yardGraphUri);
+                immutable = false;
+            }
+            
             log.info("  ... (re)use existing Graph {} for Yard {}",
                 yardGraphUri,config.getName());
-            if(!(graph instanceof LockableMGraph)){
-                log.info("        > NOTE: this ClerezzaYard is read-only");
-            }
         } catch (NoSuchEntityException e) {
             log.info("   ... create new Graph {} for Yard {}",yardGraphUri,config.getName());
-            this.graph =  tcManager.createMGraph(yardGraphUri);
+            this.graph =  tcManager.createGraph(yardGraphUri);
         }
         if(context != null){ //within an OSGI environment
             //Register the graph with the Stanbol SPARQL endpoint (STANBOL-677)
@@ -206,7 +211,7 @@
                 graphRegProp.put("graph.description", getConfig().getDescription());
             }
             graphRegistration = context.getBundleContext().registerService(
-                TripleCollection.class.getName(), graph, graphRegProp);
+                Graph.class.getName(), graph, graphRegProp);
         } //else do not register when running outside OSGI
     }
     @Deactivate
@@ -239,20 +244,20 @@
         if(id.isEmpty()){
             throw new IllegalArgumentException("The parsed representation id MUST NOT be EMTPY!");
         }
-        return getRepresentation(new UriRef(id),true);
+        return getRepresentation(new IRI(id),true);
     }
     /**
      * Internally used to create Representations for URIs
      * @param uri the uri
      * @param check if <code>false</code> than there is no check if the URI
-     *     refers to a Resource in the graph that is of type {@link #REPRESENTATION}
+     *     refers to a RDFTerm in the graph that is of type {@link #REPRESENTATION}
      * @return the Representation
      */
-    protected final Representation getRepresentation(UriRef uri, boolean check) {
+    protected final Representation getRepresentation(IRI uri, boolean check) {
         final Lock readLock = readLockGraph();
         try {
             if(!check || isRepresentation(uri)){
-                MGraph nodeGraph = createRepresentationGraph(uri, graph);
+                Graph nodeGraph = createRepresentationGraph(uri, graph);
                 //Remove the triple internally used to represent an empty Representation
                 // ... this will only remove the triple if the Representation is empty
                 //     but a check would take longer than the this call
@@ -272,49 +277,45 @@
      */
     private Lock readLockGraph() {
         final Lock readLock;
-        if(graph instanceof LockableMGraph){
-            readLock = ((LockableMGraph)graph).getLock().readLock();
-            readLock.lock();
-        } else {
-            readLock = null;
-        }
+        readLock = graph.getLock().readLock();
+        readLock.lock();
         return readLock;
     }
     /**
      * Extracts the triples that belong to the {@link Representation} with the
      * parsed id from the parsed graph. The graph is not modified and changes
      * in the returned graph will not affect the parsed graph.
-     * @param id the {@link UriRef} node representing the id of the Representation.
-     * @param graph the Graph to extract the representation from
+     * @param id the {@link IRI} node representing the id of the Representation.
+     * @param graph the ImmutableGraph to extract the representation from
      * @return the extracted graph.
      */
-    protected MGraph createRepresentationGraph(UriRef id, TripleCollection graph){
-        return extractRepresentation(graph, new IndexedMGraph(), id, new HashSet<BNode>());
+    protected Graph createRepresentationGraph(IRI id, Graph graph){
+        return extractRepresentation(graph, new IndexedGraph(), id, new HashSet<BlankNode>());
     }
     /**
      * Recursive Method internally doing all the work for 
-     * {@link #createRepresentationGraph(UriRef, TripleCollection)}
+     * {@link #createRepresentationGraph(IRI, Graph)}
      * @param source The graph to extract the Representation (source)
      * @param target The graph to store the extracted triples (target)
      * @param node the current node. Changes in recursive calls as it follows
-     * @param visited holding all the visited BNodes to avoid cycles. Other nodes 
+     * @param visited holding all the visited BlankNodes to avoid cycles. Other nodes 
      * need not be added because this implementation would not follow it anyway
-     * outgoing relations if the object is a {@link BNode} instance.
+     * outgoing relations if the object is a {@link BlankNode} instance.
      * @return the target graph (for convenience)
      */
-    private MGraph extractRepresentation(TripleCollection source,MGraph target, NonLiteral node, Set<BNode> visited){
+    private Graph extractRepresentation(Graph source,Graph target, BlankNodeOrIRI node, Set<BlankNode> visited){
         //we need all the outgoing relations and also want to follow bNodes until
-        //the next UriRef. However we are not interested in incoming relations!
+        //the next IRI. However we are not interested in incoming relations!
         Iterator<Triple> outgoing = source.filter(node, null, null);
         while (outgoing.hasNext()) {
             Triple triple = outgoing.next();
             target.add(triple);
-            Resource object = triple.getObject();
-            if(object instanceof BNode){
+            RDFTerm object = triple.getObject();
+            if(object instanceof BlankNode){
                 //add first and than follow because there might be a triple such as
                 // bnode1 <urn:someProperty> bnode1
-                visited.add((BNode)object);
-                extractRepresentation(source, target, (NonLiteral)object, visited);
+                visited.add((BlankNode)object);
+                extractRepresentation(source, target, (BlankNodeOrIRI)object, visited);
             }
         }
         return target;
@@ -329,14 +330,14 @@
             throw new IllegalArgumentException("The parsed id MUST NOT be EMPTY!");
         }
         //search for any outgoing triple
-        return isRepresentation(new UriRef(id));
+        return isRepresentation(new IRI(id));
     }
     /**
      * Internally used to check if a URI resource represents an representation
      * @param resource the resource to check
      * @return the state
      */
-    protected final boolean isRepresentation(UriRef resource){
+    protected final boolean isRepresentation(IRI resource){
         return graph.filter(resource, null, null).hasNext();
     }
 
@@ -345,7 +346,7 @@
         if(id == null) {
             throw new IllegalArgumentException("The parsed Representation id MUST NOT be NULL!");
         }
-        UriRef resource = new UriRef(id);
+        IRI resource = new IRI(id);
         final Lock writeLock = writeLockGraph();
         try {
             Iterator<Triple> it = graph.filter(resource, null, null);
@@ -365,15 +366,14 @@
      * @throws YardException
      */
     private Lock writeLockGraph() throws YardException {
-        final Lock writeLock;
-        if(graph instanceof LockableMGraph){
-            writeLock = ((LockableMGraph)graph).getLock().writeLock();
-            writeLock.lock();
-        } else {
-            throw new YardException("Unable modify data in ClerezzaYard '"+getId()
+        if (immutable) {
+             throw new YardException("Unable modify data in ClerezzaYard '"+getId()
                 + "' because the backing RDF graph '"+yardGraphUri
                 + "' is read-only!");
         }
+        final Lock writeLock;
+        writeLock = graph.getLock().writeLock();
+        writeLock.lock();
         return writeLock;
     }
     @Override
@@ -445,7 +445,7 @@
             return null;
         }
         log.debug("store Representation " + representation.getId());
-        UriRef id = new UriRef(representation.getId());
+        IRI id = new IRI(representation.getId());
         final Lock writeLock = writeLockGraph();
         try {
             Iterator<Triple> current = graph.filter(id, null, null);
@@ -491,14 +491,14 @@
         // first we use the adaptingIterator to convert reseource to string
         // to get the resources we have to retrieve the root-variable of the
         // Iterator<SolutionMapping> provided by the ResultSet of the SPARQL query
-        Iterator<String> representationIdIterator = new AdaptingIterator<Resource, String>(
-                new Iterator<Resource>() {
+        Iterator<String> representationIdIterator = new AdaptingIterator<RDFTerm, String>(
+                new Iterator<RDFTerm>() {
                     @Override public void remove() { result.remove(); }
-                    @Override public Resource next() {
+                    @Override public RDFTerm next() {
                         return result.next().get(query.getRootVariableName()); }
                     @Override public boolean hasNext() { return result.hasNext(); }
                 },
-                new Resource2StringAdapter<Resource>(), String.class);
+                new Resource2StringAdapter<RDFTerm>(), String.class);
         return new QueryResultListImpl<String>(query,representationIdIterator,String.class);
     }
     /**
@@ -547,14 +547,14 @@
                      * @param solution a solution of the query
                      * @param type the type (no generics here)
                      * @return the representation or <code>null</code> if result is
-                     * not an UriRef or there is no Representation for the result.
+                     * not an IRI or there is no Representation for the result.
                      */
                     @Override
                     public Representation adapt(SolutionMapping solution, Class<Representation> type) {
-                        Resource resource = solution.get(query.getRootVariableName());
-                        if(resource instanceof UriRef){
+                        RDFTerm resource = solution.get(query.getRootVariableName());
+                        if(resource instanceof IRI){
                             try {
-                                return getRepresentation((UriRef)resource,false);
+                                return getRepresentation((IRI)resource,false);
                             } catch (IllegalArgumentException e) {
                                 log.warn("Unable to create Representation for ID "+resource+"! -> ignore query result");
                                 return null;
@@ -591,14 +591,14 @@
             throw new YardException("Unable to parse SPARQL query generated for the parse FieldQuery",e);
         }
         Object resultObject = tcManager.executeSparqlQuery(sparqlQuery, graph);
-        final MGraph resultGraph;
-        if(resultObject instanceof MGraph){
-            resultGraph = (MGraph)resultObject;
-        } else if(resultObject instanceof Graph){
-            resultGraph = new IndexedMGraph();
-            resultGraph.addAll((Graph)resultObject);
+        final Graph resultGraph;
+        if(resultObject instanceof Graph){
+            resultGraph = (Graph)resultObject;
+        } else if(resultObject instanceof ImmutableGraph){
+            resultGraph = new IndexedGraph();
+            resultGraph.addAll((ImmutableGraph)resultObject);
         } else {
-            log.error("Unable to create "+MGraph.class+" instance for query reults of type "+resultObject.getClass()+" (this indicates that the used SPARQL Query was not of type CONSTRUCT)");
+            log.error("Unable to create "+Graph.class+" instance for query reults of type "+resultObject.getClass()+" (this indicates that the used SPARQL Query was not of type CONSTRUCT)");
             log.error("FieldQuery: "+query);
             log.error("SPARQL Query: "+sparqlQueryString);
             throw new YardException("Unable to process results of Query");
diff --git a/entityhub/yard/clerezza/src/main/java/org/apache/stanbol/entityhub/yard/clerezza/impl/ClerezzaYardConfig.java b/entityhub/yard/clerezza/src/main/java/org/apache/stanbol/entityhub/yard/clerezza/impl/ClerezzaYardConfig.java
index c33f183..192bdc8 100644
--- a/entityhub/yard/clerezza/src/main/java/org/apache/stanbol/entityhub/yard/clerezza/impl/ClerezzaYardConfig.java
+++ b/entityhub/yard/clerezza/src/main/java/org/apache/stanbol/entityhub/yard/clerezza/impl/ClerezzaYardConfig.java
@@ -18,7 +18,7 @@
 
 import java.util.Dictionary;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.entityhub.core.yard.AbstractYard.YardConfig;
 import org.osgi.service.cm.ConfigurationException;
 
@@ -38,12 +38,12 @@
      * Getter for the {@link ClerezzaYard#GRAPH_URI} property
      * @return the graph URI or <code>null</code> if non is configured
      */
-    public UriRef getGraphUri(){
+    public IRI getGraphUri(){
         Object value = config.get(ClerezzaYard.GRAPH_URI);
-        if(value instanceof UriRef){
-            return (UriRef)value;
+        if(value instanceof IRI){
+            return (IRI)value;
         } else if (value != null){
-            return new UriRef(value.toString());
+            return new IRI(value.toString());
         } else {
             return null;
         }
@@ -52,7 +52,7 @@
      * Setter for the {@link ClerezzaYard#GRAPH_URI} property
      * @param uri the uri or <code>null</code> to remove this configuration
      */
-    public void setGraphUri(UriRef uri){
+    public void setGraphUri(IRI uri){
         if(uri == null){
             config.remove(ClerezzaYard.GRAPH_URI);
         } else {
diff --git a/entityhub/yard/clerezza/src/test/java/org/apache/stanbol/entityhub/yard/clerezza/impl/ClerezzaYardTest.java b/entityhub/yard/clerezza/src/test/java/org/apache/stanbol/entityhub/yard/clerezza/impl/ClerezzaYardTest.java
index e233720..37a7d41 100644
--- a/entityhub/yard/clerezza/src/test/java/org/apache/stanbol/entityhub/yard/clerezza/impl/ClerezzaYardTest.java
+++ b/entityhub/yard/clerezza/src/test/java/org/apache/stanbol/entityhub/yard/clerezza/impl/ClerezzaYardTest.java
@@ -52,7 +52,7 @@
     /**
      * The Clerezza Yard uses the Statement<br>
      * <code>representationId -> rdf:type -> Representation</code><br>
-     * to identify that an UriRef in the RDF graph (MGraph) represents a
+     * to identify that an IRI in the RDF graph (Graph) represents a
      * Representation. This Triple is added when a Representation is stored and
      * removed if retrieved from the Yard.<p>
      * This tests if this functions as expected
diff --git a/entityhub/yard/clerezza/src/test/java/org/apache/stanbol/entityhub/yard/clerezza/impl/ExistingClerezzaGraphTest.java b/entityhub/yard/clerezza/src/test/java/org/apache/stanbol/entityhub/yard/clerezza/impl/ExistingClerezzaGraphTest.java
index 3e58d7c..bde0d0b 100644
--- a/entityhub/yard/clerezza/src/test/java/org/apache/stanbol/entityhub/yard/clerezza/impl/ExistingClerezzaGraphTest.java
+++ b/entityhub/yard/clerezza/src/test/java/org/apache/stanbol/entityhub/yard/clerezza/impl/ExistingClerezzaGraphTest.java
@@ -17,11 +17,9 @@
 package org.apache.stanbol.entityhub.yard.clerezza.impl;
 
 import static java.util.Collections.singletonMap;
-import static junit.framework.Assert.assertEquals;
 import static junit.framework.Assert.assertNotNull;
 import static junit.framework.Assert.assertTrue;
 
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
@@ -29,20 +27,17 @@
 
 import junit.framework.Assert;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.Language;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Language;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.TcManager;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
-import org.apache.clerezza.rdf.core.impl.graphmatching.GraphMatcher;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.ontologies.RDF;
 import org.apache.clerezza.rdf.ontologies.SKOS;
-import org.apache.stanbol.entityhub.core.model.InMemoryValueFactory;
 import org.apache.stanbol.entityhub.model.clerezza.RdfRepresentation;
 import org.apache.stanbol.entityhub.model.clerezza.RdfValueFactory;
 import org.apache.stanbol.entityhub.servicesapi.model.Representation;
@@ -54,7 +49,7 @@
 /**
  * Unit tests for testing {@link ClerezzaYard} initialisation and usage in 
  * cases the configured {@link ClerezzaYardConfig#getGraphUri()} points to
- * already existing Clerezza {@link MGraph}s and {@link Graph} instances.<p>
+ * already existing Clerezza {@link Graph}s and {@link ImmutableGraph} instances.<p>
  * This basically tests features added with STANBOL-662 and STANBOL-663
  * @author Rupert Westenthaler
  *
@@ -64,10 +59,10 @@
     private static TcManager tcManager;
     private static Language EN = new Language("en");
     private static Language DE = new Language("de");
-    private static final Map<UriRef,TripleCollection> entityData = new HashMap<UriRef,TripleCollection>();
+    private static final Map<IRI,Graph> entityData = new HashMap<IRI,Graph>();
     
-    private static UriRef READ_ONLY_GRAPH_URI = new UriRef("http://www.test.org/read-only-grpah");
-    private static UriRef READ_WRITEGRAPH_URI = new UriRef("http://www.test.org/read-write-grpah");
+    private static IRI READ_ONLY_GRAPH_URI = new IRI("http://www.test.org/read-only-grpah");
+    private static IRI READ_WRITEGRAPH_URI = new IRI("http://www.test.org/read-write-grpah");
     
     private static ClerezzaYard readwriteYard;
     private static ClerezzaYard readonlyYard;
@@ -77,24 +72,24 @@
         initTestData();
         //create the graphs in Clerezza
         tcManager = TcManager.getInstance();
-        MGraph graph = tcManager.createMGraph(READ_WRITEGRAPH_URI);
+        Graph graph = tcManager.createGraph(READ_WRITEGRAPH_URI);
         //add the test data to the MGrpah
-        for(TripleCollection tc :entityData.values()){ 
+        for(Graph tc :entityData.values()){ 
             graph.addAll(tc);
         }
         //create the read only graph
-        tcManager.createGraph(READ_ONLY_GRAPH_URI, graph);
+        tcManager.createImmutableGraph(READ_ONLY_GRAPH_URI, graph);
         
         //init the ClerezzaYards for the created Clerezza graphs
         ClerezzaYardConfig readWriteConfig = new ClerezzaYardConfig("readWriteYardId");
         readWriteConfig.setName("Clerezza read/write Yard");
-        readWriteConfig.setDescription("Tests config with pre-existing MGraph");
+        readWriteConfig.setDescription("Tests config with pre-existing Graph");
         readWriteConfig.setGraphUri(READ_WRITEGRAPH_URI);
         readwriteYard = new ClerezzaYard(readWriteConfig);
 
         ClerezzaYardConfig readOnlyYardConfig = new ClerezzaYardConfig("readOnlyYardId");
         readOnlyYardConfig.setName("Clerezza read-only Yard");
-        readOnlyYardConfig.setDescription("Tests config with pre-existing Graph");
+        readOnlyYardConfig.setDescription("Tests config with pre-existing ImmutableGraph");
         readOnlyYardConfig.setGraphUri(READ_ONLY_GRAPH_URI);
         readonlyYard = new ClerezzaYard(readOnlyYardConfig);
 
@@ -106,7 +101,7 @@
      */
     @Test
     public void testRetrival(){
-        for(Entry<UriRef,TripleCollection> entity : entityData.entrySet()){
+        for(Entry<IRI,Graph> entity : entityData.entrySet()){
             validateEntity(readonlyYard,entity);
             validateEntity(readwriteYard,entity);
         }
@@ -148,12 +143,12 @@
      * retrieved by the tested {@link ClerezzaYard}s.
      * @param entity key - URI; value - expected RDF data
      */
-    private void validateEntity(ClerezzaYard yard, Entry<UriRef,TripleCollection> entity) {
+    private void validateEntity(ClerezzaYard yard, Entry<IRI,Graph> entity) {
         Representation rep = yard.getRepresentation(entity.getKey().getUnicodeString());
         assertNotNull("The Representation for "+entity.getKey()
             + "is missing in the "+yard.getId(), rep);
         assertTrue("RdfRepresentation expected", rep instanceof RdfRepresentation);
-        TripleCollection repGraph = ((RdfRepresentation)rep).getRdfGraph();
+        Graph repGraph = ((RdfRepresentation)rep).getRdfGraph();
         for(Iterator<Triple> triples = entity.getValue().iterator();triples.hasNext();){
             Triple triple = triples.next();
             assertTrue("Data of Representation "+entity.getKey()
@@ -168,15 +163,15 @@
      * Initialises the {@link #entityData} used for this test (called in BeforeClass)
      */
     private static void initTestData() {
-        UriRef entity1 = new UriRef("http://www.test.org/entity1");
-        MGraph entity1Data = new SimpleMGraph();
+        IRI entity1 = new IRI("http://www.test.org/entity1");
+        Graph entity1Data = new SimpleGraph();
         entity1Data.add(new TripleImpl(entity1,RDF.type, SKOS.Concept));
         entity1Data.add(new TripleImpl(entity1,SKOS.prefLabel, new PlainLiteralImpl("test", EN)));
         entity1Data.add(new TripleImpl(entity1,SKOS.prefLabel, new PlainLiteralImpl("Test", DE)));
         entityData.put(entity1, entity1Data);
         
-        MGraph entity2Data = new SimpleMGraph();
-        UriRef entity2 = new UriRef("http://www.test.org/entity2");
+        Graph entity2Data = new SimpleGraph();
+        IRI entity2 = new IRI("http://www.test.org/entity2");
         entity2Data.add(new TripleImpl(entity2, RDF.type, SKOS.Concept));
         entity2Data.add(new TripleImpl(entity2,SKOS.prefLabel, new PlainLiteralImpl("sub-test", EN)));
         entity2Data.add(new TripleImpl(entity2,SKOS.prefLabel, new PlainLiteralImpl("Untertest", DE)));
@@ -186,7 +181,7 @@
 
     @AfterClass
     public static void cleanup(){
-        tcManager.deleteTripleCollection(READ_ONLY_GRAPH_URI);
-        tcManager.deleteTripleCollection(READ_WRITEGRAPH_URI);
+        tcManager.deleteGraph(READ_ONLY_GRAPH_URI);
+        tcManager.deleteGraph(READ_WRITEGRAPH_URI);
     }
 }
diff --git a/integration-tests/src/test/java/org/apache/stanbol/enhancer/it/MultiThreadedTestBase.java b/integration-tests/src/test/java/org/apache/stanbol/enhancer/it/MultiThreadedTestBase.java
index 7ada01d..d6b842a 100644
--- a/integration-tests/src/test/java/org/apache/stanbol/enhancer/it/MultiThreadedTestBase.java
+++ b/integration-tests/src/test/java/org/apache/stanbol/enhancer/it/MultiThreadedTestBase.java
@@ -39,12 +39,12 @@
 import java.util.concurrent.TimeUnit;
 import java.util.zip.GZIPInputStream;
 
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.clerezza.rdf.core.serializedform.UnsupportedFormatException;
@@ -61,7 +61,7 @@
 import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.impl.client.HttpClientBuilder;
 import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.commons.namespaceprefix.NamespaceMappingUtils;
 import org.apache.stanbol.commons.namespaceprefix.NamespacePrefixService;
 import org.apache.stanbol.commons.namespaceprefix.service.StanbolNamespacePrefixService;
@@ -93,7 +93,7 @@
      */
     public static final String PROPERTY_CHAIN = "stanbol.it.multithreadtest.chain";
     /**
-     * The reference to the test data. Can be a File, a Resource available via the
+     * The reference to the test data. Can be a File, a RDFTerm available via the
      * Classpath or an URL. This also supports compressed files. In case of ZIP
      * only the first entry is processed.
      */
@@ -245,7 +245,7 @@
                 mediaType = null;
             }
         }
-        Assert.assertNotNull("Unable to detect MediaType for Resource '"
+        Assert.assertNotNull("Unable to detect MediaType for RDFTerm '"
             + name+"'. Please use the property '"+PROPERTY_TEST_DATA_TYPE
             + "' to manually parse the MediaType!", mediaType);
         
@@ -271,7 +271,7 @@
      * the Apache Clerezza RDF parsers.
      */
     private Iterator<String> createRdfDataIterator(InputStream is, String mediaType, final String propertyString) {
-        final SimpleMGraph graph = new SimpleMGraph();
+        final SimpleGraph graph = new SimpleGraph();
         try {
             rdfParser.parse(graph, is, mediaType);
         } catch (UnsupportedFormatException e) {
@@ -288,19 +288,19 @@
             String next = null;
             private String getNext(){
                 if(it == null){
-                    UriRef property;
+                    IRI property;
                     if("*".equals(propertyString)){
                         property = null; //wildcard
                         log.info("Iterate over values of all Triples");
                     } else {
-                        property = new UriRef(
+                        property = new IRI(
                             NamespaceMappingUtils.getConfiguredUri(nsPrefixService, propertyString));
                         log.info("Iterate over values of property {}", property);
                     }
                     it = graph.filter(null, property, null);
                 }
                 while(it.hasNext()){
-                    Resource value = it.next().getObject();
+                    RDFTerm value = it.next().getObject();
                     if(value instanceof Literal){
                         return ((Literal)value).getLexicalForm();
                     }
@@ -635,7 +635,7 @@
             }
         }
 
-        void succeed(Request request, UriRef contentItemUri, TripleCollection results, Long rtt, int size) {
+        void succeed(Request request, IRI contentItemUri, Graph results, Long rtt, int size) {
             ExecutionMetadata em = ExecutionMetadata.parseFrom(results, contentItemUri);
             results.clear(); // we no longer need the results
             if (em != null) {
@@ -751,19 +751,19 @@
                 rtt = null;
                 return;
             }
-            IndexedMGraph graph = new IndexedMGraph();
+            IndexedGraph graph = new IndexedGraph();
             try {
                 rdfParser.parse(graph,executor.getStream(), executor.getContentType().getMimeType());
                 Iterator<Triple> ciIt = graph.filter(null, Properties.ENHANCER_EXTRACTED_FROM, null);
                 if(!ciIt.hasNext()){
                     throw new IllegalStateException("Enhancement Results do not caontain a single Enhancement");
                 }
-                Resource contentItemUri = ciIt.next().getObject();
-                if(!(contentItemUri instanceof UriRef)){
-                    throw new IllegalStateException("ContentItem URI is not an UriRef but an instance of "
+                RDFTerm contentItemUri = ciIt.next().getObject();
+                if(!(contentItemUri instanceof IRI)){
+                    throw new IllegalStateException("ContentItem URI is not an IRI but an instance of "
                             + contentItemUri.getClass().getSimpleName());
                 }
-                tracker.succeed(request, (UriRef) contentItemUri, graph, rtt, executor.getContent().length());
+                tracker.succeed(request, (IRI) contentItemUri, graph, rtt, executor.getContent().length());
                 content = null; //do not store content for successful results
             } catch (Exception e) {
                 log.warn("Exception while parsing Enhancement Response",e);
diff --git a/integration-tests/src/test/java/org/apache/stanbol/enhancer/it/MultipartRequestTest.java b/integration-tests/src/test/java/org/apache/stanbol/enhancer/it/MultipartRequestTest.java
index 6ca7c0b..3ca1162 100644
--- a/integration-tests/src/test/java/org/apache/stanbol/enhancer/it/MultipartRequestTest.java
+++ b/integration-tests/src/test/java/org/apache/stanbol/enhancer/it/MultipartRequestTest.java
@@ -30,13 +30,13 @@
 import java.util.Map;
 import java.util.Map.Entry;
 
-import org.apache.clerezza.rdf.core.BNode;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.clerezza.rdf.ontologies.RDF;
@@ -379,7 +379,7 @@
     
     @Test
     public void testContentBeforeMetadata() throws IOException{
-        final UriRef contentItemId = new UriRef("http://www.example.com/test.html");
+        final IRI contentItemId = new IRI("http://www.example.com/test.html");
         String rdfContentType = SupportedFormat.RDF_XML;
         String rdfContent = getDummyRdfMetadata(contentItemId, rdfContentType);
         MultipartEntityBuilder ciBuilder = MultipartEntityBuilder.create();
@@ -399,7 +399,7 @@
     }
     @Test
     public void testMissingContent() throws IOException{
-        final UriRef contentItemId = new UriRef("http://www.example.com/test.html");
+        final IRI contentItemId = new IRI("http://www.example.com/test.html");
         String rdfContentType = SupportedFormat.RDF_XML;
         String rdfContent = getDummyRdfMetadata(contentItemId, rdfContentType);
         MultipartEntityBuilder ciBuilder = MultipartEntityBuilder.create();
@@ -422,9 +422,9 @@
      * @param rdfContentType
      * @return
      */
-    private String getDummyRdfMetadata(final UriRef contentItemId, String rdfContentType) {
-        MGraph metadata = new SimpleMGraph();
-        metadata.add(new TripleImpl(new BNode(), Properties.ENHANCER_EXTRACTED_FROM, contentItemId));
+    private String getDummyRdfMetadata(final IRI contentItemId, String rdfContentType) {
+        Graph metadata = new SimpleGraph();
+        metadata.add(new TripleImpl(new BlankNode(), Properties.ENHANCER_EXTRACTED_FROM, contentItemId));
         ByteArrayOutputStream out = new ByteArrayOutputStream();
         serializer.serialize(out, metadata, rdfContentType);
         String rdfContent = new String(out.toByteArray(),UTF8);
@@ -440,9 +440,9 @@
     @Test
     public void testUploadWithMetadata() throws IOException {
         //create the metadata
-        Resource user = new PlainLiteralImpl("Rupert Westenthaler");
-        final UriRef contentItemId = new UriRef("http://www.example.com/test.html");
-        MGraph metadata = new SimpleMGraph();
+        RDFTerm user = new PlainLiteralImpl("Rupert Westenthaler");
+        final IRI contentItemId = new IRI("http://www.example.com/test.html");
+        Graph metadata = new SimpleGraph();
         addTagAsTextAnnotation(metadata, contentItemId, 
             "Germany",DBPEDIA_PLACE, user);
         addTagAsTextAnnotation(metadata, contentItemId, 
@@ -515,9 +515,9 @@
      * @param user the user that created the tag
      * @return the uri of the created annotation
      */
-    private static final UriRef addTagAsTextAnnotation(MGraph graph, UriRef contentItem, 
-                                                       String tag, UriRef tagType, Resource user){
-        UriRef ta = new UriRef("urn:user-annotation:"+EnhancementEngineHelper.randomUUID());
+    private static final IRI addTagAsTextAnnotation(Graph graph, IRI contentItem, 
+                                                       String tag, IRI tagType, RDFTerm user){
+        IRI ta = new IRI("urn:user-annotation:"+EnhancementEngineHelper.randomUUID());
         graph.add(new TripleImpl(ta, RDF.type, TechnicalClasses.ENHANCER_TEXTANNOTATION));
         graph.add(new TripleImpl(ta, Properties.ENHANCER_EXTRACTED_FROM,contentItem));
         if(tagType != null){
diff --git a/launchers/bundlelists/security/src/main/bundles/list.xml b/launchers/bundlelists/security/src/main/bundles/list.xml
index e71b808..95807b5 100644
--- a/launchers/bundlelists/security/src/main/bundles/list.xml
+++ b/launchers/bundlelists/security/src/main/bundles/list.xml
@@ -21,12 +21,12 @@
     <bundle>
       <groupId>org.apache.clerezza</groupId>
       <artifactId>rdf.file.storage</artifactId>
-      <version>0.4</version>
+      <version>1.0.0</version>
     </bundle>
     <bundle>
       <groupId>org.apache.clerezza</groupId>
       <artifactId>platform.config</artifactId>
-      <version>0.4</version>
+      <version>1.0.0</version>
     </bundle>
     <bundle>
       <groupId>org.apache.clerezza</groupId>
@@ -36,7 +36,7 @@
     <bundle>
       <groupId>org.apache.clerezza</groupId>
       <artifactId>platform</artifactId>
-      <version>0.2</version>
+      <version>1.0.0</version>
     </bundle>
     <bundle>
       <groupId>org.apache.stanbol</groupId>
@@ -71,7 +71,7 @@
                 <bundle>
 			<groupId>org.apache.clerezza</groupId>
 			<artifactId>platform.graphprovider.content</artifactId>
-			<version>0.7</version>
+			<version>1.0.0</version>
 		</bundle>
   </startLevel>
 </bundles>
diff --git a/launchers/bundlelists/stanbolcommons/src/main/bundles/list.xml b/launchers/bundlelists/stanbolcommons/src/main/bundles/list.xml
index 28609e3..b6094c8 100644
--- a/launchers/bundlelists/stanbolcommons/src/main/bundles/list.xml
+++ b/launchers/bundlelists/stanbolcommons/src/main/bundles/list.xml
@@ -388,7 +388,17 @@
     <bundle>
       <groupId>org.apache.clerezza</groupId>
       <artifactId>jaxrs.rdf.providers</artifactId>
-      <version>0.15</version>
+      <version>1.0.0</version>
+    </bundle>
+    <bundle>
+      <groupId>com.googlecode.json-simple</groupId>
+      <artifactId>json-simple</artifactId>
+      <version>1.1.1</version>
+    </bundle>
+    <bundle>
+      <groupId>org.apache.clerezza</groupId>
+      <artifactId>rdf.rdfjson</artifactId>
+      <version>1.0.0</version>
     </bundle>
   </startLevel>
 
diff --git a/launchers/bundlelists/zzshell/pom.xml b/launchers/bundlelists/zzshell/pom.xml
index 8aba99b..e69de29 100644
--- a/launchers/bundlelists/zzshell/pom.xml
+++ b/launchers/bundlelists/zzshell/pom.xml
@@ -1,65 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <parent>
-    <groupId>org.apache.stanbol</groupId>
-    <artifactId>apache-stanbol-bundlelists</artifactId>
-    <version>1.0.0-SNAPSHOT</version>
-    <relativePath>..</relativePath>
-  </parent>
-
-  <groupId>org.apache.stanbol</groupId>
-  <artifactId>org.apache.stanbol.launchers.bundlelists.zzshell</artifactId>
-  <version>1.0.0-SNAPSHOT</version>
-  <packaging>partialbundlelist</packaging>
-
-  <name>Apache Stanbol Bundlelist for the Clerezza Shell</name>
-  <description>This is a Maven project which produces the partial list containing for bundles providing the clerezza shell with ssh access</description>
-
-  <scm>
-    <connection>
-      scm:svn:http://svn.apache.org/repos/asf/stanbol/trunk/launchers/bundlelists/zzshell
-    </connection>
-    <developerConnection>
-      scm:svn:https://svn.apache.org/repos/asf/stanbol/trunk/launchers/bundlelists/zzshell
-    </developerConnection>
-    <url>http://stanbol.apache.org/</url>
-  </scm>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.sling</groupId>
-        <artifactId>maven-launchpad-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>attach-bundle-list</id>
-            <goals>
-              <goal>attach-bundle-list</goal>
-            </goals>
-            <configuration>
-              <includeDefaultBundles>false</includeDefaultBundles>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-</project>
diff --git a/launchers/bundlelists/zzshell/src/main/bundles/list.xml b/launchers/bundlelists/zzshell/src/main/bundles/list.xml
index 485471c..e69de29 100644
--- a/launchers/bundlelists/zzshell/src/main/bundles/list.xml
+++ b/launchers/bundlelists/zzshell/src/main/bundles/list.xml
@@ -1,109 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<bundles>
-    
-    
-	<!--  Scala -->
-	<startLevel level="32">
-		<bundle>
-			<groupId>org.apache.servicemix.bundles</groupId>
-			<artifactId>org.apache.servicemix.bundles.scala-library</artifactId>
-            <version>2.8.1_1</version>
-		</bundle>
-		<bundle>
-			<groupId>org.apache.servicemix.bundles</groupId>
-			<artifactId>org.apache.servicemix.bundles.scala-compiler</artifactId>
-            <version>2.8.1_1</version>
-		</bundle>
-		<bundle>
-			<groupId>org.apache.clerezza.ext</groupId>
-			<artifactId>slf4j-scala-api</artifactId>
-			<version>1.6.1-incubating</version>
-		</bundle>
-		<bundle>
-			<groupId>org.apache.clerezza.scala</groupId>
-			<artifactId>script-engine</artifactId>
-			<version>0.1-incubating</version>
-		</bundle>
-	</startLevel>
-	
-	<!-- Console Shell -->
-	<startLevel level="33">
-		<bundle>
-			<groupId>org.apache.clerezza</groupId>
-			<artifactId>shell</artifactId>
-			<version>0.1-incubating</version>
-		</bundle>
-		<bundle>
-			<groupId>org.apache.clerezza</groupId>
-			<artifactId>osgi.services</artifactId>
-			<version>0.1-incubating</version>
-		</bundle>
-		<bundle>
-			<groupId>org.apache.servicemix.bundles</groupId>
-			<artifactId>org.apache.servicemix.bundles.jline</artifactId>
-			<version>0.9.94_1</version>
-		</bundle>
-		<bundle>
-			<groupId>org.apache.clerezza</groupId>
-			<artifactId>permissiondescriptions</artifactId>
-			<version>0.1-incubating</version>
-		</bundle>
-	</startLevel>
-	
-	<!-- SSH Shell access -->
-	<startLevel level="34">
-		<bundle>
-			<groupId>org.apache.clerezza</groupId>
-			<artifactId>sshshell</artifactId>
-			<version>0.1-incubating</version>
-		</bundle>
-		<bundle>
-			<groupId>org.apache.clerezza</groupId>
-			<artifactId>platform.config</artifactId>
-			<version>0.3-incubating</version>
-		</bundle>
-		<bundle>
-			<groupId>org.apache.clerezza</groupId>
-			<artifactId>platform.security</artifactId>
-			<version>0.8-incubating</version>
-		</bundle>
-		<bundle>
-			<groupId>org.apache.clerezza</groupId>
-			<artifactId>platform</artifactId>
-			<version>0.1-incubating</version>
-		</bundle>
-		<bundle>
-			<groupId>org.apache.clerezza</groupId>
-			<artifactId>platform.graphprovider.content</artifactId>
-			<version>0.6-incubating</version>
-		</bundle>
-		<bundle>
-			<groupId>org.apache.mina</groupId>
-			<artifactId>mina-core</artifactId>
-			<version>2.0.7</version>
-		</bundle>
-		<bundle>
-			<groupId>org.apache.sshd</groupId>
-			<artifactId>sshd-core</artifactId>
-			<version>0.8.0</version>
-		</bundle>
-	</startLevel>
-  
-
-</bundles>
\ No newline at end of file
diff --git a/launchers/full/pom.xml b/launchers/full/pom.xml
index 73e7603..96fb16c 100644
--- a/launchers/full/pom.xml
+++ b/launchers/full/pom.xml
@@ -191,7 +191,7 @@
     <dependency>
       <groupId>org.apache.clerezza.provisioning</groupId>
       <artifactId>rdf</artifactId>
-      <version>0.1</version>
+      <version>1.0.0</version>
       <type>partialbundlelist</type>
       <scope>provided</scope>
     </dependency>
@@ -200,7 +200,7 @@
     <dependency>
       <groupId>org.apache.clerezza.provisioning</groupId>
       <artifactId>rdf.tdb</artifactId>
-      <version>0.1</version>
+      <version>1.0.0</version>
       <type>partialbundlelist</type>
       <scope>provided</scope>
     </dependency>
@@ -224,13 +224,13 @@
     </dependency> 
     
     <!-- Clerezza zz> Shell Bundle List  --> 
-    <dependency>
+    <!-- <dependency>
       <groupId>org.apache.clerezza.provisioning</groupId>
       <artifactId>shell</artifactId>
       <version>0.1</version>
       <type>partialbundlelist</type>
       <scope>provided</scope>
-    </dependency>
+    </dependency> -->
 
     <!-- Stanbol Data Bundle List -->
     <dependency>
diff --git a/nbactions.xml b/nbactions.xml
index 055d792..60bf979 100644
--- a/nbactions.xml
+++ b/nbactions.xml
@@ -40,4 +40,15 @@
                 <allowSnapshots>true</allowSnapshots>

             </properties>

         </action>

+        <action>

+            <actionName>CUSTOM-use-latests-zz-versions</actionName>

+            <displayName>use-latests-clerezza-versions</displayName>

+            <goals>

+                <goal>versions:use-latest-versions</goal>

+            </goals>

+            <properties>

+                <includes>org.apache.clerezza</includes>

+                <allowSnapshots>false</allowSnapshots>

+            </properties>

+        </action>

     </actions>

diff --git a/ontologymanager/generic/servicesapi/src/main/java/org/apache/stanbol/ontologymanager/servicesapi/NamedArtifact.java b/ontologymanager/generic/servicesapi/src/main/java/org/apache/stanbol/ontologymanager/servicesapi/NamedArtifact.java
index 8062891..4e1a52c 100644
--- a/ontologymanager/generic/servicesapi/src/main/java/org/apache/stanbol/ontologymanager/servicesapi/NamedArtifact.java
+++ b/ontologymanager/generic/servicesapi/src/main/java/org/apache/stanbol/ontologymanager/servicesapi/NamedArtifact.java
@@ -45,7 +45,7 @@
      * objects of different types cannot have the same ID. These aspects should be ensured by registries,
      * indexers etc.<br>
      * <br>
-     * XXX check if a type other than String (e.g. URI, IRI, UriRef) should be used.
+     * XXX check if a type other than String (e.g. URI, IRI, IRI) should be used.
      * 
      * @return an identifier for this resource.
      */
diff --git a/ontologymanager/generic/servicesapi/src/main/java/org/apache/stanbol/ontologymanager/servicesapi/Vocabulary.java b/ontologymanager/generic/servicesapi/src/main/java/org/apache/stanbol/ontologymanager/servicesapi/Vocabulary.java
index 7a048d6..e7c1d79 100644
--- a/ontologymanager/generic/servicesapi/src/main/java/org/apache/stanbol/ontologymanager/servicesapi/Vocabulary.java
+++ b/ontologymanager/generic/servicesapi/src/main/java/org/apache/stanbol/ontologymanager/servicesapi/Vocabulary.java
@@ -16,9 +16,8 @@
  */
 package org.apache.stanbol.ontologymanager.servicesapi;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.semanticweb.owlapi.apibinding.OWLManager;
-import org.semanticweb.owlapi.model.IRI;
 import org.semanticweb.owlapi.model.OWLClass;
 import org.semanticweb.owlapi.model.OWLDataFactory;
 import org.semanticweb.owlapi.model.OWLDataProperty;
@@ -59,7 +58,7 @@
 
     private static final String _SHORT_ENTRY = "Entry";
 
-    private static final String _SHORT_GRAPH = "Graph";
+    private static final String _SHORT_GRAPH = "ImmutableGraph";
 
     private static final String _SHORT_HAS_APPENDED = "hasAppended";
 
@@ -116,325 +115,325 @@
     /**
      * The OWL <b>object property</b> <tt>isAppendedTo</tt>.
      */
-    public static final OWLObjectProperty APPENDED_TO = __df.getOWLObjectProperty(IRI
+    public static final OWLObjectProperty APPENDED_TO = __df.getOWLObjectProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_APPENDED_TO));
 
     /**
-     * The OWL <b>object property</b> <tt>isAppendedTo</tt> (in UriRef form).
+     * The OWL <b>object property</b> <tt>isAppendedTo</tt> (in IRI form).
      */
-    public static final UriRef APPENDED_TO_URIREF = new UriRef(_NS_ONTONET + _SHORT_APPENDED_TO);
+    public static final IRI APPENDED_TO_URIREF = new IRI(_NS_ONTONET + _SHORT_APPENDED_TO);
 
     /**
      * The OWL <b>object property</b> <tt>dependsOn</tt>.
      */
-    public static final OWLObjectProperty DEPENDS_ON = __df.getOWLObjectProperty(IRI
+    public static final OWLObjectProperty DEPENDS_ON = __df.getOWLObjectProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_DEPENDS_ON));
 
     /**
-     * The OWL <b>object property</b> <tt>dependsOn</tt> (in UriRef form).
+     * The OWL <b>object property</b> <tt>dependsOn</tt> (in IRI form).
      */
-    public static final UriRef DEPENDS_ON_URIREF = new UriRef(_NS_ONTONET + _SHORT_DEPENDS_ON);
+    public static final IRI DEPENDS_ON_URIREF = new IRI(_NS_ONTONET + _SHORT_DEPENDS_ON);
 
     /**
      * The OWL <b>class</b> <tt>Entry</tt>.
      */
-    public static final OWLClass ENTRY = __df.getOWLClass(IRI.create(_NS_ONTONET + _SHORT_ENTRY));
+    public static final OWLClass ENTRY = __df.getOWLClass(org.semanticweb.owlapi.model.IRI.create(_NS_ONTONET + _SHORT_ENTRY));
 
     /**
-     * The OWL <b>class</b> <tt>Entry</tt> (in UriRef form).
+     * The OWL <b>class</b> <tt>Entry</tt> (in IRI form).
      */
-    public static final UriRef ENTRY_URIREF = new UriRef(_NS_ONTONET + _SHORT_ENTRY);
+    public static final IRI ENTRY_URIREF = new IRI(_NS_ONTONET + _SHORT_ENTRY);
 
     /**
-     * The OWL <b>class</b> <tt>Graph</tt>.
+     * The OWL <b>class</b> <tt>ImmutableGraph</tt>.
      */
-    public static final OWLClass GRAPH = __df.getOWLClass(IRI.create(_NS_ONTONET + _SHORT_GRAPH));
+    public static final OWLClass GRAPH = __df.getOWLClass(org.semanticweb.owlapi.model.IRI.create(_NS_ONTONET + _SHORT_GRAPH));
 
     /**
-     * The OWL <b>class</b> <tt>Graph</tt> (in UriRef form).
+     * The OWL <b>class</b> <tt>ImmutableGraph</tt> (in IRI form).
      */
-    public static final UriRef GRAPH_URIREF = new UriRef(_NS_ONTONET + _SHORT_GRAPH);
+    public static final IRI GRAPH_URIREF = new IRI(_NS_ONTONET + _SHORT_GRAPH);
 
     /**
      * The OWL <b>object property</b> <tt>hasAppended</tt>.
      */
-    public static final OWLObjectProperty HAS_APPENDED = __df.getOWLObjectProperty(IRI
+    public static final OWLObjectProperty HAS_APPENDED = __df.getOWLObjectProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_HAS_APPENDED));
 
     /**
-     * The OWL <b>object property</b> <tt>hasAppended</tt> (in UriRef form).
+     * The OWL <b>object property</b> <tt>hasAppended</tt> (in IRI form).
      */
-    public static final UriRef HAS_APPENDED_URIREF = new UriRef(_NS_ONTONET + _SHORT_HAS_APPENDED);
+    public static final IRI HAS_APPENDED_URIREF = new IRI(_NS_ONTONET + _SHORT_HAS_APPENDED);
 
     /**
      * The OWL <b>object property</b> <tt>hasDependent</tt>.
      */
-    public static final OWLObjectProperty HAS_DEPENDENT = __df.getOWLObjectProperty(IRI
+    public static final OWLObjectProperty HAS_DEPENDENT = __df.getOWLObjectProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_HAS_DEPENDENT));
 
     /**
-     * The OWL <b>datatype property</b> <tt>hasDependent</tt> (in UriRef form).
+     * The OWL <b>datatype property</b> <tt>hasDependent</tt> (in IRI form).
      */
-    public static final UriRef HAS_DEPENDENT_URIREF = new UriRef(_NS_ONTONET + _SHORT_HAS_DEPENDENT);
+    public static final IRI HAS_DEPENDENT_URIREF = new IRI(_NS_ONTONET + _SHORT_HAS_DEPENDENT);
 
     /**
      * The OWL <b>datatype property</b> <tt>hasOntologyIRI</tt>.
      */
-    public static final OWLDataProperty HAS_ONTOLOGY_IRI = __df.getOWLDataProperty(IRI
+    public static final OWLDataProperty HAS_ONTOLOGY_IRI = __df.getOWLDataProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_HAS_ONTOLOGY_IRI));
 
     /**
-     * The OWL <b>datatype property</b> <tt>hasOntologyIRI</tt> (in UriRef form).
+     * The OWL <b>datatype property</b> <tt>hasOntologyIRI</tt> (in IRI form).
      */
-    public static final UriRef HAS_ONTOLOGY_IRI_URIREF = new UriRef(_NS_ONTONET + _SHORT_HAS_ONTOLOGY_IRI);
+    public static final IRI HAS_ONTOLOGY_IRI_URIREF = new IRI(_NS_ONTONET + _SHORT_HAS_ONTOLOGY_IRI);
 
     /**
      * The OWL <b>object property</b> <tt>isManagedBy</tt>.
      */
-    public static final OWLObjectProperty HAS_SPACE_CORE = __df.getOWLObjectProperty(IRI
+    public static final OWLObjectProperty HAS_SPACE_CORE = __df.getOWLObjectProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_HAS_SPACE_CORE));
 
     /**
-     * The OWL <b>object property</b> <tt>hasCoreSpace</tt> (in UriRef form).
+     * The OWL <b>object property</b> <tt>hasCoreSpace</tt> (in IRI form).
      */
-    public static final UriRef HAS_SPACE_CORE_URIREF = new UriRef(_NS_ONTONET + _SHORT_HAS_SPACE_CORE);
+    public static final IRI HAS_SPACE_CORE_URIREF = new IRI(_NS_ONTONET + _SHORT_HAS_SPACE_CORE);
 
     /**
      * The OWL <b>object property</b> <tt>isManagedBy</tt>.
      */
-    public static final OWLObjectProperty HAS_SPACE_CUSTOM = __df.getOWLObjectProperty(IRI
+    public static final OWLObjectProperty HAS_SPACE_CUSTOM = __df.getOWLObjectProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_HAS_SPACE_CUSTOM));
 
     /**
-     * The OWL <b>object property</b> <tt>hasCustomSpace</tt> (in UriRef form).
+     * The OWL <b>object property</b> <tt>hasCustomSpace</tt> (in IRI form).
      */
-    public static final UriRef HAS_SPACE_CUSTOM_URIREF = new UriRef(_NS_ONTONET + _SHORT_HAS_SPACE_CUSTOM);
+    public static final IRI HAS_SPACE_CUSTOM_URIREF = new IRI(_NS_ONTONET + _SHORT_HAS_SPACE_CUSTOM);
 
     /**
-     * The OWL <b>object property</b> <tt>hasStatus</tt> (in UriRef form).
+     * The OWL <b>object property</b> <tt>hasStatus</tt> (in IRI form).
      */
-    public static final OWLObjectProperty HAS_STATUS = __df.getOWLObjectProperty(IRI
+    public static final OWLObjectProperty HAS_STATUS = __df.getOWLObjectProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_HAS_STATUS));
 
     /**
      * The OWL <b>object property</b> <tt>hasStatus</tt>.
      */
-    public static final UriRef HAS_STATUS_URIREF = new UriRef(_NS_ONTONET + _SHORT_HAS_STATUS);
+    public static final IRI HAS_STATUS_URIREF = new IRI(_NS_ONTONET + _SHORT_HAS_STATUS);
 
     /**
      * The OWL <b>datatype property</b> <tt>hasVersionIRI</tt>.
      */
-    public static final OWLDataProperty HAS_VERSION_IRI = __df.getOWLDataProperty(IRI
+    public static final OWLDataProperty HAS_VERSION_IRI = __df.getOWLDataProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_HAS_VERSION_IRI));
 
     /**
-     * The OWL <b>datatype property</b> <tt>hasVersionIRI</tt> (in UriRef form).
+     * The OWL <b>datatype property</b> <tt>hasVersionIRI</tt> (in IRI form).
      */
-    public static final UriRef HAS_VERSION_IRI_URIREF = new UriRef(_NS_ONTONET + _SHORT_HAS_VERSION_IRI);
+    public static final IRI HAS_VERSION_IRI_URIREF = new IRI(_NS_ONTONET + _SHORT_HAS_VERSION_IRI);
 
     /**
      * The OWL <b>object property</b> <tt>isManagedBy</tt>.
      */
-    public static final OWLObjectProperty IS_MANAGED_BY = __df.getOWLObjectProperty(IRI
+    public static final OWLObjectProperty IS_MANAGED_BY = __df.getOWLObjectProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_IS_MANAGED_BY));
 
     /**
      * The OWL <b>object property</b> <tt>isManagedByCore</tt>.
      */
-    public static final OWLObjectProperty IS_MANAGED_BY_CORE = __df.getOWLObjectProperty(IRI
+    public static final OWLObjectProperty IS_MANAGED_BY_CORE = __df.getOWLObjectProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_IS_MANAGED_BY_CORE));
 
     /**
-     * The OWL <b>object property</b> <tt>isManagedByCore</tt> (in UriRef form).
+     * The OWL <b>object property</b> <tt>isManagedByCore</tt> (in IRI form).
      */
-    public static final UriRef IS_MANAGED_BY_CORE_URIREF = new UriRef(_NS_ONTONET + _SHORT_IS_MANAGED_BY_CORE);
+    public static final IRI IS_MANAGED_BY_CORE_URIREF = new IRI(_NS_ONTONET + _SHORT_IS_MANAGED_BY_CORE);
 
     /**
      * The OWL <b>object property</b> <tt>isManagedByCustom</tt>.
      */
-    public static final OWLObjectProperty IS_MANAGED_BY_CUSTOM = __df.getOWLObjectProperty(IRI
+    public static final OWLObjectProperty IS_MANAGED_BY_CUSTOM = __df.getOWLObjectProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_IS_MANAGED_BY_CUSTOM));
 
     /**
-     * The OWL <b>object property</b> <tt>isManagedByCustom</tt> (in UriRef form).
+     * The OWL <b>object property</b> <tt>isManagedByCustom</tt> (in IRI form).
      */
-    public static final UriRef IS_MANAGED_BY_CUSTOM_URIREF = new UriRef(_NS_ONTONET
+    public static final IRI IS_MANAGED_BY_CUSTOM_URIREF = new IRI(_NS_ONTONET
                                                                         + _SHORT_IS_MANAGED_BY_CUSTOM);
 
     /**
-     * The OWL <b>object property</b> <tt>isManagedBy</tt> (in UriRef form).
+     * The OWL <b>object property</b> <tt>isManagedBy</tt> (in IRI form).
      */
-    public static final UriRef IS_MANAGED_BY_URIREF = new UriRef(_NS_ONTONET + _SHORT_IS_MANAGED_BY);
+    public static final IRI IS_MANAGED_BY_URIREF = new IRI(_NS_ONTONET + _SHORT_IS_MANAGED_BY);
 
     /**
      * The OWL <b>object property</b> <tt>isCoreSpaceOf</tt>.
      */
-    public static final OWLObjectProperty IS_SPACE_CORE_OF = __df.getOWLObjectProperty(IRI
+    public static final OWLObjectProperty IS_SPACE_CORE_OF = __df.getOWLObjectProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_IS_SPACE_CORE_OF));
 
     /**
-     * The OWL <b>object property</b> <tt>isCoreSpaceOf</tt> (in UriRef form).
+     * The OWL <b>object property</b> <tt>isCoreSpaceOf</tt> (in IRI form).
      */
-    public static final UriRef IS_SPACE_CORE_OF_URIREF = new UriRef(_NS_ONTONET + _SHORT_IS_SPACE_CORE_OF);
+    public static final IRI IS_SPACE_CORE_OF_URIREF = new IRI(_NS_ONTONET + _SHORT_IS_SPACE_CORE_OF);
 
     /**
      * The OWL <b>object property</b> <tt>isCustomSpaceOf</tt>.
      */
-    public static final OWLObjectProperty IS_SPACE_CUSTOM_OF = __df.getOWLObjectProperty(IRI
+    public static final OWLObjectProperty IS_SPACE_CUSTOM_OF = __df.getOWLObjectProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_IS_SPACE_CUSTOM_OF));
 
     /**
-     * The OWL <b>object property</b> <tt>isCustomSpaceOf</tt> (in UriRef form).
+     * The OWL <b>object property</b> <tt>isCustomSpaceOf</tt> (in IRI form).
      */
-    public static final UriRef IS_SPACE_CUSTOM_OF_URIREF = new UriRef(_NS_ONTONET + _SHORT_IS_SPACE_CUSTOM_OF);
+    public static final IRI IS_SPACE_CUSTOM_OF_URIREF = new IRI(_NS_ONTONET + _SHORT_IS_SPACE_CUSTOM_OF);
 
     /**
      * The OWL <b>object property</b> <tt>manages</tt>.
      */
-    public static final OWLObjectProperty MANAGES = __df.getOWLObjectProperty(IRI.create(_NS_ONTONET
+    public static final OWLObjectProperty MANAGES = __df.getOWLObjectProperty(org.semanticweb.owlapi.model.IRI.create(_NS_ONTONET
                                                                                          + _SHORT_MANAGES));
 
     /**
      * The OWL <b>object property</b> <tt>managesInCore</tt>.
      */
-    public static final OWLObjectProperty MANAGES_IN_CORE = __df.getOWLObjectProperty(IRI
+    public static final OWLObjectProperty MANAGES_IN_CORE = __df.getOWLObjectProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_MANAGES_IN_CORE));
 
     /**
-     * The OWL <b>object property</b> <tt>managesInCore</tt> (in UriRef form).
+     * The OWL <b>object property</b> <tt>managesInCore</tt> (in IRI form).
      */
-    public static final UriRef MANAGES_IN_CORE_URIREF = new UriRef(_NS_ONTONET + _SHORT_MANAGES_IN_CORE);
+    public static final IRI MANAGES_IN_CORE_URIREF = new IRI(_NS_ONTONET + _SHORT_MANAGES_IN_CORE);
     /**
      * The OWL <b>object property</b> <tt>managesInCustom</tt>.
      */
-    public static final OWLObjectProperty MANAGES_IN_CUSTOM = __df.getOWLObjectProperty(IRI
+    public static final OWLObjectProperty MANAGES_IN_CUSTOM = __df.getOWLObjectProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_MANAGES_IN_CUSTOM));
 
     /**
-     * The OWL <b>object property</b> <tt>managesInCustom</tt> (in UriRef form).
+     * The OWL <b>object property</b> <tt>managesInCustom</tt> (in IRI form).
      */
-    public static final UriRef MANAGES_IN_CUSTOM_URIREF = new UriRef(_NS_ONTONET + _SHORT_MANAGES_IN_CUSTOM);
+    public static final IRI MANAGES_IN_CUSTOM_URIREF = new IRI(_NS_ONTONET + _SHORT_MANAGES_IN_CUSTOM);
 
     /**
-     * The OWL <b>object property</b> <tt>manages</tt> (in UriRef form).
+     * The OWL <b>object property</b> <tt>manages</tt> (in IRI form).
      */
-    public static final UriRef MANAGES_URIREF = new UriRef(_NS_ONTONET + _SHORT_MANAGES);
+    public static final IRI MANAGES_URIREF = new IRI(_NS_ONTONET + _SHORT_MANAGES);
 
     /**
      * The OWL <b>object property</b> <tt>mapsToGraph</tt>.
      */
-    public static final OWLObjectProperty MAPS_TO_GRAPH = __df.getOWLObjectProperty(IRI
+    public static final OWLObjectProperty MAPS_TO_GRAPH = __df.getOWLObjectProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_MAPS_TO_GRAPH));
 
     /**
-     * The OWL <b>object property</b> <tt>mapsToGraph</tt> (in UriRef form).
+     * The OWL <b>object property</b> <tt>mapsToGraph</tt> (in IRI form).
      */
-    public static final UriRef MAPS_TO_GRAPH_URIREF = new UriRef(_NS_ONTONET + _SHORT_MAPS_TO_GRAPH);
+    public static final IRI MAPS_TO_GRAPH_URIREF = new IRI(_NS_ONTONET + _SHORT_MAPS_TO_GRAPH);
 
     /**
      * The OWL <b>class</b> <tt>PrimaryEntry</tt>.
      */
-    public static final OWLClass PRIMARY_ENTRY = __df.getOWLClass(IRI.create(_NS_ONTONET
+    public static final OWLClass PRIMARY_ENTRY = __df.getOWLClass(org.semanticweb.owlapi.model.IRI.create(_NS_ONTONET
                                                                              + _SHORT_PRIMARY_ENTRY));
 
     /**
-     * The OWL <b>class</b> <tt>PrimaryEntry</tt> (in UriRef form).
+     * The OWL <b>class</b> <tt>PrimaryEntry</tt> (in IRI form).
      */
-    public static final UriRef PRIMARY_ENTRY_URIREF = new UriRef(_NS_ONTONET + _SHORT_PRIMARY_ENTRY);
+    public static final IRI PRIMARY_ENTRY_URIREF = new IRI(_NS_ONTONET + _SHORT_PRIMARY_ENTRY);
 
     /**
      * The OWL <b>datatype property</b> <tt>retrievedFrom</tt>.
      */
-    public static final OWLDataProperty RETRIEVED_FROM = __df.getOWLDataProperty(IRI
+    public static final OWLDataProperty RETRIEVED_FROM = __df.getOWLDataProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_RETRIEVED_FROM));
 
     /**
-     * The OWL <b>datatype property</b> <tt>retrievedFrom</tt> (in UriRef form).
+     * The OWL <b>datatype property</b> <tt>retrievedFrom</tt> (in IRI form).
      */
-    public static final UriRef RETRIEVED_FROM_URIREF = new UriRef(_NS_ONTONET + _SHORT_RETRIEVED_FROM);
+    public static final IRI RETRIEVED_FROM_URIREF = new IRI(_NS_ONTONET + _SHORT_RETRIEVED_FROM);
 
     /**
      * The OWL <b>class</b> <tt>Scope</tt>.
      */
-    public static final OWLClass SCOPE = __df.getOWLClass(IRI.create(_NS_ONTONET + _SHORT_SCOPE));
+    public static final OWLClass SCOPE = __df.getOWLClass(org.semanticweb.owlapi.model.IRI.create(_NS_ONTONET + _SHORT_SCOPE));
 
     /**
-     * The OWL <b>class</b> <tt>Scope</tt> (in UriRef form).
+     * The OWL <b>class</b> <tt>Scope</tt> (in IRI form).
      */
-    public static final UriRef SCOPE_URIREF = new UriRef(_NS_ONTONET + _SHORT_SCOPE);
+    public static final IRI SCOPE_URIREF = new IRI(_NS_ONTONET + _SHORT_SCOPE);
 
     /**
      * The OWL <b>class</b> <tt>Session</tt>.
      */
-    public static final OWLClass SESSION = __df.getOWLClass(IRI.create(_NS_ONTONET + _SHORT_SESSION));
+    public static final OWLClass SESSION = __df.getOWLClass(org.semanticweb.owlapi.model.IRI.create(_NS_ONTONET + _SHORT_SESSION));
 
     /**
-     * The OWL <b>class</b> <tt>Session</tt> (in UriRef form).
+     * The OWL <b>class</b> <tt>Session</tt> (in IRI form).
      */
-    public static final UriRef SESSION_URIREF = new UriRef(_NS_ONTONET + _SHORT_SESSION);
+    public static final IRI SESSION_URIREF = new IRI(_NS_ONTONET + _SHORT_SESSION);
 
     /**
      * The OWL <b>datatype property</b> <tt>hasSizeInAxioms</tt>.
      */
-    public static final OWLDataProperty SIZE_IN_AXIOMS = __df.getOWLDataProperty(IRI
+    public static final OWLDataProperty SIZE_IN_AXIOMS = __df.getOWLDataProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_SIZE_IN_AXIOMS));
 
     /**
-     * The OWL <b>datatype property</b> <tt>hasSizeInAxioms</tt> (in UriRef form).
+     * The OWL <b>datatype property</b> <tt>hasSizeInAxioms</tt> (in IRI form).
      */
-    public static final UriRef SIZE_IN_AXIOMS_URIREF = new UriRef(_NS_ONTONET + _SHORT_SIZE_IN_AXIOMS);
+    public static final IRI SIZE_IN_AXIOMS_URIREF = new IRI(_NS_ONTONET + _SHORT_SIZE_IN_AXIOMS);
 
     /**
      * The OWL <b>datatype property</b> <tt>hasSizeInTriples</tt>.
      */
-    public static final OWLDataProperty SIZE_IN_TRIPLES = __df.getOWLDataProperty(IRI
+    public static final OWLDataProperty SIZE_IN_TRIPLES = __df.getOWLDataProperty(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_SIZE_IN_TRIPLES));
 
     /**
-     * The OWL <b>datatype property</b> <tt>hasSizeInTriples</tt> (in UriRef form).
+     * The OWL <b>datatype property</b> <tt>hasSizeInTriples</tt> (in IRI form).
      */
-    public static final UriRef SIZE_IN_TRIPLES_URIREF = new UriRef(_NS_ONTONET + _SHORT_SIZE_IN_TRIPLES);
+    public static final IRI SIZE_IN_TRIPLES_URIREF = new IRI(_NS_ONTONET + _SHORT_SIZE_IN_TRIPLES);
 
     /**
      * The OWL <b>class</b> <tt>Space</tt>.
      */
-    public static final OWLClass SPACE = __df.getOWLClass(IRI.create(_NS_ONTONET + _SHORT_SPACE));
+    public static final OWLClass SPACE = __df.getOWLClass(org.semanticweb.owlapi.model.IRI.create(_NS_ONTONET + _SHORT_SPACE));
 
     /**
-     * The OWL <b>class</b> <tt>Space</tt> (in UriRef form).
+     * The OWL <b>class</b> <tt>Space</tt> (in IRI form).
      */
-    public static final UriRef SPACE_URIREF = new UriRef(_NS_ONTONET + _SHORT_SPACE);
+    public static final IRI SPACE_URIREF = new IRI(_NS_ONTONET + _SHORT_SPACE);
 
     /**
      * The OWL <b>class</b> <tt>Status</tt>.
      */
-    public static final OWLClass STATUS = __df.getOWLClass(IRI.create(_NS_ONTONET + _SHORT_STATUS));
+    public static final OWLClass STATUS = __df.getOWLClass(org.semanticweb.owlapi.model.IRI.create(_NS_ONTONET + _SHORT_STATUS));
 
     /**
      * The OWL <b>individual</b> <tt>Status.ACTIVE</tt>.
      */
-    public static final OWLIndividual STATUS_ACTIVE = __df.getOWLNamedIndividual(IRI
+    public static final OWLIndividual STATUS_ACTIVE = __df.getOWLNamedIndividual(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_STATUS_ACTIVE));
 
     /**
-     * The OWL <b>individual</b> <tt>Status.ACTIVE</tt> (in UriRef form).
+     * The OWL <b>individual</b> <tt>Status.ACTIVE</tt> (in IRI form).
      */
-    public static final UriRef STATUS_ACTIVE_URIREF = new UriRef(_NS_ONTONET + _SHORT_STATUS_ACTIVE);
+    public static final IRI STATUS_ACTIVE_URIREF = new IRI(_NS_ONTONET + _SHORT_STATUS_ACTIVE);
 
     /**
      * The OWL <b>individual</b> <tt>Status.INACTIVE</tt>.
      */
-    public static final OWLIndividual STATUS_INACTIVE = __df.getOWLNamedIndividual(IRI
+    public static final OWLIndividual STATUS_INACTIVE = __df.getOWLNamedIndividual(org.semanticweb.owlapi.model.IRI
             .create(_NS_ONTONET + _SHORT_STATUS_INACTIVE));
 
     /**
-     * The OWL <b>individual</b> <tt>Status.INACTIVE</tt> (in UriRef form).
+     * The OWL <b>individual</b> <tt>Status.INACTIVE</tt> (in IRI form).
      */
-    public static final UriRef STATUS_INACTIVE_URIREF = new UriRef(_NS_ONTONET + _SHORT_STATUS_INACTIVE);
+    public static final IRI STATUS_INACTIVE_URIREF = new IRI(_NS_ONTONET + _SHORT_STATUS_INACTIVE);
 
     /**
-     * The OWL <b>class</b> <tt>Status</tt> (in UriRef form).
+     * The OWL <b>class</b> <tt>Status</tt> (in IRI form).
      */
-    public static final UriRef STATUS_URIREF = new UriRef(_NS_ONTONET + _SHORT_STATUS);
+    public static final IRI STATUS_URIREF = new IRI(_NS_ONTONET + _SHORT_STATUS);
 
 }
diff --git a/ontologymanager/generic/servicesapi/src/main/java/org/apache/stanbol/ontologymanager/servicesapi/io/Origin.java b/ontologymanager/generic/servicesapi/src/main/java/org/apache/stanbol/ontologymanager/servicesapi/io/Origin.java
index 4706518..27d3d0e 100644
--- a/ontologymanager/generic/servicesapi/src/main/java/org/apache/stanbol/ontologymanager/servicesapi/io/Origin.java
+++ b/ontologymanager/generic/servicesapi/src/main/java/org/apache/stanbol/ontologymanager/servicesapi/io/Origin.java
@@ -16,9 +16,8 @@
  */
 package org.apache.stanbol.ontologymanager.servicesapi.io;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.TcProvider;
-import org.semanticweb.owlapi.model.IRI;
 import org.semanticweb.owlapi.model.OWLOntologyID;
 
 /**
@@ -27,7 +26,7 @@
  * <ul>
  * <li> {@link IRI}, which is interpreted as the physical location of the resource.
  * <li> {@link OWLOntologyID}, which is interpreted as the public key of an ontology already stored by Stanbol.
- * <li> {@link UriRef}, which is interpreted as the name of a graph to be retrieved from an underlying Clerezza
+ * <li> {@link IRI}, which is interpreted as the name of a graph to be retrieved from an underlying Clerezza
  * store (typically a {@link TcProvider}).
  * </ul>
  * 
@@ -45,8 +44,8 @@
      *            the physical location of the resource
      * @return the origin that wraps this IRI.
      */
-    public static Origin<IRI> create(IRI physicalURL) {
-        return new Origin<IRI>(physicalURL);
+    public static Origin<org.semanticweb.owlapi.model.IRI> create(org.semanticweb.owlapi.model.IRI physicalURL) {
+        return new Origin<org.semanticweb.owlapi.model.IRI>(physicalURL);
     }
 
     /**
@@ -69,8 +68,8 @@
      *            the graph name
      * @return the origin that wraps this graph name.
      */
-    public static Origin<UriRef> create(UriRef graphName) {
-        return new Origin<UriRef>(graphName);
+    public static Origin<IRI> create(IRI graphName) {
+        return new Origin<IRI>(graphName);
     }
 
     private R ref;
diff --git a/ontologymanager/generic/servicesapi/src/main/java/org/apache/stanbol/ontologymanager/servicesapi/ontology/OntologyProvider.java b/ontologymanager/generic/servicesapi/src/main/java/org/apache/stanbol/ontologymanager/servicesapi/ontology/OntologyProvider.java
index 32ecfdf..6d4cf59 100644
--- a/ontologymanager/generic/servicesapi/src/main/java/org/apache/stanbol/ontologymanager/servicesapi/ontology/OntologyProvider.java
+++ b/ontologymanager/generic/servicesapi/src/main/java/org/apache/stanbol/ontologymanager/servicesapi/ontology/OntologyProvider.java
@@ -20,9 +20,9 @@
 import java.io.InputStream;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.TripleCollection;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.clerezza.rdf.core.access.TcProvider;
 import org.apache.clerezza.rdf.core.serializedform.UnsupportedFormatException;
 import org.apache.stanbol.ontologymanager.ontonet.api.OntologyNetworkConfiguration;
@@ -144,13 +144,13 @@
 
     /**
      * Returns the graph that stores all the information on stored ontologies. Whether the returned triple
-     * collection is a {@link Graph} or a {@link MGraph} depends on the provider's policy on allowing external
+     * collection is a {@link ImmutableGraph} or a {@link Graph} depends on the provider's policy on allowing external
      * modifications to the meta-level graph or not.
      * 
      * @param returnType
      * @return
      */
-    <O extends TripleCollection> O getMetaGraph(Class<O> returnType);
+    <O extends Graph> O getMetaGraph(Class<O> returnType);
 
     @Deprecated
     OWLOntologyID getOntologyId(String storageKey);
diff --git a/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/collector/MGraphMultiplexer.java b/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/collector/GraphMultiplexer.java
similarity index 81%
rename from ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/collector/MGraphMultiplexer.java
rename to ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/collector/GraphMultiplexer.java
index cc97e44..d0dda3f 100644
--- a/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/collector/MGraphMultiplexer.java
+++ b/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/collector/GraphMultiplexer.java
@@ -39,15 +39,14 @@
 import java.util.Iterator;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
 import org.apache.clerezza.rdf.ontologies.OWL;
 import org.apache.clerezza.rdf.ontologies.RDF;
 import org.apache.stanbol.ontologymanager.core.scope.ScopeManagerImpl;
@@ -62,7 +61,6 @@
 import org.apache.stanbol.ontologymanager.servicesapi.session.SessionEvent;
 import org.apache.stanbol.ontologymanager.servicesapi.session.SessionManager;
 import org.apache.stanbol.ontologymanager.servicesapi.util.OntologyUtils;
-import org.semanticweb.owlapi.model.IRI;
 import org.semanticweb.owlapi.model.OWLOntologyID;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -73,7 +71,7 @@
  * @author alexdma
  * 
  */
-public class MGraphMultiplexer implements Multiplexer {
+public class GraphMultiplexer implements Multiplexer {
 
     private class InvalidMetaGraphStateException extends RuntimeException {
 
@@ -95,9 +93,9 @@
 
     private Logger log = LoggerFactory.getLogger(getClass());
 
-    private MGraph meta;
+    private Graph meta;
 
-    public MGraphMultiplexer(MGraph metaGraph) {
+    public GraphMultiplexer(Graph metaGraph) {
         this.meta = metaGraph;
     }
 
@@ -109,66 +107,66 @@
      *            the ontology
      * @return
      */
-    protected OWLOntologyID buildPublicKey(final UriRef resource) {
+    protected OWLOntologyID buildPublicKey(final IRI resource) {
         // TODO desanitize?
-        IRI oiri = null, viri = null;
+        org.semanticweb.owlapi.model.IRI oiri = null, viri = null;
         Iterator<Triple> it = meta.filter(resource, HAS_ONTOLOGY_IRI_URIREF, null);
         if (it.hasNext()) {
-            Resource obj = it.next().getObject();
-            if (obj instanceof UriRef) oiri = IRI.create(((UriRef) obj).getUnicodeString());
-            else if (obj instanceof Literal) oiri = IRI.create(((Literal) obj).getLexicalForm());
+            RDFTerm obj = it.next().getObject();
+            if (obj instanceof IRI) oiri = org.semanticweb.owlapi.model.IRI.create(((IRI) obj).getUnicodeString());
+            else if (obj instanceof Literal) oiri = org.semanticweb.owlapi.model.IRI.create(((Literal) obj).getLexicalForm());
         } else {
             // Anonymous ontology? Decode the resource itself (which is not null)
             return OntologyUtils.decode(resource.getUnicodeString());
         }
         it = meta.filter(resource, HAS_VERSION_IRI_URIREF, null);
         if (it.hasNext()) {
-            Resource obj = it.next().getObject();
-            if (obj instanceof UriRef) viri = IRI.create(((UriRef) obj).getUnicodeString());
-            else if (obj instanceof Literal) viri = IRI.create(((Literal) obj).getLexicalForm());
+            RDFTerm obj = it.next().getObject();
+            if (obj instanceof IRI) viri = org.semanticweb.owlapi.model.IRI.create(((IRI) obj).getUnicodeString());
+            else if (obj instanceof Literal) viri = org.semanticweb.owlapi.model.IRI.create(((Literal) obj).getLexicalForm());
         }
         if (viri == null) return new OWLOntologyID(oiri);
         else return new OWLOntologyID(oiri, viri);
     }
 
     /**
-     * Creates an {@link UriRef} out of an {@link OWLOntologyID}, so it can be used as an identifier. This
-     * does NOT necessarily correspond to the UriRef that identifies the stored graph. In order to obtain
+     * Creates an {@link IRI} out of an {@link OWLOntologyID}, so it can be used as an identifier. This
+     * does NOT necessarily correspond to the IRI that identifies the stored graph. In order to obtain
      * that, check the objects of any MAPS_TO_GRAPH assertions.
      * 
      * @param publicKey
      * @return
      */
-    protected UriRef buildResource(final OWLOntologyID publicKey) {
+    protected IRI buildResource(final OWLOntologyID publicKey) {
         if (publicKey == null) throw new IllegalArgumentException(
-                "Cannot build a UriRef resource on a null public key!");
-        // The UriRef is of the form ontologyIRI[:::versionIRI] (TODO use something less conventional?)
+                "Cannot build a IRI resource on a null public key!");
+        // The IRI is of the form ontologyIRI[:::versionIRI] (TODO use something less conventional?)
         // XXX should versionIRI also include the version IRI set by owners? Currently not
 
         // Remember not to sanitize logical identifiers.
-        IRI ontologyIri = publicKey.getOntologyIRI(), versionIri = publicKey.getVersionIRI();
+        org.semanticweb.owlapi.model.IRI ontologyIri = publicKey.getOntologyIRI(), versionIri = publicKey.getVersionIRI();
         if (ontologyIri == null) throw new IllegalArgumentException(
-                "Cannot build a UriRef resource on an anonymous public key!");
+                "Cannot build a IRI resource on an anonymous public key!");
         log.debug("Searching for a meta graph entry for public key:");
         log.debug(" -- {}", publicKey);
-        UriRef match = null;
+        IRI match = null;
         LiteralFactory lf = LiteralFactory.getInstance();
-        TypedLiteral oiri = lf.createTypedLiteral(new UriRef(ontologyIri.toString()));
-        TypedLiteral viri = versionIri == null ? null : lf.createTypedLiteral(new UriRef(versionIri
+        Literal oiri = lf.createTypedLiteral(new IRI(ontologyIri.toString()));
+        Literal viri = versionIri == null ? null : lf.createTypedLiteral(new IRI(versionIri
                 .toString()));
         for (Iterator<Triple> it = meta.filter(null, HAS_ONTOLOGY_IRI_URIREF, oiri); it.hasNext();) {
-            Resource subj = it.next().getSubject();
+            RDFTerm subj = it.next().getSubject();
             log.debug(" -- Ontology IRI match found. Scanning");
-            log.debug(" -- Resource : {}", subj);
-            if (!(subj instanceof UriRef)) {
+            log.debug(" -- RDFTerm : {}", subj);
+            if (!(subj instanceof IRI)) {
                 log.debug(" ---- (uncomparable: skipping...)");
                 continue;
             }
             if (viri != null) {
                 // Must find matching versionIRI
-                if (meta.contains(new TripleImpl((UriRef) subj, HAS_VERSION_IRI_URIREF, viri))) {
+                if (meta.contains(new TripleImpl((IRI) subj, HAS_VERSION_IRI_URIREF, viri))) {
                     log.debug(" ---- Version IRI match!");
-                    match = (UriRef) subj;
+                    match = (IRI) subj;
                     break; // Found
                 } else {
                     log.debug(" ---- Expected version IRI match not found.");
@@ -177,23 +175,23 @@
 
             } else {
                 // Must find unversioned resource
-                if (meta.filter((UriRef) subj, HAS_VERSION_IRI_URIREF, null).hasNext()) {
+                if (meta.filter((IRI) subj, HAS_VERSION_IRI_URIREF, null).hasNext()) {
                     log.debug(" ---- Unexpected version IRI found. Skipping.");
                     continue;
                 } else {
                     log.debug(" ---- Unversioned match!");
-                    match = (UriRef) subj;
+                    match = (IRI) subj;
                     break; // Found
                 }
             }
         }
-        log.debug("Matching UriRef in graph : {}", match);
-        if (match == null) return new UriRef(OntologyUtils.encode(publicKey));
+        log.debug("Matching IRI in graph : {}", match);
+        if (match == null) return new IRI(OntologyUtils.encode(publicKey));
         else return match;
 
     }
 
-    private void checkHandle(UriRef candidate, Set<OntologyCollector> handles) {
+    private void checkHandle(IRI candidate, Set<OntologyCollector> handles) {
 
         /*
          * We have to do it like this because we cannot make this class a Component and reference ONManager
@@ -208,7 +206,7 @@
         // TODO check when not explicitly typed.
         SpaceType spaceType;
         if (meta.contains(new TripleImpl(candidate, RDF.type, SPACE_URIREF))) {
-            Resource rScope;
+            RDFTerm rScope;
             Iterator<Triple> parentSeeker = meta.filter(candidate, IS_SPACE_CORE_OF_URIREF, null);
             if (parentSeeker.hasNext()) {
                 rScope = parentSeeker.next().getObject();
@@ -233,9 +231,9 @@
                     }
                 }
             }
-            if (!(rScope instanceof UriRef)) throw new InvalidMetaGraphStateException(
+            if (!(rScope instanceof IRI)) throw new InvalidMetaGraphStateException(
                     rScope + " is not a legal scope identifier.");
-            String scopeId = ((UriRef) rScope).getUnicodeString().substring(prefix_scope.length());
+            String scopeId = ((IRI) rScope).getUnicodeString().substring(prefix_scope.length());
             Scope scope = scopeManager.getScope(scopeId);
             switch (spaceType) {
                 case CORE:
@@ -261,7 +259,7 @@
             Set<OWLOntologyID> aliases = listAliases(dependent);
             aliases.add(dependent);
             for (OWLOntologyID depalias : aliases) {
-                UriRef dep = buildResource(depalias);
+                IRI dep = buildResource(depalias);
                 Iterator<Triple> it = meta.filter(dep, DEPENDS_ON_URIREF, null);
                 while (it.hasNext()) {
                     Triple t = it.next();
@@ -288,19 +286,19 @@
             Set<OWLOntologyID> aliases = listAliases(dependent);
             aliases.add(dependent);
             for (OWLOntologyID depalias : aliases) {
-                UriRef dep = buildResource(depalias);
+                IRI dep = buildResource(depalias);
                 Iterator<Triple> it = meta.filter(dep, DEPENDS_ON_URIREF, null);
                 while (it.hasNext()) {
-                    Resource obj = it.next().getObject();
+                    RDFTerm obj = it.next().getObject();
                     log.debug(" ... found {} (inverse).", obj);
-                    if (obj instanceof UriRef) dependencies.add(buildPublicKey((UriRef) obj));
+                    if (obj instanceof IRI) dependencies.add(buildPublicKey((IRI) obj));
                     else log.warn(" ... Unexpected literal value!");
                 }
                 it = meta.filter(null, HAS_DEPENDENT_URIREF, dep);
                 while (it.hasNext()) {
-                    Resource sub = it.next().getSubject();
+                    RDFTerm sub = it.next().getSubject();
                     log.debug(" ... found {} (inverse).", sub);
-                    if (sub instanceof UriRef) dependencies.add(buildPublicKey((UriRef) sub));
+                    if (sub instanceof IRI) dependencies.add(buildPublicKey((IRI) sub));
                     else log.warn(" ... Unexpected literal value!");
                 }
             }
@@ -311,21 +309,21 @@
     @Override
     public Set<OWLOntologyID> getDependents(OWLOntologyID dependency) {
         Set<OWLOntologyID> dependents = new HashSet<OWLOntologyID>();
-        UriRef dep = buildResource(dependency);
+        IRI dep = buildResource(dependency);
         log.debug("Getting depents for {}", dependency);
         synchronized (meta) {
             Iterator<Triple> it = meta.filter(null, DEPENDS_ON_URIREF, dep);
             while (it.hasNext()) {
-                Resource sub = it.next().getSubject();
+                RDFTerm sub = it.next().getSubject();
                 log.debug(" ... found {} (inverse).", sub);
-                if (sub instanceof UriRef) dependents.add(buildPublicKey((UriRef) sub));
+                if (sub instanceof IRI) dependents.add(buildPublicKey((IRI) sub));
                 else log.warn(" ... Unexpected literal value!");
             }
             it = meta.filter(dep, HAS_DEPENDENT_URIREF, null);
             while (it.hasNext()) {
-                Resource obj = it.next().getObject();
+                RDFTerm obj = it.next().getObject();
                 log.debug(" ... found {} (inverse).", obj);
-                if (obj instanceof UriRef) dependents.add(buildPublicKey((UriRef) obj));
+                if (obj instanceof IRI) dependents.add(buildPublicKey((IRI) obj));
                 else log.warn(" ... Unexpected literal value!");
             }
         }
@@ -338,18 +336,18 @@
         Set<OWLOntologyID> aliases = listAliases(publicKey);
         aliases.add(publicKey);
         for (OWLOntologyID alias : aliases) {
-            UriRef ontologyId = buildResource(alias);
+            IRI ontologyId = buildResource(alias);
 
             for (Iterator<Triple> it = meta.filter(null, MANAGES_URIREF, ontologyId); it.hasNext();) {
-                NonLiteral sub = it.next().getSubject();
-                if (sub instanceof UriRef) checkHandle((UriRef) sub, handles);
+                BlankNodeOrIRI sub = it.next().getSubject();
+                if (sub instanceof IRI) checkHandle((IRI) sub, handles);
                 else throw new InvalidMetaGraphStateException(
                         sub + " is not a valid ontology collector identifer.");
             }
 
             for (Iterator<Triple> it = meta.filter(ontologyId, IS_MANAGED_BY_URIREF, null); it.hasNext();) {
-                Resource obj = it.next().getObject();
-                if (obj instanceof UriRef) checkHandle((UriRef) obj, handles);
+                RDFTerm obj = it.next().getObject();
+                if (obj instanceof IRI) checkHandle((IRI) obj, handles);
                 else throw new InvalidMetaGraphStateException(
                         obj + " is not a valid ontology collector identifer.");
             }
@@ -358,26 +356,26 @@
         // throw new UnsupportedOperationException("Not implemented yet.");
     }
 
-    private UriRef getIRIforScope(String scopeId) {
+    private IRI getIRIforScope(String scopeId) {
         // Use the Stanbol-internal namespace, so that the whole configuration can be ported.
-        return new UriRef(_NS_STANBOL_INTERNAL + Scope.shortName + "/" + scopeId);
+        return new IRI(_NS_STANBOL_INTERNAL + Scope.shortName + "/" + scopeId);
     }
 
-    private UriRef getIRIforSession(Session session) {
+    private IRI getIRIforSession(Session session) {
         // Use the Stanbol-internal namespace, so that the whole configuration can be ported.
-        return new UriRef(_NS_STANBOL_INTERNAL + Session.shortName + "/" + session.getID());
+        return new IRI(_NS_STANBOL_INTERNAL + Session.shortName + "/" + session.getID());
     }
 
-    private UriRef getIRIforSpace(OntologySpace space) {
+    private IRI getIRIforSpace(OntologySpace space) {
         // Use the Stanbol-internal namespace, so that the whole configuration can be ported.
-        return new UriRef(_NS_STANBOL_INTERNAL + OntologySpace.shortName + "/" + space.getID());
+        return new IRI(_NS_STANBOL_INTERNAL + OntologySpace.shortName + "/" + space.getID());
     }
 
     @Override
     public OWLOntologyID getPublicKey(String stringForm) {
         if (stringForm == null || stringForm.trim().isEmpty()) throw new IllegalArgumentException(
                 "String form must not be null or empty.");
-        return buildPublicKey(new UriRef(stringForm));
+        return buildPublicKey(new IRI(stringForm));
     }
 
     @Override
@@ -385,18 +383,18 @@
         Set<OWLOntologyID> result = new HashSet<OWLOntologyID>();
         Iterator<Triple> it = meta.filter(null, RDF.type, ENTRY_URIREF);
         while (it.hasNext()) {
-            Resource obj = it.next().getSubject();
-            if (obj instanceof UriRef) result.add(buildPublicKey((UriRef) obj));
+            RDFTerm obj = it.next().getSubject();
+            if (obj instanceof IRI) result.add(buildPublicKey((IRI) obj));
         }
         return result;
     }
 
     @Override
     public int getSize(OWLOntologyID publicKey) {
-        UriRef subj = buildResource(publicKey);
+        IRI subj = buildResource(publicKey);
         Iterator<Triple> it = meta.filter(subj, SIZE_IN_TRIPLES_URIREF, null);
         if (it.hasNext()) {
-            Resource obj = it.next().getObject();
+            RDFTerm obj = it.next().getObject();
             if (obj instanceof Literal) {
                 String s = ((Literal) obj).getLexicalForm();
                 try {
@@ -417,16 +415,16 @@
         if (publicKey == null || publicKey.isAnonymous()) throw new IllegalArgumentException(
                 "Cannot locate aliases for null or anonymous public keys.");
         Set<OWLOntologyID> aliases = new HashSet<OWLOntologyID>();
-        UriRef ont = buildResource(publicKey);
+        IRI ont = buildResource(publicKey);
         // Forwards
         for (Iterator<Triple> it = meta.filter(ont, OWL.sameAs, null); it.hasNext();) {
-            Resource r = it.next().getObject();
-            if (r instanceof UriRef) aliases.add(buildPublicKey((UriRef) r));
+            RDFTerm r = it.next().getObject();
+            if (r instanceof IRI) aliases.add(buildPublicKey((IRI) r));
         }
         // Backwards
         for (Iterator<Triple> it = meta.filter(null, OWL.sameAs, ont); it.hasNext();) {
-            Resource r = it.next().getSubject();
-            if (r instanceof UriRef) aliases.add(buildPublicKey((UriRef) r));
+            RDFTerm r = it.next().getSubject();
+            if (r instanceof IRI) aliases.add(buildPublicKey((IRI) r));
         }
         return aliases;
     }
@@ -443,9 +441,9 @@
         if (collector instanceof Scope) colltype = Scope.shortName + "/"; // Cannot be
         else if (collector instanceof OntologySpace) colltype = OntologySpace.shortName + "/";
         else if (collector instanceof Session) colltype = Session.shortName + "/";
-        UriRef c = new UriRef(_NS_STANBOL_INTERNAL + colltype + collector.getID());
-        UriRef u =
-        // new UriRef(prefix + "::" + keymap.buildResource(addedOntology).getUnicodeString());
+        IRI c = new IRI(_NS_STANBOL_INTERNAL + colltype + collector.getID());
+        IRI u =
+        // new IRI(prefix + "::" + keymap.buildResource(addedOntology).getUnicodeString());
         // keymap.getMapping(addedOntology);
         buildResource(addedOntology);
 
@@ -464,7 +462,7 @@
         if (!hasValues) log.debug("-- <none>");
 
         // Add both inverse triples. This graph has to be traversed efficiently, no need for reasoners.
-        UriRef predicate1 = null, predicate2 = null;
+        IRI predicate1 = null, predicate2 = null;
         if (collector instanceof OntologySpace) {
             predicate1 = MANAGES_URIREF;
             predicate2 = IS_MANAGED_BY_URIREF;
@@ -503,17 +501,17 @@
         if (collector instanceof Scope) colltype = Scope.shortName + "/"; // Cannot be
         else if (collector instanceof OntologySpace) colltype = OntologySpace.shortName + "/";
         else if (collector instanceof Session) colltype = Session.shortName + "/";
-        UriRef c = new UriRef(_NS_STANBOL_INTERNAL + colltype + collector.getID());
+        IRI c = new IRI(_NS_STANBOL_INTERNAL + colltype + collector.getID());
         Set<OWLOntologyID> aliases = listAliases(removedOntology);
         aliases.add(removedOntology);
         boolean badState = true;
         for (OWLOntologyID alias : aliases) {
-            UriRef u = buildResource(alias);
+            IRI u = buildResource(alias);
             // XXX condense the following code
 
             log.debug("Checking ({},{}) pattern", c, u);
             for (Iterator<Triple> it = meta.filter(c, null, u); it.hasNext();) {
-                UriRef property = it.next().getPredicate();
+                IRI property = it.next().getPredicate();
                 if (collector instanceof OntologySpace || collector instanceof Session) {
                     if (property.equals(MANAGES_URIREF)) badState = false;
                 }
@@ -521,7 +519,7 @@
 
             log.debug("Checking ({},{}) pattern", u, c);
             for (Iterator<Triple> it = meta.filter(u, null, c); it.hasNext();) {
-                UriRef property = it.next().getPredicate();
+                IRI property = it.next().getPredicate();
                 if (collector instanceof OntologySpace || collector instanceof Session) {
                     if (property.equals(IS_MANAGED_BY_URIREF)) badState = false;
                 }
@@ -547,12 +545,12 @@
         log.debug("Removing dependency.");
         log.debug(" ... dependent : {}", dependent);
         log.debug(" ... dependency : {}", dependency);
-        UriRef depy = buildResource(dependency);
+        IRI depy = buildResource(dependency);
         synchronized (meta) {
             Set<OWLOntologyID> aliases = listAliases(dependent);
             aliases.add(dependent);
             for (OWLOntologyID depalias : aliases) {
-                UriRef dep = buildResource(depalias);
+                IRI dep = buildResource(depalias);
                 Triple t = new TripleImpl(dep, DEPENDS_ON_URIREF, depy);
                 boolean found = false;
                 if (meta.contains(t)) {
@@ -575,10 +573,10 @@
 
     @Override
     public void scopeAppended(Session session, String scopeId) {
-        final UriRef sessionur = getIRIforSession(session), scopeur = getIRIforScope(scopeId);
+        final IRI sessionur = getIRIforSession(session), scopeur = getIRIforScope(scopeId);
         if (sessionur == null || scopeur == null) throw new IllegalArgumentException(
-                "UriRefs for scope and session cannot be null.");
-        if (meta instanceof MGraph) synchronized (meta) {
+                "IRIs for scope and session cannot be null.");
+        if (meta instanceof Graph) synchronized (meta) {
             meta.add(new TripleImpl(sessionur, HAS_APPENDED_URIREF, scopeur));
             meta.add(new TripleImpl(scopeur, APPENDED_TO_URIREF, sessionur));
         }
@@ -592,10 +590,10 @@
 
     @Override
     public void scopeDetached(Session session, String scopeId) {
-        final UriRef sessionur = getIRIforSession(session), scopeur = getIRIforScope(scopeId);
+        final IRI sessionur = getIRIforSession(session), scopeur = getIRIforScope(scopeId);
         if (sessionur == null || scopeur == null) throw new IllegalArgumentException(
-                "UriRefs for scope and session cannot be null.");
-        if (meta instanceof MGraph) synchronized (meta) {
+                "IRIs for scope and session cannot be null.");
+        if (meta instanceof Graph) synchronized (meta) {
             // TripleImpl implements equals() and hashCode() ...
             meta.remove(new TripleImpl(sessionur, HAS_APPENDED_URIREF, scopeur));
             meta.remove(new TripleImpl(scopeur, APPENDED_TO_URIREF, sessionur));
@@ -633,7 +631,7 @@
         log.debug("Setting dependency.");
         log.debug(" ... dependent : {}", dependent);
         log.debug(" ... dependency : {}", dependency);
-        UriRef dep = buildResource(dependent), depy = buildResource(dependency);
+        IRI dep = buildResource(dependent), depy = buildResource(dependency);
         // TODO check for the actual resource!
         synchronized (meta) {
             meta.add(new TripleImpl(dep, DEPENDS_ON_URIREF, depy));
@@ -648,9 +646,9 @@
      *            the scope whose information needs to be updated.
      */
     private void updateScopeRegistration(Scope scope) {
-        final UriRef scopeur = getIRIforScope(scope.getID());
-        final UriRef coreur = getIRIforSpace(scope.getCoreSpace());
-        final UriRef custur = getIRIforSpace(scope.getCustomSpace());
+        final IRI scopeur = getIRIforScope(scope.getID());
+        final IRI coreur = getIRIforSpace(scope.getCoreSpace());
+        final IRI custur = getIRIforSpace(scope.getCustomSpace());
         // If this method was called after a scope rebuild, the following will have little to no effect.
         synchronized (meta) {
             // Spaces are created along with the scope, so it is safe to add their triples.
@@ -675,9 +673,9 @@
     private void updateScopeUnregistration(Scope scope) {
         long before = System.currentTimeMillis();
         boolean removable = false, conflict = false;
-        final UriRef scopeur = getIRIforScope(scope.getID());
-        final UriRef coreur = getIRIforSpace(scope.getCoreSpace());
-        final UriRef custur = getIRIforSpace(scope.getCustomSpace());
+        final IRI scopeur = getIRIforScope(scope.getID());
+        final IRI coreur = getIRIforSpace(scope.getCoreSpace());
+        final IRI custur = getIRIforSpace(scope.getCustomSpace());
         Set<Triple> removeUs = new HashSet<Triple>();
         for (Iterator<Triple> it = meta.filter(scopeur, null, null); it.hasNext();) {
             Triple t = it.next();
@@ -715,7 +713,7 @@
     }
 
     private void updateSessionRegistration(Session session) {
-        final UriRef sesur = getIRIforSession(session);
+        final IRI sesur = getIRIforSession(session);
         // If this method was called after a session rebuild, the following will have little to no effect.
         synchronized (meta) {
             // The only essential triple to add is typing
@@ -727,7 +725,7 @@
     private void updateSessionUnregistration(Session session) {
         long before = System.currentTimeMillis();
         boolean removable = false, conflict = false;
-        final UriRef sessionur = getIRIforSession(session);
+        final IRI sessionur = getIRIforSession(session);
         Set<Triple> removeUs = new HashSet<Triple>();
         for (Iterator<Triple> it = meta.filter(sessionur, null, null); it.hasNext();) {
             Triple t = it.next();
diff --git a/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/impl/AbstractOntologyCollectorImpl.java b/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/impl/AbstractOntologyCollectorImpl.java
index 8bed032..3848311 100644
--- a/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/impl/AbstractOntologyCollectorImpl.java
+++ b/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/impl/AbstractOntologyCollectorImpl.java
@@ -27,19 +27,19 @@
 import java.util.Set;
 import java.util.TreeSet;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.ontologies.OWL;
 import org.apache.clerezza.rdf.ontologies.RDF;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.commons.owl.util.URIUtils;
 import org.apache.stanbol.ontologymanager.servicesapi.collector.Lockable;
 import org.apache.stanbol.ontologymanager.servicesapi.collector.MissingOntologyException;
@@ -57,7 +57,6 @@
 import org.apache.stanbol.ontologymanager.sources.owlapi.RootOntologySource;
 import org.semanticweb.owlapi.apibinding.OWLManager;
 import org.semanticweb.owlapi.model.AddImport;
-import org.semanticweb.owlapi.model.IRI;
 import org.semanticweb.owlapi.model.OWLDataFactory;
 import org.semanticweb.owlapi.model.OWLImportsDeclaration;
 import org.semanticweb.owlapi.model.OWLOntology;
@@ -109,17 +108,17 @@
      */
     protected Set<OWLOntologyID> managedOntologies;
 
-    protected IRI namespace = null;
+    protected org.semanticweb.owlapi.model.IRI namespace = null;
 
     protected OntologyProvider<?> ontologyProvider;
 
     protected Set<Class<?>> supportedTypes;
 
-    public AbstractOntologyCollectorImpl(String id, IRI namespace, OntologyProvider<?> ontologyProvider) {
+    public AbstractOntologyCollectorImpl(String id, org.semanticweb.owlapi.model.IRI namespace, OntologyProvider<?> ontologyProvider) {
         // Supports OWL API and Clerezza
         supportedTypes = new HashSet<Class<?>>();
         supportedTypes.add(OWLOntology.class);
-        supportedTypes.add(TripleCollection.class);
+        supportedTypes.add(Graph.class);
         setID(id);
         setDefaultNamespace(namespace);
         this.ontologyProvider = ontologyProvider;
@@ -140,8 +139,8 @@
             long before = System.currentTimeMillis();
             Object o = ontologySource.getRootOntology();
             // // FIXME restore ownership management, but maybe not by directly setting the versionIRI
-            // if (ontologyProvider.hasOntology(id.getOntologyIRI())) if (o instanceof MGraph)
-            // claimOwnership((MGraph) o);
+            // if (ontologyProvider.hasOntology(id.getOntologyIRI())) if (o instanceof Graph)
+            // claimOwnership((Graph) o);
             // else if (o instanceof OWLOntology) claimOwnership((OWLOntology) o);
 
             // Check the origin anyhow, as it may be useful for setting aliases with physical locations etc.
@@ -161,15 +160,15 @@
             Origin<?> origin = ontologySource.getOrigin();
             Object ref = origin.getReference();
             log.debug("Origin wraps a {}", ref.getClass().getCanonicalName());
-            if (ref instanceof IRI) try {
+            if (ref instanceof org.semanticweb.owlapi.model.IRI) try {
                 log.debug("Deferring addition to physical IRI {} (if available).", ref);
-                key = addOntology(new RootOntologySource((IRI) ref));
+                key = addOntology(new RootOntologySource((org.semanticweb.owlapi.model.IRI) ref));
             } catch (OWLOntologyCreationException e) {
                 throw new RuntimeException(e);
             }
-            else if (ref instanceof UriRef) {
+            else if (ref instanceof IRI) {
                 log.debug("Deferring addition to stored Clerezza graph {} (if available).", ref);
-                key = addOntology(new GraphSource((UriRef) ref));
+                key = addOntology(new GraphSource((IRI) ref));
             } else if (ref instanceof OWLOntologyID) {
                 OWLOntologyID idref = (OWLOntologyID) ref;
                 log.debug("Deferring addition to stored ontology with public key {} (if available).", ref);
@@ -219,15 +218,15 @@
 
     @SuppressWarnings("unchecked")
     @Override
-    public <O> O export(Class<O> returnType, boolean merge, IRI universalPrefix) {
+    public <O> O export(Class<O> returnType, boolean merge, org.semanticweb.owlapi.model.IRI universalPrefix) {
         if (OWLOntology.class.isAssignableFrom(returnType)) {
             return (O) exportToOWLOntology(merge, universalPrefix);
         }
-        if (TripleCollection.class.isAssignableFrom(returnType)) {
-            TripleCollection root = exportToMGraph(merge, universalPrefix);
+        if (Graph.class.isAssignableFrom(returnType)) {
+            Graph root = exportToGraph(merge, universalPrefix);
             // A Clerezza graph has to be cast properly.
-            if (returnType == Graph.class) root = ((MGraph) root).getGraph();
-            else if (returnType == MGraph.class) {}
+            if (returnType == ImmutableGraph.class) root = ((Graph) root).getImmutableGraph();
+            else if (returnType == Graph.class) {}
             return (O) root;
         }
         throw new UnsupportedOperationException("Cannot export ontology collector " + getID() + " to a "
@@ -241,15 +240,15 @@
      * @param merge
      * @return
      */
-    protected MGraph exportToMGraph(boolean merge, IRI prefix) {
+    protected Graph exportToGraph(boolean merge, org.semanticweb.owlapi.model.IRI prefix) {
         // if (merge) throw new UnsupportedOperationException(
         // "Merge not implemented yet for Clerezza triple collections.");
 
         long before = System.currentTimeMillis();
 
         // No need to store, give it a name, or anything.
-        MGraph root = new SimpleMGraph();
-        UriRef iri = new UriRef(prefix + _id);
+        Graph root = new SimpleGraph();
+        IRI iri = new IRI(prefix + _id);
         // Add the import declarations for directly managed ontologies.
         if (root != null) {
             // Set the ontology ID
@@ -258,21 +257,21 @@
             if (merge) {
                 log.warn("Merging of Clerezza triple collections is only implemented one level down. Import statements will be preserved for further levels.");
                 Iterator<Triple> it;
-                Set<Resource> importTargets = new HashSet<Resource>();
+                Set<RDFTerm> importTargets = new HashSet<RDFTerm>();
                 for (OWLOntologyID ontologyId : managedOntologies) {
-                    Graph g = getOntology(ontologyId, Graph.class, false);
+                    ImmutableGraph g = getOntology(ontologyId, ImmutableGraph.class, false);
                     root.addAll(g);
 
                     it = g.filter(null, OWL.imports, null);
                     while (it.hasNext()) {
-                        IRI tgt;
-                        Resource r = it.next().getObject();
+                        org.semanticweb.owlapi.model.IRI tgt;
+                        RDFTerm r = it.next().getObject();
                         try {
-                            if (r instanceof UriRef) tgt = IRI.create(((UriRef) r).getUnicodeString());
-                            else if (r instanceof Literal) tgt = IRI.create(((Literal) r).getLexicalForm());
-                            else tgt = IRI.create(r.toString());
+                            if (r instanceof IRI) tgt = org.semanticweb.owlapi.model.IRI.create(((IRI) r).getUnicodeString());
+                            else if (r instanceof Literal) tgt = org.semanticweb.owlapi.model.IRI.create(((Literal) r).getLexicalForm());
+                            else tgt = org.semanticweb.owlapi.model.IRI.create(r.toString());
                             tgt = URIUtils.sanitize(tgt);
-                            importTargets.add(new UriRef(tgt.toString()));
+                            importTargets.add(new IRI(tgt.toString()));
                         } catch (Exception ex) {
                             log.error("FAILED to obtain import target from resource {}", r);
                             continue;
@@ -282,7 +281,7 @@
 
                     it = g.filter(null, RDF.type, OWL.Ontology);
                     while (it.hasNext()) {
-                        NonLiteral ontology = it.next().getSubject();
+                        BlankNodeOrIRI ontology = it.next().getSubject();
                         log.debug("Removing all triples related to {} from {}", ontology, iri);
                         Iterator<Triple> it2 = g.filter(ontology, null, null);
                         while (it2.hasNext())
@@ -293,7 +292,7 @@
                      * Reinstate import statements, though. If imported ontologies were not merged earlier, we
                      * are not doing it now anyway.
                      */
-                    for (Resource target : importTargets)
+                    for (RDFTerm target : importTargets)
                         root.add(new TripleImpl(iri, OWL.imports, target));
                 }
 
@@ -306,12 +305,12 @@
 
                 // The key set of managedOntologies contains the ontology IRIs, not their storage keys.
                 for (OWLOntologyID ontologyId : managedOntologies) {
-                    IRI physIRI =
+                    org.semanticweb.owlapi.model.IRI physIRI =
                     // ontologyId.getVersionIRI() == null ? URIUtils.sanitize(IRI
                     // .create(base + ontologyId.getOntologyIRI())) : URIUtils.sanitize(IRI
                     // .create(base + ontologyId.getVersionIRI()));
-                    IRI.create(base + OntologyUtils.encode(ontologyId));
-                    root.add(new TripleImpl(iri, OWL.imports, new UriRef(physIRI.toString())));
+                    org.semanticweb.owlapi.model.IRI.create(base + OntologyUtils.encode(ontologyId));
+                    root.add(new TripleImpl(iri, OWL.imports, new IRI(physIRI.toString())));
                 }
             }
 
@@ -322,13 +321,13 @@
         return root;
     }
 
-    private TripleCollection getMergedTc() {
-        TripleCollection result = new SimpleMGraph(); // Takes less memory than the Indexed one
+    private Graph getMergedTc() {
+        Graph result = new SimpleGraph(); // Takes less memory than the Indexed one
 
         for (OWLOntologyID key : listManagedOntologies()) {
             // TODO when implemented, switch to true.
-            TripleCollection managed = getOntology(key, TripleCollection.class, false);
-            Set<Resource> exclusions = new HashSet<Resource>();
+            Graph managed = getOntology(key, Graph.class, false);
+            Set<RDFTerm> exclusions = new HashSet<RDFTerm>();
             Iterator<Triple> it = managed.filter(null, RDF.type, OWL.Ontology);
             while (it.hasNext())
                 exclusions.add(it.next().getSubject());
@@ -350,14 +349,14 @@
      * @param merge
      * @return
      */
-    protected OWLOntology exportToOWLOntology(boolean merge, IRI prefix) {
+    protected OWLOntology exportToOWLOntology(boolean merge, org.semanticweb.owlapi.model.IRI prefix) {
 
         long before = System.currentTimeMillis();
 
         // Create a new ontology
         OWLOntology root;
         OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager();
-        IRI iri = IRI.create(prefix + _id);
+        org.semanticweb.owlapi.model.IRI iri = org.semanticweb.owlapi.model.IRI.create(prefix + _id);
         try {
             root = ontologyManager.createOntology(iri);
         } catch (OWLOntologyAlreadyExistsException e) {
@@ -418,7 +417,7 @@
                 // The key set of managedOntologies contains the ontology IRIs, not their storage keys.
                 for (OWLOntologyID ontologyId : managedOntologies) {
                     // XXX some day the versionIRI will be the only physical reference for the ontology
-                    IRI physIRI = IRI.create(base + OntologyUtils.encode(ontologyId));
+                    org.semanticweb.owlapi.model.IRI physIRI = org.semanticweb.owlapi.model.IRI.create(base + OntologyUtils.encode(ontologyId));
                     changes.add(new AddImport(root, df.getOWLImportsDeclaration(physIRI)));
                 }
                 ontologyManager.applyChanges(changes);
@@ -458,7 +457,7 @@
     }
 
     @Override
-    public IRI getDefaultNamespace() {
+    public org.semanticweb.owlapi.model.IRI getDefaultNamespace() {
         return this.namespace;
     }
 
@@ -478,27 +477,27 @@
     }
 
     @Override
-    public IRI getNamespace() {
+    public org.semanticweb.owlapi.model.IRI getNamespace() {
         return getDefaultNamespace();
     }
 
     @Override
-    public <O> O getOntology(IRI ontologyIri, Class<O> returnType) {
+    public <O> O getOntology(org.semanticweb.owlapi.model.IRI ontologyIri, Class<O> returnType) {
         return getOntology(new OWLOntologyID(ontologyIri), returnType);
     }
 
     @Override
-    public <O> O getOntology(IRI ontologyIri, Class<O> returnType, boolean merge) {
+    public <O> O getOntology(org.semanticweb.owlapi.model.IRI ontologyIri, Class<O> returnType, boolean merge) {
         return getOntology(new OWLOntologyID(ontologyIri), returnType, merge);
     }
 
     @Override
-    public <O> O getOntology(IRI ontologyIri, Class<O> returnType, boolean merge, IRI universalPrefix) {
+    public <O> O getOntology(org.semanticweb.owlapi.model.IRI ontologyIri, Class<O> returnType, boolean merge, org.semanticweb.owlapi.model.IRI universalPrefix) {
         return getOntology(new OWLOntologyID(ontologyIri), returnType, merge, universalPrefix);
     }
 
     @Override
-    public <O> O getOntology(IRI ontologyIri, Class<O> returnType, IRI universalPrefix) {
+    public <O> O getOntology(org.semanticweb.owlapi.model.IRI ontologyIri, Class<O> returnType, org.semanticweb.owlapi.model.IRI universalPrefix) {
         return getOntology(new OWLOntologyID(ontologyIri), returnType, universalPrefix);
     }
 
@@ -514,15 +513,15 @@
 
     @SuppressWarnings("unchecked")
     @Override
-    public <O> O getOntology(OWLOntologyID ontologyId, Class<O> returnType, boolean merge, IRI universalPrefix) {
+    public <O> O getOntology(OWLOntologyID ontologyId, Class<O> returnType, boolean merge, org.semanticweb.owlapi.model.IRI universalPrefix) {
         if (OWLOntology.class.isAssignableFrom(returnType)) return (O) getOntologyAsOWLOntology(ontologyId,
             merge, universalPrefix);
-        if (TripleCollection.class.isAssignableFrom(returnType)) {
-            TripleCollection root = getOntologyAsMGraph(ontologyId, merge, universalPrefix);
+        if (Graph.class.isAssignableFrom(returnType)) {
+            Graph root = getOntologyAsGraph(ontologyId, merge, universalPrefix);
             // A Clerezza graph has to be cast properly.
-            if (returnType == Graph.class) root = ((MGraph) root).getGraph();
-            else if (returnType == MGraph.class) {}
-            // We don't know of other TripleCollection subclasses: just try to cast the MGraph.
+            if (returnType == ImmutableGraph.class) root = ((Graph) root).getImmutableGraph();
+            else if (returnType == Graph.class) {}
+            // We don't know of other Graph subclasses: just try to cast the Graph.
             return (O) root;
         }
         throw new UnsupportedOperationException("Cannot export ontology collector " + getID() + " to a "
@@ -530,11 +529,11 @@
     }
 
     @Override
-    public <O> O getOntology(OWLOntologyID ontologyId, Class<O> returnType, IRI universalPrefix) {
+    public <O> O getOntology(OWLOntologyID ontologyId, Class<O> returnType, org.semanticweb.owlapi.model.IRI universalPrefix) {
         return getOntology(ontologyId, returnType, false, universalPrefix);
     }
 
-    protected MGraph getOntologyAsMGraph(OWLOntologyID ontologyId, boolean merge, IRI universalPrefix) {
+    protected Graph getOntologyAsGraph(OWLOntologyID ontologyId, boolean merge, org.semanticweb.owlapi.model.IRI universalPrefix) {
         if (merge) throw new UnsupportedOperationException(
                 "Merge not implemented yet for Clerezza triple collections.");
         /*
@@ -543,7 +542,7 @@
          * imported ontologies as *not* managed.
          */
         // if (!merge) { // TODO
-        MGraph o = new IndexedMGraph(ontologyProvider.getStoredOntology(ontologyId, MGraph.class, merge));
+        Graph o = new IndexedGraph(ontologyProvider.getStoredOntology(ontologyId, Graph.class, merge));
 
         // Now rewrite import statements
 
@@ -563,11 +562,11 @@
                 replaceUs.add(it.next());
 
             for (Triple t : replaceUs) {
-                String s = ((UriRef) (t.getObject())).getUnicodeString();
+                String s = ((IRI) (t.getObject())).getUnicodeString();
                 // FIXME note the different import targets in the OWLOntology and TripleColllection objects!
                 // s = s.substring(s.indexOf("::") + 2, s.length());
-                boolean managed = managedOntologies.contains(IRI.create(s));
-                UriRef target = new UriRef((managed ? universalPrefix + "/" + tid + "/"
+                boolean managed = managedOntologies.contains(org.semanticweb.owlapi.model.IRI.create(s));
+                IRI target = new IRI((managed ? universalPrefix + "/" + tid + "/"
                         : URIUtils.upOne(universalPrefix) + "/")
                                            + s);
                 o.remove(t);
@@ -585,7 +584,7 @@
 
     protected OWLOntology getOntologyAsOWLOntology(OWLOntologyID ontologyId,
                                                    boolean merge,
-                                                   IRI universalPrefix) {
+                                                   org.semanticweb.owlapi.model.IRI universalPrefix) {
         // if (merge) throw new UnsupportedOperationException("Merge not implemented yet for OWLOntology.");
 
         // Remove the check below. It might be an unmanaged dependency (TODO remove from collector and
@@ -638,7 +637,7 @@
                 String tid = getID();
                 if (backwardPathLength > 0) tid = tid.split("/")[0];
 
-                IRI target = IRI.create((managed ? universalPrefix + "/" + tid + "/" : URIUtils
+                org.semanticweb.owlapi.model.IRI target = org.semanticweb.owlapi.model.IRI.create((managed ? universalPrefix + "/" + tid + "/" : URIUtils
                         .upOne(universalPrefix) + "/")
                                         + s);
                 changes.add(new AddImport(o, df.getOWLImportsDeclaration(target)));
@@ -660,7 +659,7 @@
     }
 
     @Override
-    public boolean hasOntology(IRI ontologyIri) {
+    public boolean hasOntology(org.semanticweb.owlapi.model.IRI ontologyIri) {
         return hasOntology(new OWLOntologyID(ontologyIri));
     }
 
@@ -684,7 +683,7 @@
     }
 
     @Override
-    public void removeOntology(IRI ontologyId) throws OntologyCollectorModificationException {
+    public void removeOntology(org.semanticweb.owlapi.model.IRI ontologyId) throws OntologyCollectorModificationException {
         removeOntology(new OWLOntologyID(ontologyId));
     }
 
@@ -724,7 +723,7 @@
      *            will be logged.
      */
     @Override
-    public void setDefaultNamespace(IRI namespace) {
+    public void setDefaultNamespace(org.semanticweb.owlapi.model.IRI namespace) {
         if (namespace == null) throw new IllegalArgumentException(
                 "Stanbol ontology namespace cannot be null.");
         if (namespace.toURI().getQuery() != null) throw new IllegalArgumentException(
@@ -736,7 +735,7 @@
         if (!namespace.toString().endsWith("/")) {
             log.warn("Namespace {} does not end with a slash ('/') character. It be added automatically.",
                 namespace);
-            namespace = IRI.create(namespace + "/");
+            namespace = org.semanticweb.owlapi.model.IRI.create(namespace + "/");
         }
         this.namespace = namespace;
     }
@@ -744,7 +743,7 @@
     protected abstract void setID(String id);
 
     @Override
-    public void setNamespace(IRI namespace) {
+    public void setNamespace(org.semanticweb.owlapi.model.IRI namespace) {
         setDefaultNamespace(namespace);
     }
 
diff --git a/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/impl/CustomSpaceImpl.java b/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/impl/CustomSpaceImpl.java
index 2425aa8..d138ee8 100644
--- a/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/impl/CustomSpaceImpl.java
+++ b/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/impl/CustomSpaceImpl.java
@@ -20,18 +20,17 @@
 import java.util.Iterator;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.ontologies.OWL;
 import org.apache.clerezza.rdf.ontologies.RDF;
 import org.apache.stanbol.ontologymanager.servicesapi.ontology.OntologyProvider;
 import org.apache.stanbol.ontologymanager.servicesapi.scope.OntologySpace;
 import org.semanticweb.owlapi.apibinding.OWLManager;
 import org.semanticweb.owlapi.model.AddImport;
-import org.semanticweb.owlapi.model.IRI;
 import org.semanticweb.owlapi.model.OWLOntology;
 import org.semanticweb.owlapi.model.OWLOntologyID;
 
@@ -49,7 +48,7 @@
         return (scopeID != null ? scopeID : "") + "/" + SUFFIX;
     }
 
-    public CustomSpaceImpl(String scopeID, IRI namespace, OntologyProvider<?> ontologyProvider) {
+    public CustomSpaceImpl(String scopeID, org.semanticweb.owlapi.model.IRI namespace, OntologyProvider<?> ontologyProvider) {
         super(buildId(scopeID), namespace, SpaceType.CUSTOM, ontologyProvider);
     }
 
@@ -68,13 +67,13 @@
     }
 
     @Override
-    protected MGraph getOntologyAsMGraph(OWLOntologyID ontologyId, boolean merge, IRI universalPrefix) {
-        MGraph o = super.getOntologyAsMGraph(ontologyId, merge, universalPrefix);
+    protected Graph getOntologyAsGraph(OWLOntologyID ontologyId, boolean merge, org.semanticweb.owlapi.model.IRI universalPrefix) {
+        Graph o = super.getOntologyAsGraph(ontologyId, merge, universalPrefix);
         switch (getConnectivityPolicy()) {
             case LOOSE:
                 break;
             case TIGHT:
-                Set<NonLiteral> onts = new HashSet<NonLiteral>(); // Expected to be a singleton
+                Set<BlankNodeOrIRI> onts = new HashSet<BlankNodeOrIRI>(); // Expected to be a singleton
                 synchronized (o) {
                     Iterator<Triple> it = o.filter(null, RDF.type, OWL.Ontology);
                     while (it.hasNext())
@@ -83,8 +82,8 @@
                 String s = getID();
                 s = s.substring(0, s.indexOf(SUFFIX)); // strip "custom"
                 s += SpaceType.CORE.getIRISuffix(); // concatenate "core"
-                UriRef target = new UriRef(universalPrefix + s);
-                for (NonLiteral subject : onts)
+                IRI target = new IRI(universalPrefix + s);
+                for (BlankNodeOrIRI subject : onts)
                     o.add(new TripleImpl(subject, OWL.imports, target));
                 break;
             default:
@@ -97,7 +96,7 @@
     @Override
     protected OWLOntology getOntologyAsOWLOntology(OWLOntologyID ontologyId,
                                                    boolean merge,
-                                                   IRI universalPrefix) {
+                                                   org.semanticweb.owlapi.model.IRI universalPrefix) {
         OWLOntology o = super.getOntologyAsOWLOntology(ontologyId, merge, universalPrefix);
         switch (getConnectivityPolicy()) {
             case LOOSE:
@@ -106,7 +105,7 @@
                 String s = getID();
                 s = s.substring(0, s.indexOf(SUFFIX)); // strip "custom"
                 s += SpaceType.CORE.getIRISuffix(); // concatenate "core"
-                IRI target = IRI.create(universalPrefix + s);
+                org.semanticweb.owlapi.model.IRI target = org.semanticweb.owlapi.model.IRI.create(universalPrefix + s);
                 o.getOWLOntologyManager().applyChange(
                     new AddImport(o, OWLManager.getOWLDataFactory().getOWLImportsDeclaration(target)));
                 break;
diff --git a/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/impl/ScopeImpl.java b/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/impl/ScopeImpl.java
index bc38c83..be4e5af 100644
--- a/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/impl/ScopeImpl.java
+++ b/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/impl/ScopeImpl.java
@@ -23,15 +23,15 @@
 import java.util.List;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.ontologies.OWL;
 import org.apache.clerezza.rdf.ontologies.RDF;
 import org.apache.stanbol.ontologymanager.servicesapi.collector.OntologyCollector;
@@ -44,7 +44,6 @@
 import org.apache.stanbol.ontologymanager.servicesapi.scope.Scope;
 import org.semanticweb.owlapi.apibinding.OWLManager;
 import org.semanticweb.owlapi.model.AddImport;
-import org.semanticweb.owlapi.model.IRI;
 import org.semanticweb.owlapi.model.OWLDataFactory;
 import org.semanticweb.owlapi.model.OWLOntology;
 import org.semanticweb.owlapi.model.OWLOntologyChange;
@@ -89,10 +88,10 @@
 
     private Logger log = LoggerFactory.getLogger(getClass());
 
-    protected IRI namespace = null;
+    protected org.semanticweb.owlapi.model.IRI namespace = null;
 
     public ScopeImpl(String id,
-                     IRI namespace,
+                     org.semanticweb.owlapi.model.IRI namespace,
                      OntologySpaceFactory factory,
                      OntologyInputSource<?>... coreOntologies) {
         setID(id);
@@ -158,33 +157,33 @@
 
     @SuppressWarnings("unchecked")
     @Override
-    public <O> O export(Class<O> returnType, boolean merge, IRI universalPrefix) {
+    public <O> O export(Class<O> returnType, boolean merge, org.semanticweb.owlapi.model.IRI universalPrefix) {
         if (OWLOntology.class.isAssignableFrom(returnType)) {
             return (O) exportToOWLOntology(merge, universalPrefix);
         }
-        if (TripleCollection.class.isAssignableFrom(returnType)) {
-            TripleCollection root = exportToMGraph(merge, universalPrefix);
+        if (Graph.class.isAssignableFrom(returnType)) {
+            Graph root = exportToGraph(merge, universalPrefix);
             // A Clerezza graph has to be cast properly.
-            if (returnType == Graph.class) root = ((MGraph) root).getGraph();
-            else if (returnType == MGraph.class) {}
+            if (returnType == ImmutableGraph.class) root = ((Graph) root).getImmutableGraph();
+            else if (returnType == Graph.class) {}
             return (O) root;
         }
         throw new UnsupportedOperationException("Cannot export scope " + getID() + " to a " + returnType);
     }
 
     /**
-     * Get a Clerezza {@link MGraph} representation of the scope.
+     * Get a Clerezza {@link Graph} representation of the scope.
      * 
      * @param merge
      *            if true the core and custom spaces will be recursively merged with the scope graph,
      *            otherwise owl:imports statements will be added.
      * @return the RDF representation of the scope as a modifiable graph.
      */
-    protected MGraph exportToMGraph(boolean merge, IRI universalPrefix) {
+    protected Graph exportToGraph(boolean merge, org.semanticweb.owlapi.model.IRI universalPrefix) {
 
         // No need to store, give it a name, or anything.
-        MGraph root = new SimpleMGraph();
-        UriRef iri = new UriRef(universalPrefix + getID());
+        Graph root = new SimpleGraph();
+        IRI iri = new IRI(universalPrefix + getID());
 
         if (root != null) {
             // Set the ontology ID
@@ -192,13 +191,13 @@
 
             if (merge) {
 
-                Graph custom, core;
+                ImmutableGraph custom, core;
 
                 // Get the subjects of "bad" triples (those with subjects of type owl:Ontology).
                 Iterator<Triple> it;
-                Set<NonLiteral> ontologies = new HashSet<NonLiteral>();
-                Set<Resource> importTargets = new HashSet<Resource>();
-                custom = this.getCustomSpace().export(Graph.class, merge);
+                Set<BlankNodeOrIRI> ontologies = new HashSet<BlankNodeOrIRI>();
+                Set<RDFTerm> importTargets = new HashSet<RDFTerm>();
+                custom = this.getCustomSpace().export(ImmutableGraph.class, merge);
                 // root.addAll(space);
                 it = custom.filter(null, RDF.type, OWL.Ontology);
                 while (it.hasNext())
@@ -206,7 +205,7 @@
                 it = custom.filter(null, OWL.imports, null);
                 while (it.hasNext())
                     importTargets.add(it.next().getObject());
-                core = this.getCoreSpace().export(Graph.class, merge);
+                core = this.getCoreSpace().export(ImmutableGraph.class, merge);
                 // root.addAll(space);
                 it = core.filter(null, RDF.type, OWL.Ontology);
                 while (it.hasNext())
@@ -218,7 +217,7 @@
                 // Make sure the scope itself is not in the "bad" subjects.
                 ontologies.remove(iri);
 
-                for (NonLiteral nl : ontologies)
+                for (BlankNodeOrIRI nl : ontologies)
                     log.debug("{} -related triples will not be added to {}", nl, iri);
 
                 // Merge the two spaces, skipping the "bad" triples.
@@ -233,14 +232,14 @@
                  * Reinstate import statements, though. If imported ontologies were not merged earlier, we are
                  * not doing it now anyway.
                  */
-                for (Resource target : importTargets)
+                for (RDFTerm target : importTargets)
                     root.add(new TripleImpl(iri, OWL.imports, target));
 
             } else {
-                UriRef physIRI = new UriRef(universalPrefix.toString() + this.getID() + "/"
+                IRI physIRI = new IRI(universalPrefix.toString() + this.getID() + "/"
                                             + SpaceType.CUSTOM.getIRISuffix());
                 root.add(new TripleImpl(iri, OWL.imports, physIRI));
-                physIRI = new UriRef(universalPrefix.toString() + this.getID() + "/"
+                physIRI = new IRI(universalPrefix.toString() + this.getID() + "/"
                                      + SpaceType.CORE.getIRISuffix());
                 root.add(new TripleImpl(iri, OWL.imports, physIRI));
             }
@@ -257,7 +256,7 @@
      *            otherwise owl:imports statements will be added.
      * @return the OWL representation of the scope.
      */
-    protected OWLOntology exportToOWLOntology(boolean merge, IRI universalPrefix) {
+    protected OWLOntology exportToOWLOntology(boolean merge, org.semanticweb.owlapi.model.IRI universalPrefix) {
         // if (merge) throw new UnsupportedOperationException(
         // "Ontology merging only implemented for managed ontologies, not for collectors. "
         // + "Please set merge parameter to false.");
@@ -285,25 +284,25 @@
                 OWLOntologyMerger merger = new OWLOntologyMerger(provider);
                 try {
                     ont = merger.createMergedOntology(OWLManager.createOWLOntologyManager(),
-                        IRI.create(getDefaultNamespace() + getID()));
+                        org.semanticweb.owlapi.model.IRI.create(getDefaultNamespace() + getID()));
                 } catch (OWLOntologyCreationException e) {
                     log.error("Failed to merge imports for ontology.", e);
                     ont = null;
                 }
             } else {
                 // The root ontology ID is in the form [namespace][scopeId]
-                ont = mgr.createOntology(IRI.create(universalPrefix + getID()));
+                ont = mgr.createOntology(org.semanticweb.owlapi.model.IRI.create(universalPrefix + getID()));
                 List<OWLOntologyChange> additions = new LinkedList<OWLOntologyChange>();
                 // Add the import statement for the custom space, if existing and not empty
                 OntologySpace spc = getCustomSpace();
                 if (spc != null && spc.listManagedOntologies().size() > 0) {
-                    IRI spaceIri = IRI.create(universalPrefix + spc.getID());
+                    org.semanticweb.owlapi.model.IRI spaceIri = org.semanticweb.owlapi.model.IRI.create(universalPrefix + spc.getID());
                     additions.add(new AddImport(ont, df.getOWLImportsDeclaration(spaceIri)));
                 }
                 // Add the import statement for the core space, if existing and not empty
                 spc = getCoreSpace();
                 if (spc != null && spc.listManagedOntologies().size() > 0) {
-                    IRI spaceIri = IRI.create(universalPrefix + spc.getID());
+                    org.semanticweb.owlapi.model.IRI spaceIri = org.semanticweb.owlapi.model.IRI.create(universalPrefix + spc.getID());
                     additions.add(new AddImport(ont, df.getOWLImportsDeclaration(spaceIri)));
                 }
                 mgr.applyChanges(additions);
@@ -341,7 +340,7 @@
     }
 
     @Override
-    public IRI getDefaultNamespace() {
+    public org.semanticweb.owlapi.model.IRI getDefaultNamespace() {
         return this.namespace;
     }
 
@@ -351,7 +350,7 @@
     }
 
     @Override
-    public IRI getNamespace() {
+    public org.semanticweb.owlapi.model.IRI getNamespace() {
         return getDefaultNamespace();
     }
 
@@ -405,7 +404,7 @@
      *            will be logged.
      */
     @Override
-    public void setDefaultNamespace(IRI namespace) {
+    public void setDefaultNamespace(org.semanticweb.owlapi.model.IRI namespace) {
         if (namespace == null) throw new IllegalArgumentException("Namespace cannot be null.");
         if (namespace.toURI().getQuery() != null) throw new IllegalArgumentException(
                 "URI Query is not allowed in OntoNet namespaces.");
@@ -416,7 +415,7 @@
         if (!namespace.toString().endsWith("/")) {
             log.warn("Namespace {} does not end with slash character ('/'). It will be added automatically.",
                 namespace);
-            namespace = IRI.create(namespace + "/");
+            namespace = org.semanticweb.owlapi.model.IRI.create(namespace + "/");
         }
         this.namespace = namespace;
     }
@@ -432,7 +431,7 @@
     }
 
     @Override
-    public void setNamespace(IRI namespace) {
+    public void setNamespace(org.semanticweb.owlapi.model.IRI namespace) {
         setDefaultNamespace(namespace);
     }
 
diff --git a/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/impl/SessionImpl.java b/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/impl/SessionImpl.java
index 73687c4..f9a5256 100644
--- a/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/impl/SessionImpl.java
+++ b/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/impl/SessionImpl.java
@@ -22,10 +22,9 @@
 import java.util.List;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.ontologies.OWL;
 import org.apache.stanbol.ontologymanager.core.scope.ScopeManagerImpl;
 import org.apache.stanbol.ontologymanager.servicesapi.ontology.OntologyProvider;
@@ -38,7 +37,6 @@
 import org.apache.stanbol.ontologymanager.servicesapi.session.SessionListener;
 import org.semanticweb.owlapi.apibinding.OWLManager;
 import org.semanticweb.owlapi.model.AddImport;
-import org.semanticweb.owlapi.model.IRI;
 import org.semanticweb.owlapi.model.OWLDataFactory;
 import org.semanticweb.owlapi.model.OWLOntology;
 import org.semanticweb.owlapi.model.OWLOntologyChange;
@@ -74,7 +72,7 @@
      * @param sessionID
      *            the IRI to be set as unique identifier for this session
      */
-    public SessionImpl(String sessionID, IRI namespace, OntologyProvider<?> ontologyProvider) {
+    public SessionImpl(String sessionID, org.semanticweb.owlapi.model.IRI namespace, OntologyProvider<?> ontologyProvider) {
         super(sessionID, namespace, ontologyProvider);
         backwardPathLength = 0;
         // setNamespace(namespace);
@@ -93,17 +91,17 @@
         fireScopeAppended(scopeId);
     }
 
-    private void attachScopeImportsClerezza(TripleCollection target, IRI prefix) {
-        UriRef iri = new UriRef(prefix + _id);
+    private void attachScopeImportsClerezza(Graph target, org.semanticweb.owlapi.model.IRI prefix) {
+        IRI iri = new IRI(prefix + _id);
         String scopePrefix = prefix.toString();
         scopePrefix = scopePrefix.substring(0, scopePrefix.lastIndexOf("/" + shortName + "/")) + "/ontology/";
         for (String scopeID : attachedScopes) {
-            UriRef physIRI = new UriRef(scopePrefix + scopeID);
+            IRI physIRI = new IRI(scopePrefix + scopeID);
             target.add(new TripleImpl(iri, OWL.imports, physIRI));
         }
     }
 
-    private void attachScopeImportsOwlApi(OWLOntology target, IRI prefix) {
+    private void attachScopeImportsOwlApi(OWLOntology target, org.semanticweb.owlapi.model.IRI prefix) {
         if (!attachedScopes.isEmpty()) {
             String scopePrefix = prefix.toString();
             scopePrefix = scopePrefix.substring(0, scopePrefix.lastIndexOf("/" + shortName + "/"))
@@ -113,7 +111,7 @@
             OWLDataFactory df = ontologyManager.getOWLDataFactory();
             // Add import declarations for attached scopes.
             for (String scopeID : attachedScopes) {
-                IRI physIRI = IRI.create(scopePrefix + scopeID);
+                org.semanticweb.owlapi.model.IRI physIRI = org.semanticweb.owlapi.model.IRI.create(scopePrefix + scopeID);
                 changes.add(new AddImport(target, df.getOWLImportsDeclaration(physIRI)));
             }
             // Commit
@@ -161,8 +159,8 @@
     }
 
     @Override
-    protected MGraph exportToMGraph(boolean merge, IRI universalPrefix) {
-        MGraph mg = super.exportToMGraph(merge, universalPrefix);
+    protected Graph exportToGraph(boolean merge, org.semanticweb.owlapi.model.IRI universalPrefix) {
+        Graph mg = super.exportToGraph(merge, universalPrefix);
         attachScopeImportsClerezza(mg, universalPrefix);
         return mg;
     }
@@ -171,10 +169,10 @@
      * TODO support merging for attached scopes as well?
      */
     @Override
-    protected OWLOntology exportToOWLOntology(boolean merge, IRI universalPrefix) {
+    protected OWLOntology exportToOWLOntology(boolean merge, org.semanticweb.owlapi.model.IRI universalPrefix) {
         OWLOntology o = super.exportToOWLOntology(merge, universalPrefix);
 
-        IRI iri = o.getOntologyID().getOntologyIRI();
+        org.semanticweb.owlapi.model.IRI iri = o.getOntologyID().getOntologyIRI();
 
         if (merge) { // Re-merge
             ScopeManager onm = ScopeManagerImpl.get(); // FIXME try to avoid this.
@@ -242,8 +240,8 @@
     }
 
     @Override
-    protected MGraph getOntologyAsMGraph(OWLOntologyID ontologyId, boolean merge, IRI universalPrefix) {
-        MGraph o = super.getOntologyAsMGraph(ontologyId, merge, universalPrefix);
+    protected Graph getOntologyAsGraph(OWLOntologyID ontologyId, boolean merge, org.semanticweb.owlapi.model.IRI universalPrefix) {
+        Graph o = super.getOntologyAsGraph(ontologyId, merge, universalPrefix);
         switch (getConnectivityPolicy()) {
             case LOOSE:
                 break;
@@ -259,7 +257,7 @@
     @Override
     protected OWLOntology getOntologyAsOWLOntology(OWLOntologyID ontologyId,
                                                    boolean merge,
-                                                   IRI universalPrefix) {
+                                                   org.semanticweb.owlapi.model.IRI universalPrefix) {
         OWLOntology o = super.getOntologyAsOWLOntology(ontologyId, merge, universalPrefix);
         switch (getConnectivityPolicy()) {
             case LOOSE:
diff --git a/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/ontology/ClerezzaOWLUtils.java b/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/ontology/ClerezzaOWLUtils.java
index 97def67..88f00d6 100644
--- a/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/ontology/ClerezzaOWLUtils.java
+++ b/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/ontology/ClerezzaOWLUtils.java
@@ -16,10 +16,10 @@
  */
 package org.apache.stanbol.ontologymanager.multiplexer.clerezza.ontology;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.TcManager;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.ontologies.OWL;
 import org.apache.clerezza.rdf.ontologies.RDF;
 import org.slf4j.Logger;
@@ -40,14 +40,14 @@
 
     private static Logger log = LoggerFactory.getLogger(ClerezzaOWLUtils.class);
 
-    public static MGraph createOntology(String id, TcManager tcm) {
-        UriRef name = new UriRef(id);
-        MGraph ont = tcm.createMGraph(name);
+    public static Graph createOntology(String id, TcManager tcm) {
+        IRI name = new IRI(id);
+        Graph ont = tcm.createGraph(name);
         ont.add(new TripleImpl(name, RDF.type, OWL.Ontology));
         return ont;
     }
 
-    public static MGraph createOntology(String id) {
+    public static Graph createOntology(String id) {
         return createOntology(id, TcManager.getInstance());
     }
 
diff --git a/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/ontology/ClerezzaOntologyProvider.java b/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/ontology/ClerezzaOntologyProvider.java
index 89add74..869e386 100644
--- a/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/ontology/ClerezzaOntologyProvider.java
+++ b/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/ontology/ClerezzaOntologyProvider.java
@@ -53,22 +53,18 @@
 import java.util.SortedSet;
 import java.util.Stack;
 import java.util.TreeSet;
-import java.util.concurrent.locks.Lock;
 
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.NonLiteral;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.BlankNodeOrIRI;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.EntityAlreadyExistsException;
-import org.apache.clerezza.rdf.core.access.LockableMGraph;
 import org.apache.clerezza.rdf.core.access.TcManager;
 import org.apache.clerezza.rdf.core.access.TcProvider;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
+import org.apache.clerezza.rdf.core.LiteralFactory;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.clerezza.rdf.core.serializedform.UnsupportedFormatException;
 import org.apache.clerezza.rdf.ontologies.OWL;
@@ -83,21 +79,20 @@
 import org.apache.felix.scr.annotations.ReferencePolicy;
 import org.apache.felix.scr.annotations.ReferenceStrategy;
 import org.apache.felix.scr.annotations.Service;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.commons.owl.OWLOntologyManagerFactory;
 import org.apache.stanbol.commons.owl.PhonyIRIMapper;
 import org.apache.stanbol.commons.owl.transformation.OWLAPIToClerezzaConverter;
 import org.apache.stanbol.commons.owl.util.OWLUtils;
 import org.apache.stanbol.commons.owl.util.URIUtils;
 import org.apache.stanbol.commons.stanboltools.offline.OfflineMode;
-import org.apache.stanbol.ontologymanager.multiplexer.clerezza.collector.MGraphMultiplexer;
+import org.apache.stanbol.ontologymanager.multiplexer.clerezza.collector.GraphMultiplexer;
 import org.apache.stanbol.ontologymanager.ontonet.api.OntologyNetworkConfiguration;
 import org.apache.stanbol.ontologymanager.servicesapi.OfflineConfiguration;
 import org.apache.stanbol.ontologymanager.servicesapi.collector.ImportManagementPolicy;
 import org.apache.stanbol.ontologymanager.servicesapi.io.Origin;
 import org.apache.stanbol.ontologymanager.servicesapi.ontology.Multiplexer;
 import org.apache.stanbol.ontologymanager.servicesapi.ontology.OntologyHandleException;
-import org.apache.stanbol.ontologymanager.servicesapi.ontology.OntologyLoadingException;
 import org.apache.stanbol.ontologymanager.servicesapi.ontology.OntologyProvider;
 import org.apache.stanbol.ontologymanager.servicesapi.ontology.OrphanOntologyKeyException;
 import org.apache.stanbol.ontologymanager.servicesapi.scope.Scope;
@@ -106,7 +101,6 @@
 import org.osgi.service.component.ComponentContext;
 import org.semanticweb.owlapi.apibinding.OWLManager;
 import org.semanticweb.owlapi.model.AddImport;
-import org.semanticweb.owlapi.model.IRI;
 import org.semanticweb.owlapi.model.OWLDataFactory;
 import org.semanticweb.owlapi.model.OWLOntology;
 import org.semanticweb.owlapi.model.OWLOntologyChange;
@@ -145,32 +139,32 @@
      */
     private class OntologyToTcMapper {
 
-        private MGraph graph;
+        private Graph graph;
 
         OntologyToTcMapper() {
             if (store == null) throw new IllegalArgumentException("TcProvider cannot be null");
-            UriRef graphId = new UriRef(metaGraphId);
+            IRI graphId = new IRI(metaGraphId);
             try {
-                graph = store.createMGraph(graphId);
+                graph = store.createGraph(graphId);
             } catch (EntityAlreadyExistsException e) {
-                graph = store.getMGraph(graphId);
+                graph = store.getGraph(graphId);
             }
         }
 
-        void addMapping(OWLOntologyID ontologyReference, UriRef graphName) {
+        void addMapping(OWLOntologyID ontologyReference, IRI graphName) {
             if (ontologyReference == null || ontologyReference.isAnonymous()) throw new IllegalArgumentException(
                     "An anonymous ontology cannot be mapped. A non-anonymous ontology ID must be forged in these cases.");
             Triple tType, tMaps, tHasOiri = null, tHasViri = null;
-            IRI ontologyIRI = ontologyReference.getOntologyIRI(), versionIri = ontologyReference
+            org.semanticweb.owlapi.model.IRI ontologyIRI = ontologyReference.getOntologyIRI(), versionIri = ontologyReference
                     .getVersionIRI();
-            UriRef entry = buildResource(ontologyReference);
+            IRI entry = buildResource(ontologyReference);
             tType = new TripleImpl(entry, RDF.type, ENTRY_URIREF);
             tMaps = new TripleImpl(entry, MAPS_TO_GRAPH_URIREF, graphName);
             LiteralFactory lf = LiteralFactory.getInstance();
-            tHasOiri = new TripleImpl(entry, HAS_ONTOLOGY_IRI_URIREF, lf.createTypedLiteral(new UriRef(
+            tHasOiri = new TripleImpl(entry, HAS_ONTOLOGY_IRI_URIREF, lf.createTypedLiteral(new IRI(
                     ontologyIRI.toString())));
             if (versionIri != null) tHasViri = new TripleImpl(entry, HAS_VERSION_IRI_URIREF,
-                    lf.createTypedLiteral(new UriRef(versionIri.toString())));
+                    lf.createTypedLiteral(new IRI(versionIri.toString())));
             synchronized (graph) {
                 graph.add(tType);
                 graph.add(tMaps);
@@ -179,73 +173,76 @@
             }
         }
 
-        OWLOntologyID buildPublicKey(final UriRef resource) {
+        OWLOntologyID buildPublicKey(final IRI resource) {
             // TODO desanitize?
             LiteralFactory lf = LiteralFactory.getInstance();
-            IRI oiri = null, viri = null;
+            org.semanticweb.owlapi.model.IRI oiri = null, viri = null;
             Iterator<Triple> it = graph.filter(resource, HAS_ONTOLOGY_IRI_URIREF, null);
             if (it.hasNext()) {
-                UriRef s = null;
-                Resource obj = it.next().getObject();
-                if (obj instanceof UriRef) s = ((UriRef) obj);
-                else if (obj instanceof TypedLiteral) s = lf.createObject(UriRef.class, (TypedLiteral) obj);
-                oiri = IRI.create(s.getUnicodeString());
+                IRI s = null;
+                RDFTerm obj = it.next().getObject();
+                if (obj instanceof IRI) s = ((IRI) obj);
+                else if (obj instanceof Literal) s = lf.createObject(IRI.class, (Literal) obj);
+                oiri = org.semanticweb.owlapi.model.IRI.create(s.getUnicodeString());
             } else {
                 // Anonymous ontology? Decode the resource itself (which is not null)
                 return OntologyUtils.decode(resource.getUnicodeString());
             }
             it = graph.filter(resource, HAS_VERSION_IRI_URIREF, null);
             if (it.hasNext()) {
-                UriRef s = null;
-                Resource obj = it.next().getObject();
-                if (obj instanceof UriRef) s = ((UriRef) obj);
-                else if (obj instanceof TypedLiteral) s = lf.createObject(UriRef.class, (TypedLiteral) obj);
-                viri = IRI.create(s.getUnicodeString());
+                IRI s = null;
+                RDFTerm obj = it.next().getObject();
+                if (obj instanceof IRI) s = ((IRI) obj);
+                else if (obj instanceof Literal) s = lf.createObject(IRI.class, (Literal) obj);
+                viri = org.semanticweb.owlapi.model.IRI.create(s.getUnicodeString());
             }
             if (viri == null) return new OWLOntologyID(oiri);
             else return new OWLOntologyID(oiri, viri);
         }
 
         /**
-         * Creates an {@link UriRef} out of an {@link OWLOntologyID}, so it can be used as a storage key for
+         * Creates an {@link IRI} out of an {@link OWLOntologyID}, so it can be used as a storage key for
          * the graph.
          * 
          * @param ontologyReference
          * @return
          */
-        UriRef buildResource(OWLOntologyID publicKey) {
+        IRI buildResource(OWLOntologyID publicKey) {
             /*
-             * The UriRef is of the form ontologyIRI[:::versionIRI] (TODO use something less conventional e.g.
+             * The IRI is of the form ontologyIRI[:::versionIRI] (TODO use something less conventional e.g.
              * the string form of OWLOntologyID objects?)
              */
-            TripleCollection meta = getMetaGraph(TripleCollection.class);
+            Graph meta = getMetaGraph(Graph.class);
             if (publicKey == null) throw new IllegalArgumentException(
-                    "Cannot build a UriRef resource on a null public key!");
+                    "Cannot build a IRI resource on a null public key!");
 
             // XXX should versionIRI also include the version IRI set by owners? Currently not
 
             // Remember not to sanitize logical identifiers.
-            IRI ontologyIri = publicKey.getOntologyIRI(), versionIri = publicKey.getVersionIRI();
+            org.semanticweb.owlapi.model.IRI ontologyIri = publicKey.getOntologyIRI(), versionIri = publicKey.getVersionIRI();
             if (ontologyIri == null) throw new IllegalArgumentException(
-                    "Cannot build a UriRef resource on an anonymous public key!");
-            UriRef match = null;
+                    "Cannot build a IRI resource on an anonymous public key!");
+
+            log.debug("Searching for a meta graph entry for public key:");
+            log.debug(" -- {}", publicKey);
+            IRI match = null;
             LiteralFactory lf = LiteralFactory.getInstance();
-            TypedLiteral oiri = lf.createTypedLiteral(new UriRef(ontologyIri.toString()));
-            TypedLiteral viri = versionIri == null ? null : lf.createTypedLiteral(new UriRef(versionIri
+            Literal oiri = lf.createTypedLiteral(new IRI(ontologyIri.toString()));
+            Literal viri = versionIri == null ? null : lf.createTypedLiteral(new IRI(versionIri
                     .toString()));
             for (Iterator<Triple> it = meta.filter(null, HAS_ONTOLOGY_IRI_URIREF, oiri); it.hasNext();) {
-                Resource subj = it.next().getSubject();
+                RDFTerm subj = it.next().getSubject();
                 log.debug(" -- Ontology IRI match found. Scanning");
-                log.debug(" -- Resource : {}", subj);
-                if (!(subj instanceof UriRef)) {
+                log.debug(" -- RDFTerm : {}", subj);
+                if (!(subj instanceof IRI)) {
                     log.debug(" ---- (uncomparable: skipping...)");
                     continue;
                 }
                 if (viri != null) {
                     // Must find matching versionIRI
-                    if (meta.contains(new TripleImpl((UriRef) subj, HAS_VERSION_IRI_URIREF, viri))) {
+                    if (meta.contains(new TripleImpl((IRI) subj, HAS_VERSION_IRI_URIREF, viri))) {
                         log.debug(" ---- Version IRI match!");
-                        match = (UriRef) subj;
+                        match = (IRI) subj;
                         break; // Found
                     } else {
                         log.debug(" ---- Expected version IRI match not found.");
@@ -254,88 +251,86 @@
 
                 } else {
                     // Must find unversioned resource
-                    if (meta.filter((UriRef) subj, HAS_VERSION_IRI_URIREF, null).hasNext()) {
+                    if (meta.filter((IRI) subj, HAS_VERSION_IRI_URIREF, null).hasNext()) {
                         log.debug(" ---- Unexpected version IRI found. Skipping.");
                         continue;
                     } else {
                         log.debug(" ---- Unversioned match!");
-                        match = (UriRef) subj;
+                        match = (IRI) subj;
                         break; // Found
                     }
                 }
             }
+            log.debug("Matching IRI in graph : {}", match);
             if (match == null) {
-                return new UriRef(OntologyUtils.encode(publicKey));
+                return new IRI(OntologyUtils.encode(publicKey));
             } else {
                 return match;
             }
         }
 
-        UriRef getMapping(OWLOntologyID reference) {
-            Set<UriRef> aliases = new HashSet<UriRef>();
+        IRI getMapping(OWLOntologyID reference) {
+            Set<IRI> aliases = new HashSet<IRI>();
             aliases.add(buildResource(reference));
             for (OWLOntologyID alias : listAliases(reference))
                 aliases.add(buildResource(alias));
-            for (UriRef alias : aliases) {
+            for (IRI alias : aliases) {
                 // Logical mappings first.
                 Iterator<Triple> it = graph.filter(alias, MAPS_TO_GRAPH_URIREF, null);
                 while (it.hasNext()) {
-                    Resource obj = it.next().getObject();
-                    if (obj instanceof UriRef) return (UriRef) obj;
+                    RDFTerm obj = it.next().getObject();
+                    if (obj instanceof IRI) return (IRI) obj;
                 }
                 Literal litloc = LiteralFactory.getInstance().createTypedLiteral(
-                    new UriRef(alias.getUnicodeString()));
+                    new IRI(alias.getUnicodeString()));
                 // Logical mappings failed, try physical mappings.
                 it = graph.filter(null, RETRIEVED_FROM_URIREF, litloc);
                 while (it.hasNext()) {
-                    Resource obj = it.next().getSubject();
-                    if (obj instanceof UriRef) return (UriRef) obj;
+                    RDFTerm obj = it.next().getSubject();
+                    if (obj instanceof IRI) return (IRI) obj;
                 }
             }
             return null;
         }
 
-        OWLOntologyID getReverseMapping(UriRef graphName) {
+        OWLOntologyID getReverseMapping(IRI graphName) {
             // Logical mappings first.
-
-            log.info("GRAPH NAME {}", graphName);
-
             Iterator<Triple> it = graph.filter(null, MAPS_TO_GRAPH_URIREF, graphName);
             while (it.hasNext()) {
-                Resource obj = it.next().getSubject();
-                if (obj instanceof UriRef) return buildPublicKey((UriRef) obj);
+                RDFTerm obj = it.next().getSubject();
+                if (obj instanceof IRI) return buildPublicKey((IRI) obj);
             }
             Literal litloc = LiteralFactory.getInstance().createTypedLiteral(
-                new UriRef(graphName.getUnicodeString()));
+                new IRI(graphName.getUnicodeString()));
             // Logical mappings failed, try physical mappings.
             it = graph.filter(null, RETRIEVED_FROM_URIREF, litloc);
             while (it.hasNext()) {
-                Resource subj = it.next().getSubject();
-                if (subj instanceof UriRef) return buildPublicKey((UriRef) subj);
+                RDFTerm subj = it.next().getSubject();
+                if (subj instanceof IRI) return buildPublicKey((IRI) subj);
 
             }
             return null;
         }
 
-        Set<OWLOntologyID> getVersions(IRI ontologyIri) {
+        Set<OWLOntologyID> getVersions(org.semanticweb.owlapi.model.IRI ontologyIri) {
             if (ontologyIri == null) throw new IllegalArgumentException("Cannot get versions for a null IRI.");
             Set<OWLOntologyID> keys = new HashSet<OWLOntologyID>();
             LiteralFactory lf = LiteralFactory.getInstance();
-            TypedLiteral iri = lf.createTypedLiteral(new UriRef(ontologyIri.toString()));
+            Literal iri = lf.createTypedLiteral(new IRI(ontologyIri.toString()));
             // Exclude aliases.
             for (Iterator<Triple> it = graph.filter(null, HAS_ONTOLOGY_IRI_URIREF, iri); it.hasNext();) {
-                Resource sub = it.next().getSubject();
-                if (sub instanceof UriRef) keys.add(buildPublicKey((UriRef) sub));
+                RDFTerm sub = it.next().getSubject();
+                if (sub instanceof IRI) keys.add(buildPublicKey((IRI) sub));
             }
             // Also check for physical locations
             for (Iterator<Triple> it = graph.filter(null, RETRIEVED_FROM_URIREF, iri); it.hasNext();) {
-                Resource sub = it.next().getSubject();
-                if (sub instanceof UriRef) keys.add(buildPublicKey((UriRef) sub));
+                RDFTerm sub = it.next().getSubject();
+                if (sub instanceof IRI) keys.add(buildPublicKey((IRI) sub));
             }
             return keys;
         }
 
-        void mapLocator(IRI locator, UriRef graphName) {
+        void mapLocator(org.semanticweb.owlapi.model.IRI locator, IRI graphName) {
             if (graphName == null) throw new IllegalArgumentException("A null graph name is not allowed.");
             // Null locator is a legal argument, will remove all locator mappings from the supplied graph
             Set<Triple> remove = new HashSet<Triple>();
@@ -347,7 +342,7 @@
             graph.removeAll(remove);
             if (locator != null) {
                 Literal litloc = LiteralFactory.getInstance().createTypedLiteral(
-                    new UriRef(locator.toString()));
+                    new IRI(locator.toString()));
                 graph.add(new TripleImpl(graphName, RETRIEVED_FROM_URIREF, litloc));
             }
         }
@@ -357,7 +352,7 @@
             Set<OWLOntologyID> aliases = listAliases(publicKey);
             aliases.add(publicKey);
             for (OWLOntologyID alias : aliases) {
-                UriRef ontologyId = buildResource(alias);
+                IRI ontologyId = buildResource(alias);
                 // Also removes aliases and dependencies.
                 // XXX Too extreme?
                 for (Iterator<Triple> it = graph.filter(ontologyId, null, null); it.hasNext();)
@@ -370,14 +365,12 @@
 
         void removeMapping(OWLOntologyID ontologyReference) {
             Iterator<Triple> it = graph.filter(buildResource(ontologyReference), MAPS_TO_GRAPH_URIREF, null);
-            // To avoid concurrent modification exceptions
-            Collection<Triple> removeUs = new HashSet<Triple>();
+            // I expect a concurrent modification exception here, but we can deal with it later.
             while (it.hasNext())
-                removeUs.add(it.next());
-            graph.removeAll(removeUs);
+                graph.remove(it.next());
         }
 
-        void setMapping(OWLOntologyID ontologyReference, UriRef graphName) {
+        void setMapping(OWLOntologyID ontologyReference, IRI graphName) {
             removeMapping(ontologyReference);
             addMapping(ontologyReference, graphName);
         }
@@ -392,8 +385,6 @@
 
     private static final boolean _RESOLVE_IMPORTS_DEFAULT = true;
 
-    private static final boolean _MISSING_IMPORTS_FAIL_DEFAULT = true;
-
     protected Multiplexer descriptor = null;
 
     @Property(name = OntologyProvider.IMPORT_POLICY, options = {
@@ -444,9 +435,6 @@
     @Property(name = OntologyProvider.RESOLVE_IMPORTS, boolValue = _RESOLVE_IMPORTS_DEFAULT)
     protected boolean resolveImports = _RESOLVE_IMPORTS_DEFAULT;
 
-    @Property(name = OntologyProvider.MISSING_IMPORTS_FAIL, boolValue = _MISSING_IMPORTS_FAIL_DEFAULT)
-    protected boolean failMissingImports = _MISSING_IMPORTS_FAIL_DEFAULT;
-
     /*
      * Do not use SCR reference here: this might be different from the registered WeightedTcProvider services
      * : when supplied, it overrides TcManager
@@ -467,7 +455,7 @@
      * rule store if running outside an OSGI environment.
      */
     public ClerezzaOntologyProvider() {
-        supported = new Class<?>[] {MGraph.class, TripleCollection.class, OWLOntology.class};
+        supported = new Class<?>[] {Graph.class, Graph.class, OWLOntology.class};
     }
 
     public ClerezzaOntologyProvider(TcProvider store, OfflineConfiguration offline, Parser parser) {
@@ -506,7 +494,7 @@
 
         // This call will also create the metadata graph.
         keymap = new OntologyToTcMapper();
-        descriptor = new MGraphMultiplexer(keymap.graph);
+        descriptor = new GraphMultiplexer(keymap.graph);
 
         // Parse configuration.
         prefix = (String) (configuration.get(OntologyProvider.GRAPH_PREFIX));
@@ -517,11 +505,6 @@
         } catch (Exception ex) {
             resolveImports = _RESOLVE_IMPORTS_DEFAULT; // Should be already assigned though
         }
-        try {
-            failMissingImports = (Boolean) (configuration.get(OntologyProvider.MISSING_IMPORTS_FAIL));
-        } catch (Exception ex) {
-            failMissingImports = _MISSING_IMPORTS_FAIL_DEFAULT; // Should be already assigned though
-        }
 
         Object importPolicy = configuration.get(OntologyProvider.IMPORT_POLICY);
         if (importPolicy == null) {
@@ -531,15 +514,15 @@
         }
 
         // TODO replace with DataFileProvider ?
-        final IRI[] offlineResources;
+        final org.semanticweb.owlapi.model.IRI[] offlineResources;
         if (this.offlineConfig != null) {
-            List<IRI> paths = offlineConfig.getOntologySourceLocations();
-            if (paths != null) offlineResources = paths.toArray(new IRI[0]);
+            List<org.semanticweb.owlapi.model.IRI> paths = offlineConfig.getOntologySourceLocations();
+            if (paths != null) offlineResources = paths.toArray(new org.semanticweb.owlapi.model.IRI[0]);
             // There are no offline paths.
-            else offlineResources = new IRI[0];
+            else offlineResources = new org.semanticweb.owlapi.model.IRI[0];
         }
         // There's no offline configuration at all.
-        else offlineResources = new IRI[0];
+        else offlineResources = new org.semanticweb.owlapi.model.IRI[0];
         this.mappers = OWLOntologyManagerFactory.getMappers(offlineResources);
 
     }
@@ -625,7 +608,7 @@
      */
     private void fillImportsReverse(OWLOntologyID importing,
                                     List<OWLOntologyID> reverseImports,
-                                    List<OWLOntologyID> level1Imports) throws OntologyHandleException {
+                                    List<OWLOntologyID> level1Imports) {
         log.debug("Filling reverse imports for {}", importing);
 
         // Add the importing ontology first
@@ -633,43 +616,25 @@
         if (level1Imports != null) level1Imports.add(importing);
 
         // Get the graph and explore its imports
-        TripleCollection graph // store.getTriples(importing);
-        = getStoredOntology(/* getPublicKey */(importing), MGraph.class, false);
+        Graph graph // store.getTriples(importing);
+        = getStoredOntology(/* getPublicKey */(importing), Graph.class, false);
         Iterator<Triple> it = graph.filter(null, RDF.type, OWL.Ontology);
         if (!it.hasNext()) return;
-        log.debug("Import list follows:");
         Iterator<Triple> it2 = graph.filter(it.next().getSubject(), OWL.imports, null);
         while (it2.hasNext()) {
             // obj is the *original* import target
-            Resource obj = it2.next().getObject();
-            log.debug(" * {}", obj);
-            if (obj instanceof UriRef) {
+            RDFTerm obj = it2.next().getObject();
+            if (obj instanceof IRI) {
                 // Right now getKey() is returning the "private" storage ID
-                String key = getKey(IRI.create(((UriRef) obj).getUnicodeString()));
-                log.debug("   ... with key {}", key);
-                if (key == null) {
-                    if (failMissingImports) throw new OntologyHandleException(
-                            "Failed to retrieve storage key for ontology "
-                                    + obj
-                                    + ". To prevent these exceptions from being thrown, please unset property "
-                                    + "'org.apache.stanbol.ontologymanager.ontonet.failOnMissingImports'");
-                    else {
-                        log.warn("null key for {}!", obj);
-                        log.warn("Will ignore since 'failOnMissingImports' is unset.");
-                        continue;
-                    }
-                }
+                String key = getKey(org.semanticweb.owlapi.model.IRI.create(((IRI) obj).getUnicodeString()));
                 // TODO this will not be needed when getKey() and getPublicKey() return the proper public key.
-                OWLOntologyID oid = keymap.getReverseMapping(new UriRef(key));
+                OWLOntologyID oid = keymap.getReverseMapping(new IRI(key));
                 // Check used for breaking cycles in the import graph.
                 // (Unoptimized, should not use contains() for stacks.)
                 if (!reverseImports.contains(oid)) {
                     if (level1Imports != null) level1Imports.add(oid);
                     fillImportsReverse(oid, reverseImports, null);
                 }
-            } else {
-                log.warn("Unexpected type for resource {}.", obj);
-                log.warn(" ... Expected {}, found {}", UriRef.class, obj.getClass());
             }
         }
     }
@@ -689,7 +654,7 @@
 
     @Override
     @Deprecated
-    public String getKey(IRI ontologyIri) {
+    public String getKey(org.semanticweb.owlapi.model.IRI ontologyIri) {
         // ontologyIri = URIUtils.sanitizeID(ontologyIri);
         return getPublicKey(new OWLOntologyID(ontologyIri));
     }
@@ -702,48 +667,48 @@
 
     @SuppressWarnings("unchecked")
     @Override
-    public <O extends TripleCollection> O getMetaGraph(Class<O> returnType) {
-        if (!TripleCollection.class.isAssignableFrom(returnType)) throw new IllegalArgumentException(
-                "Only subtypes of " + TripleCollection.class + " are allowed.");
-        return (O) store.getTriples(new UriRef(metaGraphId));
+    public <O extends Graph> O getMetaGraph(Class<O> returnType) {
+        if (!Graph.class.isAssignableFrom(returnType)) throw new IllegalArgumentException(
+                "Only subtypes of " + Graph.class + " are allowed.");
+        return (O) store.getGraph(new IRI(metaGraphId));
     }
 
     @Override
     @Deprecated
     public OWLOntologyID getOntologyId(String storageKey) {
-        return keymap.getReverseMapping(new UriRef(storageKey));
+        return keymap.getReverseMapping(new IRI(storageKey));
     }
 
     public OntologyNetworkConfiguration getOntologyNetworkConfiguration() {
         Map<String,Collection<OWLOntologyID>> coreOntologies = new HashMap<String,Collection<OWLOntologyID>>(), customOntologies = new HashMap<String,Collection<OWLOntologyID>>();
         Map<String,Collection<String>> attachedScopes = new HashMap<String,Collection<String>>();
-        final TripleCollection meta = store.getTriples(new UriRef(metaGraphId));
+        final Graph meta = store.getGraph(new IRI(metaGraphId));
 
         // Scopes first
         for (Iterator<Triple> it = meta.filter(null, RDF.type, SCOPE_URIREF); it.hasNext();) { // for each
                                                                                                // scope
             Triple ta = it.next();
-            NonLiteral sub = ta.getSubject();
-            if (sub instanceof UriRef) {
-                String s = ((UriRef) sub).getUnicodeString(), prefix = _NS_STANBOL_INTERNAL + Scope.shortName
+            BlankNodeOrIRI sub = ta.getSubject();
+            if (sub instanceof IRI) {
+                String s = ((IRI) sub).getUnicodeString(), prefix = _NS_STANBOL_INTERNAL + Scope.shortName
                                                                        + "/";
                 if (s.startsWith(prefix)) {
                     String scopeId = s.substring(prefix.length());
                     log.info("Rebuilding scope \"{}\".", scopeId);
                     coreOntologies.put(scopeId, new TreeSet<OWLOntologyID>());
                     customOntologies.put(scopeId, new TreeSet<OWLOntologyID>());
-                    UriRef core_ur = null, custom_ur = null;
-                    Resource r;
+                    IRI core_ur = null, custom_ur = null;
+                    RDFTerm r;
                     // Check core space
                     Iterator<Triple> it2 = meta.filter(sub, HAS_SPACE_CORE_URIREF, null);
                     if (it2.hasNext()) {
                         r = it2.next().getObject();
-                        if (r instanceof UriRef) core_ur = (UriRef) r;
+                        if (r instanceof IRI) core_ur = (IRI) r;
                     } else {
                         it2 = meta.filter(null, IS_SPACE_CORE_OF_URIREF, sub);
                         if (it2.hasNext()) {
                             r = it2.next().getSubject();
-                            if (r instanceof UriRef) core_ur = (UriRef) r;
+                            if (r instanceof IRI) core_ur = (IRI) r;
                         }
                     }
 
@@ -751,12 +716,12 @@
                     it2 = meta.filter(sub, HAS_SPACE_CUSTOM_URIREF, null);
                     if (it2.hasNext()) {
                         r = it2.next().getObject();
-                        if (r instanceof UriRef) custom_ur = (UriRef) r;
+                        if (r instanceof IRI) custom_ur = (IRI) r;
                     } else {
                         it2 = meta.filter(null, IS_SPACE_CUSTOM_OF_URIREF, sub);
                         if (it2.hasNext()) {
                             r = it2.next().getSubject();
-                            if (r instanceof UriRef) custom_ur = (UriRef) r;
+                            if (r instanceof IRI) custom_ur = (IRI) r;
                         }
                     }
 
@@ -764,23 +729,23 @@
                     if (core_ur != null) {
                         for (it2 = meta.filter(core_ur, null, null); it2.hasNext();) {
                             Triple t = it2.next();
-                            UriRef predicate = t.getPredicate();
+                            IRI predicate = t.getPredicate();
                             if (predicate.equals(MANAGES_URIREF)) {
-                                if (t.getObject() instanceof UriRef) coreOntologies.get(scopeId).add(
-                                    keymap.buildPublicKey((UriRef) t.getObject()) // FIXME must be very
+                                if (t.getObject() instanceof IRI) coreOntologies.get(scopeId).add(
+                                    keymap.buildPublicKey((IRI) t.getObject()) // FIXME must be very
                                                                                   // temporary!
-                                        // ((UriRef) t.getObject()).getUnicodeString()
+                                        // ((IRI) t.getObject()).getUnicodeString()
                                         );
                             }
                         }
                         for (it2 = meta.filter(null, null, core_ur); it2.hasNext();) {
                             Triple t = it2.next();
-                            UriRef predicate = t.getPredicate();
+                            IRI predicate = t.getPredicate();
                             if (predicate.equals(IS_MANAGED_BY_URIREF)) {
-                                if (t.getSubject() instanceof UriRef) coreOntologies.get(scopeId).add(
-                                    keymap.buildPublicKey((UriRef) t.getSubject()) // FIXME must be very
+                                if (t.getSubject() instanceof IRI) coreOntologies.get(scopeId).add(
+                                    keymap.buildPublicKey((IRI) t.getSubject()) // FIXME must be very
                                                                                    // temporary!
-                                        // ((UriRef) t.getSubject()).getUnicodeString()
+                                        // ((IRI) t.getSubject()).getUnicodeString()
                                         );
                             }
                         }
@@ -788,23 +753,23 @@
                     if (custom_ur != null) {
                         for (it2 = meta.filter(custom_ur, null, null); it2.hasNext();) {
                             Triple t = it2.next();
-                            UriRef predicate = t.getPredicate();
+                            IRI predicate = t.getPredicate();
                             if (predicate.equals(MANAGES_URIREF)) {
-                                if (t.getObject() instanceof UriRef) customOntologies.get(scopeId).add(
-                                    keymap.buildPublicKey((UriRef) t.getObject()) // FIXME must be very
+                                if (t.getObject() instanceof IRI) customOntologies.get(scopeId).add(
+                                    keymap.buildPublicKey((IRI) t.getObject()) // FIXME must be very
                                                                                   // temporary!
-                                        // ((UriRef) t.getObject()).getUnicodeString()
+                                        // ((IRI) t.getObject()).getUnicodeString()
                                         );
                             }
                         }
                         for (it2 = meta.filter(null, null, custom_ur); it2.hasNext();) {
                             Triple t = it2.next();
-                            UriRef predicate = t.getPredicate();
+                            IRI predicate = t.getPredicate();
                             if (predicate.equals(IS_MANAGED_BY_URIREF)) {
-                                if (t.getSubject() instanceof UriRef) customOntologies.get(scopeId).add(
-                                    keymap.buildPublicKey((UriRef) t.getSubject()) // FIXME must be very
+                                if (t.getSubject() instanceof IRI) customOntologies.get(scopeId).add(
+                                    keymap.buildPublicKey((IRI) t.getSubject()) // FIXME must be very
                                                                                    // temporary!
-                                        // ((UriRef) t.getSubject()).getUnicodeString()
+                                        // ((IRI) t.getSubject()).getUnicodeString()
                                         );
                             }
                         }
@@ -819,10 +784,10 @@
         for (Iterator<Triple> it = meta.filter(null, RDF.type, SESSION_URIREF); it.hasNext();) { // for each
                                                                                                  // scope
             Triple ta = it.next();
-            NonLiteral sub = ta.getSubject();
-            if (sub instanceof UriRef) {
-                UriRef ses_ur = (UriRef) sub;
-                String s = ((UriRef) sub).getUnicodeString();
+            BlankNodeOrIRI sub = ta.getSubject();
+            if (sub instanceof IRI) {
+                IRI ses_ur = (IRI) sub;
+                String s = ((IRI) sub).getUnicodeString();
                 String prefix = _NS_STANBOL_INTERNAL + Session.shortName + "/";
                 if (s.startsWith(prefix)) {
                     String sessionId = s.substring(prefix.length());
@@ -832,27 +797,27 @@
                     // retrieve the ontologies
                     if (ses_ur != null) {
                         for (Iterator<Triple> it2 = meta.filter(ses_ur, MANAGES_URIREF, null); it2.hasNext();) {
-                            Resource obj = it2.next().getObject();
-                            if (obj instanceof UriRef) sessionOntologies.get(sessionId).add(
-                                keymap.buildPublicKey((UriRef) obj) // FIXME must be very temporary!
-                                    // ((UriRef) obj).getUnicodeString()
+                            RDFTerm obj = it2.next().getObject();
+                            if (obj instanceof IRI) sessionOntologies.get(sessionId).add(
+                                keymap.buildPublicKey((IRI) obj) // FIXME must be very temporary!
+                                    // ((IRI) obj).getUnicodeString()
                                     );
 
                         }
                         for (Iterator<Triple> it2 = meta.filter(null, IS_MANAGED_BY_URIREF, ses_ur); it2
                                 .hasNext();) {
-                            Resource subj = it2.next().getSubject();
-                            if (subj instanceof UriRef) sessionOntologies.get(sessionId).add(
-                                keymap.buildPublicKey((UriRef) subj) // FIXME must be very temporary!
-                                    // ((UriRef) subj).getUnicodeString()
+                            RDFTerm subj = it2.next().getSubject();
+                            if (subj instanceof IRI) sessionOntologies.get(sessionId).add(
+                                keymap.buildPublicKey((IRI) subj) // FIXME must be very temporary!
+                                    // ((IRI) subj).getUnicodeString()
                                     );
 
                         }
                         for (Iterator<Triple> it2 = meta.filter(null, APPENDED_TO_URIREF, ses_ur); it2
                                 .hasNext();) {
-                            Resource subj = it2.next().getSubject();
-                            if (subj instanceof UriRef) {
-                                String s1 = ((UriRef) subj).getUnicodeString();
+                            RDFTerm subj = it2.next().getSubject();
+                            if (subj instanceof IRI) {
+                                String s1 = ((IRI) subj).getUnicodeString();
                                 String prefix1 = _NS_STANBOL_INTERNAL + Scope.shortName + "/";
                                 if (s1.startsWith(prefix1)) {
                                     String scopeId = s1.substring(prefix1.length());
@@ -862,9 +827,9 @@
                         }
                         for (Iterator<Triple> it2 = meta.filter(ses_ur, HAS_APPENDED_URIREF, null); it2
                                 .hasNext();) {
-                            Resource obj = it2.next().getObject();
-                            if (obj instanceof UriRef) {
-                                String s1 = ((UriRef) obj).getUnicodeString();
+                            RDFTerm obj = it2.next().getObject();
+                            if (obj instanceof IRI) {
+                                String s1 = ((IRI) obj).getUnicodeString();
                                 String prefix1 = _NS_STANBOL_INTERNAL + Scope.shortName + "/";
                                 if (s1.startsWith(prefix1)) {
                                     String scopeId = s1.substring(prefix1.length());
@@ -889,7 +854,7 @@
     @Override
     @Deprecated
     public String getPublicKey(OWLOntologyID ontologyId) {
-        UriRef ur = keymap.getMapping(ontologyId);
+        IRI ur = keymap.getMapping(ontologyId);
         log.debug("key for {} is {}", ontologyId, ur);
         return (ur == null) ? null : ur.getUnicodeString();
     }
@@ -907,14 +872,14 @@
 
     @Override
     @Deprecated
-    public <O> O getStoredOntology(IRI reference, Class<O> returnType) {
+    public <O> O getStoredOntology(org.semanticweb.owlapi.model.IRI reference, Class<O> returnType) {
         // reference = URIUtils.sanitizeID(reference);
         return getStoredOntology(new OWLOntologyID(reference), returnType);
     }
 
     @Override
     @Deprecated
-    public <O> O getStoredOntology(IRI reference, Class<O> returnType, boolean merge) {
+    public <O> O getStoredOntology(org.semanticweb.owlapi.model.IRI reference, Class<O> returnType, boolean merge) {
         // reference = URIUtils.sanitizeID(reference);
         return getStoredOntology(new OWLOntologyID(reference), returnType, merge);
     }
@@ -948,7 +913,7 @@
     }
 
     /**
-     * In this implementation the identifier is the Graph Name (e.g. ontonet::blabla)
+     * In this implementation the identifier is the ImmutableGraph Name (e.g. ontonet::blabla)
      */
     @SuppressWarnings("unchecked")
     @Override
@@ -972,20 +937,20 @@
                         + " is not allowed in this implementation. Only allowed return types are "
                         + supported);
 
-        TripleCollection tc = store.getTriples(new UriRef(identifier));
+        Graph tc = store.getGraph(new IRI(identifier));
         if (tc == null) return null;
         /*
          * The ontology provider itself does not wrap the returned object into an in-memory graph, therefore
          * any direct modifications will be propagated. Collectors should wrap them, though. To change this
          * behaviour, uncomment the line below.
          */
-        // tc = new SimpleMGraph(tc);
+        // tc = new SimpleGraph(tc);
 
-        if (TripleCollection.class.equals(returnType) || MGraph.class.isAssignableFrom(returnType)) {
+        if (Graph.class.equals(returnType) || Graph.class.isAssignableFrom(returnType)) {
             return returnType.cast(tc);
         } else if (OWLOntology.class.isAssignableFrom(returnType)) {
             try {
-                return (O) toOWLOntology(new UriRef(identifier), forceMerge);
+                return (O) toOWLOntology(new IRI(identifier), forceMerge);
             } catch (OWLOntologyCreationException e) {
                 log.error(
                     "Failed to return stored ontology " + identifier + " as type "
@@ -1002,7 +967,7 @@
     }
 
     @Override
-    public boolean hasOntology(IRI ontologyIri) {
+    public boolean hasOntology(org.semanticweb.owlapi.model.IRI ontologyIri) {
         // ontologyIri = URIUtils.sanitizeID(ontologyIri);
         return hasOntology(new OWLOntologyID(ontologyIri));
     }
@@ -1019,9 +984,9 @@
         if (publicKey == null || publicKey.isAnonymous()) throw new IllegalArgumentException(
                 "Cannot check for an anonymous ontology.");
         if (!new MetaGraphManager(tcManager, keymap.graph).exists(publicKey)) return Status.NO_MATCH;
-        UriRef graphName = keymap.getMapping(publicKey);
+        IRI graphName = keymap.getMapping(publicKey);
         if (graphName == null) return Status.UNCHARTED;
-        if (store.listTripleCollections().contains(graphName)) return Status.MATCH;
+        if (store.listGraphs().contains(graphName)) return Status.MATCH;
         else return Status.ORPHAN;
     }
 
@@ -1053,18 +1018,18 @@
 
     protected void computeAliasClosure(OWLOntologyID publicKey, Set<OWLOntologyID> target) {
         target.add(publicKey);
-        TripleCollection meta = getMetaGraph(TripleCollection.class);
-        UriRef ont = keymap.buildResource(publicKey);
-        Set<Resource> resources = new HashSet<Resource>();
+        Graph meta = getMetaGraph(Graph.class);
+        IRI ont = keymap.buildResource(publicKey);
+        Set<RDFTerm> resources = new HashSet<RDFTerm>();
         // Forwards
         for (Iterator<Triple> it = meta.filter(ont, OWL.sameAs, null); it.hasNext();)
             resources.add(it.next().getObject());
         // Backwards
         for (Iterator<Triple> it = meta.filter(null, OWL.sameAs, ont); it.hasNext();)
             resources.add(it.next().getSubject());
-        for (Resource r : resources)
-            if (r instanceof UriRef) {
-                OWLOntologyID newKey = keymap.buildPublicKey((UriRef) r);
+        for (RDFTerm r : resources)
+            if (r instanceof IRI) {
+                OWLOntologyID newKey = keymap.buildPublicKey((IRI) r);
                 if (!target.contains(newKey)) computeAliasClosure(newKey, target);
             }
     }
@@ -1085,8 +1050,8 @@
     public SortedSet<OWLOntologyID> listOrphans() {
         SortedSet<OWLOntologyID> result = new TreeSet<OWLOntologyID>();
         for (OWLOntologyID key : descriptor.getPublicKeys()) {
-            UriRef graphName = keymap.getMapping(key);
-            if (graphName == null || !store.listTripleCollections().contains(graphName)) result.add(key);
+            IRI graphName = keymap.getMapping(key);
+            if (graphName == null || !store.listGraphs().contains(graphName)) result.add(key);
         }
         return result;
     }
@@ -1101,7 +1066,7 @@
     }
 
     @Override
-    public Set<OWLOntologyID> listVersions(IRI ontologyIri) {
+    public Set<OWLOntologyID> listVersions(org.semanticweb.owlapi.model.IRI ontologyIri) {
         return keymap.getVersions(ontologyIri);
     }
 
@@ -1117,20 +1082,20 @@
 
         // This method only tries the supplied format once.
         log.debug("Trying to parse data stream with format {}", formatIdentifier);
-        TripleCollection rdfData = parser.parse(data, formatIdentifier);
+        Graph rdfData = parser.parse(data, formatIdentifier);
         log.debug("SUCCESS format {}.", formatIdentifier);
         return loadInStore(rdfData, force, references);
     }
 
     @Override
-    public OWLOntologyID loadInStore(final IRI ontologyIri,
+    public OWLOntologyID loadInStore(final org.semanticweb.owlapi.model.IRI ontologyIri,
                                      String formatIdentifier,
                                      boolean force,
                                      Origin<?>... origins) throws IOException {
         log.debug("Loading {}", ontologyIri);
         if (ontologyIri == null) throw new IllegalArgumentException("Ontology IRI cannot be null.");
 
-        IRI location = null;
+        org.semanticweb.owlapi.model.IRI location = null;
         if (force) location = null;
         else for (OWLOntologyIRIMapper mapper : mappers) {
             location = mapper.getDocumentIRI(ontologyIri);
@@ -1162,8 +1127,6 @@
                 if (sup != null && !formats.contains(sup)) formats.add(sup);
         }
 
-        log.debug("Will try {} supported formats", formats.size());
-
         for (String currentFormat : formats) {
             try {
                 final URLConnection con = location.toURI().toURL().openConnection();
@@ -1183,22 +1146,15 @@
             } catch (UnsupportedFormatException e) {
                 log.debug("FAILURE format {} (unsupported). Trying next one.", currentFormat);
                 continue;
-            } catch (OntologyLoadingException e) {
-                throw new OntologyLoadingException(e);
             } catch (Exception e) {
-                // From here we should only be expecting parser-specific exceptions.
-                log.debug("FAILURE format {} (most likely a parse error). Will try next one.", currentFormat);
-                log.debug("Logged exception was a {} : {}", e.getClass(), e.getLocalizedMessage());
-                log.trace("Stack trace follows:", e);
+                log.debug("FAILURE format {} (parse error). Will try next one.", currentFormat);
                 continue;
             }
         }
 
         // No parser worked, return null.
         log.error("All parsers failed, giving up.");
-        log.error("Failing location was <{}>", location);
-        throw new OntologyLoadingException("Failed to parse an ontology from location <" + location + ">");
-        // return null;
+        return null;
     }
 
     @Override
@@ -1208,17 +1164,17 @@
         checkReplaceability(origins);
         long before = System.currentTimeMillis();
 
-        TripleCollection targetGraph; // The final graph
-        TripleCollection rdfData; // The supplied ontology converted to TripleCollection
+        Graph targetGraph; // The final graph
+        Graph rdfData; // The supplied ontology converted to Graph
 
         if (ontology instanceof OWLOntology) {
             // This will be in memory!
-            rdfData = OWLAPIToClerezzaConverter.owlOntologyToClerezzaMGraph((OWLOntology) ontology);
-        } else if (ontology instanceof TripleCollection) {
+            rdfData = OWLAPIToClerezzaConverter.owlOntologyToClerezzaGraph((OWLOntology) ontology);
+        } else if (ontology instanceof Graph) {
             // This might be in memory or in persistent storage.
-            rdfData = (TripleCollection) ontology;
+            rdfData = (Graph) ontology;
         } else throw new UnsupportedOperationException(
-                "This ontology provider can only accept objects assignable to " + TripleCollection.class
+                "This ontology provider can only accept objects assignable to " + Graph.class
                         + " or " + OWLOntology.class);
 
         // XXX Force is ignored for the content, but the imports?
@@ -1229,9 +1185,9 @@
         /*
          * Compute aliases
          */
-        UriRef graphName = null;
+        IRI graphName = null;
         List<OWLOntologyID> overrides = new ArrayList<OWLOntologyID>(); // Priority aliases.
-        List<IRI> sources = new ArrayList<IRI>(); // Second-choice aliases.
+        List<org.semanticweb.owlapi.model.IRI> sources = new ArrayList<org.semanticweb.owlapi.model.IRI>(); // Second-choice aliases.
 
         // Scan origins ONCE.
         for (int i = 0; i < origins.length; i++) {
@@ -1259,13 +1215,13 @@
                     overrides.add(key);
                     log.debug(" ... assigned as a priority alias for {}", primaryKey);
                 }
-            } else if (ref instanceof IRI) {
-                sources.add((IRI) ref);
+            } else if (ref instanceof org.semanticweb.owlapi.model.IRI) {
+                sources.add((org.semanticweb.owlapi.model.IRI) ref);
                 log.debug(" ... assigned as a secondary alias (source) for {}", primaryKey);
-            } else if (ref instanceof UriRef) {
-                if (graphName != null) log.warn("Graph name already assigned as {}. Skipping.", graphName);
+            } else if (ref instanceof IRI) {
+                if (graphName != null) log.warn("ImmutableGraph name already assigned as {}. Skipping.", graphName);
                 else {
-                    graphName = (UriRef) ref;
+                    graphName = (IRI) ref;
                     log.debug(" ... assigned as a graph name for {}", primaryKey);
                 }
             } else {
@@ -1280,34 +1236,34 @@
 
         if (primaryKey == null) // No overrides, no extracted ID.
         {
-            IRI z;
+            org.semanticweb.owlapi.model.IRI z;
             // The first IRI found becomes the primary key.
             if (!sources.isEmpty()) z = sources.iterator().next();
             else // Try the graph name
-            if (graphName != null) z = IRI.create(graphName.getUnicodeString());
+            if (graphName != null) z = org.semanticweb.owlapi.model.IRI.create(graphName.getUnicodeString());
             else // Extrema ratio : compute a timestamped primary key.
-            z = IRI.create(getClass().getCanonicalName() + "-time:" + System.currentTimeMillis());
+            z = org.semanticweb.owlapi.model.IRI.create(getClass().getCanonicalName() + "-time:" + System.currentTimeMillis());
             primaryKey = new OWLOntologyID(z);
         }
 
         // Check if it is possible to avoid reloading the ontology content from its source.
         boolean mustLoad = true;
-        if (!force && graphName != null && store.listTripleCollections().contains(graphName)) {
+        if (!force && graphName != null && store.listGraphs().contains(graphName)) {
             boolean condition = true; // Any failed check will abort the scan.
             // Check if the extracted ontology ID matches that of the supplied graph.
             // XXX note that anonymous ontologies should be considered a match... or should they not?
-            TripleCollection tc = store.getTriples(graphName);
+            Graph tc = store.getGraph(graphName);
             OWLOntologyID idFromStore = OWLUtils.extractOntologyID(tc);
             condition &= (extractedId == null && idFromStore == null) || extractedId.equals(idFromStore);
             // Finally, a size check
             // FIXME not a good policy for graphs that change without altering the size.
-            if (condition && rdfData instanceof TripleCollection) condition &= tc.size() == rdfData.size();
+            if (condition && rdfData instanceof Graph) condition &= tc.size() == rdfData.size();
             mustLoad &= !condition;
         }
 
         if (!mustLoad && graphName != null) {
-            log.debug("Graph with ID {} already in store. Default action is to skip storage.", graphName);
-            targetGraph = store.getTriples(graphName);
+            log.debug("ImmutableGraph with ID {} already in store. Default action is to skip storage.", graphName);
+            targetGraph = store.getGraph(graphName);
         } else {
             String iri = null;
             if (primaryKey.getOntologyIRI() != null) iri = primaryKey.getOntologyIRI().toString();
@@ -1315,13 +1271,13 @@
             // s will become the graph name
             String s = (iri.startsWith(prefix + "::")) ? "" : (prefix + "::");
             s += iri;
-            graphName = new UriRef(URIUtils.sanitize(s));
+            graphName = new IRI(URIUtils.sanitize(s));
             log.debug("Storing ontology with graph ID {}", graphName);
             try {
-                targetGraph = store.createMGraph(graphName);
+                targetGraph = store.createGraph(graphName);
             } catch (EntityAlreadyExistsException e) {
-                if (graphName.equals(e.getEntityName())) targetGraph = store.getMGraph(graphName);
-                else targetGraph = store.createMGraph(graphName);
+                if (graphName.equals(e.getEntityName())) targetGraph = store.getGraph(graphName);
+                else targetGraph = store.createGraph(graphName);
             }
             targetGraph.addAll(rdfData);
         }
@@ -1338,11 +1294,11 @@
             // TODO map unversioned ID as well?
             Triple t = new TripleImpl(keymap.buildResource(primaryKey), SIZE_IN_TRIPLES_URIREF,
                     LiteralFactory.getInstance().createTypedLiteral(Integer.valueOf(rdfData.size())));
-            getMetaGraph(MGraph.class).add(t);
+            getMetaGraph(Graph.class).add(t);
         }
 
         // Add aliases.
-        for (IRI source : sources)
+        for (org.semanticweb.owlapi.model.IRI source : sources)
             if (source != null) overrides.add(new OWLOntologyID(source));
         for (OWLOntologyID alias : overrides)
             if (alias != null && !alias.equals(primaryKey)) {
@@ -1350,61 +1306,43 @@
                 mappedIds += " , " + alias;
             }
 
-        // Resolve imports.
         // Do this AFTER registering the ontology, otherwise import cycles will cause infinite loops.
         if (resolveImports) {
             // Scan resources of type owl:Ontology, but only get the first.
-            NonLiteral ontologySubject = null;
-            List<UriRef> importTargets = new LinkedList<UriRef>();
-            Lock l = null; // There could be locking iterators...
-            if (targetGraph instanceof LockableMGraph) {
-                l = ((LockableMGraph) targetGraph).getLock().readLock();
-                l.lock();
-            }
-            try {
-                Iterator<Triple> it = targetGraph.filter(null, RDF.type, OWL.Ontology);
-                if (it.hasNext()) ontologySubject = it.next().getSubject();
-                if (ontologySubject != null) {
-                    // Scan import statements for the one owl:Ontology considered.
-                    it = targetGraph.filter(ontologySubject, OWL.imports, null);
-                    while (it.hasNext()) {
-                        Resource obj = it.next().getObject();
-                        if (obj instanceof UriRef) importTargets.add((UriRef) obj);
-                    }
-                }
-            } finally {
-                if (l != null) l.unlock();
-            }
-            for (UriRef importTgt : importTargets)
-                try {
-                    log.info("Resolving import target {}", importTgt);
-                    OWLOntologyID id = new OWLOntologyID(IRI.create(importTgt.getUnicodeString()));
-                    if (keymap.getMapping(id) == null) { // Check if it's not there already.
-                        if (isOfflineMode()) throw new RuntimeException(
-                                "Cannot load imported ontology " + importTgt
-                                        + " while Stanbol is in offline mode.");
-                        // TODO manage origins for imported ontologies too?
-                        try {
-                            IRI irimp = IRI.create(importTgt.getUnicodeString());
-                            OWLOntologyID id2 = loadInStore(irimp, null, false);
+            Iterator<Triple> it = targetGraph.filter(null, RDF.type, OWL.Ontology);
+            if (it.hasNext()) {
+                // Scan import statements for the one owl:Ontology considered.
+                Iterator<Triple> it2 = targetGraph.filter(it.next().getSubject(), OWL.imports, null);
+                while (it2.hasNext()) {
+                    RDFTerm obj = it2.next().getObject();
+                    log.info("Resolving import target {}", obj);
+                    if (obj instanceof IRI) try {
+                        // TODO try locals first
+                        IRI target = (IRI) obj;
+                        OWLOntologyID id = new OWLOntologyID(org.semanticweb.owlapi.model.IRI.create(target.getUnicodeString()));
+                        if (keymap.getMapping(id) == null) { // Check if it's not there already.
+                            if (isOfflineMode()) throw new RuntimeException(
+                                    "Cannot load imported ontology " + obj
+                                            + " while Stanbol is in offline mode.");
+                            // TODO manage origins for imported ontologies too?
+                            OWLOntologyID id2 = loadInStore(org.semanticweb.owlapi.model.IRI.create(((IRI) obj).getUnicodeString()),
+                                null, false);
                             if (id2 != null) id = id2;
-                            log.info("<== SUCCESS");
-                        } catch (OntologyLoadingException e) {
-                            log.warn("<== FAIL");
-                            if (failMissingImports) throw e;
-                            else log.warn("Import from IRI <{}> failed, but will not abort due to permissive failed import handling set for this ontology provider.");
+                            log.info("Import {} resolved.", obj);
+                            log.debug("");
+                        } else {
+                            log.info("Requested import already stored. Setting dependency only.");
                         }
-                    } else {
-                        log.info("Requested import already stored. Setting dependency only.");
+                        descriptor.setDependency(primaryKey, id);
+                    } catch (UnsupportedFormatException e) {
+                        log.warn("Failed to parse format for resource " + obj, e);
+                        // / XXX configure to continue?
+                    } catch (IOException e) {
+                        log.warn("Failed to load ontology from resource " + obj, e);
+                        // / XXX configure to continue?
                     }
-                    descriptor.setDependency(primaryKey, id);
-                } catch (UnsupportedFormatException e) {
-                    log.warn("Failed to parse format for resource " + importTgt, e);
-                    // / XXX configure to continue?
-                } catch (IOException e) {
-                    log.warn("Failed to load ontology from resource " + importTgt, e);
-                    // / XXX configure to continue?
                 }
+            }
         }
 
         log.debug(" Ontology {}", mappedIds);
@@ -1421,14 +1359,14 @@
 
         if (descriptor.getDependents(publicKey).isEmpty() && descriptor.getHandles(publicKey).isEmpty()) {
 
-            UriRef graphName = keymap.getMapping(publicKey);
+            IRI graphName = keymap.getMapping(publicKey);
 
             // TODO propagate everything to the descriptor
             descriptor.clearDependencies(publicKey); // release dependencies
             keymap.registerOntologyDeletion(publicKey); // remove metadata
 
             // Now the actual deletion
-            store.deleteTripleCollection(graphName);
+            store.deleteGraph(graphName);
 
             return true;
         } else throw new OntologyHandleException("There are ontologies or collectors depending on "
@@ -1442,7 +1380,7 @@
     }
 
     @Override
-    public void setLocatorMapping(IRI locator, OWLOntologyID publicKey) {
+    public void setLocatorMapping(org.semanticweb.owlapi.model.IRI locator, OWLOntologyID publicKey) {
         if (publicKey == null || publicKey.isAnonymous()) throw new IllegalArgumentException(
                 "key must be non-null and non-anonymous.");
         log.info("Setting {} as the resource locator for ontology {}", locator, publicKey);
@@ -1450,17 +1388,17 @@
     }
 
     @Override
-    public void setLocatorMapping(IRI locator, String key) {
+    public void setLocatorMapping(org.semanticweb.owlapi.model.IRI locator, String key) {
         if (key == null || key.isEmpty()) throw new IllegalArgumentException(
                 "key must be non-null and non-empty.");
-        if (!store.listTripleCollections().contains(new UriRef(key))) throw new IllegalArgumentException(
+        if (!store.listGraphs().contains(new IRI(key))) throw new IllegalArgumentException(
                 "No ontology found with storage key " + key);
         if (locator == null) log
                 .warn(
                     "Setting null locator for {}. This will remove all physical mappings for the corresponding graph.",
                     key);
         else log.info("Setting {} as the resource locator for ontology {}", locator, key);
-        keymap.mapLocator(locator, new UriRef(key));
+        keymap.mapLocator(locator, new IRI(key));
     }
 
     protected void checkReplaceability(Origin<?>... origins) {
@@ -1480,32 +1418,28 @@
      * @return
      * @throws OWLOntologyCreationException
      */
-    protected OWLOntology toOWLOntology(UriRef graphName, boolean forceMerge) throws OWLOntologyCreationException {
+    protected OWLOntology toOWLOntology(IRI graphName, boolean forceMerge) throws OWLOntologyCreationException {
 
         log.debug("Exporting graph to OWLOntology");
-        log.debug(" -- Graph name : {}", graphName);
+        log.debug(" -- ImmutableGraph name : {}", graphName);
         OWLOntologyManager mgr = OWLManager.createOWLOntologyManager();
         // Never try to import
-        mgr.addIRIMapper(new PhonyIRIMapper(Collections.<IRI> emptySet()));
+        mgr.addIRIMapper(new PhonyIRIMapper(Collections.<org.semanticweb.owlapi.model.IRI> emptySet()));
 
         Set<OWLOntologyID> loaded = new HashSet<OWLOntologyID>();
-        TripleCollection graph = store.getTriples(graphName);
-        UriRef ontologyId = null;
+        Graph graph = store.getGraph(graphName);
+        IRI ontologyId = null;
 
         // Get the id of this ontology.
         Iterator<Triple> itt = graph.filter(null, RDF.type, OWL.Ontology);
         if (itt.hasNext()) {
-            NonLiteral nl = itt.next().getSubject();
-            if (nl instanceof UriRef) ontologyId = (UriRef) nl;
+            BlankNodeOrIRI nl = itt.next().getSubject();
+            if (nl instanceof IRI) ontologyId = (IRI) nl;
         }
         List<OWLOntologyID> revImps = new Stack<OWLOntologyID>();
         List<OWLOntologyID> lvl1 = new Stack<OWLOntologyID>();
 
-        try {
-            fillImportsReverse(keymap.getReverseMapping(graphName), revImps, lvl1);
-        } catch (OntologyHandleException e) {
-            throw new OWLOntologyCreationException(e);
-        }
+        fillImportsReverse(keymap.getReverseMapping(graphName), revImps, lvl1);
 
         // If not set to merge (either by policy of by force), adopt the set import policy.
         if (!forceMerge && !ImportManagementPolicy.MERGE.equals(getImportManagementPolicy())) {
@@ -1545,20 +1479,20 @@
             }
 
             // FIXME when there's more than one ontology, this way of merging them seems inefficient...
-            TripleCollection tempGraph = new IndexedMGraph();
+            Graph tempGraph = new IndexedGraph();
             // The set of triples that will be excluded from the merge
             Set<Triple> exclusions = new HashSet<Triple>();
             // Examine all reverse imports
             for (OWLOntologyID ref : revImps)
                 if (!loaded.contains(ref)) {
                     // Get the triples
-                    TripleCollection imported =
+                    Graph imported =
                     // store.getTriples(ref);
-                    getStoredOntology(getKey(ref), MGraph.class, false);
+                    getStoredOntology(getKey(ref), Graph.class, false);
                     // For each owl:Ontology
                     Iterator<Triple> remove = imported.filter(null, RDF.type, OWL.Ontology);
                     while (remove.hasNext()) {
-                        NonLiteral subj = remove.next().getSubject();
+                        BlankNodeOrIRI subj = remove.next().getSubject();
                         /*
                          * If it's not the root ontology, trash all its triples. If the root ontology is
                          * anonymous, all ontology annotations are to be trashed without distinction.
diff --git a/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/ontology/MetaGraphManager.java b/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/ontology/MetaGraphManager.java
index bdd9e94..e706874 100644
--- a/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/ontology/MetaGraphManager.java
+++ b/ontologymanager/multiplexer/clerezza/src/main/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/ontology/MetaGraphManager.java
@@ -22,18 +22,17 @@
 
 import java.util.Iterator;
 
-import org.apache.clerezza.rdf.core.LiteralFactory;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TypedLiteral;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.access.TcManager;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
+import org.apache.clerezza.rdf.core.LiteralFactory;
 import org.apache.clerezza.rdf.ontologies.OWL;
 import org.apache.clerezza.rdf.ontologies.RDF;
 import org.apache.stanbol.ontologymanager.servicesapi.util.OntologyUtils;
-import org.semanticweb.owlapi.model.IRI;
 import org.semanticweb.owlapi.model.OWLOntologyID;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -47,47 +46,47 @@
  */
 class MetaGraphManager {
 
-    private MGraph graph;
+    private Graph graph;
 
     private Logger log = LoggerFactory.getLogger(getClass());
 
     private TcManager tcManager;
 
-    public MetaGraphManager(TcManager tcManager, MGraph graph) {
+    public MetaGraphManager(TcManager tcManager, Graph graph) {
         this.tcManager = tcManager;
         this.graph = graph;
     }
 
-    protected UriRef buildResource(final OWLOntologyID publicKey) {
+    protected IRI buildResource(final OWLOntologyID publicKey) {
         if (publicKey == null) throw new IllegalArgumentException(
-                "Cannot build a UriRef resource on a null public key!");
-        // The UriRef is of the form ontologyIRI[:::versionIRI] (TODO use something less conventional?)
+                "Cannot build a IRI resource on a null public key!");
+        // The IRI is of the form ontologyIRI[:::versionIRI] (TODO use something less conventional?)
         // XXX should versionIRI also include the version IRI set by owners? Currently not
 
         // Remember not to sanitize logical identifiers.
-        IRI ontologyIri = publicKey.getOntologyIRI(), versionIri = publicKey.getVersionIRI();
+        org.semanticweb.owlapi.model.IRI ontologyIri = publicKey.getOntologyIRI(), versionIri = publicKey.getVersionIRI();
         if (ontologyIri == null) throw new IllegalArgumentException(
-                "Cannot build a UriRef resource on an anonymous public key!");
+                "Cannot build a IRI resource on an anonymous public key!");
         log.debug("Searching for a meta graph entry for public key:");
         log.debug(" -- {}", publicKey);
-        UriRef match = null;
+        IRI match = null;
         LiteralFactory lf = LiteralFactory.getInstance();
-        TypedLiteral oiri = lf.createTypedLiteral(new UriRef(ontologyIri.toString()));
-        TypedLiteral viri = versionIri == null ? null : lf.createTypedLiteral(new UriRef(versionIri
+        Literal oiri = lf.createTypedLiteral(new IRI(ontologyIri.toString()));
+        Literal viri = versionIri == null ? null : lf.createTypedLiteral(new IRI(versionIri
                 .toString()));
         for (Iterator<Triple> it = graph.filter(null, HAS_ONTOLOGY_IRI_URIREF, oiri); it.hasNext();) {
-            Resource subj = it.next().getSubject();
+            RDFTerm subj = it.next().getSubject();
             log.debug(" -- Ontology IRI match found. Scanning");
-            log.debug(" -- Resource : {}", subj);
-            if (!(subj instanceof UriRef)) {
+            log.debug(" -- RDFTerm : {}", subj);
+            if (!(subj instanceof IRI)) {
                 log.debug(" ---- (uncomparable: skipping...)");
                 continue;
             }
             if (viri != null) {
                 // Must find matching versionIRI
-                if (graph.contains(new TripleImpl((UriRef) subj, HAS_VERSION_IRI_URIREF, viri))) {
+                if (graph.contains(new TripleImpl((IRI) subj, HAS_VERSION_IRI_URIREF, viri))) {
                     log.debug(" ---- Version IRI match!");
-                    match = (UriRef) subj;
+                    match = (IRI) subj;
                     break; // Found
                 } else {
                     log.debug(" ---- Expected version IRI match not found.");
@@ -96,32 +95,32 @@
 
             } else {
                 // Must find unversioned resource
-                if (graph.filter((UriRef) subj, HAS_VERSION_IRI_URIREF, null).hasNext()) {
+                if (graph.filter((IRI) subj, HAS_VERSION_IRI_URIREF, null).hasNext()) {
                     log.debug(" ---- Unexpected version IRI found. Skipping.");
                     continue;
                 } else {
                     log.debug(" ---- Unversioned match!");
-                    match = (UriRef) subj;
+                    match = (IRI) subj;
                     break; // Found
                 }
             }
         }
-        log.debug("Matching UriRef in graph : {}", match);
-        if (match == null) return new UriRef(OntologyUtils.encode(publicKey));
+        log.debug("Matching IRI in graph : {}", match);
+        if (match == null) return new IRI(OntologyUtils.encode(publicKey));
         else return match;
 
     }
 
     public boolean exists(final OWLOntologyID publicKey) {
-        UriRef publicKeyUriRef = new UriRef(OntologyUtils.encode(publicKey));
-        if (graph.filter(publicKeyUriRef, RDF.type, ENTRY_URIREF).hasNext()) return true;
-        if (graph.filter(publicKeyUriRef, OWL.sameAs, null).hasNext()) return true;
+        IRI publicKeyIRI = new IRI(OntologyUtils.encode(publicKey));
+        if (graph.filter(publicKeyIRI, RDF.type, ENTRY_URIREF).hasNext()) return true;
+        if (graph.filter(publicKeyIRI, OWL.sameAs, null).hasNext()) return true;
         return false;
     }
 
     public void updateAddAlias(OWLOntologyID subject, OWLOntologyID object) {
         // For now add both owl:sameAs statements
-        UriRef suben = buildResource(subject), oben = buildResource(object);
+        IRI suben = buildResource(subject), oben = buildResource(object);
         synchronized (graph) {
             graph.add(new TripleImpl(suben, OWL.sameAs, oben));
             graph.add(new TripleImpl(oben, OWL.sameAs, suben));
@@ -133,14 +132,14 @@
         if (publicKey == null || publicKey.isAnonymous()) throw new IllegalArgumentException(
                 "An anonymous ontology cannot be mapped. A non-anonymous ontology ID must be forged in these cases.");
         Triple tType, tHasOiri = null, tHasViri = null;
-        IRI ontologyIRI = publicKey.getOntologyIRI(), versionIri = publicKey.getVersionIRI();
-        UriRef entry = buildResource(publicKey);
+        org.semanticweb.owlapi.model.IRI ontologyIRI = publicKey.getOntologyIRI(), versionIri = publicKey.getVersionIRI();
+        IRI entry = buildResource(publicKey);
         tType = new TripleImpl(entry, RDF.type, ENTRY_URIREF);
         LiteralFactory lf = LiteralFactory.getInstance();
-        tHasOiri = new TripleImpl(entry, HAS_ONTOLOGY_IRI_URIREF, lf.createTypedLiteral(new UriRef(
+        tHasOiri = new TripleImpl(entry, HAS_ONTOLOGY_IRI_URIREF, lf.createTypedLiteral(new IRI(
                 ontologyIRI.toString())));
         if (versionIri != null) tHasViri = new TripleImpl(entry, HAS_VERSION_IRI_URIREF,
-                lf.createTypedLiteral(new UriRef(versionIri.toString())));
+                lf.createTypedLiteral(new IRI(versionIri.toString())));
         synchronized (graph) {
             graph.add(tType);
             if (tHasViri != null) graph.add(tHasViri);
diff --git a/ontologymanager/multiplexer/clerezza/src/test/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/collector/TestOntologyNetworkPersistence.java b/ontologymanager/multiplexer/clerezza/src/test/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/collector/TestOntologyNetworkPersistence.java
index 4f6896e..82d45b7 100644
--- a/ontologymanager/multiplexer/clerezza/src/test/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/collector/TestOntologyNetworkPersistence.java
+++ b/ontologymanager/multiplexer/clerezza/src/test/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/collector/TestOntologyNetworkPersistence.java
@@ -34,10 +34,10 @@
 import java.util.Dictionary;
 import java.util.Hashtable;
 
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.TcProvider;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.clerezza.rdf.simple.storage.SimpleTcProvider;
 import org.apache.stanbol.ontologymanager.core.OfflineConfigurationImpl;
@@ -51,7 +51,6 @@
 import org.apache.stanbol.ontologymanager.sources.clerezza.GraphContentInputSource;
 import org.junit.Before;
 import org.junit.Test;
-import org.semanticweb.owlapi.model.IRI;
 import org.semanticweb.owlapi.model.OWLOntology;
 import org.semanticweb.owlapi.model.OWLOntologyID;
 import org.slf4j.Logger;
@@ -82,10 +81,10 @@
     @Test
     public void canRetrieveOntologyImported() throws Exception {
         String pcomics = "http://stanbol.apache.org/ontologies/pcomics/";
-        OWLOntologyID foaf = new OWLOntologyID(IRI.create("http://xmlns.com/foaf/0.1/")), all = new OWLOntologyID(
-                IRI.create(pcomics + "characters_all.owl")), main = new OWLOntologyID(
-                IRI.create(pcomics + "maincharacters.owl")), minor = new OWLOntologyID(
-                IRI.create(pcomics + "minorcharacters.owl"));
+        OWLOntologyID foaf = new OWLOntologyID(org.semanticweb.owlapi.model.IRI.create("http://xmlns.com/foaf/0.1/")), all = new OWLOntologyID(
+                org.semanticweb.owlapi.model.IRI.create(pcomics + "characters_all.owl")), main = new OWLOntologyID(
+                org.semanticweb.owlapi.model.IRI.create(pcomics + "maincharacters.owl")), minor = new OWLOntologyID(
+                org.semanticweb.owlapi.model.IRI.create(pcomics + "minorcharacters.owl"));
         OWLOntology oAll, oMain, oMinor, oFoaf;
         final int total = 4;
 
@@ -133,7 +132,7 @@
     @Test
     public void canRetrieveOntologySingleton() throws Exception {
 
-        OWLOntologyID foaf = new OWLOntologyID(IRI.create("http://xmlns.com/foaf/0.1/"));
+        OWLOntologyID foaf = new OWLOntologyID(org.semanticweb.owlapi.model.IRI.create("http://xmlns.com/foaf/0.1/"));
         OWLOntology o1;
 
         // Get the fake FOAF and load it into the ontology provider
@@ -286,18 +285,18 @@
     public void updatesGraphOnSpaceModification() throws Exception {
 
         // Ensure the metadata graph is there.
-        TripleCollection meta = ontologyProvider.getMetaGraph(TripleCollection.class);
+        Graph meta = ontologyProvider.getMetaGraph(Graph.class);
         assertNotNull(meta);
 
         String scopeId = "updateTest";
         Scope scope = onm.createOntologyScope(scopeId, new GraphContentInputSource(getClass()
                 .getResourceAsStream("/ontologies/test1.owl")));
 
-        UriRef collector = new UriRef(_NS_STANBOL_INTERNAL + OntologySpace.shortName + "/"
+        IRI collector = new IRI(_NS_STANBOL_INTERNAL + OntologySpace.shortName + "/"
                                       + scope.getCoreSpace().getID());
-        UriRef test1id = new UriRef("http://stanbol.apache.org/ontologies/test1.owl"); // Has no versionIRI
+        IRI test1id = new IRI("http://stanbol.apache.org/ontologies/test1.owl"); // Has no versionIRI
         // Be strict: the whole property pair must be there.
-        UriRef predicate = MANAGES_URIREF;
+        IRI predicate = MANAGES_URIREF;
         assertTrue(meta.contains(new TripleImpl(collector, predicate, test1id)));
         predicate = IS_MANAGED_BY_URIREF;
         assertTrue(meta.contains(new TripleImpl(test1id, predicate, collector)));
@@ -306,7 +305,7 @@
 
         scope.getCoreSpace().addOntology(
             new GraphContentInputSource(getClass().getResourceAsStream("/ontologies/minorcharacters.owl")));
-        UriRef minorId = new UriRef("http://stanbol.apache.org/ontologies/pcomics/minorcharacters.owl");
+        IRI minorId = new IRI("http://stanbol.apache.org/ontologies/pcomics/minorcharacters.owl");
         predicate = MANAGES_URIREF;
         assertTrue(meta.contains(new TripleImpl(collector, predicate, minorId)));
         predicate = IS_MANAGED_BY_URIREF;
diff --git a/ontologymanager/multiplexer/clerezza/src/test/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/io/TestStorage.java b/ontologymanager/multiplexer/clerezza/src/test/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/io/TestStorage.java
index 4fbca80..d447029 100644
--- a/ontologymanager/multiplexer/clerezza/src/test/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/io/TestStorage.java
+++ b/ontologymanager/multiplexer/clerezza/src/test/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/io/TestStorage.java
@@ -28,9 +28,9 @@
 import java.util.Iterator;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.utils.GraphNode;
 import org.apache.stanbol.ontologymanager.multiplexer.clerezza.Constants;
 import org.apache.stanbol.ontologymanager.servicesapi.io.OntologyInputSource;
@@ -40,7 +40,6 @@
 import org.junit.After;
 import org.junit.BeforeClass;
 import org.junit.Test;
-import org.semanticweb.owlapi.model.IRI;
 import org.semanticweb.owlapi.model.OWLOntology;
 import org.semanticweb.owlapi.model.OWLOntologyID;
 import org.slf4j.Logger;
@@ -65,7 +64,7 @@
     @Test
     public void storageOnScopeCreation() throws Exception {
 
-        assertEquals(1, ontologyProvider.getStore().listTripleCollections().size());
+        assertEquals(1, ontologyProvider.getStore().listGraphs().size());
         // This one has an import that we want to hijack locally, so we use the ParentPathInputSource.
         OntologyInputSource<?> ois = new ParentPathInputSource(new File(getClass().getResource(
             "/ontologies/minorcharacters.owl").toURI()));
@@ -74,16 +73,16 @@
 
         Set<Triple> triples = new HashSet<Triple>();
 
-        for (UriRef iri : ontologyProvider.getStore().listTripleCollections()) {
+        for (IRI iri : ontologyProvider.getStore().listGraphs()) {
             log.info("{}", iri.toString());
-            UriRef entity = new UriRef(Constants.PEANUTS_MINOR_BASE + "#" + Constants.truffles);
-            Graph ctx = new GraphNode(entity, ontologyProvider.getStore().getTriples(iri)).getNodeContext();
+            IRI entity = new IRI(Constants.PEANUTS_MINOR_BASE + "#" + Constants.truffles);
+            ImmutableGraph ctx = new GraphNode(entity, ontologyProvider.getStore().getGraph(iri)).getNodeContext();
             Iterator<Triple> it = ctx.iterator();
             while (it.hasNext())
                 triples.add(it.next());
         }
 
-        assertFalse(ontologyProvider.getStore().listTripleCollections().isEmpty());
+        assertFalse(ontologyProvider.getStore().listGraphs().isEmpty());
         assertEquals(3, triples.size());
 
     }
@@ -95,7 +94,7 @@
     @Test
     public void storedOntologyOutlivesScope() throws Exception {
         String ephemeralScopeId = "CaducousScope";
-        OntologyInputSource<OWLOntology> ois = new RootOntologySource(IRI.create(getClass().getResource(
+        OntologyInputSource<OWLOntology> ois = new RootOntologySource(org.semanticweb.owlapi.model.IRI.create(getClass().getResource(
             "/ontologies/nonexistentcharacters.owl")));
         OWLOntologyID ontologyId = ois.getRootOntology().getOntologyID();
         Scope scope = onManager.createOntologyScope(ephemeralScopeId);
diff --git a/ontologymanager/multiplexer/clerezza/src/test/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/scope/TestAxiomInterpretation.java b/ontologymanager/multiplexer/clerezza/src/test/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/scope/TestAxiomInterpretation.java
index d2b0733..dd3d1ec 100644
--- a/ontologymanager/multiplexer/clerezza/src/test/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/scope/TestAxiomInterpretation.java
+++ b/ontologymanager/multiplexer/clerezza/src/test/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/scope/TestAxiomInterpretation.java
@@ -22,7 +22,7 @@
 
 import java.io.InputStream;
 
-import org.apache.clerezza.rdf.core.Graph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.stanbol.ontologymanager.servicesapi.io.OntologyInputSource;
 import org.apache.stanbol.ontologymanager.servicesapi.scope.Scope;
@@ -51,7 +51,7 @@
         OntologyInputSource<?> custSrc = new GraphContentInputSource(content, SupportedFormat.TURTLE);
         scope.getCustomSpace().addOntology(custSrc);
 
-        Graph g = scope.export(Graph.class, true);
+        ImmutableGraph g = scope.export(ImmutableGraph.class, true);
 
         // for (Triple t : g)
         // System.out.println(t);
diff --git a/ontologymanager/multiplexer/clerezza/src/test/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/scope/TestClerezzaSpaces.java b/ontologymanager/multiplexer/clerezza/src/test/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/scope/TestClerezzaSpaces.java
index 34e6785..a6a1060 100644
--- a/ontologymanager/multiplexer/clerezza/src/test/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/scope/TestClerezzaSpaces.java
+++ b/ontologymanager/multiplexer/clerezza/src/test/java/org/apache/stanbol/ontologymanager/multiplexer/clerezza/scope/TestClerezzaSpaces.java
@@ -30,8 +30,8 @@
 import java.io.InputStream;
 import java.util.Hashtable;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.TripleCollection;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.stanbol.commons.owl.util.OWLUtils;
 import org.apache.stanbol.ontologymanager.core.OfflineConfigurationImpl;
@@ -70,13 +70,13 @@
 
     private static OntologySpaceFactory factory;
 
-    private static OntologyInputSource<TripleCollection> minorSrc, dropSrc, nonexSrc;
+    private static OntologyInputSource<Graph> minorSrc, dropSrc, nonexSrc;
 
     private static OntologyInputSource<OWLOntology> inMemorySrc;
 
     private static OfflineConfiguration offline;
 
-    private static OntologyInputSource<TripleCollection> getLocalSource(String resourcePath) {
+    private static OntologyInputSource<Graph> getLocalSource(String resourcePath) {
         InputStream is = TestOntologySpaces.class.getResourceAsStream(resourcePath);
         return new GraphSource(parser.parse(is, SupportedFormat.RDF_XML));
     }
@@ -87,8 +87,8 @@
         ScopeRegistry reg = new ScopeRegistryImpl();
 
         // This one is created from scratch
-        MGraph ont2 = ClerezzaOWLUtils.createOntology(baseIri2.toString());
-        minorSrc = new GraphSource(ont2.getGraph());
+        Graph ont2 = ClerezzaOWLUtils.createOntology(baseIri2.toString());
+        minorSrc = new GraphSource(ont2.getImmutableGraph());
         dropSrc = getLocalSource("/ontologies/droppedcharacters.owl");
         nonexSrc = getLocalSource("/ontologies/nonexistentcharacters.owl");
         inMemorySrc = new ParentPathInputSource(new File(TestClerezzaSpaces.class.getResource(
@@ -145,7 +145,7 @@
         OntologySpace space = factory.createCustomOntologySpace(scopeId, dropSrc);
         OWLOntologyID logicalId = null;
         Object o = dropSrc.getRootOntology();
-        if (o instanceof TripleCollection) logicalId = OWLUtils.extractOntologyID((TripleCollection) o);
+        if (o instanceof Graph) logicalId = OWLUtils.extractOntologyID((Graph) o);
         else if (o instanceof OWLOntology) logicalId = OWLUtils.extractOntologyID((OWLOntology) o);
         assertNotNull(logicalId);
         assertTrue(space.hasOntology(logicalId));
diff --git a/ontologymanager/registry/src/main/java/org/apache/stanbol/ontologymanager/registry/xd/vocabulary/Vocabulary.java b/ontologymanager/registry/src/main/java/org/apache/stanbol/ontologymanager/registry/xd/vocabulary/Vocabulary.java
index 44ed22e..67c024c 100644
--- a/ontologymanager/registry/src/main/java/org/apache/stanbol/ontologymanager/registry/xd/vocabulary/Vocabulary.java
+++ b/ontologymanager/registry/src/main/java/org/apache/stanbol/ontologymanager/registry/xd/vocabulary/Vocabulary.java
@@ -62,7 +62,7 @@
 	 */
 	XSD("xsd", "http://www.w3.org/2001/XMLSchema", ""),
 	/**
-	 * Resource Description Framework
+	 * RDFTerm Description Framework
 	 */
 	RDF("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns", ""),
 	/**
diff --git a/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/ontonet/api/io/GraphContentInputSource.java b/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/ontonet/api/io/GraphContentInputSource.java
index b607d38..b682d0c 100644
--- a/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/ontonet/api/io/GraphContentInputSource.java
+++ b/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/ontonet/api/io/GraphContentInputSource.java
@@ -18,14 +18,14 @@
 
 import java.io.InputStream;
 
-import org.apache.clerezza.rdf.core.TripleCollection;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.clerezza.rdf.core.access.TcProvider;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.stanbol.ontologymanager.servicesapi.io.OntologyInputSource;
 
 public class GraphContentInputSource extends
         org.apache.stanbol.ontologymanager.sources.clerezza.GraphContentInputSource implements
-        OntologyInputSource<TripleCollection> {
+        OntologyInputSource<Graph> {
 
     public GraphContentInputSource(InputStream content) {
         super(content);
diff --git a/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/ontonet/api/io/GraphSource.java b/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/ontonet/api/io/GraphSource.java
index 2ff1051..287ae72 100644
--- a/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/ontonet/api/io/GraphSource.java
+++ b/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/ontonet/api/io/GraphSource.java
@@ -16,8 +16,8 @@
  */
 package org.apache.stanbol.ontologymanager.ontonet.api.io;
 
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.TcProvider;
 
 @Deprecated
@@ -27,15 +27,15 @@
         super(graphId);
     }
 
-    public GraphSource(TripleCollection graph) {
+    public GraphSource(Graph graph) {
         super(graph);
     }
 
-    public GraphSource(UriRef graphId) {
+    public GraphSource(IRI graphId) {
         super(graphId);
     }
 
-    public GraphSource(UriRef graphId, TcProvider tcProvider) {
+    public GraphSource(IRI graphId, TcProvider tcProvider) {
         super(graphId, tcProvider);
     }
 
diff --git a/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/sources/clerezza/AbstractClerezzaGraphInputSource.java b/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/sources/clerezza/AbstractClerezzaGraphInputSource.java
index 7fe0df1..158e4b3 100644
--- a/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/sources/clerezza/AbstractClerezzaGraphInputSource.java
+++ b/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/sources/clerezza/AbstractClerezzaGraphInputSource.java
@@ -16,7 +16,7 @@
  */
 package org.apache.stanbol.ontologymanager.sources.clerezza;
 
-import org.apache.clerezza.rdf.core.TripleCollection;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.clerezza.rdf.core.access.TcManager;
 import org.apache.stanbol.ontologymanager.servicesapi.io.AbstractGenericInputSource;
 import org.apache.stanbol.ontologymanager.servicesapi.io.OntologyInputSource;
@@ -24,7 +24,7 @@
 import org.slf4j.LoggerFactory;
 
 /**
- * Default implementation of an {@link OntologyInputSource} that returns {@link TripleCollection} objects as
+ * Default implementation of an {@link OntologyInputSource} that returns {@link Graph} objects as
  * ontologies.
  * 
  * Subclasses must implement the {@link #getImports(boolean)} method, as the availability of imported
@@ -34,12 +34,12 @@
  * @author alexdma
  * 
  */
-public abstract class AbstractClerezzaGraphInputSource extends AbstractGenericInputSource<TripleCollection> {
+public abstract class AbstractClerezzaGraphInputSource extends AbstractGenericInputSource<Graph> {
 
     protected Logger log = LoggerFactory.getLogger(getClass());
 
     @Override
-    protected void bindRootOntology(TripleCollection ontology) {
+    protected void bindRootOntology(Graph ontology) {
         super.bindRootOntology(ontology);
     }
 
diff --git a/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/sources/clerezza/GraphContentInputSource.java b/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/sources/clerezza/GraphContentInputSource.java
index 61ad50a..c5d11ec 100644
--- a/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/sources/clerezza/GraphContentInputSource.java
+++ b/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/sources/clerezza/GraphContentInputSource.java
@@ -22,14 +22,14 @@
 import java.util.Collections;
 import java.util.Iterator;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.TcProvider;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.clerezza.rdf.core.serializedform.UnsupportedFormatException;
-import org.apache.stanbol.commons.indexedgraph.IndexedMGraph;
+import org.apache.stanbol.commons.indexedgraph.IndexedGraph;
 import org.apache.stanbol.ontologymanager.servicesapi.io.Origin;
 import org.apache.stanbol.ontologymanager.servicesapi.ontology.OntologyLoadingException;
 import org.apache.stanbol.ontologymanager.servicesapi.util.OntologyUtils;
@@ -37,7 +37,7 @@
 import org.slf4j.LoggerFactory;
 
 /**
- * An ontology input source that returns a Clerezza {@link TripleCollection} ({@link Graph} or {@link MGraph})
+ * An ontology input source that returns a Clerezza {@link Graph} ({@link ImmutableGraph} or {@link Graph})
  * after parsing its serialized content from an input stream.
  * 
  * @author alexdma
@@ -128,18 +128,18 @@
         else formats = Collections.singleton(formatIdentifier);
 
         // TODO guess/lookahead the ontology ID and use it in the graph name.
-        UriRef name = new UriRef( /* "ontonet" + "::" + */
+        IRI name = new IRI( /* "ontonet" + "::" + */
         getClass().getCanonicalName() + "-time:" + System.currentTimeMillis());
 
-        TripleCollection graph = null;
+        Graph graph = null;
         if (tcProvider != null && tcProvider != null) {
-            // Graph directly stored in the TcProvider prior to using the source
-            graph = tcProvider.createMGraph(name);
+            // ImmutableGraph directly stored in the TcProvider prior to using the source
+            graph = tcProvider.createGraph(name);
             bindPhysicalOrigin(Origin.create(name));
             // XXX if addition fails, should rollback procedures also delete the graph?
         } else {
             // In memory graph, will most likely have to be copied afterwards.
-            graph = new IndexedMGraph();
+            graph = new IndexedGraph();
             bindPhysicalOrigin(null);
         }
 
@@ -149,9 +149,9 @@
             String f = itf.next();
             log.debug("Parsing with format {}", f);
             try {
-                parser.parse((MGraph) graph, content, f);
+                parser.parse((Graph) graph, content, f);
                 loaded = true;
-                log.info("Graph parsed, has {} triples", graph.size());
+                log.info("ImmutableGraph parsed, has {} triples", graph.size());
             } catch (UnsupportedFormatException e) {
                 log.debug("Parsing format {} failed.", f);
             } catch (Exception e) {
@@ -173,7 +173,7 @@
         } else {
             // Rollback graph creation, if any
             if (tcProvider != null && tcProvider != null) {
-                tcProvider.deleteTripleCollection(name);
+                tcProvider.deleteGraph(name);
                 log.error("Parsing failed. Deleting triple collection {}", name);
             }
             throw new OntologyLoadingException("Parsing failed. Giving up.");
diff --git a/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/sources/clerezza/GraphSource.java b/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/sources/clerezza/GraphSource.java
index 08876ca..510c25c 100644
--- a/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/sources/clerezza/GraphSource.java
+++ b/ontologymanager/sources/clerezza/src/main/java/org/apache/stanbol/ontologymanager/sources/clerezza/GraphSource.java
@@ -16,17 +16,17 @@
  */
 package org.apache.stanbol.ontologymanager.sources.clerezza;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.TcManager;
 import org.apache.clerezza.rdf.core.access.TcProvider;
 import org.apache.stanbol.ontologymanager.servicesapi.io.OntologyInputSource;
 import org.apache.stanbol.ontologymanager.servicesapi.io.Origin;
 
 /**
- * An {@link OntologyInputSource} that gets ontologies from either a stored {@link TripleCollection}, or its
+ * An {@link OntologyInputSource} that gets ontologies from either a stored {@link Graph}, or its
  * identifier and an optionally supplied triple collection manager.
  * 
  * @author alexdma
@@ -36,7 +36,7 @@
 
     /**
      * Creates a new input source by querying the default triple collection manager for a graph named with the
-     * supplied <code>graphId</code>. A {@link UriRef} that represents the graph name will also be set as the
+     * supplied <code>graphId</code>. A {@link IRI} that represents the graph name will also be set as the
      * graph origin.
      * 
      * @param graphId
@@ -47,7 +47,7 @@
      *             if no such graph can be found.
      */
     public GraphSource(String graphId) {
-        this(new UriRef(graphId));
+        this(new IRI(graphId));
     }
 
     /**
@@ -56,12 +56,12 @@
      * @param graph
      *            the RDF graph
      * @throws IllegalArgumentException
-     *             if <code>graph</code> is neither a {@link Graph} nor a {@link MGraph}.
+     *             if <code>graph</code> is neither a {@link ImmutableGraph} nor a {@link Graph}.
      */
-    public GraphSource(TripleCollection graph) {
-        if (graph instanceof Graph) bindRootOntology(graph);
-        else if (graph instanceof MGraph) bindRootOntology(((MGraph) graph).getGraph());
-        else throw new IllegalArgumentException("GraphSource supports only Graph and MGraph types. "
+    public GraphSource(Graph graph) {
+        if (graph instanceof ImmutableGraph) bindRootOntology(graph);
+        else if (graph instanceof Graph) bindRootOntology(((Graph) graph).getImmutableGraph());
+        else throw new IllegalArgumentException("GraphSource supports only ImmutableGraph and Graph types. "
                                                 + graph.getClass() + " is not supported.");
         bindPhysicalOrigin(null);
     }
@@ -77,7 +77,7 @@
      * @throws org.apache.clerezza.rdf.core.access.NoSuchEntityException
      *             if no such graph can be found.
      */
-    public GraphSource(UriRef graphId) {
+    public GraphSource(IRI graphId) {
         this(graphId, TcManager.getInstance());
     }
 
@@ -92,8 +92,8 @@
      * @throws org.apache.clerezza.rdf.core.access.NoSuchEntityException
      *             if no such graph can be found in <code>tcProvider</code>.
      */
-    public GraphSource(UriRef graphId, TcProvider tcProvider) {
-        this(tcProvider.getTriples(graphId));
+    public GraphSource(IRI graphId, TcProvider tcProvider) {
+        this(tcProvider.getGraph(graphId));
         bindPhysicalOrigin(Origin.create(graphId));
     }
 
diff --git a/ontologymanager/sources/clerezza/src/test/java/org/apache/stanbol/ontologymanager/sources/clerezza/TestClerezzaInputSources.java b/ontologymanager/sources/clerezza/src/test/java/org/apache/stanbol/ontologymanager/sources/clerezza/TestClerezzaInputSources.java
index a54f65e..b79c077 100644
--- a/ontologymanager/sources/clerezza/src/test/java/org/apache/stanbol/ontologymanager/sources/clerezza/TestClerezzaInputSources.java
+++ b/ontologymanager/sources/clerezza/src/test/java/org/apache/stanbol/ontologymanager/sources/clerezza/TestClerezzaInputSources.java
@@ -24,8 +24,8 @@
 
 import java.io.InputStream;
 
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.TcProvider;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.clerezza.rdf.simple.storage.SimpleTcProvider;
@@ -57,7 +57,7 @@
 
     private Logger log = LoggerFactory.getLogger(getClass());
 
-    private OntologyInputSource<TripleCollection> src;
+    private OntologyInputSource<Graph> src;
 
     @Before
     public void bind() throws Exception {
@@ -68,7 +68,7 @@
         assertNotNull(src);
         if (usesTcProvider) assertNotNull(src.getOrigin());
         else assertNull(src.getOrigin());
-        TripleCollection o = src.getRootOntology();
+        Graph o = src.getRootOntology();
         assertNotNull(o);
         log.info("Ontology loaded, is a {}", o.getClass().getCanonicalName());
         assertSame(5, o.size()); // The owl:Ontology declaration and versionInfo also count as triples.
@@ -97,11 +97,11 @@
     public void fromInputStreamInSimpleTcProvider() throws Exception {
         InputStream in = getClass().getResourceAsStream(dummy_RdfXml);
         TcProvider tcp = new SimpleTcProvider();
-        assertSame(0, tcp.listTripleCollections().size());
-        int before = tcp.listTripleCollections().size();
+        assertSame(0, tcp.listGraphs().size());
+        int before = tcp.listGraphs().size();
         src = new GraphContentInputSource(in, tcp);
         checkOntology(true);
-        assertSame(before + 1, tcp.listTripleCollections().size());
+        assertSame(before + 1, tcp.listGraphs().size());
     }
 
     /*
@@ -111,10 +111,10 @@
     @Test
     public void fromInputStreamInTcManager() throws Exception {
         InputStream in = getClass().getResourceAsStream(dummy_RdfXml);
-        int before = tcManager.listTripleCollections().size();
+        int before = tcManager.listGraphs().size();
         src = new GraphContentInputSource(in, tcManager);
         checkOntology(true);
-        assertSame(before + 1, tcManager.listTripleCollections().size());
+        assertSame(before + 1, tcManager.listGraphs().size());
     }
 
     /*
@@ -145,54 +145,54 @@
     // @Test
     // public void testGraphContentSource() throws Exception {
     // // Make sure the tc manager has been reset
-    // assertEquals(1, tcManager.listTripleCollections().size());
+    // assertEquals(1, tcManager.listGraphs().size());
     //
     // OntologyProvider<TcProvider> provider = new ClerezzaOntologyProvider(tcManager,
     // new OfflineConfigurationImpl(new Hashtable<String,Object>()), parser);
-    // int tcs = tcManager.listTripleCollections().size();
+    // int tcs = tcManager.listGraphs().size();
     // InputStream content = TestClerezzaInputSources.class
     // .getResourceAsStream("/ontologies/droppedcharacters.owl");
     // OntologyInputSource<?> src = new GraphContentInputSource(content, SupportedFormat.RDF_XML,
     // ontologyProvider.getStore(), parser);
     //
-    // log.info("After input source creation, TcManager has {} graphs. ", tcManager.listTripleCollections()
+    // log.info("After input source creation, TcManager has {} graphs. ", tcManager.listGraphs()
     // .size());
-    // for (UriRef name : tcManager.listTripleCollections())
+    // for (IRI name : tcManager.listGraphs())
     // log.info("-- {} (a {})", name, tcManager.getTriples(name).getClass().getSimpleName());
-    // assertEquals(tcs + 1, tcManager.listTripleCollections().size());
+    // assertEquals(tcs + 1, tcManager.listGraphs().size());
     // Space spc = new CoreSpaceImpl(TestClerezzaInputSources.class.getSimpleName(),
     // IRI.create("http://stanbol.apache.org/ontologies/"), provider);
     // spc.addOntology(src);
-    // log.info("After addition to space, TcManager has {} graphs. ", tcManager.listTripleCollections()
+    // log.info("After addition to space, TcManager has {} graphs. ", tcManager.listGraphs()
     // .size());
     //
-    // for (UriRef name : tcManager.listTripleCollections())
+    // for (IRI name : tcManager.listGraphs())
     // log.info("-- {} (a {})", name, tcManager.getTriples(name).getClass().getSimpleName());
     // // Adding the ontology from the same storage should not create new graphs
-    // assertEquals(tcs + 1, tcManager.listTripleCollections().size());
+    // assertEquals(tcs + 1, tcManager.listGraphs().size());
     //
     // }
 
     @Test
     public void testGraphSource() throws Exception {
-        UriRef uri = new UriRef(Locations.CHAR_ACTIVE.toString());
+        IRI uri = new IRI(Locations.CHAR_ACTIVE.toString());
         InputStream inputStream = TestClerezzaInputSources.class
                 .getResourceAsStream("/ontologies/characters_all.owl");
-        parser.parse(tcManager.createMGraph(uri), inputStream, SupportedFormat.RDF_XML, uri);
-        uri = new UriRef(Locations.CHAR_MAIN.toString());
+        parser.parse(tcManager.createGraph(uri), inputStream, SupportedFormat.RDF_XML, uri);
+        uri = new IRI(Locations.CHAR_MAIN.toString());
         inputStream = TestClerezzaInputSources.class.getResourceAsStream("/ontologies/maincharacters.owl");
-        parser.parse(tcManager.createMGraph(uri), inputStream, SupportedFormat.RDF_XML, uri);
-        uri = new UriRef(Locations.CHAR_MINOR.toString());
+        parser.parse(tcManager.createGraph(uri), inputStream, SupportedFormat.RDF_XML, uri);
+        uri = new IRI(Locations.CHAR_MINOR.toString());
         inputStream = TestClerezzaInputSources.class.getResourceAsStream("/ontologies/minorcharacters.owl");
-        parser.parse(tcManager.createMGraph(uri), inputStream, SupportedFormat.RDF_XML, uri);
+        parser.parse(tcManager.createGraph(uri), inputStream, SupportedFormat.RDF_XML, uri);
 
-        src = new GraphSource(new UriRef(Locations.CHAR_ACTIVE.toString()));
+        src = new GraphSource(new IRI(Locations.CHAR_ACTIVE.toString()));
         assertNotNull(src);
         assertNotNull(src.getRootOntology());
-        // Set<TripleCollection> imported = gis.getImports(false);
+        // Set<Graph> imported = gis.getImports(false);
         // // Number of stored graphs minus the importing one minus the reserved graph = imported graphs
-        // assertEquals(tcManager.listTripleCollections().size() - 2, imported.size());
-        // for (TripleCollection g : imported)
+        // assertEquals(tcManager.listGraphs().size() - 2, imported.size());
+        // for (Graph g : imported)
         // assertNotNull(g);
     }
 
diff --git a/ontologymanager/web/src/main/java/org/apache/stanbol/ontologymanager/web/resources/RootResource.java b/ontologymanager/web/src/main/java/org/apache/stanbol/ontologymanager/web/resources/RootResource.java
index 82a5eb7..631a507 100644
--- a/ontologymanager/web/src/main/java/org/apache/stanbol/ontologymanager/web/resources/RootResource.java
+++ b/ontologymanager/web/src/main/java/org/apache/stanbol/ontologymanager/web/resources/RootResource.java
@@ -88,15 +88,13 @@
 import javax.ws.rs.core.Response.ResponseBuilder;
 import javax.ws.rs.core.Response.Status;
 import javax.ws.rs.core.UriInfo;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
 
 import org.apache.clerezza.jaxrs.utils.form.MultiPartBody;
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.clerezza.rdf.core.serializedform.UnsupportedFormatException;
 import org.apache.clerezza.rdf.ontologies.OWL;
@@ -109,7 +107,7 @@
 import org.apache.stanbol.commons.owl.util.URIUtils;
 import org.apache.stanbol.commons.web.viewable.Viewable;
 //import org.apache.stanbol.commons.web.base.ContextHelper;
-import org.apache.stanbol.ontologymanager.multiplexer.clerezza.collector.MGraphMultiplexer;
+import org.apache.stanbol.ontologymanager.multiplexer.clerezza.collector.GraphMultiplexer;
 import org.apache.stanbol.ontologymanager.registry.api.RegistryContentException;
 import org.apache.stanbol.ontologymanager.registry.api.RegistryManager;
 import org.apache.stanbol.ontologymanager.registry.api.model.Library;
@@ -161,7 +159,7 @@
         if (descriptor == null) {
             if (ontologyProvider == null) throw new IllegalStateException(
                     "Tried to obtain a multiplexer before an ontology provider was ready. This shouldn't happen.");
-            descriptor = new MGraphMultiplexer(ontologyProvider.getMetaGraph(MGraph.class));
+            descriptor = new GraphMultiplexer(ontologyProvider.getMetaGraph(Graph.class));
         }
         return descriptor;
     }
@@ -330,23 +328,23 @@
         });
     }
 
-    private MGraph getGraph(String ontologyId, boolean merged, URI requestUri) {
+    private Graph getGraph(String ontologyId, boolean merged, URI requestUri) {
         long before = System.currentTimeMillis();
 
         OWLOntologyID key = OntologyUtils.decode(ontologyId);
 
         log.debug("Will try to retrieve ontology {} from provider.", key);
         /*
-         * Export directly to MGraph since the OWLOntologyWriter uses (de-)serializing converters for the
+         * Export directly to Graph since the OWLOntologyWriter uses (de-)serializing converters for the
          * other formats.
          * 
          * Use oTemp for the "real" graph and o for the graph that will be exported. This is due to the fact
          * that in o we want to change import statements, but we do not want these changes to be stored
          * permanently.
          */
-        MGraph o = null, oTemp = null;
+        Graph o = null, oTemp = null;
         try {
-            oTemp = ontologyProvider.getStoredOntology(key, MGraph.class, merged);
+            oTemp = ontologyProvider.getStoredOntology(key, Graph.class, merged);
         } catch (Exception ex) {
             log.warn("Retrieval of ontology with ID " + key + " failed.", ex);
         }
@@ -368,7 +366,7 @@
             if (smallest != null) {
                 log.debug("Selected library for ontology {} is {} .", iri, smallest);
                 try {
-                    oTemp = registryManager.getLibrary(smallest).getOntology(iri, MGraph.class);
+                    oTemp = registryManager.getLibrary(smallest).getOntology(iri, Graph.class);
                 } catch (RegistryContentException e) {
                     log.warn("The content of library " + smallest + " could not be accessed.", e);
                 }
@@ -376,9 +374,9 @@
         }
 
         // This is needed because we need to change import statements. No need to use a more efficient but
-        // resource-intensive IndexedMGraph, since both o and oTemp will be GC'ed after serialization.
+        // resource-intensive IndexedGraph, since both o and oTemp will be GC'ed after serialization.
         if (oTemp != null) {
-            o = new SimpleMGraph(oTemp);
+            o = new SimpleGraph(oTemp);
         }
 
         if (o == null) {
@@ -404,11 +402,11 @@
         }
         for (Triple t : oldImports) {
             // construct new statement
-            String s = ((UriRef) t.getObject()).getUnicodeString();
+            String s = ((org.apache.clerezza.commons.rdf.IRI) t.getObject()).getUnicodeString();
             if (s.contains("::")) {
                 s = s.substring(s.indexOf("::") + 2, s.length());
             }
-            UriRef target = new UriRef(base + "/" + s);
+            org.apache.clerezza.commons.rdf.IRI target = new org.apache.clerezza.commons.rdf.IRI(base + "/" + s);
             o.add(new TripleImpl(t.getSubject(), OWL.imports, target));
             // remove old statement
             o.remove(t);
@@ -417,13 +415,13 @@
         // Versioning.
         OWLOntologyID id = OWLUtils.extractOntologyID(o);
         if (id != null && !id.isAnonymous() && id.getVersionIRI() == null) {
-            UriRef viri = new UriRef(requestUri.toString());
+            org.apache.clerezza.commons.rdf.IRI viri = new org.apache.clerezza.commons.rdf.IRI(requestUri.toString());
             log.debug("Setting version IRI for export : {}", viri);
-            o.add(new TripleImpl(new UriRef(id.getOntologyIRI().toString()), new UriRef(
+            o.add(new TripleImpl(new org.apache.clerezza.commons.rdf.IRI(id.getOntologyIRI().toString()), new org.apache.clerezza.commons.rdf.IRI(
                     OWL2Constants.OWL_VERSION_IRI), viri));
         }
 
-        log.debug("Exported as Clerezza Graph in {} ms. Handing over to writer.", System.currentTimeMillis()
+        log.debug("Exported as Clerezza ImmutableGraph in {} ms. Handing over to writer.", System.currentTimeMillis()
                                                                                   - before);
         return o;
     }
@@ -469,12 +467,12 @@
                                 @Context UriInfo uriInfo,
                                 @Context HttpHeaders headers) {
         ResponseBuilder rb;
-        UriRef me = new UriRef(getPublicBaseUri() + "ontonet/" + ontologyId);
-        MGraph mGraph = new SimpleMGraph();
+        org.apache.clerezza.commons.rdf.IRI me = new org.apache.clerezza.commons.rdf.IRI(getPublicBaseUri() + "ontonet/" + ontologyId);
+        Graph mImmutableGraph = new SimpleGraph();
         for (String alias : getAliases(OntologyUtils.decode(ontologyId))) {
-            mGraph.add(new TripleImpl(new UriRef(getPublicBaseUri() + "ontonet/" + alias), OWL.sameAs, me));
+            mImmutableGraph.add(new TripleImpl(new org.apache.clerezza.commons.rdf.IRI(getPublicBaseUri() + "ontonet/" + alias), OWL.sameAs, me));
         }
-        rb = Response.ok(mGraph);
+        rb = Response.ok(mImmutableGraph);
         // addCORSOrigin(servletContext, rb, headers);
         return rb.build();
     }
@@ -482,7 +480,7 @@
     @GET
     @Produces({RDF_XML, TURTLE, X_TURTLE, APPLICATION_JSON, RDF_JSON})
     public Response getMetaGraph(@Context HttpHeaders headers) {
-        ResponseBuilder rb = Response.ok(ontologyProvider.getMetaGraph(Graph.class));
+        ResponseBuilder rb = Response.ok(ontologyProvider.getMetaGraph(ImmutableGraph.class));
         // addCORSOrigin(servletContext, rb, headers);
         return rb.build();
     }
@@ -526,7 +524,7 @@
         long before = System.currentTimeMillis();
         IRI iri = URIUtils.sanitize(IRI.create(ontologyId));
         log.debug("Will try to retrieve ontology {} from provider.", iri);
-        // TODO be selective: if the ontology is small enough, use OWLOntology otherwise export to Graph.
+        // TODO be selective: if the ontology is small enough, use OWLOntology otherwise export to ImmutableGraph.
         OWLOntology o = null;
         try {
             // XXX Guarantee that there MUST always be an entry for any decoded ontology ID submitted.
@@ -589,7 +587,7 @@
         }
 
         o.getOWLOntologyManager().applyChanges(changes);
-        log.debug("Exported as Clerezza Graph in {} ms. Handing over to writer.", System.currentTimeMillis()
+        log.debug("Exported as Clerezza ImmutableGraph in {} ms. Handing over to writer.", System.currentTimeMillis()
                                                                                   - before);
         return o;
     }
@@ -627,7 +625,7 @@
         if (ontologyProvider.listOrphans().contains(key)) {
             rb = Response.status(NO_CONTENT);
         } else {
-            TripleCollection o = getGraph(ontologyId, merged, uriInfo.getRequestUri());
+            Graph o = getGraph(ontologyId, merged, uriInfo.getRequestUri());
             rb = o == null ? Response.status(NOT_FOUND) : Response.ok(o);
         }
         // addCORSOrigin(servletContext, rb, headers);
diff --git a/ontologymanager/web/src/main/java/org/apache/stanbol/ontologymanager/web/resources/ScopeResource.java b/ontologymanager/web/src/main/java/org/apache/stanbol/ontologymanager/web/resources/ScopeResource.java
index aea148e..7098497 100644
--- a/ontologymanager/web/src/main/java/org/apache/stanbol/ontologymanager/web/resources/ScopeResource.java
+++ b/ontologymanager/web/src/main/java/org/apache/stanbol/ontologymanager/web/resources/ScopeResource.java
@@ -73,8 +73,8 @@
 
 import org.apache.clerezza.jaxrs.utils.form.FormFile;
 import org.apache.clerezza.jaxrs.utils.form.MultiPartBody;
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.TripleCollection;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.clerezza.rdf.core.access.TcProvider;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.clerezza.rdf.core.serializedform.UnsupportedFormatException;
@@ -178,8 +178,8 @@
 
         if (scope == null) return Response.status(NOT_FOUND).build();
         IRI prefix = IRI.create(getPublicBaseUri() + "ontonet/ontology/");
-        // Export to Clerezza Graph, which can be rendered as JSON-LD.
-        ResponseBuilder rb = Response.ok(scope.export(Graph.class, merge, prefix));
+        // Export to Clerezza ImmutableGraph, which can be rendered as JSON-LD.
+        ResponseBuilder rb = Response.ok(scope.export(ImmutableGraph.class, merge, prefix));
         // addCORSOrigin(servletContext, rb, headers);
         return rb.build();
     }
@@ -195,7 +195,7 @@
         // Export smaller graphs to OWLOntology due to the more human-readable rendering.
         ResponseBuilder rb;
         IRI prefix = IRI.create(getPublicBaseUri() + "ontonet/ontology/");
-        if (merge) rb = Response.ok(scope.export(Graph.class, merge, prefix));
+        if (merge) rb = Response.ok(scope.export(ImmutableGraph.class, merge, prefix));
         else rb = Response.ok(scope.export(OWLOntology.class, merge, prefix));
         // addCORSOrigin(servletContext, rb, headers);
         return rb.build();
@@ -248,7 +248,7 @@
 
         OntologySpace space = scope.getCoreSpace();
         IRI prefix = IRI.create(getPublicBaseUri() + "ontonet/ontology/");
-        Graph o = space.export(Graph.class, merge, prefix);
+        ImmutableGraph o = space.export(ImmutableGraph.class, merge, prefix);
         ResponseBuilder rb = Response.ok(o);
         // addCORSOrigin(servletContext, rb, headers);
         return rb.build();
@@ -293,7 +293,7 @@
 
         OntologySpace space = scope.getCustomSpace();
         IRI prefix = IRI.create(getPublicBaseUri() + "ontonet/ontology/");
-        Graph o = space.export(Graph.class, merge, prefix);
+        ImmutableGraph o = space.export(ImmutableGraph.class, merge, prefix);
         ResponseBuilder rb = Response.ok(o);
         // addCORSOrigin(servletContext, rb, headers);
         return rb.build();
@@ -414,14 +414,14 @@
         if (scope == null) rb = Response.status(NOT_FOUND);
         else {
             IRI prefix = IRI.create(getPublicBaseUri() + "ontonet/ontology/");
-            Graph o = null;
+            ImmutableGraph o = null;
             OWLOntologyID id = OntologyUtils.decode(ontologyId);
             OntologySpace spc = scope.getCustomSpace();
             if (spc != null && spc.hasOntology(id)) {
-                o = spc.getOntology(id, Graph.class, merge, prefix);
+                o = spc.getOntology(id, ImmutableGraph.class, merge, prefix);
             } else {
                 spc = scope.getCoreSpace();
-                if (spc != null && spc.hasOntology(id)) o = spc.getOntology(id, Graph.class, merge, prefix);
+                if (spc != null && spc.hasOntology(id)) o = spc.getOntology(id, ImmutableGraph.class, merge, prefix);
             }
             if (o == null) rb = Response.status(NOT_FOUND);
             else rb = Response.ok(o);
@@ -876,8 +876,8 @@
                         for (Object o : ((SetInputSource<?>) coreSrc).getOntologies()) {
                             OntologyInputSource<?> src = null;
                             if (o instanceof OWLOntology) src = new RootOntologySource((OWLOntology) o);
-                            else if (o instanceof TripleCollection) src = new GraphSource(
-                                    (TripleCollection) o);
+                            else if (o instanceof Graph) src = new GraphSource(
+                                    (Graph) o);
                             if (src != null) expanded.add(src);
                         }
                     } else expanded.add(coreSrc); // Must be denoting a single ontology
diff --git a/ontologymanager/web/src/main/java/org/apache/stanbol/ontologymanager/web/resources/SessionResource.java b/ontologymanager/web/src/main/java/org/apache/stanbol/ontologymanager/web/resources/SessionResource.java
index 6cd4e13..567a617 100644
--- a/ontologymanager/web/src/main/java/org/apache/stanbol/ontologymanager/web/resources/SessionResource.java
+++ b/ontologymanager/web/src/main/java/org/apache/stanbol/ontologymanager/web/resources/SessionResource.java
@@ -70,7 +70,7 @@
 
 import org.apache.clerezza.jaxrs.utils.form.FormFile;
 import org.apache.clerezza.jaxrs.utils.form.MultiPartBody;
-import org.apache.clerezza.rdf.core.Graph;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
 import org.apache.clerezza.rdf.core.access.TcProvider;
 import org.apache.clerezza.rdf.core.serializedform.Parser;
 import org.apache.felix.scr.annotations.Component;
@@ -165,8 +165,8 @@
         session = sesMgr.getSession(sessionId);
         if (session == null) return Response.status(NOT_FOUND).build();
         IRI prefix = IRI.create(getPublicBaseUri() + "ontonet/session/");
-        // Export to Clerezza Graph, which can be rendered as JSON-LD.
-        ResponseBuilder rb = Response.ok(session.export(Graph.class, merge, prefix));
+        // Export to Clerezza ImmutableGraph, which can be rendered as JSON-LD.
+        ResponseBuilder rb = Response.ok(session.export(ImmutableGraph.class, merge, prefix));
 //        addCORSOrigin(servletContext, rb, headers);
         return rb.build();
     }
@@ -182,7 +182,7 @@
         ResponseBuilder rb;
         IRI prefix = IRI.create(getPublicBaseUri() + "ontonet/session/");
         // Export smaller graphs to OWLOntology due to the more human-readable rendering.
-        if (merge) rb = Response.ok(session.export(Graph.class, merge, prefix));
+        if (merge) rb = Response.ok(session.export(ImmutableGraph.class, merge, prefix));
         else rb = Response.ok(session.export(OWLOntology.class, merge, prefix));
 //        addCORSOrigin(servletContext, rb, headers);
         return rb.build();
@@ -375,7 +375,7 @@
         session = sesMgr.getSession(sessionId);
         if (session == null) return Response.status(NOT_FOUND).build();
         IRI prefix = IRI.create(getPublicBaseUri() + "ontonet/session/");
-        Graph o = session.getOntology(OntologyUtils.decode(ontologyId), Graph.class, merge, prefix);
+        ImmutableGraph o = session.getOntology(OntologyUtils.decode(ontologyId), ImmutableGraph.class, merge, prefix);
         ResponseBuilder rb = (o != null) ? Response.ok(o) : Response.status(NOT_FOUND);
 //        addCORSOrigin(servletContext, rb, headers);
         return rb.build();
@@ -409,7 +409,7 @@
             IRI prefix = IRI.create(getPublicBaseUri() + "ontonet/session/");
             OWLOntologyID id = OntologyUtils.decode(ontologyId);
             if (merge) {
-                Graph g = session.getOntology(id, Graph.class, merge, prefix);
+                ImmutableGraph g = session.getOntology(id, ImmutableGraph.class, merge, prefix);
                 rb = (g != null) ? Response.ok(g) : Response.status(NOT_FOUND);
             } else {
                 OWLOntology o = session.getOntology(id, OWLOntology.class, merge, prefix);
diff --git a/parent/pom.xml b/parent/pom.xml
index e4ba841..4692ab5 100644
--- a/parent/pom.xml
+++ b/parent/pom.xml
@@ -96,7 +96,7 @@
 
   <build>
     <plugins>
-      <!-- requires Java 6 -->
+      <!-- requires Java 8 -->
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-enforcer-plugin</artifactId>
@@ -109,8 +109,8 @@
             <configuration>
               <rules>
                 <requireJavaVersion>
-                  <message>Java 6 or higher is required to compile this module</message>
-                  <version>1.6</version>
+                  <message>Java 8 or higher is required to compile this module</message>
+                  <version>1.8</version>
                 </requireJavaVersion>
               </rules>
             </configuration>
@@ -646,12 +646,12 @@
       <dependency>
         <groupId>org.apache.clerezza</groupId>
         <artifactId>rdf.core</artifactId>
-        <version>0.14</version>
+        <version>1.0.1</version>
       </dependency>
       <dependency>
         <groupId>org.apache.clerezza</groupId>
         <artifactId>rdf.utils</artifactId>
-        <version>0.14</version>
+        <version>1.0.0</version>
       </dependency>
       <dependency>
         <groupId>org.apache.clerezza</groupId>
@@ -661,72 +661,72 @@
       <dependency>
         <groupId>org.apache.clerezza</groupId>
         <artifactId>rdf.metadata</artifactId>
-        <version>0.2</version>
+        <version>1.0.0</version>
       </dependency>
       <dependency>
         <groupId>org.apache.clerezza</groupId>
         <artifactId>rdf.ontologies</artifactId>
-        <version>0.12</version>
+        <version>1.0.0</version>
       </dependency>
       <dependency>
         <groupId>org.apache.clerezza</groupId>
         <artifactId>rdf.jena.serializer</artifactId>
-        <version>0.11</version>
+        <version>1.1.1</version>
       </dependency>
       <dependency>
         <groupId>org.apache.clerezza</groupId>
         <artifactId>rdf.jena.parser</artifactId>
-        <version>0.12</version>
+        <version>1.1.1</version>
       </dependency>
       <dependency>
         <groupId>org.apache.clerezza</groupId>
         <artifactId>rdf.jena.sparql</artifactId>
-        <version>0.7</version>
+        <version>1.1.1</version>
       </dependency>
       <dependency>
         <groupId>org.apache.clerezza</groupId>
         <artifactId>rdf.rdfjson</artifactId>
-        <version>0.4</version>
+        <version>1.0.1</version>
       </dependency>
       <dependency>
         <groupId>org.apache.clerezza</groupId>
         <artifactId>platform.content</artifactId>
-        <version>0.14</version>
+        <version>1.0.0</version>
       </dependency>
       <dependency>
         <groupId>org.apache.clerezza</groupId>
         <artifactId>platform.graphprovider.content</artifactId>
-        <version>0.7</version>
+        <version>1.0.0</version>
       </dependency>
       <dependency>
         <groupId>org.apache.clerezza</groupId>
         <artifactId>platform.typerendering.scalaserverpages</artifactId>
-        <version>0.4</version>
+        <version>1.0.0</version>
       </dependency>
       <dependency>
         <groupId>org.apache.clerezza</groupId>
         <artifactId>jaxrs.rdf.providers</artifactId>
-        <version>0.15</version>
+        <version>1.0.0</version>
       </dependency>
       <dependency>
         <groupId>org.apache.clerezza</groupId>
         <artifactId>rdf.simple.storage</artifactId>
-        <version>0.8</version>
+        <version>1.0.0</version>
       </dependency>
       <dependency>
         <groupId>org.apache.clerezza</groupId>
         <artifactId>rdf.jena.facade</artifactId>
-        <version>0.14</version>
+        <version>1.1.1</version>
       </dependency>
       <dependency>
         <groupId>org.apache.clerezza</groupId>
         <artifactId>platform.config</artifactId>
-        <version>0.4</version>
+        <version>1.0.0</version>
       </dependency>
       <dependency>
         <groupId>org.apache.clerezza</groupId>
         <artifactId>rdf.core.test</artifactId>
-        <version>0.15</version>
+        <version>1.0.0</version>
         <scope>test</scope>
       </dependency>
 
diff --git a/reasoners/web/src/main/java/org/apache/stanbol/reasoners/web/input/provider/impl/RecipeInputProvider.java b/reasoners/web/src/main/java/org/apache/stanbol/reasoners/web/input/provider/impl/RecipeInputProvider.java
index 061ad59..10ec279 100644
--- a/reasoners/web/src/main/java/org/apache/stanbol/reasoners/web/input/provider/impl/RecipeInputProvider.java
+++ b/reasoners/web/src/main/java/org/apache/stanbol/reasoners/web/input/provider/impl/RecipeInputProvider.java
@@ -22,7 +22,7 @@
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.reasoners.servicesapi.ReasoningServiceInputProvider;
 import org.apache.stanbol.rules.base.api.NoSuchRecipeException;
 import org.apache.stanbol.rules.base.api.Recipe;
@@ -95,7 +95,7 @@
 	                Recipe recipe = null;
 	                synchronized (store) {
 	                    try {
-							recipe = store.getRecipe(new UriRef(recipeId));
+							recipe = store.getRecipe(new IRI(recipeId));
 						} catch (RecipeConstructionException e) {
 							log.error("An error occurred while generating the recipe.", e);
 						}                    
@@ -170,7 +170,7 @@
 	                Recipe recipe = null;
 	                synchronized (store) {
 	                    try {
-							recipe = store.getRecipe(new UriRef(recipeId));
+							recipe = store.getRecipe(new IRI(recipeId));
 						} catch (RecipeConstructionException e) {
 							log.error("An error occurred while generating the recipe.", e);
 						}                    
diff --git a/reasoners/web/src/main/java/org/apache/stanbol/reasoners/web/utils/ReasoningServiceExecutor.java b/reasoners/web/src/main/java/org/apache/stanbol/reasoners/web/utils/ReasoningServiceExecutor.java
index 1b76fec..c13f844 100644
--- a/reasoners/web/src/main/java/org/apache/stanbol/reasoners/web/utils/ReasoningServiceExecutor.java
+++ b/reasoners/web/src/main/java/org/apache/stanbol/reasoners/web/utils/ReasoningServiceExecutor.java
@@ -25,9 +25,8 @@
 import java.util.Set;
 import java.util.concurrent.locks.Lock;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.UriRef;
-import org.apache.clerezza.rdf.core.access.LockableMGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.NoSuchEntityException;
 import org.apache.clerezza.rdf.core.access.TcManager;
 import org.apache.stanbol.commons.jobs.api.Job;
@@ -234,16 +233,16 @@
         log.debug("Attempt saving in target graph {}", targetGraphID);
 
         final long startSave = System.currentTimeMillis();
-        LockableMGraph mGraph;
-        UriRef graphUriRef = new UriRef(targetGraphID);
+        Graph mGraph;
+        IRI graphIRI = new IRI(targetGraphID);
 
         // tcManager must be synchronized
         synchronized (tcManager) {
             try {
                 // Check whether the graph already exists
-                mGraph = tcManager.getMGraph(graphUriRef);
+                mGraph = tcManager.getGraph(graphIRI);
             } catch (NoSuchEntityException e) {
-                mGraph = tcManager.createMGraph(graphUriRef);
+                mGraph = tcManager.createGraph(graphIRI);
             }
         }
 
@@ -251,12 +250,12 @@
         Lock writeLock = mGraph.getLock().writeLock();
         boolean saved = false;
         if (data instanceof Model) {
-            MGraph m = JenaToClerezzaConverter.jenaModelToClerezzaMGraph((Model) data);
+            Graph m = JenaToClerezzaConverter.jenaModelToClerezzaGraph((Model) data);
             writeLock.lock();
             saved = mGraph.addAll(m);
             writeLock.unlock();
         } else if (data instanceof OWLOntology) {
-            MGraph m = (MGraph) OWLAPIToClerezzaConverter.owlOntologyToClerezzaMGraph((OWLOntology) data);
+            Graph m = (Graph) OWLAPIToClerezzaConverter.owlOntologyToClerezzaGraph((OWLOntology) data);
             writeLock.lock();
             saved = mGraph.addAll(m);
             writeLock.unlock();
diff --git a/rules/adapters/clerezza/pom.xml b/rules/adapters/clerezza/pom.xml
index e453148..e62ca98 100644
--- a/rules/adapters/clerezza/pom.xml
+++ b/rules/adapters/clerezza/pom.xml
@@ -108,7 +108,16 @@
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
     </dependency>
-
+    <dependency>
+      <groupId>org.apache.clerezza</groupId>
+      <artifactId>rdf.jena.sparql</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.clerezza</groupId>
+      <artifactId>rdf.simple.storage</artifactId>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
 </project>
diff --git a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/ClerezzaAdapter.java b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/ClerezzaAdapter.java
index 5f571a6..2217624 100644
--- a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/ClerezzaAdapter.java
+++ b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/ClerezzaAdapter.java
@@ -28,7 +28,7 @@
 import java.util.List;
 import java.util.Set;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.sparql.query.ConstructQuery;
 import org.apache.clerezza.rdf.core.sparql.query.Expression;
 import org.apache.clerezza.rdf.core.sparql.query.TriplePattern;
@@ -308,7 +308,7 @@
         try {
             KB kb = RuleParserImpl.parse("http://sssw.org/2012/rules/", new FileInputStream("/Users/mac/Documents/CNR/SSSW2012/construct/exercise3"));
             System.out.println("Rules: " + kb.getRuleList().size());
-            Recipe recipe = new RecipeImpl(new UriRef("http://sssw.org/2012/rules/"), "Recipe", kb.getRuleList());
+            Recipe recipe = new RecipeImpl(new IRI("http://sssw.org/2012/rules/"), "Recipe", kb.getRuleList());
             
             //List<ConstructQuery> jenaRules = (List<ConstructQuery>) ruleAdapter.adaptTo(recipe, ConstructQuery.class);
             
diff --git a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/ClerezzaSparqlObject.java b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/ClerezzaSparqlObject.java
index d9cbad1..180557d 100644
--- a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/ClerezzaSparqlObject.java
+++ b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/ClerezzaSparqlObject.java
@@ -17,13 +17,13 @@
 
 package org.apache.stanbol.rules.adapters.clerezza;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.sparql.query.Expression;
 import org.apache.clerezza.rdf.core.sparql.query.TriplePattern;
 
 /**
  * 
- * This object represents either a {@link TriplePattern} or an {@link Expression} or a {@link UriRef}
+ * This object represents either a {@link TriplePattern} or an {@link Expression} or a {@link IRI}
  * internally to the Clerezza adpter.
  * 
  * @author anuzzolese
@@ -33,7 +33,7 @@
 
     private TriplePattern triplePattern;
     private Expression expression;
-    private UriRef uriRef;
+    private IRI uriRef;
 
     public ClerezzaSparqlObject(TriplePattern triplePattern) {
         this.triplePattern = triplePattern;
@@ -43,7 +43,7 @@
         this.expression = expression;
     }
 
-    public ClerezzaSparqlObject(UriRef uriRef) {
+    public ClerezzaSparqlObject(IRI uriRef) {
         this.uriRef = uriRef;
     }
 
@@ -54,10 +54,10 @@
      * <ul>
      * <li>a {@link TriplePattern}
      * <li>an {@link Expression}
-     * <li>a {@link UriRef}
+     * <li>a {@link IRI}
      * 
      * @return the object that can be in turn a {@link TriplePattern}, an {@link Expression}, and a
-     *         {@link UriRef}
+     *         {@link IRI}
      */
     public Object getClerezzaObject() {
         if (triplePattern != null) {
diff --git a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/BlankNodeAtom.java b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/BlankNodeAtom.java
index bd2b06c..7ff7f26 100644
--- a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/BlankNodeAtom.java
+++ b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/BlankNodeAtom.java
@@ -16,8 +16,8 @@
  */
 package org.apache.stanbol.rules.adapters.clerezza.atoms;
 
-import org.apache.clerezza.rdf.core.BNode;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.BlankNode;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.sparql.query.ConstructQuery;
 import org.apache.clerezza.rdf.core.sparql.query.ResourceOrVariable;
 import org.apache.clerezza.rdf.core.sparql.query.UriRefOrVariable;
@@ -58,9 +58,9 @@
         ClerezzaSparqlObject predicateCSO = (ClerezzaSparqlObject) adapter.adaptTo(argument1UriResource,
             ConstructQuery.class);
 
-        subject = new UriRefOrVariable((UriRef) subjectCSO.getClerezzaObject());
-        predicate = new UriRefOrVariable((UriRef) predicateCSO.getClerezzaObject());
-        object = new ResourceOrVariable(new BNode());
+        subject = new UriRefOrVariable((IRI) subjectCSO.getClerezzaObject());
+        predicate = new UriRefOrVariable((IRI) predicateCSO.getClerezzaObject());
+        object = new ResourceOrVariable(new BlankNode());
 
         return (T) new ClerezzaSparqlObject(new SimpleTriplePattern(subject, predicate, object));
 
diff --git a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/ClassAtom.java b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/ClassAtom.java
index 9c7f631..3d15c7e 100644
--- a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/ClassAtom.java
+++ b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/ClassAtom.java
@@ -16,7 +16,7 @@
  */
 package org.apache.stanbol.rules.adapters.clerezza.atoms;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.sparql.query.ConstructQuery;
 import org.apache.clerezza.rdf.core.sparql.query.ResourceOrVariable;
 import org.apache.clerezza.rdf.core.sparql.query.UriRefOrVariable;
@@ -64,16 +64,16 @@
 
         if (arg instanceof Variable) {
             subject = new UriRefOrVariable((Variable) arg);
-        } else if (arg instanceof UriRef) {
-            subject = new UriRefOrVariable((UriRef) arg);
+        } else if (arg instanceof IRI) {
+            subject = new UriRefOrVariable((IRI) arg);
         } else {
             throw new RuleAtomCallExeption(getClass());
         }
 
         if (cl instanceof Variable) {
             object = new ResourceOrVariable((Variable) cl);
-        } else if (cl instanceof UriRef) {
-            object = new ResourceOrVariable((UriRef) cl);
+        } else if (cl instanceof IRI) {
+            object = new ResourceOrVariable((IRI) cl);
         } else {
             throw new RuleAtomCallExeption(getClass());
         }
diff --git a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/ConcatAtom.java b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/ConcatAtom.java
index d515ba4..0eb6821 100644
--- a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/ConcatAtom.java
+++ b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/ConcatAtom.java
@@ -19,7 +19,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.sparql.query.ConstructQuery;
 import org.apache.clerezza.rdf.core.sparql.query.Expression;
 import org.apache.clerezza.rdf.core.sparql.query.FunctionCall;
@@ -59,7 +59,7 @@
         argumentExpressions.add((Expression) argument1.getClerezzaObject());
         argumentExpressions.add((Expression) argument2.getClerezzaObject());
 
-        FunctionCall functionCall = new FunctionCall(new UriRef(
+        FunctionCall functionCall = new FunctionCall(new IRI(
                 "<http://www.w3.org/2005/xpath-functions#concat>"), argumentExpressions);
 
         return (T) new ClerezzaSparqlObject(functionCall);
diff --git a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/DatavaluedPropertyAtom.java b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/DatavaluedPropertyAtom.java
index e69676e..ca2432d 100644
--- a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/DatavaluedPropertyAtom.java
+++ b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/DatavaluedPropertyAtom.java
@@ -16,8 +16,8 @@
  */
 package org.apache.stanbol.rules.adapters.clerezza.atoms;
 
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.sparql.query.ConstructQuery;
 import org.apache.clerezza.rdf.core.sparql.query.LiteralExpression;
 import org.apache.clerezza.rdf.core.sparql.query.ResourceOrVariable;
@@ -69,16 +69,16 @@
 
         if (arg1 instanceof Variable) {
             subject = new UriRefOrVariable((Variable) arg1);
-        } else if (arg1 instanceof UriRef) {
-            subject = new UriRefOrVariable((UriRef) arg1);
+        } else if (arg1 instanceof IRI) {
+            subject = new UriRefOrVariable((IRI) arg1);
         } else {
             throw new RuleAtomCallExeption(getClass());
         }
 
         if (dt instanceof Variable) {
             predicate = new UriRefOrVariable((Variable) dt);
-        } else if (dt instanceof UriRef) {
-            predicate = new UriRefOrVariable((UriRef) dt);
+        } else if (dt instanceof IRI) {
+            predicate = new UriRefOrVariable((IRI) dt);
         } else {
             throw new RuleAtomCallExeption(getClass());
         }
diff --git a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/EndsWithAtom.java b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/EndsWithAtom.java
index 2e36793..7fbf1c6 100644
--- a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/EndsWithAtom.java
+++ b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/EndsWithAtom.java
@@ -19,7 +19,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.sparql.query.ConstructQuery;
 import org.apache.clerezza.rdf.core.sparql.query.Expression;
 import org.apache.clerezza.rdf.core.sparql.query.FunctionCall;
@@ -59,7 +59,7 @@
         argumentExpressions.add((Expression) argument1.getClerezzaObject());
         argumentExpressions.add((Expression) argument2.getClerezzaObject());
 
-        FunctionCall functionCall = new FunctionCall(new UriRef(
+        FunctionCall functionCall = new FunctionCall(new IRI(
                 "<http://www.w3.org/2005/xpath-functions#ends-with>"), argumentExpressions);
 
         return (T) new ClerezzaSparqlObject(functionCall);
diff --git a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/IndividualPropertyAtom.java b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/IndividualPropertyAtom.java
index 40d5a42..c183db6 100644
--- a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/IndividualPropertyAtom.java
+++ b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/IndividualPropertyAtom.java
@@ -16,7 +16,7 @@
  */
 package org.apache.stanbol.rules.adapters.clerezza.atoms;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.sparql.query.ConstructQuery;
 import org.apache.clerezza.rdf.core.sparql.query.ResourceOrVariable;
 import org.apache.clerezza.rdf.core.sparql.query.UriRefOrVariable;
@@ -67,24 +67,24 @@
 
         if (arg1 instanceof Variable) {
             subject = new UriRefOrVariable((Variable) arg1);
-        } else if (arg1 instanceof UriRef) {
-            subject = new UriRefOrVariable((UriRef) arg1);
+        } else if (arg1 instanceof IRI) {
+            subject = new UriRefOrVariable((IRI) arg1);
         } else {
             throw new RuleAtomCallExeption(getClass());
         }
 
         if (dt instanceof Variable) {
             predicate = new UriRefOrVariable((Variable) dt);
-        } else if (dt instanceof UriRef) {
-            predicate = new UriRefOrVariable((UriRef) dt);
+        } else if (dt instanceof IRI) {
+            predicate = new UriRefOrVariable((IRI) dt);
         } else {
             throw new RuleAtomCallExeption(getClass());
         }
 
         if (arg2 instanceof Variable) {
             object = new UriRefOrVariable((Variable) arg2);
-        } else if (dt instanceof UriRef) {
-            object = new UriRefOrVariable((UriRef) arg2);
+        } else if (dt instanceof IRI) {
+            object = new UriRefOrVariable((IRI) arg2);
         } else {
             throw new RuleAtomCallExeption(getClass());
         }
diff --git a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/IsBlankAtom.java b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/IsBlankAtom.java
index ce68181..98f5f31 100644
--- a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/IsBlankAtom.java
+++ b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/IsBlankAtom.java
@@ -19,7 +19,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.sparql.query.BuiltInCall;
 import org.apache.clerezza.rdf.core.sparql.query.ConstructQuery;
 import org.apache.clerezza.rdf.core.sparql.query.Expression;
@@ -60,8 +60,8 @@
         Expression argumentExpression;
         if (arg instanceof Variable) {
             argumentExpression = (Variable) arg;
-        } else if (arg instanceof UriRef) {
-            argumentExpression = new UriRefExpression((UriRef) arg);
+        } else if (arg instanceof IRI) {
+            argumentExpression = new UriRefExpression((IRI) arg);
         } else {
             throw new RuleAtomCallExeption(getClass());
         }
diff --git a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/LengthAtom.java b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/LengthAtom.java
index 0ddca13..098adcb 100644
--- a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/LengthAtom.java
+++ b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/LengthAtom.java
@@ -19,7 +19,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.sparql.query.ConstructQuery;
 import org.apache.clerezza.rdf.core.sparql.query.Expression;
 import org.apache.clerezza.rdf.core.sparql.query.FunctionCall;
@@ -56,7 +56,7 @@
         List<Expression> argumentExpressions = new ArrayList<Expression>();
         argumentExpressions.add((Expression) argument1.getClerezzaObject());
 
-        FunctionCall functionCall = new FunctionCall(new UriRef(
+        FunctionCall functionCall = new FunctionCall(new IRI(
                 "<http://www.w3.org/2005/xpath-functions#string-length>"), argumentExpressions);
 
         return (T) new ClerezzaSparqlObject(functionCall);
diff --git a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/LowerCaseAtom.java b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/LowerCaseAtom.java
index f6bbfbe..6f2f70c 100644
--- a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/LowerCaseAtom.java
+++ b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/LowerCaseAtom.java
@@ -19,7 +19,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.sparql.query.ConstructQuery;
 import org.apache.clerezza.rdf.core.sparql.query.Expression;
 import org.apache.clerezza.rdf.core.sparql.query.FunctionCall;
@@ -56,7 +56,7 @@
         List<Expression> argumentExpressions = new ArrayList<Expression>();
         argumentExpressions.add((Expression) argument1.getClerezzaObject());
 
-        FunctionCall functionCall = new FunctionCall(new UriRef(
+        FunctionCall functionCall = new FunctionCall(new IRI(
                 "<http://www.w3.org/2005/xpath-functions#lower-case>"), argumentExpressions);
 
         return (T) new ClerezzaSparqlObject(functionCall);
diff --git a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/NewIRIAtom.java b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/NewIRIAtom.java
index feb938f..87729e0 100644
--- a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/NewIRIAtom.java
+++ b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/NewIRIAtom.java
@@ -32,7 +32,7 @@
 import org.apache.stanbol.rules.manager.atoms.StringFunctionAtom;
 
 /**
- * It adapts any NewIRIAtom to the BIND built in call in Clerezza for creating new UriRef resources binding
+ * It adapts any NewIRIAtom to the BIND built in call in Clerezza for creating new IRI resources binding
  * the value to a variable.
  * 
  * @author anuzzolese
diff --git a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/ResourceAtom.java b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/ResourceAtom.java
index 8f11197..19568a9 100644
--- a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/ResourceAtom.java
+++ b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/ResourceAtom.java
@@ -17,14 +17,14 @@
 
 package org.apache.stanbol.rules.adapters.clerezza.atoms;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.rules.adapters.AbstractAdaptableAtom;
 import org.apache.stanbol.rules.adapters.clerezza.ClerezzaSparqlObject;
 import org.apache.stanbol.rules.base.api.RuleAtom;
 import org.apache.stanbol.rules.base.api.RuleAtomCallExeption;
 
 /**
- * It adapts any ResourceAtom to UriRef resource in Clerezza.
+ * It adapts any ResourceAtom to IRI resource in Clerezza.
  * 
  * @author anuzzolese
  * 
@@ -38,7 +38,7 @@
         org.apache.stanbol.rules.manager.atoms.ResourceAtom tmp = (org.apache.stanbol.rules.manager.atoms.ResourceAtom) ruleAtom;
 
         String unquotedURI = tmp.toUnquotedString();
-        UriRef uriRef = new UriRef(unquotedURI);
+        IRI uriRef = new IRI(unquotedURI);
 
         return (T) new ClerezzaSparqlObject(uriRef);
     }
diff --git a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/StartsWithAtom.java b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/StartsWithAtom.java
index 265d71d..2842709 100644
--- a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/StartsWithAtom.java
+++ b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/StartsWithAtom.java
@@ -19,7 +19,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.sparql.query.ConstructQuery;
 import org.apache.clerezza.rdf.core.sparql.query.Expression;
 import org.apache.clerezza.rdf.core.sparql.query.FunctionCall;
@@ -59,7 +59,7 @@
         argumentExpressions.add((Expression) argument1.getClerezzaObject());
         argumentExpressions.add((Expression) argument2.getClerezzaObject());
 
-        FunctionCall functionCall = new FunctionCall(new UriRef(
+        FunctionCall functionCall = new FunctionCall(new IRI(
                 "<http://www.w3.org/2005/xpath-functions#starts-with>"), argumentExpressions);
 
         return (T) new ClerezzaSparqlObject(functionCall);
diff --git a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/StringAtom.java b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/StringAtom.java
index c128b03..74011ad 100644
--- a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/StringAtom.java
+++ b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/StringAtom.java
@@ -16,7 +16,7 @@
  */
 package org.apache.stanbol.rules.adapters.clerezza.atoms;
 
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
 import org.apache.clerezza.rdf.core.sparql.query.Expression;
 import org.apache.clerezza.rdf.core.sparql.query.LiteralExpression;
 import org.apache.clerezza.rdf.core.sparql.query.Variable;
diff --git a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/SubstringAtom.java b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/SubstringAtom.java
index ddfd8a4..c255030 100644
--- a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/SubstringAtom.java
+++ b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/SubstringAtom.java
@@ -19,7 +19,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.sparql.query.ConstructQuery;
 import org.apache.clerezza.rdf.core.sparql.query.Expression;
 import org.apache.clerezza.rdf.core.sparql.query.FunctionCall;
@@ -65,7 +65,7 @@
         argumentExpressions.add((Expression) clerezzaStart.getClerezzaObject());
         argumentExpressions.add((Expression) clerezzaLength.getClerezzaObject());
 
-        FunctionCall functionCall = new FunctionCall(new UriRef(
+        FunctionCall functionCall = new FunctionCall(new IRI(
                 "<http://www.w3.org/2005/xpath-functions#substring>"), argumentExpressions);
 
         return (T) new ClerezzaSparqlObject(functionCall);
diff --git a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/TypedLiteralAtom.java b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/TypedLiteralAtom.java
index 3ed6721..d973fcf 100644
--- a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/TypedLiteralAtom.java
+++ b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/TypedLiteralAtom.java
@@ -16,7 +16,7 @@
  */
 package org.apache.stanbol.rules.adapters.clerezza.atoms;
 
-import org.apache.clerezza.rdf.core.Literal;
+import org.apache.clerezza.commons.rdf.Literal;
 import org.apache.clerezza.rdf.core.LiteralFactory;
 import org.apache.clerezza.rdf.core.sparql.query.LiteralExpression;
 import org.apache.stanbol.rules.adapters.AbstractAdaptableAtom;
diff --git a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/UpperCaseAtom.java b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/UpperCaseAtom.java
index 50c4701..872fdbf 100644
--- a/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/UpperCaseAtom.java
+++ b/rules/adapters/clerezza/src/main/java/org/apache/stanbol/rules/adapters/clerezza/atoms/UpperCaseAtom.java
@@ -19,7 +19,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.sparql.query.ConstructQuery;
 import org.apache.clerezza.rdf.core.sparql.query.Expression;
 import org.apache.clerezza.rdf.core.sparql.query.FunctionCall;
@@ -56,7 +56,7 @@
         List<Expression> argumentExpressions = new ArrayList<Expression>();
         argumentExpressions.add((Expression) argument1.getClerezzaObject());
 
-        FunctionCall functionCall = new FunctionCall(new UriRef(
+        FunctionCall functionCall = new FunctionCall(new IRI(
                 "<http://www.w3.org/2005/xpath-functions#upper-case>"), argumentExpressions);
 
         return (T) new ClerezzaSparqlObject(functionCall);
diff --git a/rules/adapters/clerezza/src/test/java/org/apache/stanbol/rules/adapters/clerezza/ClerezzaAdpterTest.java b/rules/adapters/clerezza/src/test/java/org/apache/stanbol/rules/adapters/clerezza/ClerezzaAdpterTest.java
index d48eb4f..8db4d73 100644
--- a/rules/adapters/clerezza/src/test/java/org/apache/stanbol/rules/adapters/clerezza/ClerezzaAdpterTest.java
+++ b/rules/adapters/clerezza/src/test/java/org/apache/stanbol/rules/adapters/clerezza/ClerezzaAdpterTest.java
@@ -23,7 +23,7 @@
 
 import junit.framework.Assert;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.TcManager;
 import org.apache.clerezza.rdf.core.access.WeightedTcProvider;
 import org.apache.clerezza.rdf.core.sparql.ParseException;
@@ -109,7 +109,7 @@
             recipeString);
 
         recipeGood = new RecipeImpl(
-                new UriRef("http://incubator.apache.com/stanbol/rules/adapters/jena/test"), "A recipe.",
+                new IRI("http://incubator.apache.com/stanbol/rules/adapters/jena/test"), "A recipe.",
                 kb.getRuleList());
 
         recipeString = "kres = <http://kres.iks-project.eu/ontology.owl#> . "
@@ -122,7 +122,7 @@
         kb = RuleParserImpl.parse("http://incubator.apache.com/stanbol/rules/adapters/jena/test/",
             recipeString);
 
-        recipeWrong = new RecipeImpl(new UriRef(
+        recipeWrong = new RecipeImpl(new IRI(
                 "http://incubator.apache.com/stanbol/rules/adapters/jena/test"), "A recipe.",
                 kb.getRuleList());
     }
diff --git a/rules/adapters/jena/src/main/java/org/apache/stanbol/rules/adapters/jena/JenaAdapter.java b/rules/adapters/jena/src/main/java/org/apache/stanbol/rules/adapters/jena/JenaAdapter.java
index 892c575..63691e5 100644
--- a/rules/adapters/jena/src/main/java/org/apache/stanbol/rules/adapters/jena/JenaAdapter.java
+++ b/rules/adapters/jena/src/main/java/org/apache/stanbol/rules/adapters/jena/JenaAdapter.java
@@ -28,7 +28,7 @@
 import java.util.List;
 import java.util.Map;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.sparql.ResultSet;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -306,7 +306,7 @@
         try {
             KB kb = RuleParserImpl.parse("http://sssw.org/2012/rules/", new FileInputStream("/Users/mac/Documents/CNR/SSSW2012/rules/exercise1"));
             System.out.println("Rules: " + kb.getRuleList().size());
-            Recipe recipe = new RecipeImpl(new UriRef("http://sssw.org/2012/rules/"), "Recipe", kb.getRuleList());
+            Recipe recipe = new RecipeImpl(new IRI("http://sssw.org/2012/rules/"), "Recipe", kb.getRuleList());
             
             List<com.hp.hpl.jena.reasoner.rulesys.Rule> jenaRules = (List<com.hp.hpl.jena.reasoner.rulesys.Rule>) ruleAdapter.adaptTo(recipe, com.hp.hpl.jena.reasoner.rulesys.Rule.class);
             
diff --git a/rules/adapters/jena/src/main/java/org/apache/stanbol/rules/adapters/jena/atoms/IsBlankAtom.java b/rules/adapters/jena/src/main/java/org/apache/stanbol/rules/adapters/jena/atoms/IsBlankAtom.java
index 6e8aa1b..49b23b4 100644
--- a/rules/adapters/jena/src/main/java/org/apache/stanbol/rules/adapters/jena/atoms/IsBlankAtom.java
+++ b/rules/adapters/jena/src/main/java/org/apache/stanbol/rules/adapters/jena/atoms/IsBlankAtom.java
@@ -34,7 +34,7 @@
 
 /**
  * 
- * It adapts a IsBlankAtom to the isBNode functor of Jena.
+ * It adapts a IsBlankAtom to the isBlankNode functor of Jena.
  * 
  * @author anuzzolese
  * 
@@ -65,7 +65,7 @@
 
         nodes.add(argNode);
 
-        return (T) new Functor("isBNode", nodes, BuiltinRegistry.theRegistry);
+        return (T) new Functor("isBlankNode", nodes, BuiltinRegistry.theRegistry);
 
     }
 
diff --git a/rules/adapters/jena/src/test/java/org/apache/stanbol/rules/adapters/jena/JenaAdpterTest.java b/rules/adapters/jena/src/test/java/org/apache/stanbol/rules/adapters/jena/JenaAdpterTest.java
index 0cefff7..c99b4f7 100644
--- a/rules/adapters/jena/src/test/java/org/apache/stanbol/rules/adapters/jena/JenaAdpterTest.java
+++ b/rules/adapters/jena/src/test/java/org/apache/stanbol/rules/adapters/jena/JenaAdpterTest.java
@@ -19,7 +19,7 @@
 
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.sparql.query.ConstructQuery;
 import org.apache.stanbol.rules.base.api.Recipe;
 import org.apache.stanbol.rules.base.api.RuleAdapter;
@@ -79,7 +79,7 @@
             recipeString);
 
         recipeGood = new RecipeImpl(
-                new UriRef("http://incubator.apache.com/stanbol/rules/adapters/jena/test"), "A recipe.",
+                new IRI("http://incubator.apache.com/stanbol/rules/adapters/jena/test"), "A recipe.",
                 kb.getRuleList());
 
         recipeString = "kres = <http://kres.iks-project.eu/ontology.owl#> . "
@@ -92,7 +92,7 @@
         kb = RuleParserImpl.parse("http://incubator.apache.com/stanbol/rules/adapters/jena/test/",
             recipeString);
 
-        recipeWrong = new RecipeImpl(new UriRef(
+        recipeWrong = new RecipeImpl(new IRI(
                 "http://incubator.apache.com/stanbol/rules/adapters/jena/test"), "A recipe.",
                 kb.getRuleList());
     }
diff --git a/rules/adapters/sparql/src/test/java/org/apache/stanbol/rules/adapters/sparql/SPARQLAdpterTest.java b/rules/adapters/sparql/src/test/java/org/apache/stanbol/rules/adapters/sparql/SPARQLAdpterTest.java
index 01eb1a9..2460f52 100644
--- a/rules/adapters/sparql/src/test/java/org/apache/stanbol/rules/adapters/sparql/SPARQLAdpterTest.java
+++ b/rules/adapters/sparql/src/test/java/org/apache/stanbol/rules/adapters/sparql/SPARQLAdpterTest.java
@@ -21,7 +21,7 @@
 
 import junit.framework.Assert;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.rules.base.api.Recipe;
 import org.apache.stanbol.rules.base.api.RuleAdapter;
 import org.apache.stanbol.rules.base.api.RuleAtomCallExeption;
@@ -76,7 +76,7 @@
             recipeString);
 
         recipeGood = new RecipeImpl(
-                new UriRef("http://incubator.apache.com/stanbol/rules/adapters/jena/test"), "A recipe.",
+                new IRI("http://incubator.apache.com/stanbol/rules/adapters/jena/test"), "A recipe.",
                 kb.getRuleList());
 
         recipeString = "kres = <http://kres.iks-project.eu/ontology.owl#> . "
@@ -89,7 +89,7 @@
         kb = RuleParserImpl.parse("http://incubator.apache.com/stanbol/rules/adapters/jena/test/",
             recipeString);
 
-        recipeWrong = new RecipeImpl(new UriRef(
+        recipeWrong = new RecipeImpl(new IRI(
                 "http://incubator.apache.com/stanbol/rules/adapters/jena/test"), "A recipe.",
                 kb.getRuleList());
     }
diff --git a/rules/adapters/swrl/src/test/java/org/apache/stanbol/rules/adapters/swrl/SWRLAdpterTest.java b/rules/adapters/swrl/src/test/java/org/apache/stanbol/rules/adapters/swrl/SWRLAdpterTest.java
index 2489fce..6c09a63 100644
--- a/rules/adapters/swrl/src/test/java/org/apache/stanbol/rules/adapters/swrl/SWRLAdpterTest.java
+++ b/rules/adapters/swrl/src/test/java/org/apache/stanbol/rules/adapters/swrl/SWRLAdpterTest.java
@@ -19,7 +19,7 @@
 
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.sparql.query.ConstructQuery;
 import org.apache.stanbol.rules.base.api.Recipe;
 import org.apache.stanbol.rules.base.api.RuleAdapter;
@@ -78,7 +78,7 @@
             recipeString);
 
         recipeGood = new RecipeImpl(
-                new UriRef("http://incubator.apache.com/stanbol/rules/adapters/jena/test"), "A recipe.",
+                new IRI("http://incubator.apache.com/stanbol/rules/adapters/jena/test"), "A recipe.",
                 kb.getRuleList());
 
         recipeString = "kres = <http://kres.iks-project.eu/ontology.owl#> . "
@@ -91,7 +91,7 @@
         kb = RuleParserImpl.parse("http://incubator.apache.com/stanbol/rules/adapters/jena/test/",
             recipeString);
 
-        recipeWrong = new RecipeImpl(new UriRef(
+        recipeWrong = new RecipeImpl(new IRI(
                 "http://incubator.apache.com/stanbol/rules/adapters/jena/test"), "A recipe.",
                 kb.getRuleList());
     }
diff --git a/rules/base/src/main/java/org/apache/stanbol/rules/base/api/Recipe.java b/rules/base/src/main/java/org/apache/stanbol/rules/base/api/Recipe.java
index 976024a..a055ff8 100644
--- a/rules/base/src/main/java/org/apache/stanbol/rules/base/api/Recipe.java
+++ b/rules/base/src/main/java/org/apache/stanbol/rules/base/api/Recipe.java
@@ -18,9 +18,8 @@
 
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.rules.base.api.util.RuleList;
-import org.semanticweb.owlapi.model.IRI;
 
 /**
  * It represents a Recipe object.<br/>
@@ -44,10 +43,10 @@
      * Get the rule of the recipe identified by the rule ID. The rule is returned as a {@link Rule} object.
      * 
      * @param ruleID
-     *            {@link UriRef}
+     *            {@link IRI}
      * @return the object that represents a {@link Rule}
      */
-    Rule getRule(UriRef ruleID) throws NoSuchRuleInRecipeException;
+    Rule getRule(IRI ruleID) throws NoSuchRuleInRecipeException;
 
     /**
      * Get the list of the {@link Rule} contained in the recipe.
@@ -59,23 +58,23 @@
     /**
      * Get the list of rule IDs contained in the recipe.
      * 
-     * @return the List of {@link UriRef}.
+     * @return the List of {@link IRI}.
      */
-    List<UriRef> listRuleIDs();
+    List<IRI> listRuleIDs();
 
     /**
      * Get the list of rule names contained in the recipe.
      * 
-     * @return the List of {@link UriRef}.
+     * @return the List of {@link IRI}.
      */
     List<String> listRuleNames();
 
     /**
      * Get the ID of the recipe in the {@link RuleStore}.
      * 
-     * @return the {@link UriRef} expressing the recipe's ID.
+     * @return the {@link IRI} expressing the recipe's ID.
      */
-    UriRef getRecipeID();
+    IRI getRecipeID();
 
     /**
      * Get the description about the recipe.
diff --git a/rules/base/src/main/java/org/apache/stanbol/rules/base/api/Rule.java b/rules/base/src/main/java/org/apache/stanbol/rules/base/api/Rule.java
index 8670806..a4f46d4 100644
--- a/rules/base/src/main/java/org/apache/stanbol/rules/base/api/Rule.java
+++ b/rules/base/src/main/java/org/apache/stanbol/rules/base/api/Rule.java
@@ -16,7 +16,7 @@
  */
 package org.apache.stanbol.rules.base.api;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.rules.base.api.util.AtomList;
 
 /**
@@ -30,9 +30,9 @@
     /**
      * Gets the ID of the rule.
      * 
-     * @return the {@link UriRef} representing the name of the rule.
+     * @return the {@link IRI} representing the name of the rule.
      */
-    UriRef getRuleID();
+    IRI getRuleID();
 
     /**
      * Gets the name of the rule.
diff --git a/rules/base/src/main/java/org/apache/stanbol/rules/base/api/RuleStore.java b/rules/base/src/main/java/org/apache/stanbol/rules/base/api/RuleStore.java
index a056169..3c9a131 100644
--- a/rules/base/src/main/java/org/apache/stanbol/rules/base/api/RuleStore.java
+++ b/rules/base/src/main/java/org/apache/stanbol/rules/base/api/RuleStore.java
@@ -19,8 +19,8 @@
 import java.io.InputStream;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.rules.base.api.util.RecipeList;
 import org.apache.stanbol.rules.base.api.util.RuleList;
 
@@ -54,13 +54,13 @@
      * If some error occurs during the creation of the recipe a {@link RecipeConstructionException} is thrown.
      * 
      * @param recipeID
-     *            {@link UriRef}
+     *            {@link IRI}
      * @param recipeDescription
      *            {@link String}
      * @return a {@link Recipe}
      * @throws AlreadyExistingRecipeException
      */
-    Recipe createRecipe(UriRef recipeID, String recipeDescription) throws AlreadyExistingRecipeException;
+    Recipe createRecipe(IRI recipeID, String recipeDescription) throws AlreadyExistingRecipeException;
 
     /**
      * The method adds a new rule passed as second parameter to a recipe passed as first parameter. <br/>
@@ -134,11 +134,11 @@
      * @param recipe
      *            {@link Recipe}
      * @param ruleID
-     *            {@link UriRef}
+     *            {@link IRI}
      * @return {@link Rule}
      * @throws NoSuchRuleInRecipeException
      */
-    Rule getRule(Recipe recipe, UriRef ruleID) throws NoSuchRuleInRecipeException;
+    Rule getRule(Recipe recipe, IRI ruleID) throws NoSuchRuleInRecipeException;
 
     /**
      * It returns the set of rules that realize the recipe passed as parameter.
@@ -150,13 +150,13 @@
     RuleList listRules(Recipe recipe);
 
     /**
-     * It returns the {@link List} or rules' identifiers ({@link UriRef}).
+     * It returns the {@link List} or rules' identifiers ({@link IRI}).
      * 
      * @param recipe
      *            {@link Recipe}
-     * @return {@link List} of {@link UriRef}
+     * @return {@link List} of {@link IRI}
      */
-    List<UriRef> listRuleIDs(Recipe recipe);
+    List<IRI> listRuleIDs(Recipe recipe);
 
     /**
      * It returns the {@link List} of rules' names.
@@ -175,19 +175,19 @@
      * thrown.
      * 
      * @param recipeID
-     *            {@link UriRef}
+     *            {@link IRI}
      * @return {@link Recipe}
      * @throws NoSuchRecipeException
      * @throws RecipeConstructionException
      */
-    Recipe getRecipe(UriRef recipeID) throws NoSuchRecipeException, RecipeConstructionException;
+    Recipe getRecipe(IRI recipeID) throws NoSuchRecipeException, RecipeConstructionException;
 
     /**
      * It returns a list of existing recipes' IDs in the store.<br/>
      * 
-     * @return {@link List} of {@link UriRef}
+     * @return {@link List} of {@link IRI}
      */
-    List<UriRef> listRecipeIDs();
+    List<IRI> listRecipeIDs();
 
     /**
      * It returns the list of exisitng recipes in the RuleStore.<br/>
@@ -202,11 +202,11 @@
      * {@link RecipeEliminationException} is thrown.
      * 
      * @param recipeID
-     *            {@link UriRef}
+     *            {@link IRI}
      * @return <code>true</code> if the recipe has been removed, false otherwise.
      * @throws RecipeEliminationException
      */
-    boolean removeRecipe(UriRef recipeID) throws RecipeEliminationException;
+    boolean removeRecipe(IRI recipeID) throws RecipeEliminationException;
 
     /**
      * It removes the recipe passed as parameter.<br/>
@@ -232,13 +232,13 @@
     Recipe removeRule(Recipe recipe, Rule rule);
 
     /**
-     * It allows to export recipes as Clerezza's {@link TripleCollection} objects.
+     * It allows to export recipes as Clerezza's {@link Graph} objects.
      * 
      * @param recipe
      * @return
      * @throws NoSuchRecipeException
      */
-    TripleCollection exportRecipe(Recipe recipe) throws NoSuchRecipeException;
+    Graph exportRecipe(Recipe recipe) throws NoSuchRecipeException;
 
     /**
      * Find the set of recipes in the rule store whose description matches the <code>term</code>
diff --git a/rules/base/src/main/java/org/apache/stanbol/rules/base/api/Symbols.java b/rules/base/src/main/java/org/apache/stanbol/rules/base/api/Symbols.java
index b062e11..94c954a 100644
--- a/rules/base/src/main/java/org/apache/stanbol/rules/base/api/Symbols.java
+++ b/rules/base/src/main/java/org/apache/stanbol/rules/base/api/Symbols.java
@@ -17,7 +17,7 @@
 
 package org.apache.stanbol.rules.base.api;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 
 /**
  * It provides static methods in order to represent properties, classes and the namespace used in order to
@@ -37,18 +37,18 @@
 
     public static String variablesPrefix = "http://incubator.apache.org/stanbol/rules/variables/";
 
-    public static UriRef description = new UriRef("http://incubator.apache.org/stanbol/rules/description");
+    public static IRI description = new IRI("http://incubator.apache.org/stanbol/rules/description");
 
-    public static UriRef hasRule = new UriRef("http://incubator.apache.org/stanbol/rules/hasRule");
+    public static IRI hasRule = new IRI("http://incubator.apache.org/stanbol/rules/hasRule");
 
-    public static UriRef ruleName = new UriRef("http://incubator.apache.org/stanbol/rules/ruleName");
+    public static IRI ruleName = new IRI("http://incubator.apache.org/stanbol/rules/ruleName");
 
-    public static UriRef ruleBody = new UriRef("http://incubator.apache.org/stanbol/rules/ruleBody");
+    public static IRI ruleBody = new IRI("http://incubator.apache.org/stanbol/rules/ruleBody");
 
-    public static UriRef ruleHead = new UriRef("http://incubator.apache.org/stanbol/rules/ruleHead");
+    public static IRI ruleHead = new IRI("http://incubator.apache.org/stanbol/rules/ruleHead");
 
-    public static UriRef Recipe = new UriRef("http://incubator.apache.org/stanbol/rules/Recipe");
+    public static IRI Recipe = new IRI("http://incubator.apache.org/stanbol/rules/Recipe");
 
-    public static UriRef Rule = new UriRef("http://incubator.apache.org/stanbol/rules/Rule");
+    public static IRI Rule = new IRI("http://incubator.apache.org/stanbol/rules/Rule");
 
 }
diff --git a/rules/manager/pom.xml b/rules/manager/pom.xml
index b70ac49..aa6d34e 100644
--- a/rules/manager/pom.xml
+++ b/rules/manager/pom.xml
@@ -129,14 +129,17 @@
     <dependency>
       <groupId>org.apache.clerezza</groupId>
       <artifactId>rdf.jena.serializer</artifactId>
+      <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.clerezza</groupId>
       <artifactId>rdf.jena.sparql</artifactId>
+      <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.clerezza</groupId>
       <artifactId>rdf.simple.storage</artifactId>
+      <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.clerezza</groupId>
diff --git a/rules/manager/src/main/java/org/apache/stanbol/rules/manager/ClerezzaRuleStore.java b/rules/manager/src/main/java/org/apache/stanbol/rules/manager/ClerezzaRuleStore.java
index c2b06c1..dd2ddfb 100644
--- a/rules/manager/src/main/java/org/apache/stanbol/rules/manager/ClerezzaRuleStore.java
+++ b/rules/manager/src/main/java/org/apache/stanbol/rules/manager/ClerezzaRuleStore.java
@@ -31,16 +31,16 @@
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.Literal;
-import org.apache.clerezza.rdf.core.Resource;
-import org.apache.clerezza.rdf.core.Triple;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Literal;
+import org.apache.clerezza.commons.rdf.RDFTerm;
+import org.apache.clerezza.commons.rdf.Triple;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.EntityAlreadyExistsException;
 import org.apache.clerezza.rdf.core.access.NoSuchEntityException;
 import org.apache.clerezza.rdf.core.access.TcManager;
-import org.apache.clerezza.rdf.core.impl.PlainLiteralImpl;
-import org.apache.clerezza.rdf.core.impl.TripleImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.PlainLiteralImpl;
+import org.apache.clerezza.commons.rdf.impl.utils.TripleImpl;
 import org.apache.clerezza.rdf.core.sparql.ParseException;
 import org.apache.clerezza.rdf.core.sparql.QueryParser;
 import org.apache.clerezza.rdf.core.sparql.ResultSet;
@@ -48,7 +48,7 @@
 import org.apache.clerezza.rdf.core.sparql.query.Query;
 import org.apache.clerezza.rdf.core.sparql.query.SelectQuery;
 import org.apache.clerezza.rdf.ontologies.RDF;
-import org.apache.clerezza.rdf.utils.UnionMGraph;
+import org.apache.clerezza.rdf.utils.UnionGraph;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Deactivate;
@@ -73,7 +73,7 @@
 
 /**
  * This class provides an implementation of the {@link RuleStore} based on Clerezza.<br/>
- * Recipe are managed as {@link TripleCollection} graphs. <br/>
+ * Recipe are managed as {@link Graph} graphs. <br/>
  * The vocabulary used in these graphs is provided by {@link Symbols}.
  * 
  * @author elvio
@@ -94,7 +94,7 @@
     @Property(name = RuleStore.RECIPE_INDEX_LOCATION, value = _RECIPE_INDEX_LOCATION_DEFAULT)
     private String recipeIndexLocation;
 
-    private List<UriRef> recipes;
+    private List<IRI> recipes;
 
     /**
      * This construct returns RuleStoreImpl object with inside an ontology where to store the rules.
@@ -163,17 +163,17 @@
             else recipeIndexLocation = _RECIPE_INDEX_LOCATION_DEFAULT;
         }
 
-        recipes = new ArrayList<UriRef>();
+        recipes = new ArrayList<IRI>();
 
-        TripleCollection tripleCollection = null;
+        Graph tripleCollection = null;
         try {
-            tripleCollection = tcManager.getMGraph(new UriRef(recipeIndexLocation));
+            tripleCollection = tcManager.getGraph(new IRI(recipeIndexLocation));
         } catch (NoSuchEntityException e) {
-            tripleCollection = tcManager.createMGraph(new UriRef(recipeIndexLocation));
+            tripleCollection = tcManager.createGraph(new IRI(recipeIndexLocation));
         }
 
         for (Triple triple : tripleCollection) {
-            UriRef recipeID = (UriRef) triple.getSubject();
+            IRI recipeID = (IRI) triple.getSubject();
             recipes.add(recipeID);
         }
 
@@ -184,27 +184,27 @@
      * Moved form AddRecipe class. The AddRecipe should not be used anymore.
      */
     @Override
-    public Recipe createRecipe(UriRef recipeID, String recipeDescription) throws AlreadyExistingRecipeException {
+    public Recipe createRecipe(IRI recipeID, String recipeDescription) throws AlreadyExistingRecipeException {
 
-        TripleCollection tripleCollection;
+        Graph tripleCollection;
         try {
-            // create the MGraph in the TcManager
-            tripleCollection = tcManager.createMGraph(recipeID);
+            // create the Graph in the TcManager
+            tripleCollection = tcManager.createGraph(recipeID);
         } catch (EntityAlreadyExistsException e) {
             throw new AlreadyExistingRecipeException(e.getMessage());
         }
 
         Triple recipeTriple = new TripleImpl(recipeID, RDF.type, Symbols.Recipe);
 
-        TripleCollection recipeIndexTripleCollection = tcManager.getMGraph(new UriRef(recipeIndexLocation));
-        recipeIndexTripleCollection.add(recipeTriple);
+        Graph recipeIndexGraph = tcManager.getGraph(new IRI(recipeIndexLocation));
+        recipeIndexGraph.add(recipeTriple);
 
         if (recipeDescription != null && !recipeDescription.isEmpty()) {
             Triple descriptionTriple = new TripleImpl(recipeID, Symbols.description, new PlainLiteralImpl(
                     recipeDescription));
             tripleCollection.add(descriptionTriple);
 
-            recipeIndexTripleCollection.add(descriptionTriple);
+            recipeIndexGraph.add(descriptionTriple);
         }
 
         // add the recpe ID to the list of known recipes
@@ -227,9 +227,9 @@
         log.debug("Adding rule to recipe " + recipe);
         log.info("Rule : " + rule.toString());
 
-        UriRef recipeID = recipe.getRecipeID();
+        IRI recipeID = recipe.getRecipeID();
 
-        TripleCollection tripleCollection = tcManager.getMGraph(recipeID);
+        Graph tripleCollection = tcManager.getGraph(recipeID);
 
         // add the rule object to the graph representation of the recipe by the TcManager
         tripleCollection.add(new TripleImpl(recipeID, Symbols.hasRule, rule.getRuleID()));
@@ -270,7 +270,7 @@
      * 
      * Parse the set of rules provided by the rulesStream parameter as Stanbol syntax rules and add them to
      * the Recipe in the store.<br/>
-     * The recipe is a {@link TripleCollection} managed by the {@link TcManager}.
+     * The recipe is a {@link Graph} managed by the {@link TcManager}.
      * 
      * 
      * @param recipe
@@ -284,7 +284,7 @@
     public Recipe addRulesToRecipe(Recipe recipe, InputStream rulesStream, String description) {
         log.debug("Adding rule to recipe " + recipe);
 
-        UriRef recipeID = recipe.getRecipeID();
+        IRI recipeID = recipe.getRecipeID();
         String namespace = recipeID.toString().substring(1, recipeID.toString().length() - 1) + "/";
         
         log.info("Rule Namespace is " + namespace);
@@ -307,7 +307,7 @@
     @Override
     public Recipe addRulesToRecipe(Recipe recipe, String stanbolRule, String description) {
 
-        UriRef recipeID = recipe.getRecipeID();
+        IRI recipeID = recipe.getRecipeID();
         String namespace = recipeID.toString().substring(1, recipeID.toString().length() - 1) + "/";
 
         RuleList ruleList = RuleParserImpl.parse(namespace, stanbolRule).getRuleList();
@@ -326,18 +326,18 @@
     }
 
     @Override
-    public Recipe getRecipe(UriRef recipeID) throws NoSuchRecipeException, RecipeConstructionException {
+    public Recipe getRecipe(IRI recipeID) throws NoSuchRecipeException, RecipeConstructionException {
 
         log.info("Called get recipe for id: " + recipeID);
 
-        TripleCollection recipeGraph = null;
+        Graph recipeGraph = null;
 
         /**
          * Throw a NoSuchRecipeException in case of the TcManager throws a NoSuchEntityException with respect
-         * to UriRef representing the recipe.
+         * to IRI representing the recipe.
          */
         try {
-            recipeGraph = tcManager.getMGraph(recipeID);
+            recipeGraph = tcManager.getGraph(recipeID);
         } catch (NoSuchEntityException e) {
             throw new NoSuchRecipeException(recipeID.toString());
         }
@@ -365,9 +365,9 @@
             boolean firstIteration = true;
             while (resultSet.hasNext()) {
                 SolutionMapping solutionMapping = resultSet.next();
-                Resource nameResource = solutionMapping.get("ruleName");
-                Resource bodyResource = solutionMapping.get("ruleBody");
-                Resource headResource = solutionMapping.get("ruleHead");
+                RDFTerm nameResource = solutionMapping.get("ruleName");
+                RDFTerm bodyResource = solutionMapping.get("ruleBody");
+                RDFTerm headResource = solutionMapping.get("ruleHead");
 
                 StringBuilder stanbolRuleBuilder = new StringBuilder();
                 stanbolRuleBuilder.append(((Literal) nameResource).getLexicalForm());
@@ -406,7 +406,7 @@
     }
 
     @Override
-    public List<UriRef> listRecipeIDs() {
+    public List<IRI> listRecipeIDs() {
 
         return recipes;
     }
@@ -415,7 +415,7 @@
     public RecipeList listRecipes() throws NoSuchRecipeException, RecipeConstructionException {
         RecipeList recipeList = new RecipeList();
 
-        for (UriRef recipeID : recipes) {
+        for (IRI recipeID : recipes) {
             Recipe recipe;
             try {
                 recipe = getRecipe(recipeID);
@@ -434,18 +434,18 @@
     }
 
     @Override
-    public boolean removeRecipe(UriRef recipeID) throws RecipeEliminationException {
+    public boolean removeRecipe(IRI recipeID) throws RecipeEliminationException {
 
         // remove the recipe from the TcManager
         try {
-            tcManager.deleteTripleCollection(recipeID);
+            tcManager.deleteGraph(recipeID);
         } catch (NoSuchEntityException e) {
             throw new RecipeEliminationException(e);
         }
 
-        TripleCollection recipeIndexTripleCollection = tcManager.getTriples(new UriRef(recipeIndexLocation));
+        Graph recipeIndexGraph = tcManager.getGraph(new IRI(recipeIndexLocation));
         Triple triple = new TripleImpl(recipeID, RDF.type, Symbols.Recipe);
-        recipeIndexTripleCollection.remove(triple);
+        recipeIndexGraph.remove(triple);
 
         // System.out.println("Recipes: " +recipes.size());
         // remove the recipe ID from in-memory list
@@ -464,7 +464,7 @@
 
     @Override
     public Recipe removeRule(Recipe recipe, Rule rule) {
-        TripleCollection tripleCollection = tcManager.getMGraph(recipe.getRecipeID());
+        Graph tripleCollection = tcManager.getGraph(recipe.getRecipeID());
 
         // remove from the graph recipe all the triples having the ruleID as subject.
         Iterator<Triple> triplesIterator = tripleCollection.filter(rule.getRuleID(), null, null);
@@ -487,13 +487,13 @@
     }
 
     @Override
-    public Rule getRule(Recipe recipe, UriRef ruleID) throws NoSuchRuleInRecipeException {
+    public Rule getRule(Recipe recipe, IRI ruleID) throws NoSuchRuleInRecipeException {
 
         return recipe.getRule(ruleID);
     }
 
     @Override
-    public List<UriRef> listRuleIDs(Recipe recipe) {
+    public List<IRI> listRuleIDs(Recipe recipe) {
         return recipe.listRuleIDs();
     }
 
@@ -508,10 +508,10 @@
     }
 
     @Override
-    public TripleCollection exportRecipe(Recipe recipe) throws NoSuchRecipeException {
+    public Graph exportRecipe(Recipe recipe) throws NoSuchRecipeException {
 
         try {
-            return tcManager.getMGraph(recipe.getRecipeID());
+            return tcManager.getGraph(recipe.getRecipeID());
         } catch (NoSuchEntityException e) {
             throw new NoSuchRecipeException(recipe.toString());
         }
@@ -524,7 +524,7 @@
                         + "?recipe " + Symbols.description + " ?description . "
                         + "FILTER (regex(?description, \"" + term + "\", \"i\"))" + "}";
 
-        TripleCollection tripleCollection = tcManager.getMGraph(new UriRef(recipeIndexLocation));
+        Graph tripleCollection = tcManager.getGraph(new IRI(recipeIndexLocation));
 
         RecipeList matchingRecipes = new RecipeList();
 
@@ -536,7 +536,7 @@
 
             while (resultSet.hasNext()) {
                 SolutionMapping solutionMapping = resultSet.next();
-                UriRef recipeID = (UriRef) solutionMapping.get("recipe");
+                IRI recipeID = (IRI) solutionMapping.get("recipe");
 
                 try {
                     Recipe recipe = getRecipe(recipeID);
@@ -565,15 +565,15 @@
                         + Symbols.description + " ?description . " + "FILTER (regex(?name, \"" + term
                         + "\", \"i\"))" + "}";
 
-        List<UriRef> recipeIDs = listRecipeIDs();
+        List<IRI> recipeIDs = listRecipeIDs();
 
-        TripleCollection[] tripleCollections = new TripleCollection[recipeIDs.size()];
+        Graph[] tripleCollections = new Graph[recipeIDs.size()];
 
         for (int i = 0; i < tripleCollections.length; i++) {
-            tripleCollections[i] = tcManager.getMGraph(recipeIDs.get(i));
+            tripleCollections[i] = tcManager.getGraph(recipeIDs.get(i));
         }
 
-        UnionMGraph unionMGraph = new UnionMGraph(tripleCollections);
+        UnionGraph unionGraph = new UnionGraph(tripleCollections);
 
         RuleList matchingRules = new RuleList();
 
@@ -581,12 +581,12 @@
 
             SelectQuery query = (SelectQuery) QueryParser.getInstance().parse(sparql);
 
-            ResultSet resultSet = tcManager.executeSparqlQuery(query, unionMGraph);
+            ResultSet resultSet = tcManager.executeSparqlQuery(query, unionGraph);
 
             while (resultSet.hasNext()) {
                 SolutionMapping solutionMapping = resultSet.next();
-                UriRef recipeID = (UriRef) solutionMapping.get("recipe");
-                UriRef ruleID = (UriRef) solutionMapping.get("rule");
+                IRI recipeID = (IRI) solutionMapping.get("recipe");
+                IRI ruleID = (IRI) solutionMapping.get("rule");
                 Literal description = (Literal) solutionMapping.get("description");
 
                 try {
@@ -619,15 +619,15 @@
                         + " ?rule . " + "?rule " + Symbols.description + " ?description . "
                         + "FILTER (regex(?description, \"" + term + "\", \"i\"))" + "}";
 
-        List<UriRef> recipeIDs = listRecipeIDs();
+        List<IRI> recipeIDs = listRecipeIDs();
 
-        TripleCollection[] tripleCollections = new TripleCollection[recipeIDs.size()];
+        Graph[] tripleCollections = new Graph[recipeIDs.size()];
 
         for (int i = 0; i < tripleCollections.length; i++) {
-            tripleCollections[i] = tcManager.getMGraph(recipeIDs.get(i));
+            tripleCollections[i] = tcManager.getGraph(recipeIDs.get(i));
         }
 
-        UnionMGraph unionMGraph = new UnionMGraph(tripleCollections);
+        UnionGraph unionGraph = new UnionGraph(tripleCollections);
 
         RuleList matchingRules = new RuleList();
 
@@ -635,12 +635,12 @@
 
             SelectQuery query = (SelectQuery) QueryParser.getInstance().parse(sparql);
 
-            ResultSet resultSet = tcManager.executeSparqlQuery(query, unionMGraph);
+            ResultSet resultSet = tcManager.executeSparqlQuery(query, unionGraph);
 
             while (resultSet.hasNext()) {
                 SolutionMapping solutionMapping = resultSet.next();
-                UriRef recipeID = (UriRef) solutionMapping.get("recipe");
-                UriRef ruleID = (UriRef) solutionMapping.get("rule");
+                IRI recipeID = (IRI) solutionMapping.get("recipe");
+                IRI ruleID = (IRI) solutionMapping.get("rule");
                 Literal description = (Literal) solutionMapping.get("description");
 
                 try {
diff --git a/rules/manager/src/main/java/org/apache/stanbol/rules/manager/RecipeImpl.java b/rules/manager/src/main/java/org/apache/stanbol/rules/manager/RecipeImpl.java
index 4afd321..3c7f00b 100644
--- a/rules/manager/src/main/java/org/apache/stanbol/rules/manager/RecipeImpl.java
+++ b/rules/manager/src/main/java/org/apache/stanbol/rules/manager/RecipeImpl.java
@@ -19,7 +19,7 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.rules.base.api.NoSuchRuleInRecipeException;
 import org.apache.stanbol.rules.base.api.Recipe;
 import org.apache.stanbol.rules.base.api.Rule;
@@ -37,7 +37,7 @@
  */
 public class RecipeImpl implements Recipe {
 
-    private UriRef recipeID;
+    private IRI recipeID;
     private String recipeDescription;
     private RuleList ruleList = new RuleList();
 
@@ -48,7 +48,7 @@
      * @param recipeDescription
      * @param ruleList
      */
-    public RecipeImpl(UriRef recipeID, String recipeDescription, RuleList ruleList) {
+    public RecipeImpl(IRI recipeID, String recipeDescription, RuleList ruleList) {
         this.recipeID = recipeID;
         this.recipeDescription = recipeDescription;
         if ( ruleList != null ) {
@@ -60,7 +60,7 @@
         return ruleList;
     }
 
-    public UriRef getRecipeID() {
+    public IRI getRecipeID() {
         return recipeID;
     }
 
@@ -137,7 +137,7 @@
     }
 
     @Override
-    public Rule getRule(UriRef ruleID) throws NoSuchRuleInRecipeException {
+    public Rule getRule(IRI ruleID) throws NoSuchRuleInRecipeException {
         for (Rule rule : ruleList) {
             if (rule.getRuleID().toString().equals(ruleID.toString())) {
                 return rule;
@@ -155,8 +155,8 @@
     }
 
     @Override
-    public List<UriRef> listRuleIDs() {
-        List<UriRef> ruleIDs = new ArrayList<UriRef>();
+    public List<IRI> listRuleIDs() {
+        List<IRI> ruleIDs = new ArrayList<IRI>();
 
         for (Rule rule : ruleList) {
             ruleIDs.add(rule.getRuleID());
diff --git a/rules/manager/src/main/java/org/apache/stanbol/rules/manager/RecipeRule.java b/rules/manager/src/main/java/org/apache/stanbol/rules/manager/RecipeRule.java
index 47b6c3b..57d24d5 100644
--- a/rules/manager/src/main/java/org/apache/stanbol/rules/manager/RecipeRule.java
+++ b/rules/manager/src/main/java/org/apache/stanbol/rules/manager/RecipeRule.java
@@ -16,7 +16,7 @@
  */
 package org.apache.stanbol.rules.manager;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.rules.base.api.Recipe;
 import org.apache.stanbol.rules.base.api.Rule;
 import org.apache.stanbol.rules.base.api.util.AtomList;
@@ -30,7 +30,7 @@
  */
 public class RecipeRule extends RuleImpl {
 
-    public RecipeRule(Recipe recipe, UriRef ruleID, String ruleName, AtomList body, AtomList head) {
+    public RecipeRule(Recipe recipe, IRI ruleID, String ruleName, AtomList body, AtomList head) {
         super(ruleID, ruleName, body, head);
 
         bindToRecipe(recipe);
diff --git a/rules/manager/src/main/java/org/apache/stanbol/rules/manager/RuleImpl.java b/rules/manager/src/main/java/org/apache/stanbol/rules/manager/RuleImpl.java
index eb3f53d..b3cddb9 100644
--- a/rules/manager/src/main/java/org/apache/stanbol/rules/manager/RuleImpl.java
+++ b/rules/manager/src/main/java/org/apache/stanbol/rules/manager/RuleImpl.java
@@ -16,7 +16,7 @@
  */
 package org.apache.stanbol.rules.manager;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.rules.base.api.Recipe;
 import org.apache.stanbol.rules.base.api.Rule;
 import org.apache.stanbol.rules.base.api.RuleAtom;
@@ -31,7 +31,7 @@
  */
 public class RuleImpl implements Rule {
 
-    private UriRef ruleID;
+    private IRI ruleID;
 
     private String ruleName;
     private String rule;
@@ -42,7 +42,7 @@
     protected Recipe recipe;
     protected String description;
 
-    public RuleImpl(UriRef ruleID, String ruleName, AtomList body, AtomList head) {
+    public RuleImpl(IRI ruleID, String ruleName, AtomList body, AtomList head) {
         this.ruleID = ruleID;
         this.ruleName = ruleName;
         this.head = head;
@@ -154,7 +154,7 @@
     }
 
     @Override
-    public UriRef getRuleID() {
+    public IRI getRuleID() {
         return ruleID;
     }
 
diff --git a/rules/manager/src/main/java/org/apache/stanbol/rules/manager/parse/RuleGrammar.jj b/rules/manager/src/main/java/org/apache/stanbol/rules/manager/parse/RuleGrammar.jj
index cf00727..ac62f72 100644
--- a/rules/manager/src/main/java/org/apache/stanbol/rules/manager/parse/RuleGrammar.jj
+++ b/rules/manager/src/main/java/org/apache/stanbol/rules/manager/parse/RuleGrammar.jj
@@ -30,7 +30,7 @@
 import java.net.URI;
 import java.net.URISyntaxException;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.rules.base.api.Rule;
 import org.apache.stanbol.rules.base.api.RuleAtom;
 import org.apache.stanbol.rules.base.api.util.AtomList;
@@ -82,7 +82,7 @@
 import org.apache.stanbol.rules.manager.atoms.VariableAtom;
 
 import com.hp.hpl.jena.rdf.model.ModelFactory;
-import com.hp.hpl.jena.rdf.model.Resource;
+import com.hp.hpl.jena.rdf.model.RDFTerm;
 
 /**
 *
@@ -130,7 +130,7 @@
 
 
 	private static URI getSWRLArgument(String argument){
-                Resource rdfNode = null;
+                RDFTerm rdfNode = null;
                 String[] argumentComposition = argument.split(":");
                 if(argumentComposition.length == 2){
                         String prefix = argumentComposition[0];
@@ -151,7 +151,7 @@
         }
 
         private static URI getSWRLVariable(String argument){
-                Resource variableResource = null;
+                RDFTerm variableResource = null;
                 String variableString = argument.substring(1);
 
 
@@ -288,7 +288,7 @@
 
                                                      }
                                                  else{
-                                                         rule = new RuleImpl(new UriRef(ruleStorePrefix+nsPrefix), nsPrefix, atoms[0], atoms[1]);
+                                                         rule = new RuleImpl(new IRI(ruleStorePrefix+nsPrefix), nsPrefix, atoms[0], atoms[1]);
                                                          kb.addRule(rule);
                                                 }
 	  									}
diff --git a/rules/manager/src/main/java/org/apache/stanbol/rules/manager/parse/RuleParserImpl.java b/rules/manager/src/main/java/org/apache/stanbol/rules/manager/parse/RuleParserImpl.java
index 9e12cef..161da9f 100644
--- a/rules/manager/src/main/java/org/apache/stanbol/rules/manager/parse/RuleParserImpl.java
+++ b/rules/manager/src/main/java/org/apache/stanbol/rules/manager/parse/RuleParserImpl.java
@@ -8,7 +8,7 @@
 import java.net.URI;
 import java.net.URISyntaxException;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.rules.base.api.Rule;
 import org.apache.stanbol.rules.base.api.RuleAtom;
 import org.apache.stanbol.rules.base.api.util.AtomList;
@@ -192,7 +192,7 @@
 
                                                      }
                                                  else{
-                                                         rule = new RuleImpl(new UriRef(ruleStorePrefix+nsPrefix), nsPrefix, atoms[0], atoms[1]);
+                                                         rule = new RuleImpl(new IRI(ruleStorePrefix+nsPrefix), nsPrefix, atoms[0], atoms[1]);
                                                          kb.addRule(rule);
                                                 }
       break;
diff --git a/rules/manager/src/main/java/org/apache/stanbol/rules/manager/parse/RuleParserImplTokenManager.java b/rules/manager/src/main/java/org/apache/stanbol/rules/manager/parse/RuleParserImplTokenManager.java
index 4d0b274..a245147 100644
--- a/rules/manager/src/main/java/org/apache/stanbol/rules/manager/parse/RuleParserImplTokenManager.java
+++ b/rules/manager/src/main/java/org/apache/stanbol/rules/manager/parse/RuleParserImplTokenManager.java
@@ -6,7 +6,7 @@
 import java.io.StringReader;
 import java.net.URI;
 import java.net.URISyntaxException;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.rules.base.api.Rule;
 import org.apache.stanbol.rules.base.api.RuleAtom;
 import org.apache.stanbol.rules.base.api.util.AtomList;
diff --git a/rules/manager/src/test/java/org/apache/stanbol/rules/manager/RuleStoreTest.java b/rules/manager/src/test/java/org/apache/stanbol/rules/manager/RuleStoreTest.java
index 20d3b76..1c2cf6b 100644
--- a/rules/manager/src/test/java/org/apache/stanbol/rules/manager/RuleStoreTest.java
+++ b/rules/manager/src/test/java/org/apache/stanbol/rules/manager/RuleStoreTest.java
@@ -25,7 +25,7 @@
 import java.util.Hashtable;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.TcManager;
 import org.apache.clerezza.rdf.core.access.WeightedTcProvider;
 import org.apache.clerezza.rdf.core.sparql.QueryEngine;
@@ -59,7 +59,7 @@
 
     @BeforeClass
     public static void setUpClass() throws Exception {
-        class SpecialTcManager extends TcManager {
+        /*class SpecialTcManager extends TcManager {
             public SpecialTcManager(QueryEngine qe, WeightedTcProvider wtcp) {
                 super();
                 bindQueryEngine(qe);
@@ -68,8 +68,8 @@
         }
 
         QueryEngine qe = new JenaSparqlEngine();
-        WeightedTcProvider wtcp = new SimpleTcProvider();
-        TcManager tcm = new SpecialTcManager(qe, wtcp);
+        WeightedTcProvider wtcp = new SimpleTcProvider();*/
+        TcManager tcm = TcManager.getInstance();//new SpecialTcManager(qe, wtcp);
 
         Dictionary<String,Object> configuration = new Hashtable<String,Object>();
         store = new ClerezzaRuleStore(configuration, tcm);
@@ -114,7 +114,7 @@
     }
     
     private void createRecipeTest() throws Exception {
-        Recipe recipe = store.createRecipe(new UriRef(
+        Recipe recipe = store.createRecipe(new IRI(
                 "http://incubator.apache.com/stanbol/rules/test/recipeA"), "The text recipe named A.");
 
         if (recipe == null) {
@@ -126,7 +126,7 @@
     }
 
     private void addRuleToRecipeTest() throws Exception {
-        Recipe recipe = store.getRecipe(new UriRef("http://incubator.apache.com/stanbol/rules/test/recipeA"));
+        Recipe recipe = store.getRecipe(new IRI("http://incubator.apache.com/stanbol/rules/test/recipeA"));
 
         String separator = System.getProperty("line.separator");
         String rule = "rule1[" + separator + "	is(<http://dbpedia.org/ontology/Person>, ?x) . " + separator
@@ -149,7 +149,7 @@
     }
 
     private void getRecipeTest() throws Exception {
-        Recipe recipe = store.getRecipe(new UriRef("http://incubator.apache.com/stanbol/rules/test/recipeA"));
+        Recipe recipe = store.getRecipe(new IRI("http://incubator.apache.com/stanbol/rules/test/recipeA"));
 
         if (recipe == null) {
             Assert.fail();
@@ -162,7 +162,7 @@
     }
 
     private void getNotExistingRuleByNameInRecipeTest() throws Exception {
-        Recipe recipe = store.getRecipe(new UriRef("http://incubator.apache.com/stanbol/rules/test/recipeA"));
+        Recipe recipe = store.getRecipe(new IRI("http://incubator.apache.com/stanbol/rules/test/recipeA"));
 
         try {
             recipe.getRule("ruleX");
@@ -174,10 +174,10 @@
     }
 
     private void getNotExistingRuleByIdInRecipeTest() throws Exception {
-        Recipe recipe = store.getRecipe(new UriRef("http://incubator.apache.com/stanbol/rules/test/recipeA"));
+        Recipe recipe = store.getRecipe(new IRI("http://incubator.apache.com/stanbol/rules/test/recipeA"));
 
         try {
-            recipe.getRule(new UriRef("http://foo.org/ruleX"));
+            recipe.getRule(new IRI("http://foo.org/ruleX"));
             Assert.fail();
         } catch (NoSuchRuleInRecipeException e) {
             Assert.assertTrue(true);
@@ -186,7 +186,7 @@
     }
 
     private void getExistingRuleByIdInRecipeTest() throws Exception {
-        Recipe recipe = store.getRecipe(new UriRef("http://incubator.apache.com/stanbol/rules/test/recipeA"));
+        Recipe recipe = store.getRecipe(new IRI("http://incubator.apache.com/stanbol/rules/test/recipeA"));
 
         try {
             Rule rule = recipe.getRule(recipe.listRuleIDs().get(0));
@@ -198,7 +198,7 @@
     }
 
     private void getExistingRuleByNameInRecipeTest() throws Exception {
-        Recipe recipe = store.getRecipe(new UriRef("http://incubator.apache.com/stanbol/rules/test/recipeA"));
+        Recipe recipe = store.getRecipe(new IRI("http://incubator.apache.com/stanbol/rules/test/recipeA"));
 
         try {
             Rule rule = recipe.getRule(recipe.listRuleNames().get(0));
@@ -239,7 +239,7 @@
     }
 
     private void removeRuleInRecipeTest() throws Exception {
-        Recipe recipe = store.getRecipe(new UriRef("http://incubator.apache.com/stanbol/rules/test/recipeA"));
+        Recipe recipe = store.getRecipe(new IRI("http://incubator.apache.com/stanbol/rules/test/recipeA"));
 
         String tmp = recipe.toString();
         Rule rule = recipe.getRule(recipe.listRuleNames().get(0));
@@ -247,7 +247,7 @@
         store.removeRule(recipe, rule);
 
         Recipe recipe2 = store
-                .getRecipe(new UriRef("http://incubator.apache.com/stanbol/rules/test/recipeA"));
+                .getRecipe(new IRI("http://incubator.apache.com/stanbol/rules/test/recipeA"));
 
         String tmp2 = recipe2.toString();
 
@@ -261,7 +261,7 @@
         Recipe[] initialRecipes = new Recipe[recipeListInitial.size()];
         initialRecipes = recipeListInitial.toArray(initialRecipes);
 
-        Recipe recipe = store.getRecipe(new UriRef("http://incubator.apache.com/stanbol/rules/test/recipeA"));
+        Recipe recipe = store.getRecipe(new IRI("http://incubator.apache.com/stanbol/rules/test/recipeA"));
         store.removeRecipe(recipe);
 
         RecipeList recipeListFinal = store.listRecipes();
diff --git a/rules/refactor/src/main/java/org/apache/stanbol/rules/refactor/api/Refactorer.java b/rules/refactor/src/main/java/org/apache/stanbol/rules/refactor/api/Refactorer.java
index eaecc47..c8a2b41 100644
--- a/rules/refactor/src/main/java/org/apache/stanbol/rules/refactor/api/Refactorer.java
+++ b/rules/refactor/src/main/java/org/apache/stanbol/rules/refactor/api/Refactorer.java
@@ -16,9 +16,9 @@
  */
 package org.apache.stanbol.rules.refactor.api;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.stanbol.rules.base.api.NoSuchRecipeException;
 import org.apache.stanbol.rules.base.api.Recipe;
 import org.apache.stanbol.rules.base.api.RuleStore;
@@ -36,10 +36,10 @@
      * Fetch the mgraph with the selected uri from the storage.
      * 
      * @param uriRef
-     *            {@link UriRef}
-     * @return the {@link MGraph}.
+     *            {@link IRI}
+     * @return the {@link Graph}.
      */
-    MGraph getRefactoredDataSet(UriRef uriRef);
+    Graph getRefactoredDataSet(IRI uriRef);
 
     /**
      * The refactoring is perfomed by the {@code Refactorer} by invoking this method. The {@code datasetID}
@@ -48,13 +48,13 @@
      * {@link RuleStore},
      * 
      * @param refactoredDataSetID
-     *            {@link UriRef}
+     *            {@link IRI}
      * @param datasetID
-     *            {@link UriRef}
+     *            {@link IRI}
      * @param recipeIRI
-     *            {@link UriRef}
+     *            {@link IRI}
      */
-    void graphRefactoring(UriRef refactoredOntologyID, UriRef datasetID, UriRef recipeID) throws RefactoringException,
+    void graphRefactoring(IRI refactoredOntologyID, IRI datasetID, IRI recipeID) throws RefactoringException,
                                                                                          NoSuchRecipeException;
 
     /**
@@ -63,14 +63,14 @@
      * graph in order to obtain the refactoring.
      * 
      * @param datasetURI
-     *            {@link UriRef}
+     *            {@link IRI}
      * @param recipe
-     *            {@link UriRef}
-     * @return the refactored {@link MGraph}
+     *            {@link IRI}
+     * @return the refactored {@link Graph}
      * @throws RefactoringException
      * @throws NoSuchRecipeException
      */
-    TripleCollection graphRefactoring(UriRef datasetID, UriRef recipeID) throws RefactoringException,
+    Graph graphRefactoring(IRI datasetID, IRI recipeID) throws RefactoringException,
                                                                         NoSuchRecipeException;
 
     /**
@@ -79,13 +79,13 @@
      * graph in order to obtain the refactoring.
      * 
      * @param datasetID
-     *            {@link TripleCollection}
+     *            {@link Graph}
      * @param recipe
      *            {@link Recipe}
-     * @return the refactored {@link TripleCollection}
+     * @return the refactored {@link Graph}
      * @throws SemionRefactoringException
      * @throws NoSuchRecipeException
      */
-    TripleCollection graphRefactoring(TripleCollection dataset, Recipe recipe) throws RefactoringException;
+    Graph graphRefactoring(Graph dataset, Recipe recipe) throws RefactoringException;
 
 }
diff --git a/rules/refactor/src/main/java/org/apache/stanbol/rules/refactor/impl/RefactorerImpl.java b/rules/refactor/src/main/java/org/apache/stanbol/rules/refactor/impl/RefactorerImpl.java
index 287af21..b234feb 100644
--- a/rules/refactor/src/main/java/org/apache/stanbol/rules/refactor/impl/RefactorerImpl.java
+++ b/rules/refactor/src/main/java/org/apache/stanbol/rules/refactor/impl/RefactorerImpl.java
@@ -20,13 +20,13 @@
 import java.util.Dictionary;
 import java.util.List;
 
-import org.apache.clerezza.rdf.core.Graph;
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.ImmutableGraph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.TcManager;
 import org.apache.clerezza.rdf.core.access.WeightedTcProvider;
-import org.apache.clerezza.rdf.core.impl.SimpleMGraph;
+import org.apache.clerezza.commons.rdf.impl.utils.simple.SimpleGraph;
 import org.apache.clerezza.rdf.core.sparql.query.ConstructQuery;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
@@ -154,9 +154,9 @@
     }
 
     @Override
-    public MGraph getRefactoredDataSet(UriRef uriRef) {
+    public Graph getRefactoredDataSet(IRI uriRef) {
 
-        return weightedTcProvider.getMGraph(uriRef);
+        return weightedTcProvider.getGraph(uriRef);
     }
 
     /**
@@ -166,14 +166,14 @@
      * @param datasetID
      * @return
      */
-    private Graph sparqlConstruct(ConstructQuery constructQuery, UriRef datasetID) {
+    private ImmutableGraph sparqlConstruct(ConstructQuery constructQuery, IRI datasetID) {
 
-        MGraph graph = weightedTcProvider.getMGraph(datasetID);
+        Graph graph = weightedTcProvider.getGraph(datasetID);
         return sparqlConstruct(constructQuery, graph);
 
     }
 
-    private Graph sparqlConstruct(ConstructQuery constructQuery, TripleCollection tripleCollection) {
+    private ImmutableGraph sparqlConstruct(ConstructQuery constructQuery, Graph tripleCollection) {
 
         return tcManager.executeSparqlQuery(constructQuery, tripleCollection);
 
@@ -181,7 +181,7 @@
 
     @SuppressWarnings("unchecked")
     @Override
-    public void graphRefactoring(UriRef refactoredOntologyID, UriRef datasetID, UriRef recipeID) throws RefactoringException,
+    public void graphRefactoring(IRI refactoredOntologyID, IRI datasetID, IRI recipeID) throws RefactoringException,
                                                                                                 NoSuchRecipeException {
 
         Recipe recipe;
@@ -193,7 +193,7 @@
                 List<ConstructQuery> constructQueries = (List<ConstructQuery>) ruleAdapter.adaptTo(recipe,
                     ConstructQuery.class);
 
-                MGraph mGraph = tcManager.createMGraph(refactoredOntologyID);
+                Graph mGraph = tcManager.createGraph(refactoredOntologyID);
                 for (ConstructQuery constructQuery : constructQueries) {
                     mGraph.addAll(this.sparqlConstruct(constructQuery, datasetID));
                 }
@@ -219,9 +219,9 @@
 
     @SuppressWarnings("unchecked")
     @Override
-    public TripleCollection graphRefactoring(UriRef graphID, UriRef recipeID) throws RefactoringException,
+    public Graph graphRefactoring(IRI graphID, IRI recipeID) throws RefactoringException,
                                                                              NoSuchRecipeException {
-        MGraph unionMGraph = null;
+        Graph unionGraph = null;
 
         // JenaToOwlConvert jenaToOwlConvert = new JenaToOwlConvert();
 
@@ -237,10 +237,10 @@
             List<ConstructQuery> constructQueries = (List<ConstructQuery>) ruleAdapter.adaptTo(recipe,
                 ConstructQuery.class);
 
-            unionMGraph = new SimpleMGraph();
+            unionGraph = new SimpleGraph();
 
             for (ConstructQuery constructQuery : constructQueries) {
-                unionMGraph.addAll(this.sparqlConstruct(constructQuery, graphID));
+                unionGraph.addAll(this.sparqlConstruct(constructQuery, graphID));
             }
 
         } catch (NoSuchRecipeException e1) {
@@ -256,13 +256,13 @@
             throw new RefactoringException("The cause of the refactoring excpetion is: " + e.getMessage(), e);
         }
 
-        return unionMGraph.getGraph();
+        return unionGraph.getImmutableGraph();
 
     }
 
     @SuppressWarnings("unchecked")
     @Override
-    public TripleCollection graphRefactoring(TripleCollection inputGraph, Recipe recipe) throws RefactoringException {
+    public Graph graphRefactoring(Graph inputGraph, Recipe recipe) throws RefactoringException {
 
         RuleAdapter ruleAdapter;
         try {
@@ -274,12 +274,12 @@
                 System.out.println(constructQuery.toString());
             }
 
-            MGraph unionMGraph = new SimpleMGraph();
+            Graph unionGraph = new SimpleGraph();
             for (ConstructQuery constructQuery : constructQueries) {
-                unionMGraph.addAll(sparqlConstruct(constructQuery, inputGraph));
+                unionGraph.addAll(sparqlConstruct(constructQuery, inputGraph));
             }
 
-            return unionMGraph;
+            return unionGraph;
         } catch (UnavailableRuleObjectException e) {
             throw new RefactoringException("The cause of the refactoring excpetion is: " + e.getMessage(), e);
         } catch (UnsupportedTypeForExportException e) {
diff --git a/rules/refactor/src/test/java/org/apache/stanbol/rules/refactor/RefactoringTest.java b/rules/refactor/src/test/java/org/apache/stanbol/rules/refactor/RefactoringTest.java
index 8977a7e..db50d88 100644
--- a/rules/refactor/src/test/java/org/apache/stanbol/rules/refactor/RefactoringTest.java
+++ b/rules/refactor/src/test/java/org/apache/stanbol/rules/refactor/RefactoringTest.java
@@ -25,9 +25,9 @@
 
 import junit.framework.Assert;
 
-import org.apache.clerezza.rdf.core.MGraph;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.access.TcManager;
 import org.apache.clerezza.rdf.core.access.WeightedTcProvider;
 import org.apache.clerezza.rdf.core.sparql.QueryEngine;
@@ -69,7 +69,7 @@
     private static Refactorer refactorer;
     private static TcManager tcm;
     private static RuleStore store;
-    private TripleCollection tripleCollection;
+    private Graph tripleCollection;
     private String rule;
 
     @BeforeClass
@@ -125,15 +125,15 @@
         Model jenaModel = ModelFactory.createDefaultModel();
         jenaModel = jenaModel.read(inputStream, null);
 
-        tripleCollection = JenaToClerezzaConverter.jenaModelToClerezzaMGraph(jenaModel);
+        tripleCollection = JenaToClerezzaConverter.jenaModelToClerezzaGraph(jenaModel);
 
-        MGraph mGraph = tcm.createMGraph(new UriRef(
+        Graph mGraph = tcm.createGraph(new IRI(
                 "http://incubator.apache.com/stanbol/rules/refactor/test/graph"));
         mGraph.addAll(tripleCollection);
 
         Recipe recipe;
         try {
-            recipe = store.createRecipe(new UriRef(
+            recipe = store.createRecipe(new IRI(
                     "http://incubator.apache.com/stanbol/rules/refactor/test/recipeA"),
                 "Recipe for testing the Refactor.");
             recipe = store.addRulesToRecipe(recipe, rule, "Test");
@@ -145,10 +145,10 @@
 
     @After
     public void tearDown() {
-        tcm.deleteTripleCollection(new UriRef("http://incubator.apache.com/stanbol/rules/refactor/test/graph"));
+        tcm.deleteGraph(new IRI("http://incubator.apache.com/stanbol/rules/refactor/test/graph"));
 
         try {
-            store.removeRecipe(new UriRef("http://incubator.apache.com/stanbol/rules/refactor/test/recipeA"));
+            store.removeRecipe(new IRI("http://incubator.apache.com/stanbol/rules/refactor/test/recipeA"));
         } catch (RecipeEliminationException e) {
             Assert.fail(e.getMessage());
         }
@@ -157,10 +157,10 @@
     @Test
     public void refactoringTest() throws Exception {
 
-        Recipe recipe = store.getRecipe(new UriRef(
+        Recipe recipe = store.getRecipe(new IRI(
                 "http://incubator.apache.com/stanbol/rules/refactor/test/recipeA"));
 
-        TripleCollection tc = refactorer.graphRefactoring(new UriRef(
+        Graph tc = refactorer.graphRefactoring(new IRI(
                 "http://incubator.apache.com/stanbol/rules/refactor/test/graph"), recipe.getRecipeID());
 
         Assert.assertNotNull(tc);
@@ -170,11 +170,11 @@
     @Test
     public void easyRefactoringTest() throws Exception {
 
-        Recipe recipe = store.getRecipe(new UriRef(
+        Recipe recipe = store.getRecipe(new IRI(
                 "http://incubator.apache.com/stanbol/rules/refactor/test/recipeA"));
         try {
 
-            TripleCollection tc = refactorer.graphRefactoring(tripleCollection, recipe);
+            Graph tc = refactorer.graphRefactoring(tripleCollection, recipe);
 
             Assert.assertNotNull(tc);
 
@@ -188,9 +188,9 @@
 
         try {
 
-            refactorer.graphRefactoring(new UriRef(
-                    "http://incubator.apache.com/stanbol/rules/refactor/test/refactoredGraph"), new UriRef(
-                    "http://incubator.apache.com/stanbol/rules/refactor/test/graph"), new UriRef(
+            refactorer.graphRefactoring(new IRI(
+                    "http://incubator.apache.com/stanbol/rules/refactor/test/refactoredGraph"), new IRI(
+                    "http://incubator.apache.com/stanbol/rules/refactor/test/graph"), new IRI(
                     "http://incubator.apache.com/stanbol/rules/refactor/test/recipeB"));
             Assert.fail();
 
@@ -210,14 +210,14 @@
                       + "rule2[ is(kres:Person, ?x) . same(localname(?y), \"text\") -> is(foaf:Person, ?x) ]";
 
         try {
-            Recipe recipe = store.getRecipe(new UriRef(
+            Recipe recipe = store.getRecipe(new IRI(
                     "http://incubator.apache.com/stanbol/rules/refactor/test/recipeA"));
 
             recipe = store.addRulesToRecipe(recipe, rule, "Test");
 
-            refactorer.graphRefactoring(new UriRef(
-                    "http://incubator.apache.com/stanbol/rules/refactor/test/refactoredGraph"), new UriRef(
-                    "http://incubator.apache.com/stanbol/rules/refactor/test/graph"), new UriRef(
+            refactorer.graphRefactoring(new IRI(
+                    "http://incubator.apache.com/stanbol/rules/refactor/test/refactoredGraph"), new IRI(
+                    "http://incubator.apache.com/stanbol/rules/refactor/test/graph"), new IRI(
                     "http://incubator.apache.com/stanbol/rules/refactor/test/recipeA"));
 
         } catch (NoSuchRecipeException e) {
@@ -235,12 +235,12 @@
 
         try {
 
-            refactorer.graphRefactoring(new UriRef(
-                    "http://incubator.apache.com/stanbol/rules/refactor/test/refactoredGraph"), new UriRef(
-                    "http://incubator.apache.com/stanbol/rules/refactor/test/graph"), new UriRef(
+            refactorer.graphRefactoring(new IRI(
+                    "http://incubator.apache.com/stanbol/rules/refactor/test/refactoredGraph"), new IRI(
+                    "http://incubator.apache.com/stanbol/rules/refactor/test/graph"), new IRI(
                     "http://incubator.apache.com/stanbol/rules/refactor/test/recipeA"));
 
-            TripleCollection tc = tcm.getMGraph(new UriRef(
+            Graph tc = tcm.getGraph(new IRI(
                     "http://incubator.apache.com/stanbol/rules/refactor/test/refactoredGraph"));
 
             Assert.assertNotNull(tc);
diff --git a/rules/web/src/main/java/org/apache/stanbol/rules/web/resources/RefactorResource.java b/rules/web/src/main/java/org/apache/stanbol/rules/web/resources/RefactorResource.java
index cd3f39b..aa74a31 100644
--- a/rules/web/src/main/java/org/apache/stanbol/rules/web/resources/RefactorResource.java
+++ b/rules/web/src/main/java/org/apache/stanbol/rules/web/resources/RefactorResource.java
@@ -54,8 +54,8 @@
 //import javax.ws.rs.core.Response.Status;
 
 import org.apache.clerezza.jaxrs.utils.form.MultiPartBody;
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Property;
 import org.apache.felix.scr.annotations.Reference;
@@ -231,8 +231,8 @@
         OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
         OWLOntology inputOntology = manager.loadOntologyFromOntologyDocument(input);
 
-        TripleCollection tripleCollection = refactorer.graphRefactoring(
-            OWLAPIToClerezzaConverter.owlOntologyToClerezzaMGraph(inputOntology), actualRecipe);
+        Graph tripleCollection = refactorer.graphRefactoring(
+            OWLAPIToClerezzaConverter.owlOntologyToClerezzaGraph(inputOntology), actualRecipe);
         // Refactor
         return OWLAPIToClerezzaConverter.clerezzaGraphToOWLOntology(tripleCollection);
     }
@@ -291,14 +291,14 @@
 				log.info("The recipe ID is a URI without scheme. The ID is set to " + recipe);
 			}
         	
-        	UriRef recipeID = new UriRef(recipe);
+        	IRI recipeID = new IRI(recipe);
         	
             rcp = ruleStore.getRecipe(recipeID);
 
             OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
             OWLOntology inputOntology = manager.loadOntologyFromOntologyDocument(input);
-            TripleCollection tripleCollection = refactorer.graphRefactoring(
-                OWLAPIToClerezzaConverter.owlOntologyToClerezzaMGraph(inputOntology), rcp);
+            Graph tripleCollection = refactorer.graphRefactoring(
+                OWLAPIToClerezzaConverter.owlOntologyToClerezzaGraph(inputOntology), rcp);
             OWLOntology outputOntology = OWLAPIToClerezzaConverter
                     .clerezzaGraphToOWLOntology(tripleCollection);
             rb = Response.ok(outputOntology);
@@ -334,9 +334,9 @@
         log.info("recipe: {}", recipe);
         log.info("input-graph: {}", inputGraph);
         log.info("output-graph: {}", outputGraph);
-        UriRef recipeID = new UriRef(recipe);
-        UriRef inputGraphID = new UriRef(inputGraph);
-        UriRef outputGraphID = new UriRef(outputGraph);
+        IRI recipeID = new IRI(recipe);
+        IRI inputGraphID = new IRI(inputGraph);
+        IRI outputGraphID = new IRI(outputGraph);
 
         // Refactorer semionRefactorer = semionManager.getRegisteredRefactorer();
 
diff --git a/rules/web/src/main/java/org/apache/stanbol/rules/web/resources/RulesResource.java b/rules/web/src/main/java/org/apache/stanbol/rules/web/resources/RulesResource.java
index ddb9c84..75178ac 100644
--- a/rules/web/src/main/java/org/apache/stanbol/rules/web/resources/RulesResource.java
+++ b/rules/web/src/main/java/org/apache/stanbol/rules/web/resources/RulesResource.java
@@ -52,7 +52,7 @@
 import javax.ws.rs.core.Response.Status;
 
 import org.apache.clerezza.jaxrs.utils.form.MultiPartBody;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.Property;
 import org.apache.felix.scr.annotations.Reference;
@@ -232,10 +232,10 @@
 				log.info("The recipe ID is a URI without scheme. The ID is set to " + recipeID);
 			}
         	
-            recipe = ruleStore.getRecipe(new UriRef(recipeID));
+            recipe = ruleStore.getRecipe(new IRI(recipeID));
 
             if (ruleID != null && !ruleID.isEmpty()) {
-                rule = ruleStore.getRule(recipe, new UriRef(ruleID));
+                rule = ruleStore.getRule(recipe, new IRI(ruleID));
                 RuleList ruleList = new RuleList();
                 ruleList.add(rule);
 
@@ -281,10 +281,10 @@
 				log.info("The recipe ID is a URI without scheme. The ID is set to " + recipeID);
 			}
 			
-            recipe = ruleStore.getRecipe(new UriRef(recipeID));
+            recipe = ruleStore.getRecipe(new IRI(recipeID));
 
             if (ruleID != null && !ruleID.isEmpty()) {
-                rule = ruleStore.getRule(recipe, new UriRef(ruleID));
+                rule = ruleStore.getRule(recipe, new IRI(ruleID));
                 RuleList ruleList = new RuleList();
                 ruleList.add(rule);
 
@@ -343,7 +343,7 @@
 				recipeID = "urn:" + recipeID;
 				log.info("The recipe ID is a URI without scheme. The ID is set to " + recipeID);
 			}
-            ruleStore.createRecipe(new UriRef(recipeID), description);
+            ruleStore.createRecipe(new IRI(recipeID), description);
 
             responseBuilder = Response.ok();
         } catch (AlreadyExistingRecipeException e) {
@@ -435,8 +435,8 @@
 	
 	            Recipe rcp;
 	            try {
-	            	rcp = ruleStore.getRecipe(new UriRef(recipe));
-	                Rule rl = ruleStore.getRule(rcp, new UriRef(rule));
+	            	rcp = ruleStore.getRecipe(new IRI(recipe));
+	                Rule rl = ruleStore.getRule(rcp, new IRI(rule));
 	                ruleStore.removeRule(rcp, rl);
 	            } catch (NoSuchRecipeException e) {
 	                log.error(e.getMessage(), e);
@@ -451,7 +451,7 @@
 	
 	        } else {
 	            try {
-	                ruleStore.removeRecipe(new UriRef(recipe));
+	                ruleStore.removeRecipe(new IRI(recipe));
 	            } catch (RecipeEliminationException e) {
 	                log.error(e.getMessage(), e);
 	                responseBuilder = Response.status(Status.INTERNAL_SERVER_ERROR);
@@ -515,7 +515,7 @@
 				log.info("The recipe ID is a URI without scheme. The ID is set to " + recipe);
 			}
         	
-            rcp = ruleStore.getRecipe(new UriRef(recipe));
+            rcp = ruleStore.getRecipe(new IRI(recipe));
             ruleStore.addRulesToRecipe(rcp, rules, description);
 
             responseBuilder = Response.ok();
@@ -576,7 +576,7 @@
 				log.info("The recipe ID is a URI without scheme. The ID is set to " + recipe);
 			}
             
-            Recipe rcp = ruleStore.getRecipe(new UriRef(recipe));
+            Recipe rcp = ruleStore.getRecipe(new IRI(recipe));
             RuleAdapter adapter = adapterManager.getAdapter(rcp, classToLoad);
 
             Object adaptedRecipe = adapter.adaptTo(rcp, classToLoad);
diff --git a/rules/web/src/main/java/org/apache/stanbol/rules/web/writers/RecipeListWriter.java b/rules/web/src/main/java/org/apache/stanbol/rules/web/writers/RecipeListWriter.java
index b738518..ba6d171 100644
--- a/rules/web/src/main/java/org/apache/stanbol/rules/web/writers/RecipeListWriter.java
+++ b/rules/web/src/main/java/org/apache/stanbol/rules/web/writers/RecipeListWriter.java
@@ -31,7 +31,7 @@
 import javax.ws.rs.ext.MessageBodyWriter;
 import javax.ws.rs.ext.Provider;
 
-import org.apache.clerezza.rdf.core.TripleCollection;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.clerezza.rdf.rdfjson.serializer.RdfJsonSerializingProvider;
@@ -197,7 +197,7 @@
                 }
             } else if (mediaType.toString().equals(KRFormat.RDF_JSON)) {
 
-                TripleCollection mGraph = OWLAPIToClerezzaConverter.owlOntologyToClerezzaMGraph(ontology);
+                Graph mGraph = OWLAPIToClerezzaConverter.owlOntologyToClerezzaGraph(ontology);
 
                 RdfJsonSerializingProvider provider = new RdfJsonSerializingProvider();
                 provider.serialize(out, mGraph, SupportedFormat.RDF_JSON);
diff --git a/rules/web/src/main/java/org/apache/stanbol/rules/web/writers/RecipeWriter.java b/rules/web/src/main/java/org/apache/stanbol/rules/web/writers/RecipeWriter.java
index f49fafc..a3a3d7b 100644
--- a/rules/web/src/main/java/org/apache/stanbol/rules/web/writers/RecipeWriter.java
+++ b/rules/web/src/main/java/org/apache/stanbol/rules/web/writers/RecipeWriter.java
@@ -31,8 +31,8 @@
 import javax.ws.rs.ext.MessageBodyWriter;
 import javax.ws.rs.ext.Provider;
 
-import org.apache.clerezza.rdf.core.TripleCollection;
-import org.apache.clerezza.rdf.core.UriRef;
+import org.apache.clerezza.commons.rdf.Graph;
+import org.apache.clerezza.commons.rdf.IRI;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.clerezza.rdf.rdfjson.serializer.RdfJsonSerializingProvider;
@@ -53,7 +53,6 @@
 import org.semanticweb.owlapi.io.OWLFunctionalSyntaxOntologyFormat;
 import org.semanticweb.owlapi.io.OWLXMLOntologyFormat;
 import org.semanticweb.owlapi.io.RDFXMLOntologyFormat;
-import org.semanticweb.owlapi.model.IRI;
 import org.semanticweb.owlapi.model.OWLAxiom;
 import org.semanticweb.owlapi.model.OWLDataFactory;
 import org.semanticweb.owlapi.model.OWLDataProperty;
@@ -148,14 +147,14 @@
     
                 RuleList rules = recipe.getRuleList();
     
-                UriRef recipeID = recipe.getRecipeID();
+                IRI recipeID = recipe.getRecipeID();
     
                 String recipeURI = recipeID.toString().replace("<", "").replace(">", "");
-                IRI recipeIRI = IRI.create(recipeURI);
+                org.semanticweb.owlapi.model.IRI recipeIRI = org.semanticweb.owlapi.model.IRI.create(recipeURI);
                 OWLIndividual recipeIndividual = factory.getOWLNamedIndividual(recipeIRI);
     
                 String descriptionURI = Symbols.description.toString().replace("<", "").replace(">", "");
-                IRI descriptionIRI = IRI.create(descriptionURI);
+                org.semanticweb.owlapi.model.IRI descriptionIRI = org.semanticweb.owlapi.model.IRI.create(descriptionURI);
                 OWLDataProperty descriptionProperty = factory.getOWLDataProperty(descriptionIRI);
                 
                 OWLAxiom axiom; 
@@ -169,7 +168,7 @@
                 
                 if(rules != null){
                     for (Rule rule : rules) {
-                        UriRef ruleID = rule.getRuleID();
+                        IRI ruleID = rule.getRuleID();
                         String ruleName = rule.getRuleName();
                         String ruleDescription = rule.getDescription();
         
@@ -185,12 +184,12 @@
         
                         String[] ruleParts = ruleContent.split("\\->");
         
-                        IRI ruleIRI = IRI.create(ruleURI);
+                        org.semanticweb.owlapi.model.IRI ruleIRI = org.semanticweb.owlapi.model.IRI.create(ruleURI);
         
-                        IRI ruleNameIRI = IRI.create(ruleNameURI);
-                        IRI ruleBodyIRI = IRI.create(ruleBodyURI);
-                        IRI ruleHeadIRI = IRI.create(ruleHeadURI);
-                        IRI hasRuleIRI = IRI.create(hasRuleURI);
+                        org.semanticweb.owlapi.model.IRI ruleNameIRI = org.semanticweb.owlapi.model.IRI.create(ruleNameURI);
+                        org.semanticweb.owlapi.model.IRI ruleBodyIRI = org.semanticweb.owlapi.model.IRI.create(ruleBodyURI);
+                        org.semanticweb.owlapi.model.IRI ruleHeadIRI = org.semanticweb.owlapi.model.IRI.create(ruleHeadURI);
+                        org.semanticweb.owlapi.model.IRI hasRuleIRI = org.semanticweb.owlapi.model.IRI.create(hasRuleURI);
         
                         OWLIndividual ruleIndividual = factory.getOWLNamedIndividual(ruleIRI);
         
@@ -264,7 +263,7 @@
                     }
                 } else if (mediaType.toString().equals(KRFormat.RDF_JSON)) {
     
-                    TripleCollection mGraph = OWLAPIToClerezzaConverter.owlOntologyToClerezzaMGraph(ontology);
+                    Graph mGraph = OWLAPIToClerezzaConverter.owlOntologyToClerezzaGraph(ontology);
     
                     RdfJsonSerializingProvider provider = new RdfJsonSerializingProvider();
                     provider.serialize(out, mGraph, SupportedFormat.RDF_JSON);
diff --git a/rules/web/src/main/java/org/apache/stanbol/rules/web/writers/RuleListWriter.java b/rules/web/src/main/java/org/apache/stanbol/rules/web/writers/RuleListWriter.java
index a23b7db..d0602a8 100644
--- a/rules/web/src/main/java/org/apache/stanbol/rules/web/writers/RuleListWriter.java
+++ b/rules/web/src/main/java/org/apache/stanbol/rules/web/writers/RuleListWriter.java
@@ -31,7 +31,7 @@
 import javax.ws.rs.ext.MessageBodyWriter;
 import javax.ws.rs.ext.Provider;
 
-import org.apache.clerezza.rdf.core.TripleCollection;
+import org.apache.clerezza.commons.rdf.Graph;
 import org.apache.clerezza.rdf.core.serializedform.Serializer;
 import org.apache.clerezza.rdf.core.serializedform.SupportedFormat;
 import org.apache.clerezza.rdf.rdfjson.serializer.RdfJsonSerializingProvider;
@@ -239,7 +239,7 @@
                 }
             } else if (mediaType.toString().equals(KRFormat.RDF_JSON)) {
 
-                TripleCollection mGraph = OWLAPIToClerezzaConverter.owlOntologyToClerezzaMGraph(ontology);
+                Graph mGraph = OWLAPIToClerezzaConverter.owlOntologyToClerezzaGraph(ontology);
 
                 RdfJsonSerializingProvider provider = new RdfJsonSerializingProvider();
                 provider.serialize(out, mGraph, SupportedFormat.RDF_JSON);