1.0.0: Fix tag.

git-svn-id: https://svn.apache.org/repos/asf/jackrabbit/oak/tags/jackrabbit-oak-1.0.0@1593444 13f79535-47bb-0310-9956-ffa450edef68
diff --git a/.gitignore b/.gitignore
index 221390d..358ec2a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,6 +3,3 @@
 .project
 target
 oak-run/*.csv
-.idea
-*.iml
-atlassian-ide-plugin.xml
diff --git a/RELEASE-NOTES.txt b/RELEASE-NOTES.txt
index 069ec32..6ae8ade 100644
--- a/RELEASE-NOTES.txt
+++ b/RELEASE-NOTES.txt
@@ -1,130 +1,86 @@
-Release Notes -- Apache Jackrabbit Oak -- Version 1.1.0
+Release Notes -- Apache Jackrabbit Oak -- Version 1.0.0
 
 Introduction
 ------------
 
-Jackrabbit Oak is an effort to implement a scalable and performant
-hierarchical content repository for use as the foundation of modern
-world-class web sites and  other demanding content applications.
+Jackrabbit Oak is a scalable, high-performance hierarchical content
+repository designed for use as the foundation of modern world-class
+web sites and other demanding content applications.
 
 The Oak effort is a part of the Apache Jackrabbit project.
 Apache Jackrabbit is a project of the Apache Software Foundation.
 
-Changes in Oak 0.20.0
----------------------
-
-New Features
-
-  [OAK-319] Similar (rep:similar) support
-  [OAK-382] JMX service to configure auto-cancel or long running queries
-  [OAK-593] Segment-based MK
-  [OAK-631] SegmentMK: Implement garbage collection
-  [OAK-904] Query: support "union" and "union all"
-  [OAK-1341] DocumentNodeStore: Implement revision garbage collection
-  [OAK-1543] Document the configuration steps for DataStore and BlobStores
-  [OAK-1574] AbstractRebaseDiff: Implement refined conflict resolution for addExistingNode conflicts
-  [OAK-1577] H2MK: Implement refined conflict resolution for addExistingNode conflicts
-  [OAK-1636] Solr index: support "jcr:score"
-
+Changes in Oak 1.0.0
+--------------------
 
 Improvements
 
-  [OAK-262] Query: support pseudo properties like jcr:score() and rep:excerpt()
-  [OAK-1056] Transient changes contributed by commit hooks are kept in memory
-  [OAK-1295] Recovery for missing _lastRev updates
-  [OAK-1329] Relaxed JCR locking behavior
-  [OAK-1342] Cascading document history
-  [OAK-1456] Non-blocking reindexing
-  [OAK-1489] ValueImpl should implement JackrabbitValue
-  [OAK-1496] Benchmark for concurrent file writes.
-  [OAK-1559] Expose BlobGCMBean for supported NodeStores
-  [OAK-1560] Expose RevisionGCMBean for supported NodeStores
-  [OAK-1567] Return Iterator instead of returning List in DocumentStore.query
-  [OAK-1568] Provide flag to not cache documents from queries
-  [OAK-1573] Document External Login and LDAP specifically
-  [OAK-1578] Configurable size of capped collection used by MongoDiffCache
-  [OAK-1592] Performance of Session#hasPermission
-  [OAK-1593] Guard against NPE in ConfigurationParameters.of(ConfigurationParameters...)
-  [OAK-1601] Log warning on concurrent session access
-  [OAK-1603] Operations tasks api improvements
-  [OAK-1606] Omit warnings about accessing commit related info when external events are excluded
-  [OAK-1607] EmbeddedSolrServerConfigurationProvider should not expose HTTP configuration
-  [OAK-1608] Let oak-solr-osgi start also in containers different than Jetty
-  [OAK-1612] Limit number of wildcards in rep:glob
-  [OAK-1616] Password utility: prevent timing attacks
-  [OAK-1627] Use non-orderable child nodes in ObservationRefreshTest
-  [OAK-1637] SolrIndexInitializer should be more configurable
-  [OAK-1638] Add QueryJcrTest suite coverage to Solr indexer
-  [OAK-1640] When modifying node types, revalidate only affected content
-  [OAK-1643] Implement BlobReferenceIterator optimized for Mongo
-  [OAK-1646] MarkSweepGarbageCollector - Improvements in exception handling and initialization
-  [OAK-1647] AsyncIndexUpdateTask creating too many checkpoint
-  [OAK-1651] Fix oak-solr-core pom dependencies
-  [OAK-1656] Provide lazy iterator for FileDataStore.getAllIdentifiers
-  [OAK-1657] Don't expand FT conditions if FT constraints are available
-  [OAK-1659] Improve CommitRateLimiter to delay commits
-  [OAK-1660] SegmentMK: Optimize reading of large binaries
-  [OAK-1661] JCR Event Info should contain NodeType for Events Type Node-Deleted
-  [OAK-1671] Use request handlers instead of search components in Solr native support
+  [OAK-14]   Identify and document changes in behaviour wrt. Jackrabbit 2
+  [OAK-364]  Runtime performance metrics
+  [OAK-925]  Query: Lucene index loads all path in memory
+  [OAK-1463] memory cache for RDB persistence
+  [OAK-1667] Encode Blob length as part of blobId in DataStoreBlobStore
+  [OAK-1687] Disable automatically cancel long running queries
+  [OAK-1711] Provide tools to manage externally synced users
+  [OAK-1722] Use a shared IndexSearcher for performing Lucene queries
+  [OAK-1726] Improve support for local caching in DataStoreBlobStore
+  [OAK-1730] Possible NPE in OrderedContentMirrorStoreStrategy
+  [OAK-1737] Add Lucene Codec for disabling field compression
+  [OAK-1738] Provide experimental config flag to enable/disable storing ...
+  [OAK-1740] Add a custom Lucene field visitor for the path
+  [OAK-1741] Ensure that package export version are proper for 1.0 release
+  [OAK-1748] Use Jackrabbit 2.8
+  [OAK-1756] Remove export package directive from oak-solr-*
+  [OAK-1758] Increased debug logging in SegmentMK
+  [OAK-1762] TarMK: Fall back to normal IO when mmap fails
+  [OAK-1772] Expose an extension point to move in memory state in ...
+  [OAK-1773] Optimize upgrade to DocumentNodeStore
+  [OAK-1780] Faster TarMK cleanup
+  [OAK-1728] Backport Solr indexer empty property query expansion ...
+  [OAK-1797] Missing documentation around Ordered Index
+  [OAK-1798] Simplify pluggability of custom principal providers
+  [OAK-1802] increase verbosity on migration progress
 
-Bugs
+Bug fixes
 
-  [OAK-204] short cutting the name mapping breaks path validation in value factory
-  [OAK-828] Full-text support for index aggregates
-  [OAK-1168] Invalid JCR paths not caught
-  [OAK-1174] Inconsistent handling of invalid names/paths
-  [OAK-1344] HierarchicalInvalidator hits query size limit
-  [OAK-1415] OOME when moving large subtree
-  [OAK-1465] performance degradation with growing index size on Oak-Mongo
-  [OAK-1541] Concurrent creation of users chokes on intermediate paths
-  [OAK-1554] Clarify behaviour for BlobStore api for invalid arguments
-  [OAK-1564] ClockTest on Windows fails
-  [OAK-1566] ArrayIndexOutOfBoundsException in Segment.getRefId()
-  [OAK-1569] ClusterPermissionsTest occasionally fails on Windows
-  [OAK-1579] ConcurrentAddNodesClusterIT.addNodes2() fails on travis
-  [OAK-1580] DBCursor close missing
-  [OAK-1581] NPE in OsgiWhiteboard#track()
-  [OAK-1582] ClassCastException in MarkSweepGarbageCollector#init()
-  [OAK-1583] Adjust release check script to remove all hidden files from the source comparison
-  [OAK-1584] Performance regression of adding and removing child nodes after OAK-850
-  [OAK-1585] rollback logic issues incorrect Updates
-  [OAK-1586] Implement checkpoint support in DocumentNodeStore
-  [OAK-1587] NoSuchElementException in SegmentTracker.getSegment()
-  [OAK-1594] Build fails on Java 8
-  [OAK-1595] Permissions#getPermissions(String, TreeLocation, boolean) does not work for permissions names
-  [OAK-1596] Provide mechanism for pre authenticated shared credentials
-  [OAK-1597] QueryStat MBean does not log any query
-  [OAK-1602] 512 byte shard key limit in MongoMK
-  [OAK-1604] Support for signed references in Blob
-  [OAK-1605] Running into endless loop due to tika 1.4
-  [OAK-1613] Node aggregation over multiple levels does't work
-  [OAK-1614] Oak Analyzer can't tokenize chinese phrases
-  [OAK-1615] Incomplete escaping in XPathConditionVisitor
-  [OAK-1620] Index cost calculation integer overflow
-  [OAK-1621] NPE on concurrent session usage
-  [OAK-1622] Duplicate configuration services (regression of OAK-1476)
-  [OAK-1623] TokenConfiguration expects nested configuration options
-  [OAK-1624] Item names with trailing spaces should not be allowed
-  [OAK-1625] SegmentTracker: Blob references not added
-  [OAK-1630] Solr parse exceptions for and/or full text expressions
-  [OAK-1632] Solr parse exception for primary type restriction
-  [OAK-1634] After crash, segment persistence is broken with failures in java.nio classes (with v0.19)
-  [OAK-1635] SolrIndexEditor should better escape path when deleting
-  [OAK-1644] Has Binary flag should also be copied to split documents
-  [OAK-1652] Incorrect name mapping in NodeObserver
-  [OAK-1654] Composite index aggregates
-  [OAK-1655] DataStoreBlobStore does not take into maxLastModifiedTime when fetching all chunks
-  [OAK-1662] Node not accessible after document split
-  [OAK-1663] Long running RevisionTest
-  [OAK-1664] org.apache.jackrabbit.oak.namepath package missing package-info file
-  [OAK-1668] Lucene should not serve queries for what it doesn't index
-  [OAK-1670] ConnectedHead.update() passes current state on to Unconnected instead of base state
-  [OAK-1672] TarFileTest#testWriteAndRead failure
-  [OAK-1675] The OrderedPropertyIndex track WARN and INFO too often
-
+  [OAK-1076] XPath failures for typed properties
+  [OAK-1679] LdapLoginTestBase#testConcurrentLoginSameGroup fails if ...
+  [OAK-1684] Non-blocking reindexing doesn't save the initial checkpoint
+  [OAK-1689] XPath and union queries with "or" can return the same node ...
+  [OAK-1692] Document split may drop revisions
+  [OAK-1696] Repository  fails to restart on Windows
+  [OAK-1701] Backport OAK-1673 fix to SolrQueryIndex to 1.0 branch
+  [OAK-1719] Missing commit hooks in upgrade
+  [OAK-1720] PermissionValidator may throw AccessDenied if testing for ...
+  [OAK-1729] DocumentNodeStore revision GC removes intermediate docs
+  [OAK-1731] Repository upgrade does not copy default values of property ...
+  [OAK-1732] Cluster node lease not renewed in time
+  [OAK-1734] UserInitializer#initialize runs a query without any indexes
+  [OAK-1739] Incorrect handling of multivalued comparisons in queries
+  [OAK-1742] DocumentNodeStore on MongoDB returns incorrect diff for merge ...
+  [OAK-1749] AsyncIndexUpdate may resurrect nodes
+  [OAK-1751] DocumentNodeStore may detect conflict too late
+  [OAK-1753] Calling initializer after InitialContent.initialize() on a ...
+  [OAK-1755] Search fails if the property to be searched on is of ...
+  [OAK-1757] Oak eats too many CPU cycles when idle
+  [OAK-1760] RepositoryUpgrade leads to one large commit with ...
+  [OAK-1761] DocumentNodeStore does not make use of References while ...
+  [OAK-1765] An admin user should be able to unlock any node
+  [OAK-1770] Document split suppressed with steady load on many cluster nodes
+  [OAK-1774] Full text query expansion should escape fields
+  [OAK-1776] Ordered index returns the wrong nodes when ordering by ...
+  [OAK-1779] Stale cache after MongoMK GC
+  [OAK-1781] Too many branch commits on document node store
+  [OAK-1783] Ignore rep:excerpt property to support core query engine ...
+  [OAK-1784] Async index update persists conflict markers
+  [OAK-1787] RepositoryException thrown instead of InvalidItemStateException
+  [OAK-1789] Upgraded version history has UUIDs as jcr:frozenUuid of ...
+  [OAK-1793] MongoMK GC removes documents with data still in use
+  [OAK-1795] RepositoryUpgrade fails for very long path with DocumentNodeStore
+  [OAK-1801] Versionable path of version histories not set during migration
 
 In addition to the above-mentioned changes, this release contains
-all the changes included up to the Apache Jackrabbit Oak 0.19 release.
+all the changes included up to the Apache Jackrabbit Oak 0.20.0 release.
 
 For more detailed information about all the changes in this and other
 Oak releases, please see the Oak issue tracker at
@@ -159,10 +115,10 @@
 ------------------------------------
 
 Established in 1999, The Apache Software Foundation provides organizational,
-legal, and financial support for more than 100 freely-available,
+legal, and financial support for more than 140 freely-available,
 collaboratively-developed Open Source projects. The pragmatic Apache License
 enables individual and commercial users to easily deploy Apache software;
 the Foundation's intellectual property framework limits the legal exposure
-of its 2,500+ contributors.
+of its 3,800+ contributors.
 
 For more information, visit http://www.apache.org/
diff --git a/oak-auth-external/pom.xml b/oak-auth-external/pom.xml
index c9d972b..8d2960b 100644
--- a/oak-auth-external/pom.xml
+++ b/oak-auth-external/pom.xml
@@ -23,7 +23,7 @@
     <parent>

         <groupId>org.apache.jackrabbit</groupId>

         <artifactId>oak-parent</artifactId>

-        <version>1.1-SNAPSHOT</version>

+        <version>1.0.0</version>

         <relativePath>../oak-parent/pom.xml</relativePath>

     </parent>

 

@@ -161,7 +161,7 @@
         <dependency>

             <groupId>com.h2database</groupId>

             <artifactId>h2</artifactId>

-            <version>${h2.version}</version>

+            <version>1.3.175</version>

             <scope>test</scope>

         </dependency>

         <dependency>

diff --git a/oak-auth-ldap/pom.xml b/oak-auth-ldap/pom.xml
index 7ce1b9f..3a88f75 100644
--- a/oak-auth-ldap/pom.xml
+++ b/oak-auth-ldap/pom.xml
@@ -23,7 +23,7 @@
     <parent>
         <groupId>org.apache.jackrabbit</groupId>
         <artifactId>oak-parent</artifactId>
-        <version>1.1-SNAPSHOT</version>
+        <version>1.0.0</version>
         <relativePath>../oak-parent/pom.xml</relativePath>
     </parent>
 
diff --git a/oak-blob/pom.xml b/oak-blob/pom.xml
index bbbda87..e096423 100644
--- a/oak-blob/pom.xml
+++ b/oak-blob/pom.xml
@@ -21,7 +21,7 @@
   <parent>
     <artifactId>oak-parent</artifactId>
     <groupId>org.apache.jackrabbit</groupId>
-    <version>1.1-SNAPSHOT</version>
+    <version>1.0.0</version>
     <relativePath>../oak-parent/pom.xml</relativePath>
   </parent>
   <modelVersion>4.0.0</modelVersion>
diff --git a/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/AbstractBlobStore.java b/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/AbstractBlobStore.java
index 3ee3d3b..c30b341 100644
--- a/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/AbstractBlobStore.java
+++ b/oak-blob/src/main/java/org/apache/jackrabbit/oak/spi/blob/AbstractBlobStore.java
@@ -80,26 +80,16 @@
  * long), size of data store id (variable size long), hash code length (variable
  * size int), hash code.
  */
-public abstract class AbstractBlobStore implements GarbageCollectableBlobStore,
-        Cache.Backend<AbstractBlobStore.BlockId, AbstractBlobStore.Data> {
+public abstract class AbstractBlobStore implements GarbageCollectableBlobStore, Cache.Backend<AbstractBlobStore.BlockId, AbstractBlobStore.Data> {
 
     protected static final String HASH_ALGORITHM = "SHA-256";
 
     protected static final int TYPE_DATA = 0;
     protected static final int TYPE_HASH = 1;
+    protected static final int TYPE_HASH_COMPRESSED = 2;
 
-    /**
-     * The minimum block size. Blocks must be larger than that so that the
-     * content hash is always shorter than the data itself.
-     */
     protected static final int BLOCK_SIZE_LIMIT = 48;
 
-    /**
-     * The blob ids that are still floating around in memory. The blob store
-     * assumes such binaries must not be deleted, because those binaries are not
-     * referenced yet in a way the garbage collection algorithm can detect (not
-     * referenced at all, or only referenced in memory).
-     */
     protected Map<String, WeakReference<String>> inUse =
         Collections.synchronizedMap(new WeakHashMap<String, WeakReference<String>>());
 
@@ -193,7 +183,6 @@
         }
     }
 
-    @Override
     public InputStream getInputStream(String blobId) throws IOException {
         //Marking would handled by next call to store.readBlob
         return new BlobStoreInputStream(this, blobId, 0);
@@ -226,8 +215,9 @@
             String blobId = reference.substring(0, colon);
             if (reference.equals(getReference(blobId))) {
                 return blobId;
+            }else{
+                log.debug("Possibly invalid reference as blobId does not match {}", reference);
             }
-            log.debug("Possibly invalid reference as blobId does not match {}", reference);
         }
         return null;
     }
@@ -248,8 +238,7 @@
     protected byte[] getOrCreateReferenceKey() {
         byte[] referenceKeyValue = new byte[256];
         new SecureRandom().nextBytes(referenceKeyValue);
-        log.info("Reference key is not specified for the BlobStore in use. " + 
-                "Generating a random key. For stable " +
+        log.info("Reference key is not specified for the BlobStore in use. Generating a random key. For stable " +
                 "reference ensure that reference key is specified");
         return referenceKeyValue;
     }
@@ -282,18 +271,16 @@
     }
 
     /**
-     * Set the referenceKey from plain text. Key content would be UTF-8 encoding
-     * of the string.
-     * 
-     * <p>
-     * This is useful when setting key via generic bean property manipulation
-     * from string properties. User can specify the key in plain text and that
-     * would be passed on this object via
-     * {@link org.apache.jackrabbit.oak.commons.PropertiesUtil#populate(Object, java.util.Map, boolean)}
-     * 
+     * Set the referenceKey from plain text. Key content would be
+     * UTF-8 encoding of the string.
+     *
+     * <p>This is useful when setting key via generic
+     *  bean property manipulation from string properties. User can specify the
+     *  key in plain text and that would be passed on this object via
+     *  {@link org.apache.jackrabbit.oak.commons.PropertiesUtil#populate(Object, java.util.Map, boolean)}
+     *
      * @param textKey base64 encoded key
-     * @see org.apache.jackrabbit.oak.commons.PropertiesUtil#populate(Object,
-     *      java.util.Map, boolean)
+     * @see org.apache.jackrabbit.oak.commons.PropertiesUtil#populate(Object, java.util.Map, boolean)
      */
     public void setReferenceKeyPlainText(String textKey) {
         setReferenceKey(textKey.getBytes(Charsets.UTF_8));
@@ -308,9 +295,7 @@
         inUse.clear();
     }
 
-    private void convertBlobToId(InputStream in,
-            ByteArrayOutputStream idStream, int level, long totalLength)
-            throws IOException {
+    private void convertBlobToId(InputStream in, ByteArrayOutputStream idStream, int level, long totalLength) throws IOException {
         int count = 0;
         // try to re-use the block (but not concurrently)
         byte[] block = blockBuffer.getAndSet(null);
@@ -341,13 +326,8 @@
                 idStream.write(TYPE_HASH);
                 IOUtils.writeVarInt(idStream, level);
                 if (level > 0) {
-                    // level > 0: total size (size of all sub-blocks)
-                    // (see class level javadoc for details)                    
                     IOUtils.writeVarLong(idStream, totalLength);
                 }
-                // level = 0: size (size of this block)
-                // level > 0: size of the indirection block
-                // (see class level javadoc for details)                
                 IOUtils.writeVarLong(idStream, blockLen);
                 totalLength += blockLen;
                 IOUtils.writeVarInt(idStream, digest.length);
@@ -406,8 +386,7 @@
     }
 
     @Override
-    public int readBlob(String blobId, long pos, byte[] buff, int off,
-            int length) throws IOException {
+    public int readBlob(String blobId, long pos, byte[] buff, int off, int length) throws IOException {
         if (isMarkEnabled()) {
             mark(blobId);
         }
@@ -432,9 +411,6 @@
                 pos -= len;
             } else if (type == TYPE_HASH) {
                 int level = IOUtils.readVarInt(idStream);
-                // level = 0: size (size of this block)
-                // level > 0: total size (size of all sub-blocks)
-                // (see class level javadoc for details)
                 long totalLength = IOUtils.readVarLong(idStream);
                 if (level > 0) {
                     // block length (ignored)
@@ -509,9 +485,6 @@
                 totalLength += len;
             } else if (type == TYPE_HASH) {
                 int level = IOUtils.readVarInt(idStream);
-                // level = 0: size (size of this block)
-                // level > 0: total size (size of all sub-blocks)
-                // (see class level javadoc for details)                
                 totalLength += IOUtils.readVarLong(idStream);
                 if (level > 0) {
                     // block length (ignored)
@@ -546,9 +519,7 @@
                 IOUtils.skipFully(idStream, len);
             } else if (type == TYPE_HASH) {
                 int level = IOUtils.readVarInt(idStream);
-                // level = 0: size (size of this block)
-                // level > 0: total size (size of all sub-blocks)
-                // (see class level javadoc for details)
+                // totalLength
                 IOUtils.readVarLong(idStream);
                 if (level > 0) {
                     // block length (ignored)
@@ -678,9 +649,7 @@
                         IOUtils.skipFully(idStream, len);
                     } else if (type == TYPE_HASH) {
                         int level = IOUtils.readVarInt(idStream);
-                        // level = 0: size (size of this block)
-                        // level > 0: total size (size of all sub-blocks)
-                        // (see class level javadoc for details)
+                        // totalLength
                         IOUtils.readVarLong(idStream);
                         if (level > 0) {
                             // block length (ignored)
@@ -703,7 +672,7 @@
             } catch (Exception e) {
                 throw new RuntimeException(e);
             }
-            // Check now if ids are available
+            // Check now if ids available
             if (!queue.isEmpty()) {
                 return true;
             }
diff --git a/oak-commons/pom.xml b/oak-commons/pom.xml
index 59405bd..c71db6a 100644
--- a/oak-commons/pom.xml
+++ b/oak-commons/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.jackrabbit</groupId>
     <artifactId>oak-parent</artifactId>
-    <version>1.1-SNAPSHOT</version>
+    <version>1.0.0</version>
     <relativePath>../oak-parent/pom.xml</relativePath>
   </parent>
 
diff --git a/oak-core/pom.xml b/oak-core/pom.xml
index bf982c5..0fcc3ab 100644
--- a/oak-core/pom.xml
+++ b/oak-core/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.jackrabbit</groupId>
     <artifactId>oak-parent</artifactId>
-    <version>1.1-SNAPSHOT</version>
+    <version>1.0.0</version>
     <relativePath>../oak-parent/pom.xml</relativePath>
   </parent>
 
@@ -38,15 +38,8 @@
         <artifactId>maven-bundle-plugin</artifactId>
         <configuration>
           <instructions>
-            <Embed-Dependency>
-              <!-- OAK-1708 TODO: temporary workaround for embedding code for DocumentNodeStoreService-->
-              commons-dbcp,commons-pool,h2,json-simple,
-              <!-- OAK-1708 TODO: note these below will only embedded when build with the respective profiles from oak-parent -->
-              postgresql,db2,db2-license
-            </Embed-Dependency>
-            <Embed-Transitive>true</Embed-Transitive>
             <Import-Package>
-              *;resolution:=optional
+              *
             </Import-Package>
             <Export-Package>
               org.apache.jackrabbit.oak,
@@ -219,7 +212,7 @@
       <groupId>com.googlecode.json-simple</groupId>
       <artifactId>json-simple</artifactId>
       <version>1.1.1</version>
-      <!--<optional>true</optional>-->
+      <optional>true</optional>
     </dependency>
     <dependency>
       <groupId>org.apache.jackrabbit</groupId>
@@ -279,10 +272,18 @@
       <optional>true</optional>
     </dependency>
 
+    <!-- db blob store -->
+    <dependency>
+      <groupId>com.h2database</groupId>
+      <artifactId>h2</artifactId>
+      <version>1.3.175</version>
+      <optional>true</optional>
+    </dependency>
     <dependency>
       <groupId>commons-dbcp</groupId>
       <artifactId>commons-dbcp</artifactId>
       <version>1.4</version>
+      <optional>true</optional>
     </dependency>
 
     <!-- Logging -->
@@ -337,11 +338,5 @@
       <version>${project.version}</version>
       <scope>test</scope>
     </dependency>
-    <dependency>
-      <groupId>com.h2database</groupId>
-      <artifactId>h2</artifactId>
-      <version>${h2.version}</version>
-      <scope>test</scope>
-    </dependency>
   </dependencies>
 </project>
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/osgi/OsgiWhiteboard.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/osgi/OsgiWhiteboard.java
index 18b6795..5f80ec1 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/osgi/OsgiWhiteboard.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/osgi/OsgiWhiteboard.java
@@ -18,19 +18,13 @@
 
 import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
-import static com.google.common.collect.Lists.newArrayList;
-import static com.google.common.collect.Maps.newHashMap;
-import static com.google.common.collect.Maps.newTreeMap;
-import static java.util.Collections.emptyList;
-import static java.util.Collections.singletonList;
+import static java.util.Arrays.asList;
 
 import java.util.Collections;
 import java.util.Dictionary;
 import java.util.Hashtable;
 import java.util.List;
 import java.util.Map;
-import java.util.SortedMap;
-import java.util.concurrent.atomic.AtomicReference;
 
 import javax.annotation.Nonnull;
 
@@ -38,10 +32,8 @@
 import org.apache.jackrabbit.oak.spi.whiteboard.Tracker;
 import org.apache.jackrabbit.oak.spi.whiteboard.Whiteboard;
 import org.osgi.framework.BundleContext;
-import org.osgi.framework.ServiceReference;
 import org.osgi.framework.ServiceRegistration;
 import org.osgi.util.tracker.ServiceTracker;
-import org.osgi.util.tracker.ServiceTrackerCustomizer;
 
 /**
  * OSGi-based whiteboard implementation.
@@ -77,63 +69,17 @@
         };
     }
 
-    /**
-     * Returns a tracker for services of the given type. The returned tracker
-     * is optimized for frequent {@link Tracker#getServices()} calls through
-     * the use of a pre-compiled list of services that's atomically updated
-     * whenever services are added, modified or removed.
-     */
     @Override
-    public <T> Tracker<T> track(final Class<T> type) {
+    public <T> Tracker<T> track(Class<T> type) {
         checkNotNull(type);
-        final AtomicReference<List<T>> list =
-                new AtomicReference<List<T>>(Collections.<T>emptyList());
-        final ServiceTrackerCustomizer customizer =
-                new ServiceTrackerCustomizer() {
-                    private final Map<ServiceReference, T> services =
-                            newHashMap();
-                    @Override @SuppressWarnings("unchecked")
-                    public synchronized Object addingService(
-                            ServiceReference reference) {
-                        Object service = context.getService(reference);
-                        if (type.isInstance(service)) {
-                            services.put(reference, (T) service);
-                            list.set(getServiceList(services));
-                            return service;
-                        } else {
-                            context.ungetService(reference);
-                            return null;
-                        }
-                    }
-                    @Override @SuppressWarnings("unchecked")
-                    public synchronized void modifiedService(
-                            ServiceReference reference, Object service) {
-                        // TODO: Figure out if the old reference instance
-                        // would automatically reflect the updated properties.
-                        // For now we play it safe by replacing the old key
-                        // with the new reference instance passed as argument.
-                        services.remove(reference);
-                        services.put(reference, (T) service);
-                        list.set(getServiceList(services));
-                    }
-                    @Override
-                    public synchronized void removedService(
-                            ServiceReference reference, Object service) {
-                        services.remove(reference);
-                        list.set(getServiceList(services));
-                        // TODO: Note that the service might still be in use
-                        // by some client that called getServices() before
-                        // this method was invoked.
-                        context.ungetService(reference);
-                    }
-                };
         final ServiceTracker tracker =
-                new ServiceTracker(context, type.getName(), customizer);
+                new ServiceTracker(context, type.getName(), null);
         tracker.open();
         return new Tracker<T>() {
-            @Override
+            @Override @SuppressWarnings("unchecked")
             public List<T> getServices() {
-                return list.get();
+                Object[] services = tracker.getServices();
+                return (List<T>) (services != null ? asList(services) : Collections.emptyList());
             }
             @Override
             public void stop() {
@@ -142,26 +88,4 @@
         };
     }
 
-    /**
-     * Utility method that sorts the service objects in the given map
-     * according to their service rankings and returns the resulting list.
-     *
-     * @param services currently available services
-     * @return ordered list of the services
-     */
-    private static <T> List<T> getServiceList(
-            Map<ServiceReference, T> services) {
-        switch (services.size()) {
-        case 0:
-            return emptyList();
-        case 1:
-            return singletonList(
-                    services.values().iterator().next());
-        default:
-            SortedMap<ServiceReference, T> sorted = newTreeMap();
-            sorted.putAll(services);
-            return newArrayList(sorted.values());
-        }
-    }
-
 }
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/MarkSweepGarbageCollector.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/MarkSweepGarbageCollector.java
index 0b67eea..57dd178 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/MarkSweepGarbageCollector.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/MarkSweepGarbageCollector.java
@@ -96,7 +96,7 @@
     /**
      * Creates an instance of MarkSweepGarbageCollector
      *
-     * @param marker BlobReferenceRetriever instanced used to fetch refereed blob entries
+     * @param marker BlobReferenceRetriever instanced used to fetch refereedd blob entries
      * @param blobStore
      * @param root the root absolute path of directory under which temporary
      *             files would be created
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/db/DbBlobStore.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/db/DbBlobStore.java
new file mode 100644
index 0000000..2b831bf
--- /dev/null
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/blob/db/DbBlobStore.java
@@ -0,0 +1,317 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.blob.db;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.jackrabbit.oak.commons.StringUtils;
+import org.apache.jackrabbit.oak.plugins.blob.CachingBlobStore;
+import org.h2.jdbcx.JdbcConnectionPool;
+
+import com.google.common.collect.AbstractIterator;
+
+/**
+ * A database blob store.
+ */
+public class DbBlobStore extends CachingBlobStore {
+
+    private JdbcConnectionPool cp;
+    private long minLastModified;
+
+    public void setConnectionPool(JdbcConnectionPool cp) throws SQLException {
+        this.cp = cp;
+        Connection conn = cp.getConnection();
+        Statement stat = conn.createStatement();
+        stat.execute("create table if not exists datastore_meta" +
+                "(id varchar primary key, level int, lastMod bigint)");
+        stat.execute("create table if not exists datastore_data" +
+                "(id varchar primary key, data binary)");
+        stat.close();
+        conn.close();
+    }
+
+    @Override
+    protected synchronized void storeBlock(byte[] digest, int level, byte[] data) throws IOException {
+        try {
+            storeBlockToDatabase(digest, level, data);
+        } catch (SQLException e) {
+            throw new IOException(e);
+        }
+    }
+    
+    private void storeBlockToDatabase(byte[] digest, int level, byte[] data) throws SQLException {
+        String id = StringUtils.convertBytesToHex(digest);
+        cache.put(id, data);
+        Connection conn = cp.getConnection();
+        try {
+            long now = System.currentTimeMillis();
+            PreparedStatement prep = conn.prepareStatement(
+                    "update datastore_meta set lastMod = ? where id = ?");
+            int count;
+            try {
+                prep.setLong(1, now);
+                prep.setString(2, id);
+                count = prep.executeUpdate();
+            } finally {
+                prep.close();
+            }
+            if (count == 0) {
+                try {
+                    prep = conn.prepareStatement(
+                            "insert into datastore_data(id, data) values(?, ?)");
+                    try {
+                        prep.setString(1, id);
+                        prep.setBytes(2, data);
+                        prep.execute();
+                    } finally {
+                        prep.close();
+                    }
+                } catch (SQLException e) {
+                    // already exists - ok
+                }
+                try {
+                    prep = conn.prepareStatement(
+                            "insert into datastore_meta(id, level, lastMod) values(?, ?, ?)");
+                    try {
+                        prep.setString(1, id);
+                        prep.setInt(2, level);
+                        prep.setLong(3, now);
+                        prep.execute();
+                    } finally {
+                        prep.close();
+                    }
+                } catch (SQLException e) {
+                    // already exists - ok
+                }
+            }
+        } finally {
+            conn.close();
+        }
+    }
+
+    @Override
+    protected byte[] readBlockFromBackend(BlockId blockId) throws Exception {
+        String id = StringUtils.convertBytesToHex(blockId.getDigest());
+        byte[] data = cache.get(id);
+        if (data == null) {        
+            Connection conn = cp.getConnection();
+            try {
+                PreparedStatement prep = conn.prepareStatement(
+                        "select data from datastore_data where id = ?");
+                try {
+                    prep.setString(1, id);
+                    ResultSet rs = prep.executeQuery();
+                    if (!rs.next()) {
+                        throw new IOException("Datastore block " + id + " not found");
+                    }
+                    data = rs.getBytes(1);
+                } finally {
+                    prep.close();
+                }
+                cache.put(id, data);
+            } finally {
+                conn.close();
+            }
+        }
+        // System.out.println("    read block " + id + " blockLen: " + data.length + " [0]: " + data[0]);
+        if (blockId.getPos() == 0) {
+            return data;
+        }
+        int len = (int) (data.length - blockId.getPos());
+        if (len < 0) {
+            return new byte[0];
+        }
+        byte[] d2 = new byte[len];
+        System.arraycopy(data, (int) blockId.getPos(), d2, 0, len);
+        return d2;
+    }
+
+    @Override
+    public void startMark() throws IOException {
+        minLastModified = System.currentTimeMillis();
+        markInUse();
+    }
+
+    @Override
+    protected boolean isMarkEnabled() {
+        return minLastModified != 0;
+    }
+
+    @Override
+    protected void mark(BlockId blockId) throws Exception {
+        if (minLastModified == 0) {
+            return;
+        }
+        Connection conn = cp.getConnection();
+        try {
+            String id = StringUtils.convertBytesToHex(blockId.getDigest());
+            PreparedStatement prep = conn.prepareStatement(
+                    "update datastore_meta set lastMod = ? where id = ? and lastMod < ?");
+            prep.setLong(1, System.currentTimeMillis());
+            prep.setString(2, id);
+            prep.setLong(3, minLastModified);
+            prep.executeUpdate();
+            prep.close();
+        } finally {
+            conn.close();
+        }
+    }
+
+    @Override
+    public int sweep() throws IOException {
+        try {
+            return sweepFromDatabase();
+        } catch (SQLException e) {
+            throw new IOException(e);
+        }
+    }
+    
+    private int sweepFromDatabase() throws SQLException {
+        int count = 0;
+        Connection conn = cp.getConnection();
+        try {
+            PreparedStatement prep = conn.prepareStatement(
+                    "select id from datastore_meta where lastMod < ?");
+            prep.setLong(1, minLastModified);
+            ResultSet rs = prep.executeQuery();
+            ArrayList<String> ids = new ArrayList<String>();
+            while (rs.next()) {
+                ids.add(rs.getString(1));
+            }
+            prep = conn.prepareStatement(
+                "delete from datastore_meta where id = ?");
+            PreparedStatement prepData = conn.prepareStatement(
+                "delete from datastore_data where id = ?");
+            for (String id : ids) {
+                prep.setString(1, id);
+                prep.execute();
+                prepData.setString(1, id);
+                prepData.execute();
+                count++;
+            }
+            prepData.close();
+            prep.close();
+        } finally {
+            conn.close();
+        }
+        minLastModified = 0;
+        return count;
+    }
+
+    @Override
+    public boolean deleteChunks(List<String> chunkIds, long maxLastModifiedTime) throws Exception {
+        Connection conn = cp.getConnection();
+        try {
+            
+            PreparedStatement prep = null;
+            PreparedStatement prepData = null;
+            
+            StringBuilder inClause = new StringBuilder();
+            int batch = chunkIds.size();
+            for (int i = 0; i < batch; i++) {
+                inClause.append('?');
+                if (i != batch -1) {
+                    inClause.append(',');
+                }
+            }
+
+            if (maxLastModifiedTime > 0) {
+                prep = conn.prepareStatement(
+                        "delete from datastore_meta where id in (" 
+                                + inClause.toString() + ") and lastMod <= ?");
+                prep.setLong(batch + 1, maxLastModifiedTime);
+
+                prepData = conn.prepareStatement(
+                        "delete from datastore_data where id in (" 
+                                + inClause.toString() + ") and lastMod <= ?");
+                prepData.setLong(batch + 1, maxLastModifiedTime);
+            } else {
+                prep = conn.prepareStatement(
+                        "delete from datastore_meta where id in (" 
+                                + inClause.toString() + ")");
+
+                prepData = conn.prepareStatement(
+                        "delete from datastore_data where id in (" 
+                                + inClause.toString() + ")");
+            }
+            
+            for (int idx = 0; idx < batch; idx++) {
+                prep.setString(idx + 1, chunkIds.get(idx));
+                prepData.setString(idx + 1, chunkIds.get(idx));
+            }
+
+            prep.execute();
+            prepData.execute();
+            prep.close();
+            prepData.close();
+        } finally {
+            conn.commit();
+            conn.close();
+        }
+
+        return true;
+    }
+
+
+    @Override
+    public Iterator<String> getAllChunkIds(long maxLastModifiedTime) throws Exception {
+        final Connection conn = cp.getConnection();
+        PreparedStatement prep = null;
+
+        if (maxLastModifiedTime > 0) {
+            prep = conn.prepareStatement(
+                    "select id from datastore_meta where lastMod <= ?");
+            prep.setLong(1, maxLastModifiedTime);
+        } else {
+            prep = conn.prepareStatement(
+                    "select id from datastore_meta");
+        }
+
+        final ResultSet rs = prep.executeQuery();
+
+        return new AbstractIterator<String>() {
+            @Override
+            protected String computeNext() {
+                try {
+                    if (rs.next()) {
+                        return rs.getString(1);
+                    }
+                    conn.close();
+                } catch (SQLException e) {
+                    try {
+                        if ((conn != null) && !conn.isClosed()) {
+                            conn.close();
+                        }
+                    } catch (Exception e2) {
+                        // ignore
+                    }
+                    throw new RuntimeException(e);
+                }
+                return endOfData();
+            }
+        };
+    }
+    
+}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStoreService.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStoreService.java
index 5e16cc4..720088e 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStoreService.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentNodeStoreService.java
@@ -25,12 +25,13 @@
 import java.util.Dictionary;
 import java.util.Hashtable;
 import java.util.List;
-import java.util.Locale;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
-import javax.sql.DataSource;
-
+import com.mongodb.DB;
+import com.mongodb.MongoClient;
+import com.mongodb.MongoClientOptions;
+import com.mongodb.MongoClientURI;
 import org.apache.felix.scr.annotations.Activate;
 import org.apache.felix.scr.annotations.Component;
 import org.apache.felix.scr.annotations.ConfigurationPolicy;
@@ -49,7 +50,6 @@
 import org.apache.jackrabbit.oak.plugins.blob.BlobGCMBean;
 import org.apache.jackrabbit.oak.plugins.blob.BlobGarbageCollector;
 import org.apache.jackrabbit.oak.plugins.document.cache.CachingDocumentStore;
-import org.apache.jackrabbit.oak.plugins.document.rdb.RDBDataSourceFactory;
 import org.apache.jackrabbit.oak.plugins.document.util.MongoConnection;
 import org.apache.jackrabbit.oak.spi.blob.BlobStore;
 import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore;
@@ -66,11 +66,6 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.mongodb.DB;
-import com.mongodb.MongoClient;
-import com.mongodb.MongoClientOptions;
-import com.mongodb.MongoClientURI;
-
 /**
  * The OSGi service to start/stop a DocumentNodeStore instance.
  */
@@ -184,6 +179,22 @@
         int changesSize = PropertiesUtil.toInteger(prop(PROP_CHANGES_SIZE), DEFAULT_CHANGES_SIZE);
         boolean useMK = PropertiesUtil.toBoolean(context.getProperties().get(PROP_USE_MK), false);
 
+
+        MongoClientOptions.Builder builder = MongoConnection.getDefaultBuilder();
+        MongoClientURI mongoURI = new MongoClientURI(uri, builder);
+
+        if (log.isInfoEnabled()) {
+            // Take care around not logging the uri directly as it
+            // might contain passwords
+            String type = useMK ? "MK" : "NodeStore";
+            log.info("Starting Document{} with host={}, db={}, cache size (MB)={}, Off Heap Cache size (MB)={}, 'changes' collection size (MB)={}",
+                    type, mongoURI.getHosts(), db, cacheSize, offHeapCache, changesSize);
+            log.info("Mongo Connection details {}", MongoConnection.toString(mongoURI.getOptions()));
+        }
+
+        MongoClient client = new MongoClient(mongoURI);
+        DB mongoDB = client.getDB(db);
+
         DocumentMK.Builder mkBuilder =
                 new DocumentMK.Builder().
                 memoryCacheSize(cacheSize * MB).
@@ -194,62 +205,12 @@
             mkBuilder.setBlobStore(blobStore);
         }
 
-        String jdbcuri = System.getProperty("oak.jdbc.connection.uri", "");
-
-        if (!jdbcuri.isEmpty()) {
-            // OAK-1708 - this is temporary until we figure out parametrization,
-            // and how to pass in DataSources directly
-            String username = System.getProperty("oak.jdbc.username", "");
-            String passwd = System.getProperty("oak.jdbc.password", "");
-            String driver = System.getProperty("oak.jdbc.driver.class", "");
-
-            if (driver.length() > 0) {
-                log.info("trying to load {}", driver);
-
-                try {
-                    Class.forName(driver);
-                } catch (ClassNotFoundException ex) {
-                    log.error("driver not loaded", ex);
-                }
-            } else {
-                log.info("System property oak.jdbc.driver.class not set.");
-            }
-
-            if (log.isInfoEnabled()) {
-                String type = useMK ? "MK" : "NodeStore";
-                log.info(
-                        "Starting Document{} with uri={}, cache size (MB)={}, Off Heap Cache size (MB)={}, 'changes' collection size (MB)={}",
-                        type, jdbcuri, cacheSize, offHeapCache, changesSize);
-            }
-
-            DataSource ds = RDBDataSourceFactory.forJdbcUrl(jdbcuri, username, passwd);
-            mkBuilder.setRDBConnection(ds);
-
-            log.info("Connected to datasource {}", ds);
-        } else {
-            MongoClientOptions.Builder builder = MongoConnection.getDefaultBuilder();
-            MongoClientURI mongoURI = new MongoClientURI(uri, builder);
-
-            if (log.isInfoEnabled()) {
-                // Take care around not logging the uri directly as it
-                // might contain passwords
-                String type = useMK ? "MK" : "NodeStore";
-                log.info("Starting Document{} with host={}, db={}, cache size (MB)={}, Off Heap Cache size (MB)={}, 'changes' collection size (MB)={}",
-                        type, mongoURI.getHosts(), db, cacheSize, offHeapCache, changesSize);
-                log.info("Mongo Connection details {}", MongoConnection.toString(mongoURI.getOptions()));
-            }
-
-            MongoClient client = new MongoClient(mongoURI);
-            DB mongoDB = client.getDB(db);
-
-            mkBuilder.setMongoDB(mongoDB, changesSize);
-
-            log.info("Connected to database {}", mongoDB);
-        }
-
+        mkBuilder.setMongoDB(mongoDB, changesSize);
         mkBuilder.setExecutor(executor);
         mk = mkBuilder.open();
 
+        log.info("Connected to database {}", mongoDB);
+
         registerJMXBeans(mk.getNodeStore());
         registerLastRevRecoveryJob(mk.getNodeStore());
 
@@ -351,6 +312,7 @@
                             mcl.getDiffCacheStats(),
                             CacheStatsMBean.TYPE,
                             mcl.getDiffCacheStats().getName()));
+            
         }
 
         DocumentStore ds = store.getDocumentStore();
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentStore.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentStore.java
index 5a114b3..c093a40 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentStore.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/DocumentStore.java
@@ -21,20 +21,11 @@
 import javax.annotation.CheckForNull;
 import javax.annotation.Nonnull;
 
+import org.apache.jackrabbit.mk.api.MicroKernelException;
+
 /**
  * The interface for the backend storage for documents.
  * <p>
- * In general atomicity of operations on a DocumentStore are limited to a single
- * document. That is, an implementation does not have to guarantee atomicity of
- * the entire effect of a method call. A method that fails with an exception may
- * have modified just some documents and then abort. However, an implementation
- * must not modify a document partially. Either the complete update operation
- * is applied to a document or no modification is done at all.
- * <p>
- * Even though none of the methods declare an exception, they will still throw
- * an implementation specific runtime exception when the operations fails (e.g.
- * an I/O error occurs).
- * <p>
  * For keys, the maximum length is 512 bytes in the UTF-8 representation.
  */
 public interface DocumentStore {
@@ -114,8 +105,7 @@
                                        int limit);
 
     /**
-     * Remove a document. This method does nothing if there is no document
-     * with the given key.
+     * Remove a document.
      *
      * @param <T> the document type
      * @param collection the collection
@@ -124,10 +114,7 @@
     <T extends Document> void remove(Collection<T> collection, String key);
 
     /**
-     * Batch remove documents with given key. Keys for documents that do not
-     * exist are simply ignored. If this method fails with an exception, then
-     * only some of the documents identified by {@code keys} may have been
-     * removed.
+     * Batch remove documents with given key.
      *
      * @param <T> the document type
      * @param collection the collection
@@ -166,9 +153,11 @@
      * @param collection the collection
      * @param update the update operation
      * @return the old document or <code>null</code> if it didn't exist before.
+     * @throws MicroKernelException if the operation failed.
      */
     @CheckForNull
-    <T extends Document> T createOrUpdate(Collection<T> collection, UpdateOp update);
+    <T extends Document> T createOrUpdate(Collection<T> collection, UpdateOp update)
+            throws MicroKernelException;
 
     /**
      * Performs a conditional update (e.g. using
@@ -181,9 +170,11 @@
      * @param update the update operation with the condition
      * @return the old document or <code>null</code> if the condition is not met or
      *         if the document wasn't found
+     * @throws MicroKernelException if the operation failed.
      */
     @CheckForNull
-    <T extends Document> T findAndUpdate(Collection<T> collection, UpdateOp update);
+    <T extends Document> T findAndUpdate(Collection<T> collection, UpdateOp update)
+            throws MicroKernelException;
 
     /**
      * Invalidate the document cache.
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/Revision.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/Revision.java
index a7806cd..db6a698 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/Revision.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/Revision.java
@@ -156,12 +156,6 @@
         long timestamp = getCurrentTimestamp();
         int c;
         synchronized (Revision.class) {
-            // need to check again, because threads
-            // could arrive inside the synchronized block
-            // out of order
-            if (timestamp < lastRevisionTimestamp) {
-                timestamp = lastRevisionTimestamp;
-            }
             if (timestamp == lastRevisionTimestamp) {
                 c = ++lastRevisionCount;
             } else {
@@ -527,14 +521,9 @@
             if (range1 == FUTURE && range2 == FUTURE) {
                 return o1.compareRevisionTimeThenClusterId(o2);
             }
-            if (range1 == null && range2 == null) {
+            if (range1 == null || range2 == null) {
                 return o1.compareRevisionTimeThenClusterId(o2);
             }
-            if (range1 == null) {
-                return -1;
-            } else if (range2 == null) {
-                return 1;
-            }
             int comp = range1.compareRevisionTimeThenClusterId(range2);
             if (comp != 0) {
                 return comp;
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDataSourceFactory.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDataSourceFactory.java
index c30de9a..63c7d49 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDataSourceFactory.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDataSourceFactory.java
@@ -16,115 +16,28 @@
  */
 package org.apache.jackrabbit.oak.plugins.document.rdb;
 
-import java.io.Closeable;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.sql.Connection;
 import java.sql.Driver;
 import java.sql.DriverManager;
 import java.sql.SQLException;
-import java.sql.SQLFeatureNotSupportedException;
-import java.util.logging.Logger;
 
 import javax.sql.DataSource;
 
 import org.apache.commons.dbcp.BasicDataSource;
 import org.apache.jackrabbit.mk.api.MicroKernelException;
-import org.slf4j.LoggerFactory;
 
-/**
- * Factory for creating {@link DataSource}s based on a JDBC connection URL.
- */
 public class RDBDataSourceFactory {
 
-    private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(RDBDataSourceFactory.class);
-
     public static DataSource forJdbcUrl(String url, String username, String passwd) {
         try {
             BasicDataSource bds = new BasicDataSource();
-            LOG.debug("Getting Dricer for " + url);
             Driver d = DriverManager.getDriver(url);
             bds.setDriverClassName(d.getClass().getName());
             bds.setUsername(username);
             bds.setPassword(passwd);
             bds.setUrl(url);
-            return new CloseableDataSource(bds);
+            return bds;
         } catch (SQLException ex) {
-            String message = "trying to obtain driver for " + url;
-            LOG.info(message, ex);
-            throw new MicroKernelException(message, ex);
-        }
-    }
-
-    /**
-     * A {@link Closeable} {@link DataSource} based on a {@link BasicDataSource}. 
-     */
-    private static class CloseableDataSource implements DataSource, Closeable {
-
-        private BasicDataSource ds;
-
-        public CloseableDataSource(BasicDataSource ds) {
-            this.ds = ds;
-        }
-
-        @Override
-        public PrintWriter getLogWriter() throws SQLException {
-            return this.ds.getLogWriter();
-        }
-
-        @Override
-        public int getLoginTimeout() throws SQLException {
-            return this.ds.getLoginTimeout();
-        }
-
-        @Override
-        public void setLogWriter(PrintWriter pw) throws SQLException {
-            this.ds.setLogWriter(pw);
-        }
-
-        @Override
-        public void setLoginTimeout(int t) throws SQLException {
-            this.ds.setLoginTimeout(t);
-        }
-
-        @Override
-        public boolean isWrapperFor(Class<?> c) throws SQLException {
-            return this.ds.isWrapperFor(c);
-        }
-
-        @Override
-        public <T> T unwrap(Class<T> c) throws SQLException {
-            return this.ds.unwrap(c);
-        }
-
-        @Override
-        public void close() throws IOException {
-            try {
-                this.ds.close();
-            } catch (SQLException ex) {
-                throw new IOException("closing data source " + this.ds, ex);
-            }
-        }
-
-        @Override
-        public Connection getConnection() throws SQLException {
-            return this.ds.getConnection();
-        }
-
-        @Override
-        public Connection getConnection(String user, String passwd) throws SQLException {
-            return this.ds.getConnection(user, passwd);
-        }
-
-        // needed in Java 7...
-        @SuppressWarnings("unused")
-        public Logger getParentLogger() throws SQLFeatureNotSupportedException {
-            throw new SQLFeatureNotSupportedException();
-        }
-
-        @Override
-        public String toString() {
-            return this.getClass().getName() + " wrapping a " + this.ds.toString();
+            throw new MicroKernelException("trying to obtain driver for " + url, ex);
         }
     }
 }
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java
index 6651f72..29ef562 100755
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBDocumentStore.java
@@ -17,7 +17,6 @@
 package org.apache.jackrabbit.oak.plugins.document.rdb;
 
 import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.UnsupportedEncodingException;
 import java.sql.Connection;
@@ -27,7 +26,6 @@
 import java.sql.Statement;
 import java.sql.Types;
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
 import java.util.Map;
@@ -36,9 +34,6 @@
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.locks.Lock;
-import java.util.zip.Deflater;
-import java.util.zip.GZIPInputStream;
-import java.util.zip.GZIPOutputStream;
 
 import javax.annotation.CheckForNull;
 import javax.annotation.Nonnull;
@@ -57,7 +52,6 @@
 import org.apache.jackrabbit.oak.plugins.document.UpdateOp;
 import org.apache.jackrabbit.oak.plugins.document.UpdateUtils;
 import org.apache.jackrabbit.oak.plugins.document.cache.CachingDocumentStore;
-import org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore;
 import org.apache.jackrabbit.oak.plugins.document.util.StringValue;
 import org.json.simple.JSONObject;
 import org.json.simple.parser.JSONParser;
@@ -67,81 +61,8 @@
 
 import com.google.common.base.Objects;
 import com.google.common.cache.Cache;
-import com.google.common.collect.Lists;
 import com.google.common.util.concurrent.Striped;
 
-/**
- * Implementation of {@link CachingDocumentStore} for relational databases.
- * 
- * <h3>Supported Databases</h3>
- * <p>
- * The code is supposed to be sufficiently generic to run with a variety of
- * database implementations. However, the tables are created when required to
- * simplify testing, and <em>that</em> code specifically supports these
- * databases:
- * <ul>
- * <li>h2</li>
- * <li>IBM DB2</li>
- * <li>Postgres</li>
- * </ul>
- * 
- * <h3>Table Layout</h3>
- * <p>
- * Data for each of the DocumentStore's {@link Collection}s is stored in its own
- * database table (with a name matching the collection).
- * <p>
- * The tables essentially implement key/value storage, where the key usually is
- * derived from an Oak path, and the value is a serialization of a
- * {@link Document} (or a part of one). Additional fields are used for queries,
- * debugging, and concurrency control:
- * <table style="text-align: left;">
- * <thead>
- * <tr>
- * <th>Column</th>
- * <th>Type</th>
- * <th>Description</th>
- * </tr>
- * </thead> <tbody>
- * <tr>
- * <th>ID</th>
- * <td>varchar(1000) not null primary key</td>
- * <td>the document's key</td>
- * </tr>
- * <tr>
- * <th>MODIFIED</th>
- * <td>bigint</td>
- * <td>low-resolution timestamp
- * </tr>
- * <tr>
- * <th>MODCOUNT</th>
- * <td>bigint</td>
- * <td>modification counter, used for avoiding overlapping updates</td>
- * </tr>
- * <tr>
- * <th>SIZE</th>
- * <td>bigint</td>
- * <td>the size of the document's JSON serialization (for debugging purposes)</td>
- * </tr>
- * <tr>
- * <th>DATA</th>
- * <td>varchar(16384)</td>
- * <td>the document's JSON serialization (only used for small document sizes, in
- * which case BDATA (below) is not set</td>
- * </tr>
- * <tr>
- * <th>BDATA</th>
- * <td>blob</td>
- * <td>the document's JSON serialization (usually GZIPped, only used for "large"
- * documents)</td>
- * </tr>
- * </tbody>
- * </table>
- * 
- * <h3>Caching</h3>
- * <p>
- * The cache borrows heavily from the {@link MongoDocumentStore} implementation;
- * however it does not support the off-heap mechanism yet.
- */
 public class RDBDocumentStore implements CachingDocumentStore {
 
     /**
@@ -163,7 +84,54 @@
 
     @Override
     public <T extends Document> T find(final Collection<T> collection, final String id, int maxCacheAge) {
-        return readDocumentCached(collection, id, maxCacheAge);
+        if (collection != Collection.NODES) {
+            return readDocument(collection, id);
+        } else {
+            CacheValue cacheKey = new StringValue(id);
+            NodeDocument doc;
+            if (maxCacheAge > 0) {
+                // first try without lock
+                doc = nodesCache.getIfPresent(cacheKey);
+                if (doc != null) {
+                    if (maxCacheAge == Integer.MAX_VALUE || System.currentTimeMillis() - doc.getCreated() < maxCacheAge) {
+                        return castAsT(unwrap(doc));
+                    }
+                }
+            }
+            try {
+                Lock lock = getAndLock(id);
+                try {
+                    if (maxCacheAge == 0) {
+                        invalidateCache(collection, id);
+                    }
+                    while (true) {
+                        doc = nodesCache.get(cacheKey, new Callable<NodeDocument>() {
+                            @Override
+                            public NodeDocument call() throws Exception {
+                                NodeDocument doc = (NodeDocument) readDocument(collection, id);
+                                if (doc != null) {
+                                    doc.seal();
+                                }
+                                return wrap(doc);
+                            }
+                        });
+                        if (maxCacheAge == 0 || maxCacheAge == Integer.MAX_VALUE) {
+                            break;
+                        }
+                        if (System.currentTimeMillis() - doc.getCreated() < maxCacheAge) {
+                            break;
+                        }
+                        // too old: invalidate, try again
+                        invalidateCache(collection, id);
+                    }
+                } finally {
+                    lock.unlock();
+                }
+                return castAsT(unwrap(doc));
+            } catch (ExecutionException e) {
+                throw new IllegalStateException("Failed to load document with " + id, e);
+            }
+        }
     }
 
     @Override
@@ -186,11 +154,11 @@
     }
 
     @Override
-    public <T extends Document> void remove(Collection<T> collection, List<String> ids) {
-        for (String id : ids) {
-            invalidateCache(collection, id);
+    public <T extends Document> void remove(Collection<T> collection, List<String> keys) {
+        // TODO Use batch delete
+        for (String key : keys) {
+            remove(collection, key);
         }
-        delete(collection, ids);
     }
 
     @Override
@@ -328,57 +296,6 @@
         }
     }
 
-    private <T extends Document> T readDocumentCached(final Collection<T> collection, final String id, int maxCacheAge) {
-        if (collection != Collection.NODES) {
-            return readDocumentUncached(collection, id);
-        } else {
-            CacheValue cacheKey = new StringValue(id);
-            NodeDocument doc;
-            if (maxCacheAge > 0) {
-                // first try without lock
-                doc = nodesCache.getIfPresent(cacheKey);
-                if (doc != null) {
-                    if (maxCacheAge == Integer.MAX_VALUE || System.currentTimeMillis() - doc.getCreated() < maxCacheAge) {
-                        return castAsT(unwrap(doc));
-                    }
-                }
-            }
-            try {
-                Lock lock = getAndLock(id);
-                try {
-                    if (maxCacheAge == 0) {
-                        invalidateCache(collection, id);
-                    }
-                    while (true) {
-                        doc = nodesCache.get(cacheKey, new Callable<NodeDocument>() {
-                            @Override
-                            public NodeDocument call() throws Exception {
-                                NodeDocument doc = (NodeDocument) readDocumentUncached(collection, id);
-                                if (doc != null) {
-                                    doc.seal();
-                                }
-                                return wrap(doc);
-                            }
-                        });
-                        if (maxCacheAge == 0 || maxCacheAge == Integer.MAX_VALUE) {
-                            break;
-                        }
-                        if (System.currentTimeMillis() - doc.getCreated() < maxCacheAge) {
-                            break;
-                        }
-                        // too old: invalidate, try again
-                        invalidateCache(collection, id);
-                    }
-                } finally {
-                    lock.unlock();
-                }
-                return castAsT(unwrap(doc));
-            } catch (ExecutionException e) {
-                throw new IllegalStateException("Failed to load document with " + id, e);
-            }
-        }
-    }
-
     @CheckForNull
     private <T extends Document> boolean internalCreate(Collection<T> collection, List<UpdateOp> updates) {
         try {
@@ -398,7 +315,7 @@
     @CheckForNull
     private <T extends Document> T internalCreateOrUpdate(Collection<T> collection, UpdateOp update, boolean allowCreate,
             boolean checkConditions) {
-        T oldDoc = readDocumentCached(collection, update.getId(), Integer.MAX_VALUE);
+        T oldDoc = readDocument(collection, update.getId());
 
         if (oldDoc == null) {
             if (!allowCreate) {
@@ -416,16 +333,13 @@
                 insertDocument(collection, doc);
                 addToCache(collection, doc);
                 return oldDoc;
-            } catch (MicroKernelException ex) {
+            }
+            catch (MicroKernelException ex) {
                 // may have failed due to a race condition; try update instead
-                // this is an edge case, so it's ok to bypass the cache
-                // (avoiding a race condition where the DB is already updated
-                // but the case is not)
-                oldDoc = readDocumentUncached(collection, update.getId());
+                oldDoc = readDocument(collection, update.getId());
                 if (oldDoc == null) {
                     // something else went wrong
-                    LOG.error("insert failed, but document " + update.getId() + " is not present, aborting", ex);
-                    throw (ex);
+                    throw(ex);
                 }
                 return internalUpdate(collection, update, oldDoc, checkConditions, RETRIES);
             }
@@ -436,42 +350,33 @@
 
     @CheckForNull
     private <T extends Document> T internalUpdate(Collection<T> collection, UpdateOp update, T oldDoc, boolean checkConditions,
-            int maxRetries) {
+            int retries) {
         T doc = applyChanges(collection, oldDoc, update, checkConditions);
         if (doc == null) {
             return null;
         } else {
-            Lock l = getAndLock(update.getId());
-            try {
-                boolean success = false;
+            boolean success = false;
 
-                int retries = maxRetries;
-                while (!success && retries > 0) {
-                    success = updateDocument(collection, doc, (Long) oldDoc.get(MODCOUNT));
-                    if (!success) {
-                        // retry with a fresh document
-                        retries -= 1;
-                        oldDoc = readDocumentCached(collection, update.getId(), Integer.MAX_VALUE);
-                        doc = applyChanges(collection, oldDoc, update, checkConditions);
-                        if (doc == null) {
-                            return null;
-                        }
-                    } else {
-                        if (collection == Collection.NODES) {
-                            applyToCache((NodeDocument) oldDoc, (NodeDocument) doc);
-                        }
-                    }
-                }
-
+            while (!success && retries > 0) {
+                success = updateDocument(collection, doc, (Long) oldDoc.get(MODCOUNT));
                 if (!success) {
-                    throw new MicroKernelException("failed update (race?) after " + maxRetries + " retries");
+                    // retry with a fresh document
+                    retries -= 1;
+                    oldDoc = readDocument(collection, update.getId());
+                    doc = applyChanges(collection, oldDoc, update, checkConditions);
+                    if (doc == null) {
+                        return null;
+                    }
+                } else {
+                    applyToCache(collection, oldDoc, doc);
                 }
+            }
 
-                return oldDoc;
+            if (!success) {
+                throw new MicroKernelException("failed update (race?)");
             }
-            finally {
-                l.unlock();
-            }
+
+            return oldDoc;
         }
     }
 
@@ -490,11 +395,30 @@
 
     @CheckForNull
     private <T extends Document> void internalUpdate(Collection<T> collection, List<String> ids, UpdateOp update) {
-
-        for (String id : ids) {
-            UpdateOp up = update.copy();
-            up = up.shallowCopy(id);
-            internalCreateOrUpdate(collection, up, false, true);
+        Connection connection = null;
+        String tableName = getTable(collection);
+        try {
+            connection = getConnection();
+            for (String id : ids) {
+                String in = dbRead(connection, tableName, id);
+                if (in == null) {
+                    throw new MicroKernelException(tableName + " " + id + " not found");
+                }
+                T doc = fromString(collection, in);
+                Long oldmodcount = (Long) doc.get(MODCOUNT);
+                update.increment(MODCOUNT, 1);
+                UpdateUtils.applyChanges(doc, update, comparator);
+                String data = asString(doc);
+                Long modified = (Long) doc.get(MODIFIED);
+                Long modcount = (Long) doc.get(MODCOUNT);
+                dbUpdate(connection, tableName, id, modified, modcount, oldmodcount, data);
+                invalidateCache(collection, id); // TODO
+            }
+            connection.commit();
+        } catch (Exception ex) {
+            throw new MicroKernelException(ex);
+        } finally {
+            closeConnection(connection);
         }
     }
 
@@ -576,7 +500,7 @@
     }
 
     @CheckForNull
-    private <T extends Document> T readDocumentUncached(Collection<T> collection, String id) {
+    private <T extends Document> T readDocument(Collection<T> collection, String id) {
         Connection connection = null;
         String tableName = getTable(collection);
         try {
@@ -595,7 +519,7 @@
         String tableName = getTable(collection);
         try {
             connection = getConnection();
-            dbDelete(connection, tableName, Collections.singletonList(id));
+            dbDelete(connection, tableName, id);
             connection.commit();
         } catch (Exception ex) {
             throw new MicroKernelException(ex);
@@ -604,22 +528,6 @@
         }
     }
 
-    private <T extends Document> void delete(Collection<T> collection, List<String> ids) {
-        for (List<String> sublist : Lists.partition(ids, 64)) {
-            Connection connection = null;
-            String tableName = getTable(collection);
-            try {
-                connection = getConnection();
-                dbDelete(connection, tableName, sublist);
-                connection.commit();
-            } catch (Exception ex) {
-                throw new MicroKernelException(ex);
-            } finally {
-                closeConnection(connection);
-            }
-        }
-    }
-
     private <T extends Document> boolean updateDocument(@Nonnull Collection<T> collection, @Nonnull T document, Long oldmodcount) {
         Connection connection = null;
         String tableName = getTable(collection);
@@ -668,20 +576,12 @@
 
     // low level operations
 
-    private static byte[] GZIPSIG = {31, -117};
-    private static boolean NOGZIP = Boolean.getBoolean("org.apache.jackrabbit.oak.plugins.document.rdb.RDBDocumentStore.NOGZIP");
-
     private String getData(ResultSet rs, int stringIndex, int blobIndex) throws SQLException {
         try {
             String data = rs.getString(stringIndex);
             byte[] bdata = rs.getBytes(blobIndex);
             if (bdata == null) {
                 return data;
-            } else if (bdata.length >= 2 && bdata[0] == GZIPSIG[0] && bdata[1] == GZIPSIG[1]) {
-                // GZIP
-                ByteArrayInputStream bis = new ByteArrayInputStream(bdata);
-                GZIPInputStream gis = new GZIPInputStream(bis, 65536);
-                return IOUtils.toString(gis, "UTF-8");
             } else {
                 return IOUtils.toString(bdata, "UTF-8");
             }
@@ -690,33 +590,12 @@
         }
     }
 
-    private static byte[] asBytes(String data) {
-        byte[] bytes;
+    private static ByteArrayInputStream asInputStream(String data) {
         try {
-            bytes = data.getBytes("UTF-8");
+            return new ByteArrayInputStream(data.getBytes("UTF-8"));
         } catch (UnsupportedEncodingException ex) {
-            LOG.error("UTF-8 not supported??", ex);
-            throw new MicroKernelException(ex);
-        }
-
-        if (NOGZIP) {
-            return bytes;
-        } else {
-            try {
-                ByteArrayOutputStream bos = new ByteArrayOutputStream(data.length());
-                GZIPOutputStream gos = new GZIPOutputStream(bos) {
-                    {
-                        // TODO: make this configurable
-                        this.def.setLevel(Deflater.BEST_SPEED);
-                    }
-                };
-                gos.write(bytes);
-                gos.close();
-                return bos.toByteArray();
-            } catch (IOException ex) {
-                LOG.error("Error while gzipping contents", ex);
-                throw new MicroKernelException(ex);
-            }
+            LOG.error("This REALLY is not supposed to happen", ex);
+            return null;
         }
     }
 
@@ -753,6 +632,9 @@
             t += " and MODIFIED >= ?";
         }
         t += " order by ID";
+        if (limit != Integer.MAX_VALUE) {
+            t += " limit ?";
+        }
         PreparedStatement stmt = connection.prepareStatement(t);
         List<String> result = new ArrayList<String>();
         try {
@@ -763,10 +645,10 @@
                 stmt.setLong(si++, startValue);
             }
             if (limit != Integer.MAX_VALUE) {
-                stmt.setFetchSize(limit);
+                stmt.setInt(si++, limit);
             }
             ResultSet rs = stmt.executeQuery();
-            while (rs.next() && result.size() < limit) {
+            while (rs.next()) {
                 String data = getData(rs, 1, 2);
                 result.add(data);
             }
@@ -794,8 +676,8 @@
                 stmt.setBinaryStream(si++, null, 0);
             } else {
                 stmt.setString(si++, "truncated...:" + data.substring(0, 1023));
-                byte[] bytes = asBytes(data);
-                stmt.setBytes(si++, bytes);
+                ByteArrayInputStream bis = asInputStream(data);
+                stmt.setBinaryStream(si++, bis, bis.available());
             }
 
             stmt.setString(si++, id);
@@ -826,8 +708,8 @@
                 stmt.setBinaryStream(si++, null, 0);
             } else {
                 stmt.setString(si++, "truncated...:" + data.substring(0, 1023));
-                byte[] bytes = asBytes(data);
-                stmt.setBytes(si++, bytes);
+                ByteArrayInputStream bis = asInputStream(data);
+                stmt.setBinaryStream(si++, bis, bis.available());
             }
 
             int result = stmt.executeUpdate();
@@ -840,32 +722,15 @@
         }
     }
 
-    private void dbDelete(Connection connection, String tableName, List<String> ids) throws SQLException {
-
-        PreparedStatement stmt;
-        int cnt = ids.size();
-
-        if (cnt == 1) {
-            stmt = connection.prepareStatement("delete from " + tableName + " where ID=?");
-        } else {
-            StringBuilder inClause = new StringBuilder();
-            for (int i = 0; i < cnt; i++) {
-                inClause.append('?');
-                if (i != cnt - 1) {
-                    inClause.append(',');
-                }
-            }
-            stmt = connection.prepareStatement("delete from " + tableName + " where ID in (" + inClause.toString() + ")");
-        }
-
+    private boolean dbDelete(Connection connection, String tableName, String id) throws SQLException {
+        PreparedStatement stmt = connection.prepareStatement("delete from " + tableName + " where ID = ?");
         try {
-            for (int i = 0; i < cnt; i++) {
-                stmt.setString(i + 1, ids.get(i));
-            }
+            stmt.setString(1, id);
             int result = stmt.executeUpdate();
-            if (result != cnt) {
-                LOG.debug("DB delete failed for " + tableName + "/" + ids);
+            if (result != 1) {
+                LOG.debug("DB delete failed for " + tableName + "/" + id);
             }
+            return result == 1;
         } finally {
             stmt.close();
         }
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/package-info.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/package-info.java
deleted file mode 100755
index 9c9c3b0..0000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/package-info.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Implementations of {@link DocumentStore} and {@link BlobStore} for relational databases.
- */
-package org.apache.jackrabbit.oak.plugins.document.rdb;
-
-import org.apache.jackrabbit.oak.plugins.document.DocumentStore;
-import org.apache.jackrabbit.oak.spi.blob.BlobStore;
-
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/util/Utils.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/util/Utils.java
index 7ae4dcc..149e05a 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/util/Utils.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/util/Utils.java
@@ -16,8 +16,6 @@
  */
 package org.apache.jackrabbit.oak.plugins.document.util;
 
-import static com.google.common.base.Preconditions.checkNotNull;
-
 import java.io.Closeable;
 import java.io.IOException;
 import java.nio.charset.Charset;
@@ -25,14 +23,18 @@
 import java.security.NoSuchAlgorithmException;
 import java.sql.Timestamp;
 import java.util.Comparator;
+import java.util.HashSet;
 import java.util.Map;
 import java.util.Map.Entry;
+import java.util.Set;
 import java.util.SortedMap;
 import java.util.TreeMap;
 
 import javax.annotation.Nonnull;
 import javax.annotation.Nullable;
 
+import com.mongodb.BasicDBObject;
+
 import org.apache.commons.codec.binary.Hex;
 import org.apache.jackrabbit.oak.commons.PathUtils;
 import org.apache.jackrabbit.oak.plugins.document.Revision;
@@ -40,7 +42,7 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.mongodb.BasicDBObject;
+import static com.google.common.base.Preconditions.checkNotNull;
 
 /**
  * Utility methods.
@@ -254,16 +256,15 @@
         if (path.length() < PATH_SHORT) {
             return false;
         }
-        // check if name is too long
-        String name = PathUtils.getName(path);
-        if (name.getBytes(UTF_8).length > NODE_NAME_LIMIT) {
-            throw new IllegalArgumentException("Node name is too long: " + path);
-        }
         // check if the parent path is long
         byte[] parent = PathUtils.getParentPath(path).getBytes(UTF_8);
         if (parent.length < PATH_LONG) {
             return false;
         }
+        String name = PathUtils.getName(path);
+        if (name.getBytes(UTF_8).length > NODE_NAME_LIMIT) {
+            throw new IllegalArgumentException("Node name is too long: " + path);
+        }
         return true;
     }
     
@@ -385,9 +386,9 @@
      *
      * @param obj object to close
      */
-    public static void closeIfCloseable(Object obj) {
-        if (obj instanceof Closeable) {
-            try {
+    public static void closeIfCloseable(Object obj){
+        if(obj instanceof Closeable){
+            try{
                 ((Closeable) obj).close();
             } catch (IOException e) {
                 LOG.warn("Error occurred while closing {}", obj, e);
@@ -398,7 +399,7 @@
     /**
      * Provides a readable string for given timestamp
      */
-    public static String timestampToString(long timestamp) {
+    public static String timestampToString(long timestamp){
         return (new Timestamp(timestamp) + "00").substring(0, 23);
     }
 }
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/AsyncIndexUpdate.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/AsyncIndexUpdate.java
index 9ce60a3..e3935d4 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/AsyncIndexUpdate.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/AsyncIndexUpdate.java
@@ -186,6 +186,8 @@
                 if (switchOnSync) {
                     reindexedDefinitions.addAll(indexUpdate
                             .getReindexedDefinitions());
+                } else {
+                    postAsyncRunStatsStatus(indexStats);
                 }
             } else if (switchOnSync) {
                 log.debug("No changes detected after diff, will try to switch to synchronous updates on "
@@ -207,6 +209,7 @@
                     store.merge(builder, newCommitHook(name, state),
                             CommitInfo.EMPTY);
                     reindexedDefinitions.clear();
+                    postAsyncRunStatsStatus(indexStats);
                 } catch (CommitFailedException e) {
                     if (e != CONCURRENT_UPDATE) {
                         exception = e;
@@ -214,7 +217,6 @@
                 }
             }
         }
-        postAsyncRunStatsStatus(indexStats);
 
         if (exception != null) {
             if (!failing) {
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/OrderedPropertyIndex.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/OrderedPropertyIndex.java
index 99d71f2..531fff1 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/OrderedPropertyIndex.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/property/OrderedPropertyIndex.java
@@ -49,7 +49,7 @@
     }
 
     @Override
-    OrderedPropertyIndexLookup getLookup(NodeState root) {
+    PropertyIndexLookup getLookup(NodeState root) {
         return new OrderedPropertyIndexLookup(root);
     }
 
@@ -172,70 +172,10 @@
     }
 
     @Override
-    public String getPlanDescription(IndexPlan plan, NodeState root) {
-        LOG.debug("getPlanDescription({}, {})", plan, root);
-        StringBuilder buff = new StringBuilder("ordered");
-        OrderedPropertyIndexLookup lookup = getLookup(root);
-        Filter filter = plan.getFilter();
-        int depth = 1;
-        boolean found = false;
-        for (PropertyRestriction pr : filter.getPropertyRestrictions()) {
-            String propertyName = PathUtils.getName(pr.propertyName);
-            if (!lookup.isIndexed(propertyName, "/", filter)) {
-                continue;
-            }
-            String operation = null;
-            PropertyValue value = null;       
-            // TODO support pr.list
-            if (pr.first == null && pr.last == null) {
-                // open query: [property] is not null
-                operation = "is not null";
-            } else if (pr.first != null && pr.first.equals(pr.last) && pr.firstIncluding
-                       && pr.lastIncluding) {
-                // [property]=[value]
-                operation = "=";
-                value = pr.first;
-            } else if (pr.first != null && !pr.first.equals(pr.last)) {
-                // '>' & '>=' use cases
-                if (lookup.isAscending(root, propertyName, filter)) {
-                    value = pr.first;
-                    operation = pr.firstIncluding ? ">=" : ">";
-                }
-            } else if (pr.last != null && !pr.last.equals(pr.first)) {
-                // '<' & '<='
-                if (!lookup.isAscending(root, propertyName, filter)) {
-                    value = pr.last;
-                    operation = pr.lastIncluding ? "<=" : "<";
-                }
-            }
-            if (operation != null) {
-                buff.append(' ').append(propertyName).append(' ').
-                        append(operation).append(' ').append(value);
-            } else {
-                continue;
-            }
-            // stop with the first property that is indexed
-            found = true;
-            break;
-        }
-        List<OrderEntry> sortOrder = plan.getSortOrder();
-        if (!found && sortOrder != null && !sortOrder.isEmpty()) {
-            // we could be here if we have a query where the ORDER BY makes us play it.
-            for (OrderEntry oe : sortOrder) {
-                String propertyName = PathUtils.getName(oe.getPropertyName());
-                if (!lookup.isIndexed(propertyName, "/", null)) {
-                    continue;
-                }
-                depth = PathUtils.getDepth(oe.getPropertyName());
-                buff.append(" order by ").append(propertyName);
-                // stop with the first property that is indexed
-                break;
-            }
-        }        
-        if (depth > 1) {
-            buff.append(" ancestor ").append(depth - 1);
-        }       
-        return buff.toString();
+    public String getPlanDescription(IndexPlan plan) {
+        LOG.debug("getPlanDescription() - plan: {}", plan);
+        LOG.error("Not implemented yet");
+        throw new UnsupportedOperationException("Not implemented yet.");
     }
 
     @Override
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MutableNodeState.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MutableNodeState.java
index 2b70258..273b896 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MutableNodeState.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/memory/MutableNodeState.java
@@ -63,12 +63,6 @@
      */
     private final Map<String, MutableNodeState> nodes = newHashMap();
 
-    /**
-     * Flag to indicate that this child has been replace in its parent.
-     * @see org.apache.jackrabbit.oak.spi.state.NodeBuilder#isReplaced()
-     */
-    private boolean replaced;
-
     MutableNodeState(@Nonnull NodeState base) {
         checkNotNull(base);
         this.base = ModifiedNodeState.unwrap(base, properties, nodes);
@@ -109,12 +103,8 @@
         if (child == null) {
             checkValidName(name);
             child = new MutableNodeState(state);
-            if (base.hasChildNode(name)) {
-                child.replaced = true;
-            }
             nodes.put(name, child);
         } else {
-            child.replaced = true;
             child.reset(state);
         }
         return child;
@@ -159,7 +149,7 @@
     }
 
     boolean isReplaced(NodeState before) {
-        return replaced;
+        return base != before;
     }
 
     boolean isReplaced(NodeState before, String name) {
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/RecordId.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/RecordId.java
index f9a98ac..f273aa2 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/RecordId.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/RecordId.java
@@ -18,39 +18,24 @@
 
 import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
-import static java.lang.Integer.parseInt;
-import static org.apache.jackrabbit.oak.plugins.segment.Segment.RECORD_ALIGN_BITS;
 
 import java.util.UUID;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
 
 public final class RecordId implements Comparable<RecordId> {
 
-    private static final Pattern PATTERN = Pattern.compile(
-            "([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})"
-            + "(:(0|[1-9][0-9]*)|\\.([0-9a-f]{4}))");
-
     public static RecordId[] EMPTY_ARRAY = new RecordId[0];
 
     public static RecordId fromString(SegmentTracker factory, String id) {
-        Matcher matcher = PATTERN.matcher(id);
-        if (matcher.matches()) {
-            UUID uuid = UUID.fromString(matcher.group(1));
-            SegmentId segmentId = factory.getSegmentId(
-                    uuid.getMostSignificantBits(),
-                    uuid.getLeastSignificantBits());
-
-            int offset;
-            if (matcher.group(3) != null) {
-                offset = parseInt(matcher.group(3));
-            } else {
-                offset = parseInt(matcher.group(4), 16) << RECORD_ALIGN_BITS;
-            }
-
-            return new RecordId(segmentId, offset);
+        int colon = id.indexOf(':');
+        if (colon != -1) {
+            UUID uuid = UUID.fromString(id.substring(0, colon));
+            return new RecordId(
+                    factory.getSegmentId(
+                            uuid.getMostSignificantBits(),
+                            uuid.getLeastSignificantBits()),
+                    Integer.parseInt(id.substring(colon + 1)));
         } else {
-            throw new IllegalArgumentException("Bad record identifier: " + id);
+            throw new IllegalArgumentException("Bad RecordId: " + id);
         }
     }
 
@@ -60,7 +45,7 @@
 
     public RecordId(SegmentId segmentId, int offset) {
         checkArgument(offset < Segment.MAX_SEGMENT_SIZE);
-        checkArgument((offset % (1 << RECORD_ALIGN_BITS)) == 0);
+        checkArgument((offset & 3) == 0);
         this.segmentId = checkNotNull(segmentId);
         this.offset = offset;
     }
@@ -93,7 +78,7 @@
 
     @Override
     public String toString() {
-        return String.format("%s.%04x", segmentId, offset >> RECORD_ALIGN_BITS);
+        return segmentId + ":" + offset;
     }
 
     @Override
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Segment.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Segment.java
index 1cff57f..a82fab9 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Segment.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Segment.java
@@ -447,30 +447,10 @@
         int length = data.remaining();

 

         writer.format("Segment %s (%d bytes)%n", id, length);

-        if (id.isDataSegmentId()) {

-            writer.println("--------------------------------------------------------------------------");

-            int refcount = getRefCount();

-            for (int refid = 0; refid < refcount; refid++) {

-                writer.format("reference %02x: %s%n", refid, getRefId(refid));

-            }

-            int rootcount = data.getShort(ROOT_COUNT_OFFSET) & 0xffff;

-            int pos = refcount * 16;

-            for (int rootid = 0; rootid < rootcount; rootid++) {

-                writer.format(

-                        "root %d: %s at %04x%n", rootid,

-                        RecordType.values()[data.get(pos + rootid * 3) & 0xff],

-                        data.getShort(pos + rootid * 3 + 1) & 0xffff);

-            }

-            int blobrefcount = data.getShort(BLOBREF_COUNT_OFFSET) & 0xffff;

-            pos += rootcount * 3;

-            for (int blobrefid = 0; blobrefid < blobrefcount; blobrefid++) {

-                int offset = data.getShort(pos + blobrefid * 2) & 0xffff;

-                SegmentBlob blob = new SegmentBlob(

-                        new RecordId(id, offset << RECORD_ALIGN_BITS));

-                writer.format(

-                        "blobref %d: %s at %04x%n", blobrefid,

-                        blob.getBlobId(), offset);

-            }

+        writer.println("--------------------------------------------------------------------------");

+        int refcount = getRefCount();

+        for (int refid = 0; refid < refcount; refid++) {

+            writer.format("reference %02x: %s%n", refid, getRefId(refid));

         }

         writer.println("--------------------------------------------------------------------------");

         int pos = data.limit() - ((length + 15) & ~15);

diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/tree/AbstractTree.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/tree/AbstractTree.java
index 30f70ad..51afd2e 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/tree/AbstractTree.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/tree/AbstractTree.java
@@ -204,7 +204,7 @@
 
     @Override
     public Status getStatus() {
-        if (nodeBuilder.isNew() || nodeBuilder.isReplaced()) {
+        if (nodeBuilder.isNew()) {
             return NEW;
         } else if (nodeBuilder.isModified()) {
             return MODIFIED;
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/ComparisonImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/ComparisonImpl.java
index 585b44e..29c88fd 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/ComparisonImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/ComparisonImpl.java
@@ -146,7 +146,6 @@
             return p1.compareTo(p2) < 0;
         case LIKE:
             return evaluateLike(p1, p2);
-        // case IN is not needed here, as this is handled in the class InImpl.
         }
         throw new IllegalArgumentException("Unknown operator: " + operator);
     }
@@ -207,9 +206,6 @@
                         // path conditions
                         operand1.restrict(f, operator, v);
                     }
-                } else {
-                    // like '%' conditions
-                    operand1.restrict(f, operator, v);
                 }
             } else {
                 operand1.restrict(f, operator, v);
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SelectorImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SelectorImpl.java
index 7c17c7d..8434fb2 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SelectorImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/SelectorImpl.java
@@ -327,15 +327,8 @@
         StringBuilder buff = new StringBuilder();
         buff.append(toString());
         buff.append(" /* ");
-        QueryIndex index = getIndex();
-        if (index != null) {
-            if (index instanceof AdvancedQueryIndex) {
-                AdvancedQueryIndex adv = (AdvancedQueryIndex) index;
-                IndexPlan p = plan.getIndexPlan();
-                buff.append(adv.getPlanDescription(p, rootState));
-            } else {
-                buff.append(index.getPlan(createFilter(true), rootState));
-            }
+        if (getIndex() != null) {
+            buff.append(getIndex().getPlan(createFilter(true), rootState));
         } else {
             buff.append("no-index");
         }
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authentication/token/TokenProviderImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authentication/token/TokenProviderImpl.java
index a10b41d..2d4600f 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authentication/token/TokenProviderImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authentication/token/TokenProviderImpl.java
@@ -29,7 +29,6 @@
 import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
-import java.util.UUID;
 import javax.annotation.CheckForNull;
 import javax.annotation.Nonnull;
 import javax.jcr.AccessDeniedException;
@@ -211,7 +210,11 @@
         if (tokenParent != null) {
             try {
                 long creationTime = new Date().getTime();
-                NodeUtil tokenNode = createTokenNode(tokenParent, creationTime);
+                Calendar creation = GregorianCalendar.getInstance();
+                creation.setTimeInMillis(creationTime);
+                String tokenName = Text.replace(ISO8601.format(creation), ":", ".");
+
+                NodeUtil tokenNode = tokenParent.addChild(tokenName, TOKEN_NT_NAME);
                 tokenNode.setString(JcrConstants.JCR_UUID, IdentifierManager.generateUUID());
 
                 String key = generateKey(options.getConfigValue(PARAM_TOKEN_LENGTH, DEFAULT_KEY_SIZE));
@@ -394,31 +397,6 @@
         return tokenParent;
     }
 
-    /**
-     * Create a new token node below the specified {@code parent}.
-     *
-     * @param parent The parent node.
-     * @param creationTime The creation time that is used as name hint.
-     * @return The new token node
-     * @throws AccessDeniedException
-     */
-    private NodeUtil createTokenNode(@Nonnull NodeUtil parent, @Nonnull long creationTime) throws AccessDeniedException {
-        Calendar creation = GregorianCalendar.getInstance();
-        creation.setTimeInMillis(creationTime);
-        String tokenName = Text.replace(ISO8601.format(creation), ":", ".");
-        NodeUtil tokenNode;
-        try {
-            tokenNode = parent.addChild(tokenName, TOKEN_NT_NAME);
-            root.commit();
-        } catch (CommitFailedException e) {
-            // conflict while creating token node -> retry
-            log.debug("Failed to create token node " + tokenName + ". Using random name as fallback.");
-            root.refresh();
-            tokenNode = parent.addChild(UUID.randomUUID().toString(), TOKEN_NT_NAME);
-        }
-        return tokenNode;
-    }
-
     //--------------------------------------------------------------------------
 
     /**
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionEntry.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionEntry.java
index 537b67b..73f2452 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionEntry.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionEntry.java
@@ -16,6 +16,7 @@
  */
 package org.apache.jackrabbit.oak.security.authorization.permission;
 
+import java.util.Set;
 import javax.annotation.Nonnull;
 import javax.annotation.Nullable;
 
@@ -25,11 +26,15 @@
 import org.apache.jackrabbit.oak.api.Type;
 import org.apache.jackrabbit.oak.commons.PathUtils;
 import org.apache.jackrabbit.oak.spi.security.authorization.permission.PermissionConstants;
+import org.apache.jackrabbit.oak.spi.security.authorization.restriction.Restriction;
 import org.apache.jackrabbit.oak.spi.security.authorization.restriction.RestrictionPattern;
 import org.apache.jackrabbit.oak.spi.security.authorization.restriction.RestrictionProvider;
 import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBits;
+import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
 import org.apache.jackrabbit.util.Text;
 
+import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE;
+
 final class PermissionEntry implements Comparable<PermissionEntry>, PermissionConstants {
 
     /**
@@ -57,19 +62,22 @@
      */
     final RestrictionPattern restriction;
 
-    PermissionEntry(@Nonnull String path, @Nonnull Tree entryTree, @Nonnull RestrictionProvider restrictionsProvider) {
-        this(path, entryTree.getProperty(REP_IS_ALLOW).getValue(Type.BOOLEAN),
-                Integer.parseInt(entryTree.getName()),
-                PrivilegeBits.getInstance(entryTree.getProperty(REP_PRIVILEGE_BITS)),
-                restrictionsProvider.getPattern(path, entryTree));
+    PermissionEntry(String path, Tree entryTree, RestrictionProvider restrictionsProvider) {
+        this.path = path;
+        isAllow = entryTree.getProperty(REP_IS_ALLOW).getValue(Type.BOOLEAN);
+        index = Integer.parseInt(entryTree.getName());
+        privilegeBits = PrivilegeBits.getInstance(entryTree.getProperty(REP_PRIVILEGE_BITS));
+        restriction = restrictionsProvider.getPattern(path, entryTree);
     }
 
-    PermissionEntry(@Nonnull String path, boolean isAllow, int index, @Nonnull PrivilegeBits privilegeBits, @Nonnull RestrictionPattern restriction) {
-        this.path = path;
-        this.isAllow = isAllow;
-        this.index = index;
-        this.privilegeBits = privilegeBits;
-        this.restriction = restriction;
+    static void write(NodeBuilder parent, boolean isAllow, int index, PrivilegeBits privilegeBits, Set<Restriction> restrictions) {
+        NodeBuilder n = parent.child(String.valueOf(index))
+                .setProperty(JCR_PRIMARYTYPE, NT_REP_PERMISSIONS, Type.NAME)
+                .setProperty(REP_IS_ALLOW, isAllow)
+                .setProperty(privilegeBits.asPropertyState(REP_PRIVILEGE_BITS));
+        for (Restriction restriction : restrictions) {
+            n.setProperty(restriction.getProperty());
+        }
     }
 
     public boolean matches(@Nonnull Tree tree, @Nullable PropertyState property) {
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionHook.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionHook.java
index 57618dc..53ad272 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionHook.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionHook.java
@@ -16,28 +16,44 @@
  */
 package org.apache.jackrabbit.oak.security.authorization.permission;
 
+import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
+import java.util.Set;
+
 import javax.annotation.Nonnull;
 
 import org.apache.jackrabbit.oak.api.CommitFailedException;
+import org.apache.jackrabbit.oak.api.Type;
 import org.apache.jackrabbit.oak.core.ImmutableRoot;
 import org.apache.jackrabbit.oak.plugins.nodetype.TypePredicate;
+import org.apache.jackrabbit.oak.plugins.tree.ImmutableTree;
 import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
 import org.apache.jackrabbit.oak.spi.commit.PostValidationHook;
 import org.apache.jackrabbit.oak.spi.security.authorization.accesscontrol.AccessControlConstants;
 import org.apache.jackrabbit.oak.spi.security.authorization.permission.PermissionConstants;
+import org.apache.jackrabbit.oak.spi.security.authorization.restriction.Restriction;
 import org.apache.jackrabbit.oak.spi.security.authorization.restriction.RestrictionProvider;
+import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBits;
 import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBitsProvider;
 import org.apache.jackrabbit.oak.spi.state.DefaultNodeStateDiff;
 import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
 import org.apache.jackrabbit.oak.spi.state.NodeState;
 import org.apache.jackrabbit.oak.spi.state.NodeStateUtils;
+import org.apache.jackrabbit.util.Text;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Objects;
+import com.google.common.base.Strings;
+
+import static com.google.common.collect.Iterables.addAll;
+import static com.google.common.collect.Sets.newLinkedHashSet;
+import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE;
 import static org.apache.jackrabbit.JcrConstants.JCR_SYSTEM;
 import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
+import static org.apache.jackrabbit.oak.plugins.tree.TreeConstants.OAK_CHILD_ORDER;
 
 /**
  * {@code CommitHook} implementation that processes any modification made to
@@ -76,8 +92,8 @@
     private TypePredicate isACE;
     private TypePredicate isGrantACE;
 
-    private Map<String, PermissionStoreEditor> modified = new HashMap<String, PermissionStoreEditor>();
-    private Map<String, PermissionStoreEditor> deleted = new HashMap<String, PermissionStoreEditor>();
+    private Map<String, Acl> modified = new HashMap<String, Acl>();
+    private Map<String, Acl> deleted = new HashMap<String, Acl>();
 
     public PermissionHook(String workspaceName, RestrictionProvider restrictionProvider) {
         this.workspaceName = workspaceName;
@@ -105,11 +121,11 @@
     }
 
     private void apply() {
-        for (Map.Entry<String, PermissionStoreEditor> entry : deleted.entrySet()) {
-            entry.getValue().removePermissionEntries();
+        for (Map.Entry<String, Acl> entry : deleted.entrySet()) {
+            entry.getValue().remove();
         }
-        for (Map.Entry<String, PermissionStoreEditor> entry : modified.entrySet()) {
-            entry.getValue().updatePermissionEntries();
+        for (Map.Entry<String, Acl> entry : modified.entrySet()) {
+            entry.getValue().update();
         }
     }
 
@@ -135,8 +151,8 @@
             }
             String path = parentPath + '/' + name;
             if (isACL.apply(after)) {
-                PermissionStoreEditor psEditor = createPermissionStoreEditor(name, after);
-                modified.put(psEditor.accessControlledPath, psEditor);
+                Acl acl = new Acl(parentPath, name, after);
+                modified.put(acl.accessControlledPath, acl);
             } else {
                 after.compareAgainstBaseState(EMPTY_NODE, new Diff(path));
             }
@@ -152,23 +168,23 @@
             String path = parentPath + '/' + name;
             if (isACL.apply(before)) {
                 if (isACL.apply(after)) {
-                    PermissionStoreEditor psEditor = createPermissionStoreEditor(name, after);
-                    modified.put(psEditor.accessControlledPath, psEditor);
+                    Acl acl = new Acl(parentPath, name, after);
+                    modified.put(acl.accessControlledPath, acl);
 
                     // also consider to remove the ACL from removed entries of other principals
-                    PermissionStoreEditor beforeEditor = createPermissionStoreEditor(name, before);
-                    beforeEditor.entries.keySet().removeAll(psEditor.entries.keySet());
-                    if (!beforeEditor.entries.isEmpty()) {
-                        deleted.put(parentPath, beforeEditor);
+                    Acl beforeAcl = new Acl(parentPath, name, before);
+                    beforeAcl.entries.keySet().removeAll(acl.entries.keySet());
+                    if (!beforeAcl.entries.isEmpty()) {
+                        deleted.put(parentPath, beforeAcl);
                     }
 
                 } else {
-                    PermissionStoreEditor psEditor = createPermissionStoreEditor(name, before);
-                    deleted.put(psEditor.accessControlledPath, psEditor);
+                    Acl acl = new Acl(parentPath, name, before);
+                    deleted.put(acl.accessControlledPath, acl);
                 }
             } else if (isACL.apply(after)) {
-                PermissionStoreEditor psEditor = createPermissionStoreEditor(name, after);
-                modified.put(psEditor.accessControlledPath, psEditor);
+                Acl acl = new Acl(parentPath, name, after);
+                modified.put(acl.accessControlledPath, acl);
             } else {
                 after.compareAgainstBaseState(before, new Diff(path));
             }
@@ -183,16 +199,218 @@
             }
             String path = parentPath + '/' + name;
             if (isACL.apply(before)) {
-                PermissionStoreEditor psEditor = createPermissionStoreEditor(name, before);
-                deleted.put(psEditor.accessControlledPath, psEditor);
+                Acl acl = new Acl(parentPath, name, before);
+                deleted.put(acl.accessControlledPath, acl);
             } else {
                 EMPTY_NODE.compareAgainstBaseState(before, new Diff(path));
             }
             return true;
         }
+    }
 
-        private PermissionStoreEditor createPermissionStoreEditor(@Nonnull String nodeName, @Nonnull NodeState nodeState) {
-            return new PermissionStoreEditor(parentPath, nodeName, nodeState, permissionRoot, isACE, isGrantACE, bitsProvider, restrictionProvider);
+    private final class Acl {
+
+        private final String accessControlledPath;
+
+        private final String nodeName;
+
+        private final Map<String, List<AcEntry>> entries = new HashMap<String, List<AcEntry>>();
+
+        private Acl(String aclPath, String name, @Nonnull NodeState node) {
+            if (name.equals(REP_REPO_POLICY)) {
+                this.accessControlledPath = "";
+            } else {
+                this.accessControlledPath = aclPath.length() == 0 ? "/" : aclPath;
+            }
+            nodeName = PermissionUtil.getEntryName(accessControlledPath);
+
+            Set<String> orderedChildNames =
+                    newLinkedHashSet(node.getNames(OAK_CHILD_ORDER));
+            long n = orderedChildNames.size();
+            if (node.getChildNodeCount(n + 1) > n) {
+                addAll(orderedChildNames, node.getChildNodeNames());
+            }
+
+            int index = 0;
+            for (String childName : orderedChildNames) {
+                NodeState ace = node.getChildNode(childName);
+                if (isACE.apply(ace)) {
+                    AcEntry entry = new AcEntry(ace, accessControlledPath, index);
+                    List<AcEntry> list = entries.get(entry.principalName);
+                    if (list == null) {
+                        list = new ArrayList<AcEntry>();
+                        entries.put(entry.principalName, list);
+                    }
+                    list.add(entry);
+                    index++;
+                }
+            }
+        }
+
+        private void remove() {
+            for (String principalName: entries.keySet()) {
+                if (permissionRoot.hasChildNode(principalName)) {
+                    NodeBuilder principalRoot = permissionRoot.getChildNode(principalName);
+
+                    // find the ACL node that for this path and principal
+                    NodeBuilder parent = principalRoot.getChildNode(nodeName);
+                    if (!parent.exists()) {
+                        continue;
+                    }
+
+                    // check if the node is the correct one
+                    if (PermissionUtil.checkACLPath(parent, accessControlledPath)) {
+                        // remove and reconnect child nodes
+                        NodeBuilder newParent = null;
+                        for (String childName : parent.getChildNodeNames()) {
+                            if (childName.charAt(0) != 'c') {
+                                continue;
+                            }
+                            NodeBuilder child = parent.getChildNode(childName);
+                            if (newParent == null) {
+                                newParent = child;
+                            } else {
+                                newParent.setChildNode(childName, child.getNodeState());
+                                child.remove();
+                            }
+                        }
+                        parent.remove();
+                        if (newParent != null) {
+                            principalRoot.setChildNode(nodeName, newParent.getNodeState());
+                        }
+                    } else {
+                        // check if any of the child nodes match
+                        for (String childName : parent.getChildNodeNames()) {
+                            if (childName.charAt(0) != 'c') {
+                                continue;
+                            }
+                            NodeBuilder child = parent.getChildNode(childName);
+                            if (PermissionUtil.checkACLPath(child, accessControlledPath)) {
+                                child.remove();
+                            }
+                        }
+                    }
+                } else {
+                    log.error("Unable to remove permission entry {}: Principal root missing.", this);
+                }
+            }
+        }
+
+        private void update() {
+            for (String principalName: entries.keySet()) {
+                NodeBuilder principalRoot = permissionRoot.child(principalName);
+                if (!principalRoot.hasProperty(JCR_PRIMARYTYPE)) {
+                    principalRoot.setProperty(JCR_PRIMARYTYPE, NT_REP_PERMISSION_STORE, Type.NAME);
+                }
+                NodeBuilder parent = principalRoot.child(nodeName);
+                if (!parent.hasProperty(JCR_PRIMARYTYPE)) {
+                    parent.setProperty(JCR_PRIMARYTYPE, NT_REP_PERMISSION_STORE, Type.NAME);
+                }
+
+                // check if current parent already has the correct path
+                if (parent.hasProperty(REP_ACCESS_CONTROLLED_PATH)) {
+                    if (!PermissionUtil.checkACLPath(parent, accessControlledPath)) {
+                        // hash collision, find a new child
+                        NodeBuilder child = null;
+                        int idx = 0;
+                        for (String childName : parent.getChildNodeNames()) {
+                            if (childName.charAt(0) != 'c') {
+                                continue;
+                            }
+                            child = parent.getChildNode(childName);
+                            if (PermissionUtil.checkACLPath(child, accessControlledPath)) {
+                                break;
+                            }
+                            child = null;
+                            idx++;
+                        }
+                        while (child == null) {
+                            String name = 'c' + String.valueOf(idx++);
+                            child = parent.getChildNode(name);
+                            if (child.exists()) {
+                                child = null;
+                            } else {
+                                child = parent.child(name);
+                                child.setProperty(JCR_PRIMARYTYPE, NT_REP_PERMISSION_STORE, Type.NAME);
+                            }
+                        }
+                        parent = child;
+                        parent.setProperty(REP_ACCESS_CONTROLLED_PATH, accessControlledPath);
+                    }
+                } else {
+                    // new parent
+                    parent.setProperty(REP_ACCESS_CONTROLLED_PATH, accessControlledPath);
+                }
+                updateEntries(parent, entries.get(principalName));
+            }
+        }
+
+        private void updateEntries(NodeBuilder parent, List<AcEntry> list) {
+            // remove old entries
+            for (String childName : parent.getChildNodeNames()) {
+                if (childName.charAt(0) != 'c') {
+                    parent.getChildNode(childName).remove();
+                }
+            }
+            for (AcEntry ace: list) {
+                PermissionEntry.write(parent, ace.isAllow, ace.index, ace.privilegeBits, ace.restrictions);
+            }
+        }
+    }
+
+    private final class AcEntry {
+
+        private final String accessControlledPath;
+        private final String principalName;
+        private final PrivilegeBits privilegeBits;
+        private final boolean isAllow;
+        private final Set<Restriction> restrictions;
+        private final int index;
+        private int hashCode = -1;
+
+        private AcEntry(@Nonnull NodeState node, @Nonnull String accessControlledPath, int index) {
+            this.accessControlledPath = accessControlledPath;
+            this.index = index;
+
+            principalName = Text.escapeIllegalJcrChars(node.getString(REP_PRINCIPAL_NAME));
+            privilegeBits = bitsProvider.getBits(node.getNames(REP_PRIVILEGES));
+            isAllow = isGrantACE.apply(node);
+            restrictions = restrictionProvider.readRestrictions(Strings.emptyToNull(accessControlledPath), new ImmutableTree(node));
+        }
+
+        @Override
+        public int hashCode() {
+            if (hashCode == -1) {
+                hashCode = Objects.hashCode(accessControlledPath, principalName, privilegeBits, isAllow, restrictions);
+            }
+            return hashCode;
+        }
+
+        @Override
+        public boolean equals(Object o) {
+            if (o == this) {
+                return true;
+            }
+            if (o instanceof AcEntry) {
+                AcEntry other = (AcEntry) o;
+                return isAllow == other.isAllow
+                        && privilegeBits.equals(other.privilegeBits)
+                        && principalName.equals(other.principalName)
+                        && accessControlledPath.equals(other.accessControlledPath)
+                        && restrictions.equals(other.restrictions);
+            }
+            return false;
+        }
+
+        @Override
+        public String toString() {
+            StringBuilder sb = new StringBuilder();
+            sb.append(accessControlledPath);
+            sb.append(';').append(principalName);
+            sb.append(';').append(isAllow ? "allow" : "deny");
+            sb.append(';').append(bitsProvider.getPrivilegeNames(privilegeBits));
+            sb.append(';').append(restrictions);
+            return sb.toString();
         }
     }
 }
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionStoreEditor.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionStoreEditor.java
deleted file mode 100644
index 6a60c75..0000000
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/permission/PermissionStoreEditor.java
+++ /dev/null
@@ -1,332 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.oak.security.authorization.permission;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import javax.annotation.Nonnull;
-
-import com.google.common.base.Objects;
-import com.google.common.base.Strings;
-import com.google.common.collect.Maps;
-import org.apache.jackrabbit.oak.api.Type;
-import org.apache.jackrabbit.oak.plugins.nodetype.TypePredicate;
-import org.apache.jackrabbit.oak.plugins.tree.ImmutableTree;
-import org.apache.jackrabbit.oak.spi.security.authorization.accesscontrol.AccessControlConstants;
-import org.apache.jackrabbit.oak.spi.security.authorization.permission.PermissionConstants;
-import org.apache.jackrabbit.oak.spi.security.authorization.restriction.Restriction;
-import org.apache.jackrabbit.oak.spi.security.authorization.restriction.RestrictionProvider;
-import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBits;
-import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBitsProvider;
-import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
-import org.apache.jackrabbit.oak.spi.state.NodeState;
-import org.apache.jackrabbit.util.Text;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import static com.google.common.collect.Iterables.addAll;
-import static com.google.common.collect.Sets.newLinkedHashSet;
-import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE;
-import static org.apache.jackrabbit.oak.plugins.tree.TreeConstants.OAK_CHILD_ORDER;
-
-final class PermissionStoreEditor implements AccessControlConstants, PermissionConstants {
-
-    private static final Logger log = LoggerFactory.getLogger(PermissionStoreEditor.class);
-
-    final String accessControlledPath;
-    final String nodeName;
-    final Map<String, List<AcEntry>> entries = Maps.<String, List<AcEntry>>newHashMap();
-
-    private final NodeBuilder permissionRoot;
-    private final TypePredicate isACE;
-    private final RestrictionProvider restrictionProvider;
-
-    PermissionStoreEditor(@Nonnull String aclPath, @Nonnull String name,
-                          @Nonnull NodeState node, @Nonnull NodeBuilder permissionRoot,
-                          @Nonnull TypePredicate isACE, @Nonnull TypePredicate isGrantACE,
-                          @Nonnull PrivilegeBitsProvider bitsProvider,
-                          @Nonnull RestrictionProvider restrictionProvider) {
-        this.permissionRoot = permissionRoot;
-        this.isACE = isACE;
-        this.restrictionProvider = restrictionProvider;
-
-        if (name.equals(REP_REPO_POLICY)) {
-            accessControlledPath = "";
-        } else {
-            accessControlledPath = aclPath.length() == 0 ? "/" : aclPath;
-        }
-        nodeName = PermissionUtil.getEntryName(accessControlledPath);
-
-        Set<String> orderedChildNames = newLinkedHashSet(node.getNames(OAK_CHILD_ORDER));
-        long n = orderedChildNames.size();
-        if (node.getChildNodeCount(n + 1) > n) {
-            addAll(orderedChildNames, node.getChildNodeNames());
-        }
-
-        int index = 0;
-        for (String childName : orderedChildNames) {
-            NodeState ace = node.getChildNode(childName);
-            if (isACE.apply(ace)) {
-                boolean isAllow = isGrantACE.apply(ace);
-                PrivilegeBits privilegeBits = bitsProvider.getBits(ace.getNames(REP_PRIVILEGES));
-                Set<Restriction> restrictions = restrictionProvider.readRestrictions(Strings.emptyToNull(accessControlledPath), new ImmutableTree(ace));
-
-
-                AcEntry entry = new AcEntry(ace, accessControlledPath, index, isAllow, privilegeBits, restrictions);
-                List<AcEntry> list = entries.get(entry.principalName);
-                if (list == null) {
-                    list = new ArrayList<AcEntry>();
-                    entries.put(entry.principalName, list);
-                }
-                list.add(entry);
-                index++;
-            }
-        }
-    }
-
-    void removePermissionEntry(@Nonnull String principalName, @Nonnull PermissionEntry permissionEntry) {
-        if (permissionRoot.hasChildNode(principalName)) {
-            NodeBuilder principalRoot = permissionRoot.getChildNode(principalName);
-
-            // find the ACL node that for this path and principal
-            NodeBuilder parent = principalRoot.getChildNode(nodeName);
-            if (!parent.exists()) {
-                log.error("Unable to remove permission entry {}: Parent for node " + nodeName + " missing.", this);
-                return;
-            }
-
-            // check if the node is the correct one
-            if (!PermissionUtil.checkACLPath(parent, accessControlledPath)) {
-                parent = null;
-                // find the right collision node
-                for (String childName : parent.getChildNodeNames()) {
-                    if (childName.charAt(0) != 'c') {
-                        continue;
-                    }
-                    NodeBuilder child = parent.getChildNode(childName);
-                    if (PermissionUtil.checkACLPath(child, accessControlledPath)) {
-                        parent = child;
-                        break;
-                    }
-                }
-                if (parent == null) {
-                    log.error("Unable to remove permission entry {}: Parent for node " + nodeName + " missing.", this);
-                    return;
-                }
-            }
-
-            for (String childName : parent.getChildNodeNames()) {
-                if (childName.charAt(0) == 'c') {
-                    continue;
-                }
-
-                NodeBuilder entryNode = parent.getChildNode(childName);
-                if (permissionEntry.equals(new PermissionEntry(accessControlledPath, new ImmutableTree(entryNode.getNodeState()), restrictionProvider))) {
-                    entryNode.remove();
-                }
-            }
-        } else {
-            log.error("Unable to remove permission entry {}: Principal root missing.", this);
-        }
-    }
-
-    void removePermissionEntries() {
-        for (String principalName : entries.keySet()) {
-            if (permissionRoot.hasChildNode(principalName)) {
-                NodeBuilder principalRoot = permissionRoot.getChildNode(principalName);
-
-                // find the ACL node that for this path and principal
-                NodeBuilder parent = principalRoot.getChildNode(nodeName);
-                if (!parent.exists()) {
-                    continue;
-                }
-
-                // check if the node is the correct one
-                if (PermissionUtil.checkACLPath(parent, accessControlledPath)) {
-                    // remove and reconnect child nodes
-                    NodeBuilder newParent = null;
-                    for (String childName : parent.getChildNodeNames()) {
-                        if (childName.charAt(0) != 'c') {
-                            continue;
-                        }
-                        NodeBuilder child = parent.getChildNode(childName);
-                        if (newParent == null) {
-                            newParent = child;
-                        } else {
-                            newParent.setChildNode(childName, child.getNodeState());
-                            child.remove();
-                        }
-                    }
-                    parent.remove();
-                    if (newParent != null) {
-                        principalRoot.setChildNode(nodeName, newParent.getNodeState());
-                    }
-                } else {
-                    // check if any of the child nodes match
-                    for (String childName : parent.getChildNodeNames()) {
-                        if (childName.charAt(0) != 'c') {
-                            continue;
-                        }
-                        NodeBuilder child = parent.getChildNode(childName);
-                        if (PermissionUtil.checkACLPath(child, accessControlledPath)) {
-                            child.remove();
-                        }
-                    }
-                }
-            } else {
-                log.error("Unable to remove permission entry {}: Principal root missing.", this);
-            }
-        }
-    }
-
-    void updatePermissionEntries() {
-        for (String principalName: entries.keySet()) {
-            NodeBuilder principalRoot = permissionRoot.child(principalName);
-            if (!principalRoot.hasProperty(JCR_PRIMARYTYPE)) {
-                principalRoot.setProperty(JCR_PRIMARYTYPE, NT_REP_PERMISSION_STORE, Type.NAME);
-            }
-            NodeBuilder parent = principalRoot.child(nodeName);
-            if (!parent.hasProperty(JCR_PRIMARYTYPE)) {
-                parent.setProperty(JCR_PRIMARYTYPE, NT_REP_PERMISSION_STORE, Type.NAME);
-            }
-
-            // check if current parent already has the correct path
-            if (parent.hasProperty(REP_ACCESS_CONTROLLED_PATH)) {
-                if (!PermissionUtil.checkACLPath(parent, accessControlledPath)) {
-                    // hash collision, find a new child
-                    NodeBuilder child = null;
-                    int idx = 0;
-                    for (String childName : parent.getChildNodeNames()) {
-                        if (childName.charAt(0) != 'c') {
-                            continue;
-                        }
-                        child = parent.getChildNode(childName);
-                        if (PermissionUtil.checkACLPath(child, accessControlledPath)) {
-                            break;
-                        }
-                        child = null;
-                        idx++;
-                    }
-                    while (child == null) {
-                        String name = 'c' + String.valueOf(idx++);
-                        child = parent.getChildNode(name);
-                        if (child.exists()) {
-                            child = null;
-                        } else {
-                            child = parent.child(name);
-                            child.setProperty(JCR_PRIMARYTYPE, NT_REP_PERMISSION_STORE, Type.NAME);
-                        }
-                    }
-                    parent = child;
-                    parent.setProperty(REP_ACCESS_CONTROLLED_PATH, accessControlledPath);
-                }
-            } else {
-                // new parent
-                parent.setProperty(REP_ACCESS_CONTROLLED_PATH, accessControlledPath);
-            }
-            updateEntries(parent, entries.get(principalName));
-        }
-    }
-
-    private void updateEntries(NodeBuilder parent, List<AcEntry> list) {
-        // remove old entries
-        for (String childName : parent.getChildNodeNames()) {
-            if (childName.charAt(0) != 'c') {
-                parent.getChildNode(childName).remove();
-            }
-        }
-        for (AcEntry ace: list) {
-            ace.writeToPermissionStore(parent);
-        }
-    }
-
-    final class AcEntry {
-
-        final String accessControlledPath;
-        final String principalName;
-        final PrivilegeBits privilegeBits;
-        final boolean isAllow;
-        final Set<Restriction> restrictions;
-        final int index;
-        int hashCode = -1;
-
-        private TypePredicate isACE;
-
-        AcEntry(@Nonnull NodeState node, @Nonnull String accessControlledPath, int index,
-                boolean isAllow, PrivilegeBits privilegeBits, Set<Restriction> restrictions) {
-            this.accessControlledPath = accessControlledPath;
-            this.index = index;
-
-            this.principalName = Text.escapeIllegalJcrChars(node.getString(REP_PRINCIPAL_NAME));
-            this.privilegeBits = privilegeBits;
-            this.isAllow = isAllow;
-            this.restrictions = restrictions;
-        }
-
-        PermissionEntry asPermissionEntry() {
-            return new PermissionEntry(accessControlledPath, isAllow, index, privilegeBits, restrictionProvider.getPattern(accessControlledPath, restrictions));
-        }
-
-        void writeToPermissionStore(NodeBuilder parent) {
-            NodeBuilder n = parent.child(String.valueOf(index))
-                    .setProperty(JCR_PRIMARYTYPE, NT_REP_PERMISSIONS, Type.NAME)
-                    .setProperty(REP_IS_ALLOW, isAllow)
-                    .setProperty(privilegeBits.asPropertyState(REP_PRIVILEGE_BITS));
-            for (Restriction restriction : restrictions) {
-                n.setProperty(restriction.getProperty());
-            }
-        }
-
-        //-------------------------------------------------------------< Object >---
-        @Override
-        public int hashCode() {
-            if (hashCode == -1) {
-                hashCode = Objects.hashCode(accessControlledPath, principalName, privilegeBits, isAllow, restrictions);
-            }
-            return hashCode;
-        }
-
-        @Override
-        public boolean equals(Object o) {
-            if (o == this) {
-                return true;
-            }
-            if (o instanceof AcEntry) {
-                AcEntry other = (AcEntry) o;
-                return isAllow == other.isAllow
-                        && privilegeBits.equals(other.privilegeBits)
-                        && principalName.equals(other.principalName)
-                        && accessControlledPath.equals(other.accessControlledPath)
-                        && restrictions.equals(other.restrictions);
-            }
-            return false;
-        }
-
-        @Override
-        public String toString() {
-            StringBuilder sb = new StringBuilder();
-            sb.append(accessControlledPath);
-            sb.append(';').append(principalName);
-            sb.append(';').append(isAllow ? "allow" : "deny");
-            sb.append(';').append(privilegeBits);
-            sb.append(';').append(restrictions);
-            return sb.toString();
-        }
-    }
-}
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/restriction/PrincipalRestrictionProvider.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/restriction/PrincipalRestrictionProvider.java
index b9eac1b..4f485ec 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/restriction/PrincipalRestrictionProvider.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/restriction/PrincipalRestrictionProvider.java
@@ -108,10 +108,4 @@
     public RestrictionPattern getPattern(@Nullable String oakPath, @Nonnull Tree tree) {
         return base.getPattern(oakPath, tree);
     }
-
-    @Nonnull
-    @Override
-    public RestrictionPattern getPattern(@Nullable String oakPath, @Nonnull Set<Restriction> restrictions) {
-        return base.getPattern(oakPath, restrictions);
-    }
 }
\ No newline at end of file
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/restriction/RestrictionProviderImpl.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/restriction/RestrictionProviderImpl.java
index 716f69e..1c91f22 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/restriction/RestrictionProviderImpl.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/security/authorization/restriction/RestrictionProviderImpl.java
@@ -19,9 +19,6 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
-import javax.annotation.Nonnull;
-import javax.annotation.Nullable;
 import javax.jcr.security.AccessControlException;
 
 import com.google.common.collect.ImmutableMap;
@@ -32,13 +29,10 @@
 import org.apache.jackrabbit.oak.api.Type;
 import org.apache.jackrabbit.oak.spi.security.authorization.restriction.AbstractRestrictionProvider;
 import org.apache.jackrabbit.oak.spi.security.authorization.restriction.CompositePattern;
-import org.apache.jackrabbit.oak.spi.security.authorization.restriction.Restriction;
 import org.apache.jackrabbit.oak.spi.security.authorization.restriction.RestrictionDefinition;
 import org.apache.jackrabbit.oak.spi.security.authorization.restriction.RestrictionDefinitionImpl;
 import org.apache.jackrabbit.oak.spi.security.authorization.restriction.RestrictionPattern;
 import org.apache.jackrabbit.oak.spi.security.authorization.restriction.RestrictionProvider;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * Default restriction provider implementation that supports the following
@@ -57,8 +51,6 @@
 @Service(RestrictionProvider.class)
 public class RestrictionProviderImpl extends AbstractRestrictionProvider {
 
-    private static final Logger log = LoggerFactory.getLogger(RestrictionProviderImpl.class);
-
     public RestrictionProviderImpl() {
         super(supportedRestrictions());
     }
@@ -77,8 +69,9 @@
         if (oakPath == null) {
             return RestrictionPattern.EMPTY;
         } else {
-            List<RestrictionPattern> patterns = new ArrayList<RestrictionPattern>(3);
             PropertyState glob = tree.getProperty(REP_GLOB);
+
+            List<RestrictionPattern> patterns = new ArrayList<RestrictionPattern>(2);
             if (glob != null) {
                 patterns.add(GlobPattern.create(oakPath, glob.getValue(Type.STRING)));
             }
@@ -86,34 +79,17 @@
             if (ntNames != null) {
                 patterns.add(new NodeTypePattern(ntNames.getValue(Type.NAMES)));
             }
+
             PropertyState prefixes = tree.getProperty(REP_PREFIXES);
             if (prefixes != null) {
                 patterns.add(new PrefixPattern(prefixes.getValue(Type.STRINGS)));
             }
-            return CompositePattern.create(patterns);
-        }
-    }
 
-    @Nonnull
-    @Override
-    public RestrictionPattern getPattern(@Nullable String oakPath, @Nonnull Set<Restriction> restrictions) {
-        if (oakPath == null || restrictions.isEmpty()) {
-            return RestrictionPattern.EMPTY;
-        } else {
-            List<RestrictionPattern> patterns = new ArrayList<RestrictionPattern>(3);
-            for (Restriction r : restrictions) {
-                String name = r.getDefinition().getName();
-                if (REP_GLOB.equals(name)) {
-                    patterns.add(GlobPattern.create(oakPath, r.getProperty().getValue(Type.STRING)));
-                } else if (REP_NT_NAMES.equals(name)) {
-                    patterns.add(new NodeTypePattern(r.getProperty().getValue(Type.NAMES)));
-                } else if (REP_PREFIXES.equals(name)) {
-                    patterns.add(new PrefixPattern(r.getProperty().getValue(Type.STRINGS)));
-                } else {
-                    log.debug("Ignoring unsupported restriction " + name);
-                }
+            switch (patterns.size()) {
+                case 1 : return patterns.get(0);
+                case 2 : return new CompositePattern(patterns);
+                default : return  RestrictionPattern.EMPTY;
             }
-            return CompositePattern.create(patterns);
         }
     }
 
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/query/QueryIndex.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/query/QueryIndex.java
index a5cfe10..c777dae 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/query/QueryIndex.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/query/QueryIndex.java
@@ -153,10 +153,9 @@
          * Get the query plan description (for logging purposes).
          * 
          * @param plan the index plan
-         * @param rootState root state of the current repository snapshot
          * @return the query plan description
          */
-        String getPlanDescription(IndexPlan plan, NodeState root);
+        String getPlanDescription(IndexPlan plan);
 
         /**
          * Start a query. The filter and sort order of the index plan is to be
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/security/authorization/restriction/CompositePattern.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/security/authorization/restriction/CompositePattern.java
index 3200df2..271dd96 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/security/authorization/restriction/CompositePattern.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/security/authorization/restriction/CompositePattern.java
@@ -37,14 +37,6 @@
         this.patterns = patterns;
     }
 
-    public static RestrictionPattern create(@Nonnull List<RestrictionPattern> patterns) {
-        switch (patterns.size()) {
-            case 0 : return RestrictionPattern.EMPTY;
-            case 1 : return patterns.get(0);
-            default : return new CompositePattern(patterns);
-        }
-    }
-
     @Override
     public boolean matches(@Nonnull Tree tree, @Nullable PropertyState property) {
         for (RestrictionPattern pattern : patterns) {
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/security/authorization/restriction/CompositeRestrictionProvider.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/security/authorization/restriction/CompositeRestrictionProvider.java
index 4ffa024..9952fd9 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/security/authorization/restriction/CompositeRestrictionProvider.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/security/authorization/restriction/CompositeRestrictionProvider.java
@@ -128,20 +128,18 @@
     @Nonnull
     @Override
     public RestrictionPattern getPattern(@Nullable String oakPath, @Nonnull Tree tree) {
-        return getPattern(oakPath, readRestrictions(oakPath, tree));
-    }
-
-    @Nonnull
-    @Override
-    public RestrictionPattern getPattern(@Nullable String oakPath, @Nonnull Set<Restriction> restrictions) {
         List<RestrictionPattern> patterns = new ArrayList<RestrictionPattern>();
         for (RestrictionProvider rp : providers) {
-            RestrictionPattern pattern = rp.getPattern(oakPath, restrictions);
+            RestrictionPattern pattern = rp.getPattern(oakPath, tree);
             if (pattern != RestrictionPattern.EMPTY) {
                 patterns.add(pattern);
             }
         }
-        return CompositePattern.create(patterns);
+        switch (patterns.size()) {
+            case 0 : return RestrictionPattern.EMPTY;
+            case 1 : return patterns.iterator().next();
+            default : return new CompositePattern(patterns);
+        }
     }
 
     //------------------------------------------------------------< private >---
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/security/authorization/restriction/RestrictionProvider.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/security/authorization/restriction/RestrictionProvider.java
index 25f197d..e0c43a3 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/security/authorization/restriction/RestrictionProvider.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/security/authorization/restriction/RestrictionProvider.java
@@ -134,20 +134,6 @@
     RestrictionPattern getPattern(@Nullable String oakPath, @Nonnull Tree tree);
 
     /**
-     * Creates the {@link RestrictionPattern} for the specified restrictions.
-     * The implementation should ignore all restrictions present in the specified
-     * set that it doesn't support.
-     *
-     * @param oakPath The path of the access controlled tree or {@code null} if
-     * the target policies applies to the repository level.
-     * @param restrictions the restrictions.
-     * @return A new {@link RestrictionPattern} representing those restrictions
-     * of the specified set that are supported by this implementation.
-     */
-    @Nonnull
-    RestrictionPattern getPattern(@Nullable String oakPath, @Nonnull Set<Restriction> restrictions);
-
-    /**
      * Empty restriction provider implementation that doesn't support any
      * restrictions.
      */
@@ -192,11 +178,5 @@
         public RestrictionPattern getPattern(@Nullable String oakPath, @Nonnull Tree tree) {
             return RestrictionPattern.EMPTY;
         }
-
-        @Nonnull
-        @Override
-        public RestrictionPattern getPattern(@Nullable String oakPath, @Nonnull Set<Restriction> restrictions) {
-            return RestrictionPattern.EMPTY;
-        }
     };
 }
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/state/AbstractNodeStoreBranch.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/state/AbstractNodeStoreBranch.java
index ae2a226..f031e51 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/state/AbstractNodeStoreBranch.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/state/AbstractNodeStoreBranch.java
@@ -696,17 +696,17 @@
         @Nonnull
         @Override
         NodeState getHead() {
-            throw new IllegalStateException("Branch with failed reset", ex);
+            throw new IllegalStateException("Branch with failed reset");
         }
 
         @Override
         void setRoot(NodeState root) {
-            throw new IllegalStateException("Branch with failed reset", ex);
+            throw new IllegalStateException("Branch with failed reset");
         }
 
         @Override
         void rebase() {
-            throw new IllegalStateException("Branch with failed reset", ex);
+            throw new IllegalStateException("Branch with failed reset");
         }
 
         /**
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/whiteboard/AbstractServiceTracker.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/whiteboard/AbstractServiceTracker.java
index 253e029..9d2c4a0 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/whiteboard/AbstractServiceTracker.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/whiteboard/AbstractServiceTracker.java
@@ -20,7 +20,6 @@
 
 import static com.google.common.base.Preconditions.checkNotNull;
 import static com.google.common.base.Preconditions.checkState;
-import static java.util.Collections.emptyList;
 
 import java.util.List;
 
@@ -33,58 +32,32 @@
  */
 public abstract class AbstractServiceTracker<T> {
 
-    /**
-     * Sentinel object used as the {@link #tracker} value of an already
-     * stopped instance.
-     */
-    private final Tracker<T> stopped = new Tracker<T>() {
-        @Override
-        public List<T> getServices() {
-            return emptyList();
-        }
-        @Override
-        public void stop() {
-            // do nothing
-        }
-    };
-
-    /**
-     * The type of services tracked by this instance.
-     */
     private final Class<T> type;
 
-    /**
-     * The underlying {@link Tracker}, or the {@link #stopped} sentinel
-     * sentinel object when this instance is not active. This variable
-     * is {@code volatile} so that the {@link #getServices()} method will
-     * always see the latest state without having to be synchronized.
-     */
-    private volatile Tracker<T> tracker = stopped;
+    private Tracker<T> tracker = null;
 
-    protected AbstractServiceTracker(@Nonnull Class<T> type) {
+    public AbstractServiceTracker(@Nonnull Class<T> type) {
         this.type = checkNotNull(type);
     }
 
     public synchronized void start(Whiteboard whiteboard) {
-        checkState(tracker == stopped);
+        checkState(tracker == null);
         tracker = whiteboard.track(type);
     }
 
     public synchronized void stop() {
-        checkState(tracker != stopped);
-        Tracker<T> t = tracker;
-        tracker = stopped;
-        t.stop();
+        checkState(tracker != null);
+        tracker.stop();
+        tracker = null;
     }
 
     /**
-     * Returns all services of type {@code T} that are currently available.
-     * This method is intentionally not synchronized to prevent lock
-     * contention when accessed frequently in highly concurrent code.
+     * Returns all services of type {@code T} currently available.
      *
-     * @return currently available services
+     * @return services currently available.
      */
-    protected List<T> getServices() {
+    protected synchronized List<T> getServices() {
+        checkState(tracker != null);
         return tracker.getServices();
     }
 
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/whiteboard/Tracker.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/whiteboard/Tracker.java
index dd63f44..6c3bb03 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/whiteboard/Tracker.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/whiteboard/Tracker.java
@@ -18,6 +18,10 @@
 
 import java.util.List;
 
+import javax.annotation.CheckForNull;
+
+import com.google.common.base.Predicate;
+
 /**
  * Tracker for whiteboard services.
  */
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/whiteboard/WhiteboardRestrictionProvider.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/whiteboard/WhiteboardRestrictionProvider.java
index f0db6bb..57c8d49 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/whiteboard/WhiteboardRestrictionProvider.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/spi/whiteboard/WhiteboardRestrictionProvider.java
@@ -83,12 +83,6 @@
         return getProvider().getPattern(oakPath, tree);
     }
 
-    @Nonnull
-    @Override
-    public RestrictionPattern getPattern(@Nullable String oakPath, @Nonnull Set<Restriction> restrictions) {
-        return getProvider().getPattern(oakPath, restrictions);
-    }
-
     //------------------------------------------------------------< private >---
     private RestrictionProvider getProvider() {
         return CompositeRestrictionProvider.newInstance(getServices());
diff --git a/oak-core/src/main/java/org/apache/jackrabbit/oak/stats/Clock.java b/oak-core/src/main/java/org/apache/jackrabbit/oak/stats/Clock.java
index 69be6e6..3b74f6b 100644
--- a/oak-core/src/main/java/org/apache/jackrabbit/oak/stats/Clock.java
+++ b/oak-core/src/main/java/org/apache/jackrabbit/oak/stats/Clock.java
@@ -51,7 +51,7 @@
      * to test the effect of different update frequencies.
      */
     static final long FAST_CLOCK_INTERVAL =
-            Long.getLong("fast.clock.interval", 1);
+            Long.getLong("fast.clock.interval", 10);
 
     private long monotonic = 0;
 
diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/AbstractSecurityTest.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/AbstractSecurityTest.java
index 06fae3f..58ae8ed 100644
--- a/oak-core/src/test/java/org/apache/jackrabbit/oak/AbstractSecurityTest.java
+++ b/oak-core/src/test/java/org/apache/jackrabbit/oak/AbstractSecurityTest.java
@@ -16,9 +16,12 @@
  */
 package org.apache.jackrabbit.oak;
 
+import static com.google.common.collect.Lists.newArrayList;
+
 import java.util.Arrays;
 import java.util.List;
 import java.util.UUID;
+
 import javax.annotation.Nullable;
 import javax.jcr.Credentials;
 import javax.jcr.NoSuchWorkspaceException;
@@ -39,8 +42,6 @@
 import org.apache.jackrabbit.oak.api.ContentSession;
 import org.apache.jackrabbit.oak.api.Root;
 import org.apache.jackrabbit.oak.namepath.NamePathMapper;
-import org.apache.jackrabbit.oak.plugins.commit.ConflictValidatorProvider;
-import org.apache.jackrabbit.oak.plugins.commit.JcrConflictHandler;
 import org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexEditorProvider;
 import org.apache.jackrabbit.oak.plugins.index.property.PropertyIndexProvider;
 import org.apache.jackrabbit.oak.plugins.index.reference.ReferenceEditorProvider;
@@ -61,8 +62,6 @@
 import org.junit.After;
 import org.junit.Before;
 
-import static com.google.common.collect.Lists.newArrayList;
-
 /**
  * AbstractOakTest is the base class for oak test execution.
  */
@@ -82,14 +81,16 @@
     public void before() throws Exception {
         Oak oak = new Oak()
                 .with(new InitialContent())
-                .with(JcrConflictHandler.JCR_CONFLICT_HANDLER)
+// FIXME review whether we need to improve the test setup here. See also OAK-1541
+//                .with(JcrConflictHandler.JCR_CONFLICT_HANDLER)
                 .with(new NamespaceEditorProvider())
                 .with(new ReferenceEditorProvider())
                 .with(new ReferenceIndexProvider())
                 .with(new PropertyIndexEditorProvider())
                 .with(new PropertyIndexProvider())
                 .with(new TypeEditorProvider())
-                .with(new ConflictValidatorProvider())
+// FIXME review whether we need to improve the test setup here. See also OAK-1541
+//                .with(new ConflictValidatorProvider())
                 .with(getSecurityProvider());
         withEditors(oak);
         contentRepository = oak.createContentRepository();
diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/core/MutableTreeTest.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/core/MutableTreeTest.java
index 1fbec84..52dccc6 100644
--- a/oak-core/src/test/java/org/apache/jackrabbit/oak/core/MutableTreeTest.java
+++ b/oak-core/src/test/java/org/apache/jackrabbit/oak/core/MutableTreeTest.java
@@ -313,17 +313,6 @@
     }
 
     @Test
-    public void isNew() throws CommitFailedException {
-        Tree tree = root.getTree("/");
-        tree.addChild("c");
-        root.commit();
-
-        tree.getChild("c").remove();
-        Tree c = tree.addChild("c");
-        assertEquals(Status.NEW, c.getStatus());
-    }
-
-    @Test
     public void modifiedAfterRebase() throws CommitFailedException {
         Tree tree = root.getTree("/");
 
diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/db/DbBlobStoreTest.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/db/DbBlobStoreTest.java
new file mode 100644
index 0000000..02f418a
--- /dev/null
+++ b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/blob/db/DbBlobStoreTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.jackrabbit.oak.plugins.blob.db;
+
+import java.sql.Connection;
+
+import org.apache.jackrabbit.oak.spi.blob.AbstractBlobStoreTest;
+import org.h2.jdbcx.JdbcConnectionPool;
+
+/**
+ * Tests the DbBlobStore implementation.
+ */
+public class DbBlobStoreTest extends AbstractBlobStoreTest {
+
+    private Connection sentinel;
+    private JdbcConnectionPool cp;
+
+    @Override
+    public void setUp() throws Exception {
+        Class.forName("org.h2.Driver");
+        cp = JdbcConnectionPool.create("jdbc:h2:mem:", "", "");
+        sentinel = cp.getConnection();
+        DbBlobStore blobStore = new DbBlobStore();
+        blobStore.setConnectionPool(cp);
+        blobStore.setBlockSize(128);
+        blobStore.setBlockSizeMin(48);
+        this.store = blobStore;
+    }
+
+    @Override
+    public void tearDown() throws Exception {
+        super.tearDown();
+        if (sentinel != null) {
+            sentinel.close();
+        }
+        cp.dispose();
+    }
+
+    protected int getArtifactSize() {
+        return 4160;
+    }
+
+}
diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/AbstractDocumentStoreTest.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/AbstractDocumentStoreTest.java
index c959b94..3704b7c 100644
--- a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/AbstractDocumentStoreTest.java
+++ b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/AbstractDocumentStoreTest.java
@@ -17,26 +17,20 @@
 package org.apache.jackrabbit.oak.plugins.document;
 
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashSet;
-import java.util.List;
 import java.util.Set;
 
 import org.junit.After;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 @RunWith(Parameterized.class)
 public abstract class AbstractDocumentStoreTest {
 
     protected String dsname;
     protected DocumentStore ds;
-    protected List<String> removeMe = new ArrayList<String>();
-
-    static final Logger LOG = LoggerFactory.getLogger(AbstractDocumentStoreTest.class);
+    protected Set<String> removeMe = new HashSet<String>();
 
     public AbstractDocumentStoreTest(DocumentStoreFixture dsf) {
         this.ds = dsf.createDocumentStore();
@@ -45,24 +39,11 @@
 
     @After
     public void cleanUp() {
-        if (!removeMe.isEmpty()) {
-            long start = System.nanoTime();
+        for (String id : removeMe) {
             try {
-                ds.remove(org.apache.jackrabbit.oak.plugins.document.Collection.NODES, removeMe);
+                ds.remove(org.apache.jackrabbit.oak.plugins.document.Collection.NODES, id);
             } catch (Exception ex) {
-                // retry one by one
-                for (String id : removeMe) {
-                    try {
-                        ds.remove(org.apache.jackrabbit.oak.plugins.document.Collection.NODES, id);
-                    } catch (Exception ex2) {
-                        // best effort
-                    }
-                }
-            }
-            if (removeMe.size() > 1) {
-                long elapsed = (System.nanoTime() - start) / (1000 * 1000);
-                float rate = (((float)removeMe.size()) / (elapsed == 0 ? 1 : elapsed));
-                LOG.info(removeMe.size() + " documents removed in " + elapsed + "ms (" + rate + "/ms)");
+                // best effort
             }
         }
     }
diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/BasicDocumentStoreTest.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/BasicDocumentStoreTest.java
index 831d7ca..267f4cb 100644
--- a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/BasicDocumentStoreTest.java
+++ b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/BasicDocumentStoreTest.java
@@ -16,17 +16,10 @@
  */
 package org.apache.jackrabbit.oak.plugins.document;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
-import java.util.ArrayList;
 import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.UUID;
 
 import org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentStore;
 import org.junit.Assume;
@@ -116,88 +109,6 @@
     }
 
     @Test
-    public void testDeleteNonExisting() {
-        String id = this.getClass().getName() + ".testDeleteNonExisting-" + UUID.randomUUID();
-        // delete is best effort
-        ds.remove(Collection.NODES, id);
-    }
-
-    @Test
-    public void testDeleteNonExistingMultiple() {
-        String id = this.getClass().getName() + ".testDeleteNonExistingMultiple-" + UUID.randomUUID();
-        // create a test node
-        UpdateOp up = new UpdateOp(id, true);
-        up.set("_id", id + "-2");
-        boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
-        assertTrue(success);
-        List<String> todelete = new ArrayList<String>();
-        todelete.add(id + "-2");
-        todelete.add(id);
-        ds.remove(Collection.NODES, todelete);
-        // id-2 should be removed
-        Document d = ds.find(Collection.NODES, id + "-2");
-        assertTrue(d == null);
-    }
-
-    @Test
-    public void testUpdateMultiple() {
-        String id = this.getClass().getName() + ".testUpdateMultiple";
-        // create a test node
-        UpdateOp up = new UpdateOp(id, true);
-        up.set("_id", id);
-        boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
-        assertTrue(success);
-        removeMe.add(id);
-
-        // update a non-existing one and this one
-        List<String> toupdate = new ArrayList<String>();
-        toupdate.add(id + "-" + UUID.randomUUID());
-        toupdate.add(id);
-
-        UpdateOp up2 = new UpdateOp(id, false);
-        up2.set("foo", "bar");
-        ds.update(Collection.NODES, toupdate, up2);
-
-        // id should be updated
-        ds.invalidateCache();
-        Document d = ds.find(Collection.NODES, id);
-        assertNotNull(d);
-        assertEquals(d.get("foo").toString(), "bar");
-    }
-
-    @Test
-    public void testQuery() {
-        // create ten documents
-        String base = this.getClass().getName() + ".testQuery-";
-        for (int i = 0; i < 10; i++) {
-            String id = base + i;
-            UpdateOp up = new UpdateOp(id, true);
-            up.set("_id", id);
-            boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
-            assertTrue("document with " + id + " not created", success);
-            removeMe.add(id);
-        }
-
-        Set<String> result = getKeys(ds.query(Collection.NODES, base, base + "A", 5));
-        assertEquals(5, result.size());
-        assertTrue(result.contains(base + "4"));
-        assertFalse(result.contains(base + "5"));
-
-        result = getKeys(ds.query(Collection.NODES, base, base + "A", 20));
-        assertEquals(10, result.size());
-        assertTrue(result.contains(base + "0"));
-        assertTrue(result.contains(base + "9"));
-    }
-
-    private Set<String> getKeys(List<NodeDocument> docs) {
-        Set<String> result = new HashSet<String>();
-        for (NodeDocument doc : docs) {
-            result.add(doc.getId());
-        }
-        return result;
-    }
-
-    @Test
     public void testCreatePerfSmall() {
         createPerf(16);
     }
@@ -219,7 +130,7 @@
             up.set("_id", id);
             up.set("foo", pval);
             boolean success = super.ds.create(Collection.NODES, Collections.singletonList(up));
-            assertTrue("document with " + id + " not created", success);
+            assertTrue("document with " + id + " nit created", success);
             removeMe.add(id);
             cnt += 1;
         }
diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/ConcurrentConflictTest.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/ConcurrentConflictTest.java
index 645195e..9493275 100644
--- a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/ConcurrentConflictTest.java
+++ b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/ConcurrentConflictTest.java
@@ -78,7 +78,6 @@
         concurrentUpdates(true);
     }
 
-    @Ignore("OAK-1788")
     @Test
     public void concurrentUpdates() throws Exception {
         concurrentUpdates(false);
diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/ConcurrentDocumentStoreTest.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/ConcurrentDocumentStoreTest.java
deleted file mode 100644
index f0884d7..0000000
--- a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/ConcurrentDocumentStoreTest.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.oak.plugins.document;
-
-import static org.junit.Assert.fail;
-import static org.junit.Assert.assertTrue;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class ConcurrentDocumentStoreTest extends AbstractDocumentStoreTest {
-
-    static final Logger LOG = LoggerFactory.getLogger(ConcurrentDocumentStoreTest.class);
-
-    public ConcurrentDocumentStoreTest(DocumentStoreFixture dsf) {
-        super(dsf);
-    }
-
-    @Test
-    public void testConcurrentUpdate() throws Exception {
-        int workers = 20; // TODO: this test is going to fail if the number of
-                          // workers exceeds the number of retries done by
-                          // RDBDocumentStore
-        String id = this.getClass().getName() + ".testConcurrentUpdate";
-        UpdateOp up = new UpdateOp(id, true);
-        up.set("_id", id);
-        up.set("thread", Thread.currentThread().getName());
-        up.set("counter", 0L);
-        ds.create(Collection.NODES, Collections.singletonList(up));
-        super.removeMe.add(id);
-        List<Exception> exceptions = Collections.synchronizedList(new ArrayList<Exception>());
-        List<Thread> worker = new ArrayList<Thread>();
-        for (int i = 0; i < workers; i++) {
-            worker.add(new Thread(new Worker(id, false, exceptions)));
-        }
-        for (Thread t : worker) {
-            t.start();
-        }
-        for (Thread t : worker) {
-            t.join();
-        }
-        for (Exception e : exceptions) {
-            fail(e.toString());
-        }
-        Document d = ds.find(Collection.NODES, id);
-        String val = d.get("counter").toString();
-        org.junit.Assert.assertEquals("counter property not updated as expected", Integer.toString(workers), val);
-    }
-
-    @Test
-    public void testConcurrenCreateOrUpdate() throws Exception {
-        int workers = 8; // TODO: this test is going to fail if the number of
-                         // workers exceeds the number of retries done by
-                         // RDBDocumentStore
-        String id = this.getClass().getName() + ".testConcurrentCreateOrUpdate";
-        super.removeMe.add(id);
-        List<Exception> exceptions = Collections.synchronizedList(new ArrayList<Exception>());
-        List<Thread> worker = new ArrayList<Thread>();
-        for (int i = 0; i < workers; i++) {
-            worker.add(new Thread(new Worker(id, true, exceptions)));
-        }
-        for (Thread t : worker) {
-            t.start();
-        }
-        for (Thread t : worker) {
-            t.join();
-        }
-        for (Exception e : exceptions) {
-            fail(e.toString());
-        }
-        Document d = ds.find(Collection.NODES, id);
-        String val = d.get("counter").toString();
-        org.junit.Assert.assertEquals("counter property not updated as expected", Integer.toString(workers), val);
-    }
-
-    private final class Worker implements Runnable {
-
-        private final String id;
-        private final boolean create;
-        private final List<Exception> exceptions;
-
-        Worker(String id, boolean create, List<Exception> exceptions) {
-            this.id = id;
-            this.create = create;
-            this.exceptions = exceptions;
-        }
-
-        @Override
-        public void run() {
-            try {
-                UpdateOp up = new UpdateOp(id, true);
-                up.set("_id", id);
-                up.set("thread", Thread.currentThread().getName());
-                up.increment("counter", 1L);
-                if (create) {
-                    ds.createOrUpdate(Collection.NODES, up);
-                } else {
-                    Document d = ds.findAndUpdate(Collection.NODES, up);
-                    assertTrue(d.isSealed());
-                }
-            } catch (Exception ex) {
-                LOG.error("trying to create/update", ex);
-                exceptions.add(ex);
-            }
-        }
-    }
-}
diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/MongoDocumentStoreLimitsTest.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/MongoDocumentStoreLimitsTest.java
deleted file mode 100644
index 558a28b..0000000
--- a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/MongoDocumentStoreLimitsTest.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.oak.plugins.document;
-
-import org.apache.jackrabbit.oak.api.CommitFailedException;
-import org.apache.jackrabbit.oak.plugins.document.util.Utils;
-import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
-import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
-import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
-import org.junit.Test;
-
-import com.google.common.base.Strings;
-
-import static junit.framework.Assert.assertNotNull;
-
-/**
- * Test for OAK-1589
- */
-public class MongoDocumentStoreLimitsTest extends AbstractMongoConnectionTest {
-
-    @Test
-    public void longName() throws Exception {
-        DocumentNodeStore ns = mk.getNodeStore();
-        NodeBuilder builder = ns.getRoot().builder();
-
-        builder.child("test");
-        ns.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
-
-        String longName = Strings.repeat("foo_", 10000);
-        String longPath = String.format("/test/%s", longName);
-
-        builder = ns.getRoot().builder();
-        builder.child("test").child(longName);
-
-        try {
-            ns.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
-        } catch (CommitFailedException e) {
-            // expected to fail
-            return;
-        }
-
-        // check that the document was created
-        // when no exception was thrown
-        String id = Utils.getIdFromPath(longPath);
-        NodeDocument doc = ns.getDocumentStore().find(Collection.NODES, id, 0);
-        assertNotNull(doc);
-    }
-}
diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/RevisionTest.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/RevisionTest.java
index 912fa0e..af63018 100644
--- a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/RevisionTest.java
+++ b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/document/RevisionTest.java
@@ -16,26 +16,12 @@
  */
 package org.apache.jackrabbit.oak.plugins.document;
 
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-import com.google.common.collect.Lists;
-import com.google.common.collect.Queues;
-import com.google.common.collect.Sets;
-import com.google.common.util.concurrent.Uninterruptibles;
 import org.apache.jackrabbit.oak.plugins.document.Revision.RevisionComparator;
 import org.junit.Test;
 
@@ -235,35 +221,6 @@
         assertTrue(comp.compare(r1c2, r2c1) < 0);
     }
 
-    // OAK-1727
-    @Test
-    public void clusterCompare2() {
-        RevisionComparator comp = new RevisionComparator(1);
-
-        comp.add(Revision.fromString("r3-0-1"), Revision.fromString("r1-1-0"));
-
-        Revision r1 = Revision.fromString("r1-0-2");
-        Revision r2 = Revision.fromString("r4-0-2");
-
-        // cluster sync
-        Revision c1sync = Revision.fromString("r5-0-1");
-        comp.add(c1sync,  Revision.fromString("r2-0-0"));
-        Revision c2sync = Revision.fromString("r4-1-2");
-        comp.add(c2sync,  Revision.fromString("r2-1-0"));
-        Revision c3sync = Revision.fromString("r2-0-3");
-        comp.add(c3sync,  Revision.fromString("r2-1-0"));
-
-        assertTrue(comp.compare(r1, r2) < 0);
-        assertTrue(comp.compare(r2, c2sync) < 0);
-        // same seen-at revision, but clusterId 2 < 3
-        assertTrue(comp.compare(c2sync, c3sync) < 0);
-
-        // this means, c3sync must be after r1 and r2
-        // because: r1 < r2 < c2sync < c3sync
-        assertTrue(comp.compare(r1, c3sync) < 0);
-        assertTrue(comp.compare(r2, c3sync) < 0);
-    }
-
     @Test
     public void revisionSeen() {
         RevisionComparator comp = new RevisionComparator(1);
@@ -297,109 +254,4 @@
         assertEquals(new Revision(0x30, 0, 0), comp.getRevisionSeen(r21));
     }
 
-    @Test
-    public void uniqueRevision2() throws Exception {
-        List<Thread> threads = new ArrayList<Thread>();
-        final AtomicBoolean stop = new AtomicBoolean();
-        final Set<Revision> set = Collections
-                .synchronizedSet(new HashSet<Revision>());
-        final Revision[] duplicate = new Revision[1];
-        for (int i = 0; i < 20; i++) {
-            Thread thread = new Thread(new Runnable() {
-                @Override
-                public void run() {
-                    Revision[] last = new Revision[1024];
-                    while (!stop.get()) {
-                        for (Revision r : last) {
-                            set.remove(r);
-                        }
-                        for (int i = 0; i < last.length; i++) {
-                            last[i] = Revision.newRevision(1);
-                        }
-                        for (Revision r : last) {
-                            if (!set.add(r)) {
-                                duplicate[0] = r;
-                            }
-                        }
-                    }
-                }
-            });
-            thread.start();
-            threads.add(thread);
-        }
-        Thread.sleep(200);
-        stop.set(true);
-        for (Thread t : threads) {
-            t.join();
-        }
-        assertNull("Duplicate revision", duplicate[0]);
-    }
-
-    @Test
-    public void uniqueRevision() throws Exception {
-        //Revision.setClock(new Clock.Virtual());
-        final BlockingQueue<Revision> revisionQueue = Queues.newLinkedBlockingQueue();
-        int noOfThreads = 60;
-        final int noOfLoops = 1000;
-        List<Thread> workers = new ArrayList<Thread>();
-        final AtomicBoolean stop = new AtomicBoolean();
-        final CountDownLatch startLatch = new CountDownLatch(1);
-        final CountDownLatch stopLatch = new CountDownLatch(noOfThreads);
-        for (int i = 0; i < noOfThreads; i++) {
-            workers.add(new Thread(new Runnable() {
-                @Override
-                public void run() {
-                    Uninterruptibles.awaitUninterruptibly(startLatch);
-                    for (int j = 0; j < noOfLoops && !stop.get(); j++) {
-                        revisionQueue.add(Revision.newRevision(1));
-                    }
-                    stopLatch.countDown();
-                }
-            }));
-        }
-
-        final List<Revision> duplicates = Lists.newArrayList();
-        final Set<Revision> seenRevs = Sets.newHashSet();
-        workers.add(new Thread(new Runnable() {
-            @Override
-            public void run() {
-                startLatch.countDown();
-
-                while (!stop.get()) {
-                    List<Revision> revs = Lists.newArrayList();
-                    Queues.drainUninterruptibly(revisionQueue, revs, 5, 100, TimeUnit.MILLISECONDS);
-                    record(revs);
-                }
-
-                List<Revision> revs = Lists.newArrayList();
-                revisionQueue.drainTo(revs);
-                record(revs);
-            }
-
-            private void record(List<Revision> revs) {
-                for (Revision rev : revs) {
-                    if (!seenRevs.add(rev)) {
-                        duplicates.add(rev);
-                    }
-                }
-
-                if (!duplicates.isEmpty()) {
-                    stop.set(true);
-                }
-            }
-        }));
-
-        for (Thread t : workers) {
-            t.start();
-        }
-
-        stopLatch.await();
-        stop.set(true);
-
-        for (Thread t : workers) {
-            t.join();
-        }
-        assertTrue(String.format("Duplicate rev seen %s %n Seen %s", duplicates, seenRevs), duplicates.isEmpty());
-    }
-
 }
diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/property/OrderedIndexCostTest.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/property/OrderedIndexCostTest.java
index cb76941..e04b34c 100644
--- a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/property/OrderedIndexCostTest.java
+++ b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/property/OrderedIndexCostTest.java
@@ -49,7 +49,7 @@
      */
     private static class AlwaysIndexedOrderedPropertyIndex extends OrderedPropertyIndex {
         @Override
-        AlwaysIndexedLookup getLookup(NodeState root) {
+        PropertyIndexLookup getLookup(NodeState root) {
             return new AlwaysIndexedLookup(root);
         }
 
diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/property/OrderedPropertyIndexQueryTest.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/property/OrderedPropertyIndexQueryTest.java
index 58ec326..0cc5591 100644
--- a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/property/OrderedPropertyIndexQueryTest.java
+++ b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/property/OrderedPropertyIndexQueryTest.java
@@ -19,7 +19,6 @@
 import static junit.framework.Assert.assertEquals;
 import static junit.framework.Assert.assertFalse;
 import static junit.framework.Assert.assertTrue;
-import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE;
 import static org.apache.jackrabbit.JcrConstants.JCR_SYSTEM;
 import static org.apache.jackrabbit.JcrConstants.NT_UNSTRUCTURED;
 import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.JCR_NODE_TYPES;
@@ -33,17 +32,12 @@
 
 import javax.jcr.RepositoryException;
 
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
 import org.apache.jackrabbit.JcrConstants;
 import org.apache.jackrabbit.oak.api.CommitFailedException;
 import org.apache.jackrabbit.oak.api.PropertyValue;
 import org.apache.jackrabbit.oak.api.ResultRow;
 import org.apache.jackrabbit.oak.api.Tree;
 import org.apache.jackrabbit.oak.api.Type;
-import org.apache.jackrabbit.oak.commons.PathUtils;
 import org.apache.jackrabbit.oak.plugins.index.IndexConstants;
 import org.apache.jackrabbit.oak.plugins.index.IndexUpdateProvider;
 import org.apache.jackrabbit.oak.plugins.index.IndexUtils;
@@ -62,9 +56,13 @@
 import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
 import org.apache.jackrabbit.oak.spi.state.NodeState;
 import org.apache.jackrabbit.oak.util.NodeUtil;
-import org.junit.Ignore;
 import org.junit.Test;
 
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+
 public class OrderedPropertyIndexQueryTest extends BasicOrderedPropertyIndexQueryTest {
     private static final EditorHook HOOK = new EditorHook(new IndexUpdateProvider(
         new OrderedPropertyIndexEditorProvider()));
@@ -162,7 +160,7 @@
         Tree test = rTree.addChild("test");
         Calendar start = midnightFirstJan2013();
         List<ValuePathTuple> nodes = addChildNodes(
-                generateOrderedDates(NUMBER_OF_NODES, direction, start), test, direction, Type.DATE);
+            generateOrderedDates(NUMBER_OF_NODES, direction, start), test, direction, Type.DATE);
         root.commit();
 
         Calendar searchForCalendar = (Calendar) start.clone();
@@ -243,14 +241,14 @@
         Tree test = rTree.addChild("test");
         Calendar start = midnightFirstJan2013();
         addChildNodes(
-                generateOrderedDates(NUMBER_OF_NODES, direction, start), test, direction, Type.DATE);
+            generateOrderedDates(NUMBER_OF_NODES, direction, start), test, direction, Type.DATE);
         root.commit();
 
         Calendar searchForCalendar = (Calendar) start.clone();
         searchForCalendar.add(Calendar.HOUR_OF_DAY, -36);
         String searchFor = ISO_8601_2000.format(searchForCalendar.getTime());
         Map<String, PropertyValue> filter = ImmutableMap.of(ORDERED_PROPERTY,
-                PropertyValues.newDate(searchFor));
+            PropertyValues.newDate(searchFor));
         Iterator<? extends ResultRow> results = executeQuery(
             String.format(query, ORDERED_PROPERTY, ORDERED_PROPERTY), SQL2, filter).getRows()
             .iterator();
@@ -314,9 +312,9 @@
         // querying
         Iterator<? extends ResultRow> results;
         String query = String.format(
-                "SELECT * from [nt:base] WHERE %s IS NOT NULL ORDER BY %s",
-                ORDERED_PROPERTY,
-                ORDERED_PROPERTY);
+            "SELECT * from [nt:base] WHERE %s IS NOT NULL ORDER BY %s",
+            ORDERED_PROPERTY,
+            ORDERED_PROPERTY);
         results = executeQuery(query, SQL2, null)
             .getRows().iterator();
         assertRightOrder(nodes, results);
@@ -324,41 +322,6 @@
         setTravesalEnabled(true);
     }
 
-    @Test @Ignore("OAK-1763")  // FIXME OAK-1763
-    public void orderByOnDouble() throws CommitFailedException, ParseException, RepositoryException {
-        setTravesalEnabled(false);
-
-        Tree test = root.getTree("/").addChild("test");
-
-        double v1 = 1.0E18;
-        double v2 = 2.0E17;
-        assertTrue(v2 <= v1);  // To be super sure ;-)
-
-        Tree child1 = test.addChild(String.valueOf(v1));
-        child1.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, Type.NAME);
-        child1.setProperty(ORDERED_PROPERTY, v1, Type.DOUBLE);
-
-        Tree child2 = test.addChild(String.valueOf(v2));
-        child2.setProperty(JCR_PRIMARYTYPE, NT_UNSTRUCTURED, Type.NAME);
-        child2.setProperty(ORDERED_PROPERTY, v2, Type.DOUBLE);
-
-        root.commit();
-
-        String query = String.format(
-                "SELECT * from [nt:base] WHERE %s IS NOT NULL ORDER BY %s",
-                ORDERED_PROPERTY,
-                ORDERED_PROPERTY);
-        Iterator<? extends ResultRow> results = executeQuery(query, SQL2, null).getRows().iterator();
-
-        assertTrue(results.hasNext());
-        double r1 = Double.valueOf(PathUtils.getName(results.next().getPath()));
-        assertTrue(results.hasNext());
-        double r2 = Double.valueOf(PathUtils.getName(results.next().getPath()));
-        assertTrue(r1 <= r2);
-
-        setTravesalEnabled(true);
-    }
-
     @Test
     public void orderByQueryNoWhere() throws CommitFailedException, ParseException {
         setTravesalEnabled(false);
@@ -384,32 +347,6 @@
         setTravesalEnabled(true);
     }
 
-    @Test @Ignore("OAK-1763")  // FIXME OAK-1763
-    public void orderByQueryOnSpecialChars() throws CommitFailedException, ParseException {
-        setTravesalEnabled(false);
-
-        // index automatically created by the framework:
-        // {@code createTestIndexNode()}
-
-        Tree rTree = root.getTree("/");
-        Tree test = rTree.addChild("test");
-        List<String> values = Lists.newArrayList("%", " ");
-        List<ValuePathTuple> nodes = addChildNodes(values, test,
-                OrderDirection.ASC, Type.STRING);
-        root.commit();
-
-        // querying
-        Iterator<? extends ResultRow> results;
-        String query = String.format(
-                "SELECT * from [nt:base] ORDER BY %s",
-                ORDERED_PROPERTY);
-        results = executeQuery(query, SQL2, null)
-                .getRows().iterator();
-        assertRightOrder(nodes, results);
-
-        setTravesalEnabled(true);
-    }
-
     @Test
     public void planOderByNoWhere() throws IllegalArgumentException, RepositoryException,
                                    CommitFailedException {
diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilderTest.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilderTest.java
index f756692..dd3e2cd 100644
--- a/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilderTest.java
+++ b/oak-core/src/test/java/org/apache/jackrabbit/oak/plugins/memory/MemoryNodeBuilderTest.java
@@ -16,6 +16,18 @@
  */
 package org.apache.jackrabbit.oak.plugins.memory;
 
+import javax.annotation.Nonnull;
+
+import com.google.common.collect.ImmutableSet;
+
+import org.apache.jackrabbit.oak.api.PropertyState;
+import org.apache.jackrabbit.oak.spi.state.AbstractNodeState;
+import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry;
+import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
+import org.apache.jackrabbit.oak.spi.state.NodeState;
+import org.junit.Before;
+import org.junit.Test;
+
 import static junit.framework.Assert.assertEquals;
 import static junit.framework.Assert.assertFalse;
 import static junit.framework.Assert.assertTrue;
@@ -23,32 +35,12 @@
 import static org.apache.jackrabbit.oak.api.Type.STRING;
 import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
 
-import java.util.Collection;
-
-import javax.annotation.Nonnull;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import org.apache.jackrabbit.oak.api.PropertyState;
-import org.apache.jackrabbit.oak.spi.state.AbstractNodeState;
-import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry;
-import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
-import org.apache.jackrabbit.oak.spi.state.NodeState;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-
-@RunWith(Parameterized.class)
 public class MemoryNodeBuilderTest {
 
-    private final NodeState base;
+    private NodeState base;
 
-    public MemoryNodeBuilderTest(NodeState base) {
-        this.base = base;
-    }
-
-    @Parameterized.Parameters
-    public static Collection<Object[]> fixtures() {
+    @Before
+    public void setUp() {
         NodeBuilder builder = EMPTY_NODE.builder();
         builder.setProperty("a", 1L);
         builder.setProperty("b", 2L);
@@ -56,11 +48,7 @@
         builder.child("x").child("q");
         builder.child("y");
         builder.child("z");
-        NodeState base = builder.getNodeState();
-        return ImmutableList.of(
-            new Object[] { base },
-            new Object[] { ModifiedNodeState.squeeze(base) }
-        );
+        base = builder.getNodeState();
     }
 
     @Test
@@ -214,41 +202,6 @@
     }
 
     @Test
-    public void testReplacedStatus() {
-        NodeBuilder root = base.builder();
-        NodeBuilder x = root.getChildNode("x");
-        x.setChildNode("new");
-        assertFalse(x.isReplaced());
-    }
-
-    @Test
-    public void testReplacedStatus2() {
-        NodeBuilder x = base.builder().getChildNode("x");
-        NodeBuilder q = x.getChildNode("q");
-        q.remove();
-        assertFalse(q.isReplaced());
-        x.setChildNode("q").setProperty("a", "b");
-        assertTrue(q.isReplaced());
-    }
-
-    @Test
-    public void testReplacedStatus3() {
-        NodeBuilder x = base.builder().getChildNode("x");
-        NodeBuilder q = x.getChildNode("q");
-        assertFalse(q.isReplaced());
-        x.setChildNode("q").setProperty("a", "b");
-        assertTrue(q.isReplaced());
-    }
-
-    @Test
-    public void removeParent() {
-        NodeBuilder x = base.builder().getChildNode("x");
-        NodeBuilder y = x.setChildNode("y");
-        x.remove();
-        assertFalse(x.exists());
-    }
-
-    @Test
     public void testRemovedStatus() {
         NodeBuilder root = base.builder();
         NodeBuilder x = root.child("x");
diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/security/authentication/token/TokenProviderImplTest.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/security/authentication/token/TokenProviderImplTest.java
index caabeb2..0cc8623 100644
--- a/oak-core/src/test/java/org/apache/jackrabbit/oak/security/authentication/token/TokenProviderImplTest.java
+++ b/oak-core/src/test/java/org/apache/jackrabbit/oak/security/authentication/token/TokenProviderImplTest.java
@@ -23,10 +23,6 @@
 import java.util.List;
 import java.util.Map;
 import java.util.UUID;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
 import javax.jcr.AccessDeniedException;
 import javax.jcr.Credentials;
 import javax.jcr.GuestCredentials;
@@ -34,14 +30,11 @@
 
 import org.apache.jackrabbit.JcrConstants;
 import org.apache.jackrabbit.api.security.authentication.token.TokenCredentials;
-import org.apache.jackrabbit.oak.api.ContentSession;
 import org.apache.jackrabbit.oak.api.PropertyState;
-import org.apache.jackrabbit.oak.api.Root;
 import org.apache.jackrabbit.oak.api.Tree;
 import org.apache.jackrabbit.oak.api.Type;
 import org.apache.jackrabbit.oak.plugins.identifier.IdentifierManager;
 import org.apache.jackrabbit.oak.spi.security.authentication.ImpersonationCredentials;
-import org.apache.jackrabbit.oak.spi.security.authentication.token.TokenConfiguration;
 import org.apache.jackrabbit.oak.spi.security.authentication.token.TokenInfo;
 import org.apache.jackrabbit.oak.spi.security.authentication.token.TokenProvider;
 import org.apache.jackrabbit.oak.util.NodeUtil;
@@ -315,51 +308,6 @@
         }
     }
 
-    /**
-     *@see <a href="https://issues.apache.org/jira/browse/OAK-1697">OAK-1697</a>
-     */
-    @Test
-    public void testValidTokenCredentialsWithConflict() throws Exception {
-        ExecutorService pool = Executors.newFixedThreadPool(10);
-        List<ContentSession> sessions = new ArrayList<ContentSession>();
-
-        try {
-            TokenConfiguration tc = getSecurityProvider().getConfiguration(
-                    TokenConfiguration.class);
-            SimpleCredentials sc = (SimpleCredentials) getAdminCredentials();
-
-            List<TokenProvider> tokenProviders = new ArrayList<TokenProvider>();
-
-            for (int i = 0; i < 10; i++) {
-                ContentSession session = login(getAdminCredentials());
-                Root r = session.getLatestRoot();
-                tokenProviders.add(tc.getTokenProvider(r));
-                sessions.add(session);
-            }
-
-            ArrayList<DataFuture> list = new ArrayList<DataFuture>();
-
-            for (TokenProvider tokenProvider : tokenProviders) {
-                list.add(createDataFuture(pool, tokenProvider, sc.getUserID(),
-                        Collections.<String, Object> emptyMap()));
-            }
-
-            for (DataFuture df : list) {
-                assertNotNull(df.future.get());
-            }
-        } finally {
-            for (ContentSession session : sessions) {
-                if (session != null) {
-                    session.close();
-                }
-            }
-
-            if (pool != null) {
-                pool.shutdown();
-            }
-        }
-    }
-
     //--------------------------------------------------------------------------
     private static void assertTokenInfo(TokenInfo info, String userId) {
         assertNotNull(info);
@@ -382,23 +330,4 @@
         tree.setProperty(tokenTree.getProperty("rep:token.key"));
         tree.setProperty(tokenTree.getProperty("rep:token.exp"));
     }
-    
-    private static class DataFuture {
-        public Future<TokenInfo> future;
-
-        public DataFuture(Future<TokenInfo> future) {
-            super();
-            this.future = future;
-        }
-    }
-    
-    private DataFuture createDataFuture(ExecutorService pool , final TokenProvider tp,final String userId, final Map<String, ?> attributes){
-        Future<TokenInfo> future = pool.submit(new Callable<TokenInfo>() {
-            @Override
-            public TokenInfo call() throws Exception {
-                return tp.createToken(userId, attributes);
-            }
-        });
-        return new DataFuture(future);
-    }
 }
\ No newline at end of file
diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/security/authorization/accesscontrol/ACLTest.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/security/authorization/accesscontrol/ACLTest.java
index e0ec74c..0cebe87 100644
--- a/oak-core/src/test/java/org/apache/jackrabbit/oak/security/authorization/accesscontrol/ACLTest.java
+++ b/oak-core/src/test/java/org/apache/jackrabbit/oak/security/authorization/accesscontrol/ACLTest.java
@@ -800,11 +800,5 @@
         public RestrictionPattern getPattern(@Nullable String oakPath, @Nonnull Tree tree) {
             throw new UnsupportedOperationException();
         }
-
-        @Nonnull
-        @Override
-        public RestrictionPattern getPattern(@Nullable String oakPath, @Nonnull Set<Restriction> restrictions) {
-            throw new UnsupportedOperationException();
-        }
     }
 }
\ No newline at end of file
diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/security/authorization/evaluation/AbstractOakCoreTest.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/security/authorization/evaluation/AbstractOakCoreTest.java
index 246d755..cfd880c 100644
--- a/oak-core/src/test/java/org/apache/jackrabbit/oak/security/authorization/evaluation/AbstractOakCoreTest.java
+++ b/oak-core/src/test/java/org/apache/jackrabbit/oak/security/authorization/evaluation/AbstractOakCoreTest.java
@@ -69,9 +69,6 @@
     @Override
     public void after() throws Exception {
         try {
-            // revert uncommited changes
-            root.refresh();
-
             // clean up policies at the root node
             AccessControlManager acMgr = getAccessControlManager(root);
             AccessControlPolicy[] policies = acMgr.getPolicies("/");
diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/security/authorization/restriction/RestrictionProviderImplTest.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/security/authorization/restriction/RestrictionProviderImplTest.java
index ee86b2f..5c77ffb 100644
--- a/oak-core/src/test/java/org/apache/jackrabbit/oak/security/authorization/restriction/RestrictionProviderImplTest.java
+++ b/oak-core/src/test/java/org/apache/jackrabbit/oak/security/authorization/restriction/RestrictionProviderImplTest.java
@@ -94,7 +94,7 @@
         map.put(PropertyStates.createProperty(REP_NT_NAMES, ntNames, Type.NAMES), new NodeTypePattern(ntNames));
 
         NodeUtil tree = new NodeUtil(root.getTree("/")).getOrAddTree("testPath", JcrConstants.NT_UNSTRUCTURED);
-        Tree restrictions = tree.addChild(REP_RESTRICTIONS, NT_REP_RESTRICTIONS).getTree();
+        Tree restrictions = tree.addChild("restrictions", NT_REP_RESTRICTIONS).getTree();
 
         // test restrictions individually
         for (Map.Entry<PropertyState, RestrictionPattern> entry : map.entrySet()) {
@@ -115,54 +115,6 @@
     }
 
     @Test
-    public void testGetPatternForAllSupported() throws Exception {
-        Map<PropertyState, RestrictionPattern> map = newHashMap();
-        map.put(PropertyStates.createProperty(REP_GLOB, "/*/jcr:content"), GlobPattern.create("/testPath", "/*/jcr:content"));
-        List<String> ntNames = ImmutableList.of(JcrConstants.NT_FOLDER, JcrConstants.NT_LINKEDFILE);
-        map.put(PropertyStates.createProperty(REP_NT_NAMES, ntNames, Type.NAMES), new NodeTypePattern(ntNames));
-        List<String> prefixes = ImmutableList.of("rep", "jcr");
-        map.put(PropertyStates.createProperty(REP_PREFIXES, prefixes, Type.STRINGS), new PrefixPattern(prefixes));
-
-        NodeUtil tree = new NodeUtil(root.getTree("/")).getOrAddTree("testPath", JcrConstants.NT_UNSTRUCTURED);
-        Tree restrictions = tree.addChild(REP_RESTRICTIONS, NT_REP_RESTRICTIONS).getTree();
-        for (Map.Entry<PropertyState, RestrictionPattern> entry : map.entrySet()) {
-            restrictions.setProperty(entry.getKey());
-        }
-
-        RestrictionPattern pattern = provider.getPattern("/testPath", restrictions);
-        assertTrue(pattern instanceof CompositePattern);
-    }
-
-    @Test
-    public void testGetPatternFromRestrictions() throws Exception {
-        Map<PropertyState, RestrictionPattern> map = newHashMap();
-        map.put(PropertyStates.createProperty(REP_GLOB, "/*/jcr:content"), GlobPattern.create("/testPath", "/*/jcr:content"));
-        List<String> ntNames = ImmutableList.of(JcrConstants.NT_FOLDER, JcrConstants.NT_LINKEDFILE);
-        map.put(PropertyStates.createProperty(REP_NT_NAMES, ntNames, Type.NAMES), new NodeTypePattern(ntNames));
-        List<String> prefixes = ImmutableList.of("rep", "jcr");
-        map.put(PropertyStates.createProperty(REP_PREFIXES, prefixes, Type.STRINGS), new PrefixPattern(prefixes));
-
-        NodeUtil tree = new NodeUtil(root.getTree("/")).getOrAddTree("testPath", JcrConstants.NT_UNSTRUCTURED);
-        Tree restrictions = tree.addChild(REP_RESTRICTIONS, NT_REP_RESTRICTIONS).getTree();
-
-        // test restrictions individually
-        for (Map.Entry<PropertyState, RestrictionPattern> entry : map.entrySet()) {
-            restrictions.setProperty(entry.getKey());
-
-            RestrictionPattern pattern = provider.getPattern("/testPath", provider.readRestrictions("/testPath", tree.getTree()));
-            assertEquals(entry.getValue(), pattern);
-            restrictions.removeProperty(entry.getKey().getName());
-        }
-
-        // test combination on multiple restrictions
-        for (Map.Entry<PropertyState, RestrictionPattern> entry : map.entrySet()) {
-            restrictions.setProperty(entry.getKey());
-        }
-        RestrictionPattern pattern = provider.getPattern("/testPath", provider.readRestrictions("/testPath", tree.getTree()));
-        assertTrue(pattern instanceof CompositePattern);
-    }
-
-    @Test
     public void testValidateGlobRestriction() throws Exception {
         Tree t = new NodeUtil(root.getTree("/")).addChild("testTree", "nt:unstructured").getTree();
         String path = t.getPath();
diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/spi/commit/BackgroundObserverTest.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/spi/commit/BackgroundObserverTest.java
index a889a03..b961fd6 100644
--- a/oak-core/src/test/java/org/apache/jackrabbit/oak/spi/commit/BackgroundObserverTest.java
+++ b/oak-core/src/test/java/org/apache/jackrabbit/oak/spi/commit/BackgroundObserverTest.java
@@ -40,7 +40,6 @@
 
 public class BackgroundObserverTest {
     private static final CommitInfo COMMIT_INFO = new CommitInfo("no-session", null);
-    public static final int CHANGE_COUNT = 1024;
 
     private final List<Runnable> assertions = Lists.newArrayList();
     private CountDownLatch doneCounter;
@@ -53,7 +52,7 @@
     public void concurrentObservers() throws InterruptedException {
         Observer observer = createCompositeObserver(newFixedThreadPool(16), 128);
 
-        for (int k = 0; k < CHANGE_COUNT; k++) {
+        for (int k = 0; k < 1024; k++) {
             contentChanged(observer, k);
         }
         done(observer);
@@ -91,11 +90,6 @@
     }
 
     private Observer createBackgroundObserver(ExecutorService executor) {
-        // Ensure the observation revision queue is sufficiently large to hold
-        // all revisions. Otherwise waiting for events might block since pending
-        // events would only be released on a subsequent commit. See OAK-1491
-        int queueLength = CHANGE_COUNT + 1;
-
         return new BackgroundObserver(new Observer() {
             // Need synchronised list here to maintain correct memory barrier
             // when this is passed on to done(List<Runnable>)
@@ -123,7 +117,7 @@
             private Long getP(NodeState previous) {
                 return previous.getProperty("p").getValue(Type.LONG);
             }
-        }, executor, queueLength);
+        }, executor, 1024);
     }
 
 }
diff --git a/oak-core/src/test/java/org/apache/jackrabbit/oak/spi/security/authorization/restriction/TestProvider.java b/oak-core/src/test/java/org/apache/jackrabbit/oak/spi/security/authorization/restriction/TestProvider.java
index 5254d03..d837b26 100644
--- a/oak-core/src/test/java/org/apache/jackrabbit/oak/spi/security/authorization/restriction/TestProvider.java
+++ b/oak-core/src/test/java/org/apache/jackrabbit/oak/spi/security/authorization/restriction/TestProvider.java
@@ -17,7 +17,6 @@
 package org.apache.jackrabbit.oak.spi.security.authorization.restriction;
 
 import java.util.Map;
-import java.util.Set;
 import javax.annotation.Nonnull;
 import javax.annotation.Nullable;
 
@@ -46,17 +45,6 @@
         return (hasRestriction) ? new MatchingPattern() : RestrictionPattern.EMPTY;
     }
 
-    @Nonnull
-    @Override
-    public RestrictionPattern getPattern(@Nullable String oakPath, @Nonnull Set<Restriction> restrictions) {
-        for (Restriction r : restrictions) {
-            if (getSupportedRestrictions(oakPath).contains(r.getDefinition())) {
-                return new MatchingPattern();
-            }
-        }
-        return RestrictionPattern.EMPTY;
-    }
-
     private static final class MatchingPattern implements RestrictionPattern {
 
         @Override
diff --git a/oak-core/src/test/resources/org/apache/jackrabbit/oak/query/sql2_index.txt b/oak-core/src/test/resources/org/apache/jackrabbit/oak/query/sql2_index.txt
index 92d8faf..9c5e53d 100644
--- a/oak-core/src/test/resources/org/apache/jackrabbit/oak/query/sql2_index.txt
+++ b/oak-core/src/test/resources/org/apache/jackrabbit/oak/query/sql2_index.txt
@@ -24,12 +24,6 @@
 # * new tests are typically be added on top, after the syntax docs
 # * use ascii character only
 
-explain select *
-  from [nt:base]
-  where [jcr:uuid] like '%'
-[nt:base] as [nt:base] /* property jcr:uuid
-  where [nt:base].[jcr:uuid] like cast('%' as string) */
-
 explain select e.[jcr:path]
   from [nt:base] as a
   inner join [nt:base] as b on ischildnode(b, a)
diff --git a/oak-doc/pom.xml b/oak-doc/pom.xml
index f1902ab..f67636f 100644
--- a/oak-doc/pom.xml
+++ b/oak-doc/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.jackrabbit</groupId>
     <artifactId>oak-parent</artifactId>
-    <version>0.20-SNAPSHOT</version>
+    <version>1.0.0-SNAPSHOT</version>
     <relativePath>../oak-parent/pom.xml</relativePath>
   </parent>
 
diff --git a/oak-http/pom.xml b/oak-http/pom.xml
index e2a735d..38c5a11 100644
--- a/oak-http/pom.xml
+++ b/oak-http/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.jackrabbit</groupId>
     <artifactId>oak-parent</artifactId>
-    <version>1.1-SNAPSHOT</version>
+    <version>1.0.0</version>
     <relativePath>../oak-parent/pom.xml</relativePath>
   </parent>
 
diff --git a/oak-it/mk/pom.xml b/oak-it/mk/pom.xml
index 9eb7429..101658f 100644
--- a/oak-it/mk/pom.xml
+++ b/oak-it/mk/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.jackrabbit</groupId>
     <artifactId>oak-parent</artifactId>
-    <version>1.1-SNAPSHOT</version>
+    <version>1.0.0</version>
     <relativePath>../../oak-parent/pom.xml</relativePath>
   </parent>
 
diff --git a/oak-it/osgi/pom.xml b/oak-it/osgi/pom.xml
index c8115e9..167a497 100644
--- a/oak-it/osgi/pom.xml
+++ b/oak-it/osgi/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.jackrabbit</groupId>
     <artifactId>oak-parent</artifactId>
-    <version>1.1-SNAPSHOT</version>
+    <version>1.0.0</version>
     <relativePath>../../oak-parent/pom.xml</relativePath>
   </parent>
 
diff --git a/oak-it/pom.xml b/oak-it/pom.xml
index e6c4cc6..56e1fa1 100644
--- a/oak-it/pom.xml
+++ b/oak-it/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.jackrabbit</groupId>
     <artifactId>oak-parent</artifactId>
-    <version>1.1-SNAPSHOT</version>
+    <version>1.0.0</version>
     <relativePath>../oak-parent/pom.xml</relativePath>
   </parent>
 
diff --git a/oak-jcr/pom.xml b/oak-jcr/pom.xml
index 87f24ce..dda650b 100644
--- a/oak-jcr/pom.xml
+++ b/oak-jcr/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.jackrabbit</groupId>
     <artifactId>oak-parent</artifactId>
-    <version>1.1-SNAPSHOT</version>
+    <version>1.0.0</version>
     <relativePath>../oak-parent/pom.xml</relativePath>
   </parent>
 
@@ -296,7 +296,7 @@
     <dependency>
       <groupId>com.h2database</groupId>
       <artifactId>h2</artifactId>
-      <version>${h2.version}</version>
+      <version>1.3.175</version>
       <scope>test</scope>
     </dependency>
     <dependency>
diff --git a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/Jcr.java b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/Jcr.java
index 23eee79..cbbc758 100644
--- a/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/Jcr.java
+++ b/oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/Jcr.java
@@ -56,7 +56,7 @@
 import org.apache.jackrabbit.oak.spi.state.NodeStore;
 
 public class Jcr {
-    public static final int DEFAULT_OBSERVATION_QUEUE_LENGTH = 1000;
+    private static final int DEFAULT_OBSERVATION_QUEUE_LENGTH = 1000;
 
     private final Oak oak;
 
diff --git a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/AbstractRepositoryTest.java b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/AbstractRepositoryTest.java
index 27575c3..0f70446 100644
--- a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/AbstractRepositoryTest.java
+++ b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/AbstractRepositoryTest.java
@@ -53,7 +53,6 @@
     private NodeStore nodeStore;
     private Repository repository;
     private Session adminSession;
-    protected int observationQueueLength = Jcr.DEFAULT_OBSERVATION_QUEUE_LENGTH;
 
     /**
      * The system property "ns-fixtures" can be used to provide a
@@ -119,9 +118,7 @@
     protected Repository getRepository() {
         if (repository == null) {
             nodeStore = fixture.createNodeStore();
-            repository  = new Jcr(nodeStore)
-                    .withObservationQueueLength(observationQueueLength)
-                    .createRepository();
+            repository  = new Jcr(nodeStore).createRepository();
         }
         return repository;
     }
diff --git a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/AutoCreatedItemsTest.java b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/AutoCreatedItemsTest.java
index 4fa98fb..3d9bc79 100644
--- a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/AutoCreatedItemsTest.java
+++ b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/AutoCreatedItemsTest.java
@@ -16,16 +16,16 @@
  */
 package org.apache.jackrabbit.oak.jcr;
 
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
 import javax.jcr.Node;
 import javax.jcr.Session;
 import javax.jcr.Value;
 
 import org.junit.Test;
 
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 /**
  * {@code AutoCreatedItemsTest} checks if auto-created nodes and properties
  * are added correctly as defined in the node type definition.
diff --git a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/CompatibilityIssuesTest.java b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/CompatibilityIssuesTest.java
index f453987..dd58027 100644
--- a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/CompatibilityIssuesTest.java
+++ b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/CompatibilityIssuesTest.java
@@ -325,7 +325,7 @@
             session.getNode(testNodePath).setProperty("foo2","bar2");
             session.save();
 
-            latch.await(60, TimeUnit.SECONDS);
+            latch.await(10,TimeUnit.SECONDS);
 
             //Only one event is recorded for foo2 modification
             assertEquals(1,events.size());
diff --git a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/ConcurrentAddIT.java b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/ConcurrentAddIT.java
index 2aa7ab2..0f43a32 100644
--- a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/ConcurrentAddIT.java
+++ b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/ConcurrentAddIT.java
@@ -31,6 +31,7 @@
 import javax.jcr.Session;
 
 import com.google.common.collect.Iterators;
+import org.junit.Assume;
 import org.junit.Test;
 
 /**
@@ -76,6 +77,8 @@
 
     @Test @SuppressWarnings("unchecked")
     public void addNodesSameParent() throws Exception {
+        // takes too long with RDBDocumentStore
+        Assume.assumeTrue(fixture != NodeStoreFixture.DOCUMENT_JDBC);
         List<Exception> exceptions = Collections.synchronizedList(
                 new ArrayList<Exception>());
         // use nt:unstructured to force conflicts on :childOrder property
diff --git a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/ConcurrentAddNodesClusterIT.java b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/ConcurrentAddNodesClusterIT.java
index 5224c1b..e1cb609 100644
--- a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/ConcurrentAddNodesClusterIT.java
+++ b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/ConcurrentAddNodesClusterIT.java
@@ -22,7 +22,6 @@
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.Callable;
-import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
@@ -56,7 +55,6 @@
     private static final int NUM_CLUSTER_NODES = 3;
     private static final int NODE_COUNT = 100;
     private static final int LOOP_COUNT = 10;
-    private static final int WORKER_COUNT = 20;
     private static final String PROP_NAME = "testcount";
     private static final ScheduledExecutorService EXECUTOR = Executors.newSingleThreadScheduledExecutor();
 
@@ -115,56 +113,6 @@
     }
 
     @Test
-    public void addNodesConcurrent2() throws Exception {
-        for (int i = 0; i < NUM_CLUSTER_NODES; i++) {
-            DocumentMK mk = new DocumentMK.Builder()
-                    .setMongoDB(createConnection().getDB())
-                    .setClusterId(i + 1).open();
-            mks.add(mk);
-        }
-        final Map<String, Exception> exceptions = Collections.synchronizedMap(
-                new HashMap<String, Exception>());
-        final CountDownLatch latch = new CountDownLatch(1);
-        for (int i = 0; i < mks.size(); i++) {
-            DocumentMK mk = mks.get(i);
-            final Repository repo = new Jcr(mk.getNodeStore()).createRepository();
-            repos.add(repo);
-            for (int w = 0; w < WORKER_COUNT; w++) {
-                final String name = "Worker-" + (i + 1) + "-" + (w + 1);
-                workers.add(new Thread(new Runnable() {
-                    @Override
-                    public void run() {
-                        try {
-                            latch.await();
-                            Session session = createAdminSession(repo);
-                            Node node = session.getRootNode().addNode(name, "oak:Unstructured");
-                            for (int j = 0; j < NODE_COUNT; j++) {
-                                node.addNode("node" + j);
-                                session.save();
-                            }
-                        } catch (RepositoryException e) {
-                            exceptions.put(Thread.currentThread().getName(), e);
-                        } catch (InterruptedException e) {
-                            Thread.currentThread().interrupt();
-                        }
-                    }
-                }));
-            }
-        }
-        for (Thread t : workers) {
-            t.start();
-        }
-        latch.countDown();
-        for (Thread t : workers) {
-            t.join();
-        }
-        for (Map.Entry<String, Exception> entry : exceptions.entrySet()) {
-            // System.out.println("exception in thread " + entry.getKey());
-            throw entry.getValue();
-        }
-    }
-
-    @Test
     public void addNodes() throws Exception {
         for (int i = 0; i < 2; i++) {
             DocumentMK mk = new DocumentMK.Builder()
@@ -393,19 +341,16 @@
         @Override
         public void run() {
             try {
-                Session session = createAdminSession(repo);
+                Session session = repo.login(new SimpleCredentials(
+                        "admin", "admin".toCharArray()));
                 ensureIndex(session.getRootNode(), PROP_NAME);
+
                 String nodeName = "testroot-" + Thread.currentThread().getName();
                 createNodes(session, nodeName, LOOP_COUNT, NODE_COUNT, exceptions);
             } catch (Exception e) {
                 exceptions.put(Thread.currentThread().getName(), e);
             }
         }
-
-    }
-
-    private Session createAdminSession(Repository repository) throws RepositoryException {
-        return repository.login(new SimpleCredentials("admin", "admin".toCharArray()));
     }
 
     private void createNodes(Session session,
diff --git a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/ConcurrentAddReferenceTest.java b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/ConcurrentAddReferenceTest.java
index 7c4bdd6..a084b21 100644
--- a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/ConcurrentAddReferenceTest.java
+++ b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/ConcurrentAddReferenceTest.java
@@ -32,6 +32,7 @@
 
 import com.google.common.collect.Iterators;
 import org.junit.After;
+import org.junit.Assume;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -75,6 +76,7 @@
     @SuppressWarnings("unchecked")
     @Test
     public void addReferences() throws Exception {
+        Assume.assumeTrue(fixture != NodeStoreFixture.DOCUMENT_JDBC);  // FIXME OAK-1472
         List<Exception> exceptions = Collections.synchronizedList(new ArrayList<Exception>());
         List<Thread> worker = new ArrayList<Thread>();
         for (int i = 0; i < NUM_WORKERS; i++) {
diff --git a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/ConcurrentAddRemoveIT.java b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/ConcurrentAddRemoveIT.java
index 794adab..62ff9ac 100644
--- a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/ConcurrentAddRemoveIT.java
+++ b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/ConcurrentAddRemoveIT.java
@@ -28,6 +28,7 @@
 import javax.jcr.RepositoryException;
 import javax.jcr.Session;
 
+import org.junit.Assume;
 import org.junit.Test;
 
 /**
@@ -46,6 +47,7 @@
 
     @Test
     public void concurrent() throws Exception {
+        Assume.assumeTrue(fixture != NodeStoreFixture.DOCUMENT_JDBC);  // FIXME OAK-1488
         List<Exception> exceptions = Collections.synchronizedList(new ArrayList<Exception>());
         Node test = getAdminSession().getRootNode().addNode("test");
         List<Thread> worker = new ArrayList<Thread>();
diff --git a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/LargeOperationIT.java b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/LargeOperationIT.java
index a1ed77f..67c5698 100644
--- a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/LargeOperationIT.java
+++ b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/LargeOperationIT.java
@@ -50,8 +50,6 @@
 import javax.jcr.observation.EventIterator;
 import javax.jcr.observation.EventListener;
 
-import ch.qos.logback.classic.Level;
-import ch.qos.logback.classic.LoggerContext;
 import com.google.common.collect.Lists;
 import org.apache.commons.math3.distribution.BinomialDistribution;
 import org.apache.commons.math3.exception.MathIllegalArgumentException;
@@ -63,8 +61,6 @@
 import org.apache.jackrabbit.api.JackrabbitRepository;
 import org.apache.jackrabbit.oak.jcr.NodeStoreFixture.DocumentFixture;
 import org.apache.jackrabbit.oak.jcr.NodeStoreFixture.SegmentFixture;
-import org.apache.jackrabbit.oak.jcr.session.RefreshStrategy;
-import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore;
 import org.apache.jackrabbit.oak.plugins.segment.SegmentStore;
 import org.apache.jackrabbit.oak.plugins.segment.file.FileStore;
 import org.apache.jackrabbit.oak.spi.state.NodeStore;
@@ -161,14 +157,6 @@
 
     @Before
     public void setup() throws RepositoryException {
-        // Disable noisy logging we want to ignore for these tests
-        ((LoggerContext)LoggerFactory.getILoggerFactory())
-                .getLogger(DocumentNodeStore.class).setLevel(Level.ERROR);
-        ((LoggerContext)LoggerFactory.getILoggerFactory())
-                .getLogger("org.apache.jackrabbit.oak.jcr.observation.ChangeProcessor").setLevel(Level.ERROR);
-        ((LoggerContext)LoggerFactory.getILoggerFactory())
-                .getLogger(RefreshStrategy.class).setLevel(Level.ERROR);
-
         nodeStore = fixture.createNodeStore();
         repository  = new Jcr(nodeStore).createRepository();
         session = createAdminSession();
@@ -293,7 +281,7 @@
             executionTimes.add(t);
             LOG.info("Copying {} node took {} ns/node", scale, t);
         }
-        boolean knownIssue = fixture.getClass() == DocumentFixture.class;  // FIXME OAK-1698
+        boolean knownIssue = fixture.getClass() == DocumentFixture.class;  // FIXME OAK-1414
         assertOnLgn("large copy", scales, executionTimes, knownIssue);
     }
 
@@ -325,7 +313,7 @@
             executionTimes.add(t);
             LOG.info("Moving {} node took {} ns/node", scale, t);
         }
-        boolean knownIssue = fixture.getClass() == DocumentFixture.class;  // FIXME OAK-1698
+        boolean knownIssue = fixture.getClass() == DocumentFixture.class;  // FIXME OAK-1415
         assertOnLgn("large move", scales, executionTimes, knownIssue);
     }
 
@@ -362,8 +350,7 @@
             executionTimes.add(t);
             LOG.info("Adding 100 siblings next to {} siblings took {} ns/node", scale, t);
         }
-        boolean knownIssue = fixture.getClass() == DocumentFixture.class;  // FIXME OAK-1698
-        assertOnLgn("many siblings", scales, executionTimes, knownIssue);
+        assertOnLgn("many siblings", scales, executionTimes, false);
     }
 
     /**
@@ -401,8 +388,7 @@
                 } catch (Exception ignore) {}
             }
         }
-        boolean knownIssue = fixture.getClass() == DocumentFixture.class;  // FIXME OAK-1698
-        assertOnLgn("large number of pending events", scales, executionTimes, knownIssue);
+        assertOnLgn("large number of pending events", scales, executionTimes, false);
     }
 
     @Test
@@ -424,8 +410,7 @@
                 executionTimes.add(t);
                 LOG.info("Adding {} nodes took {} ns/node", scale, t);
             }
-            boolean knownIssue = fixture.getClass() == DocumentFixture.class;  // FIXME OAK-1698
-            assertOnLgn("slow listeners", scales, executionTimes, knownIssue);
+            assertOnLgn("slow listeners", scales, executionTimes, false);
         } finally {
             delayedEventHandling.stop();
             result.get();
diff --git a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/NodeStoreFixture.java b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/NodeStoreFixture.java
index 3732ee9..abd7183 100644
--- a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/NodeStoreFixture.java
+++ b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/NodeStoreFixture.java
@@ -19,11 +19,14 @@
 package org.apache.jackrabbit.oak.jcr;
 
 import java.io.Closeable;
+import java.io.File;
 import java.io.IOException;
 import java.util.UUID;
 
 import javax.sql.DataSource;
 
+import com.mongodb.DB;
+
 import org.apache.jackrabbit.mk.core.MicroKernelImpl;
 import org.apache.jackrabbit.oak.kernel.KernelNodeStore;
 import org.apache.jackrabbit.oak.plugins.document.DocumentMK;
@@ -36,8 +39,6 @@
 import org.apache.jackrabbit.oak.spi.blob.BlobStore;
 import org.apache.jackrabbit.oak.spi.state.NodeStore;
 
-import com.mongodb.DB;
-
 /**
  * NodeStore fixture for parametrized tests.
  */
@@ -50,7 +51,7 @@
         public NodeStore createNodeStore() {
             return new CloseableNodeStore(new DocumentMK.Builder().open());
         }
-
+        
         @Override
         public NodeStore createNodeStore(int clusterNodeId) {
             MongoConnection connection;
@@ -80,21 +81,20 @@
     public static final NodeStoreFixture DOCUMENT_NS = createDocumentFixture("mongodb://localhost:27017/oak");
 
     public static final NodeStoreFixture DOCUMENT_JDBC = new NodeStoreFixture() {
-
-        private DataSource ds;
-
         @Override
         public NodeStore createNodeStore() {
             String id = UUID.randomUUID().toString();
-            this.ds = RDBDataSourceFactory.forJdbcUrl("jdbc:h2:mem:" + id, "sa", "");
-            return new DocumentMK.Builder().setRDBConnection(this.ds).getNodeStore();
+            String folder = (new File("target")).isDirectory() ? "target/" : "";
+            DataSource ds = RDBDataSourceFactory.forJdbcUrl("jdbc:h2:file:" + folder + id + ";MVCC=true", "sa", "");
+            return new DocumentMK.Builder().setRDBConnection(ds).getNodeStore();
         }
 
         @Override
         public NodeStore createNodeStore(int clusterNodeId) {
             try {
-                this.ds = RDBDataSourceFactory.forJdbcUrl("jdbc:h2:mem:oaknodes-" + clusterNodeId, "sa", "");
-                return new DocumentMK.Builder().setRDBConnection(this.ds).getNodeStore();
+                String folder = (new File("target")).isDirectory() ? "target/" : "";
+                DataSource ds = RDBDataSourceFactory.forJdbcUrl("jdbc:h2:file:" + folder + "oaknodes-" + clusterNodeId, "sa", "");
+                return new DocumentMK.Builder().setRDBConnection(ds).getNodeStore();
             } catch (Exception e) {
                 return null;
             }
@@ -105,13 +105,6 @@
             if (nodeStore instanceof DocumentNodeStore) {
                 ((DocumentNodeStore) nodeStore).dispose();
             }
-            if (this.ds instanceof Closeable) {
-                try {
-                    ((Closeable)this.ds).close();
-                } catch (IOException ex) {
-                    throw new RuntimeException(ex);
-                }
-            }
         }
     };
 
diff --git a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/OrderableNodesTest.java b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/OrderableNodesTest.java
index 25cfc2f..8533329 100644
--- a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/OrderableNodesTest.java
+++ b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/OrderableNodesTest.java
@@ -29,7 +29,6 @@
 import static junit.framework.Assert.assertEquals;
 import static junit.framework.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
 
 public class OrderableNodesTest extends AbstractRepositoryTest {
 
diff --git a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/RepositoryTest.java b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/RepositoryTest.java
index 081cc9c..02b4163 100644
--- a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/RepositoryTest.java
+++ b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/RepositoryTest.java
@@ -18,15 +18,6 @@
  */
 package org.apache.jackrabbit.oak.jcr;
 
-import static java.util.Arrays.asList;
-import static org.apache.jackrabbit.commons.JcrUtils.getChildNodes;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-import static org.junit.Assume.assumeTrue;
-
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
@@ -38,7 +29,6 @@
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.Set;
-
 import javax.jcr.Binary;
 import javax.jcr.GuestCredentials;
 import javax.jcr.ImportUUIDBehavior;
@@ -79,6 +69,15 @@
 import org.junit.Ignore;
 import org.junit.Test;
 
+import static java.util.Arrays.asList;
+import static org.apache.jackrabbit.commons.JcrUtils.getChildNodes;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.junit.Assume.assumeTrue;
+
 public class RepositoryTest extends AbstractRepositoryTest {
     private static final String TEST_NODE = "test_node";
     private static final String TEST_PATH = '/' + TEST_NODE;
@@ -568,20 +567,6 @@
     }
 
     @Test
-    public void testIsNew() throws RepositoryException, InterruptedException {
-        Session session = getAdminSession();
-        Node root = session.getRootNode();
-        Node node1 = root.addNode("node1");
-        session.save();
-
-        node1.remove();
-        Node node2 = root.addNode("node2");
-        assertTrue("The Node is just added", node2.isNew());
-        Node node1Again = root.addNode("node1");
-        assertTrue("The Node is just added but has a remove in same commit", node1Again.isNew());
-    }
-
-    @Test
     public void testAddNodeWithExpandedName() throws RepositoryException {
         Session session = getAdminSession();
 
diff --git a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/observation/ObservationRefreshTest.java b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/observation/ObservationRefreshTest.java
index d450d36..269f92e 100644
--- a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/observation/ObservationRefreshTest.java
+++ b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/observation/ObservationRefreshTest.java
@@ -78,10 +78,6 @@
 
     @Before
     public void setup() throws RepositoryException {
-        // Ensure the observation revision queue is sufficiently large to hold
-        // all revisions. Otherwise waiting for events might block since pending
-        // events would only be released on a subsequent commit. See OAK-1491
-        observationQueueLength = 1000000;
         Session session = getAdminSession();
 
         NodeTypeManager ntMgr = session.getWorkspace().getNodeTypeManager();
diff --git a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/observation/ObservationTest.java b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/observation/ObservationTest.java
index bdc412a..ba7d07f 100644
--- a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/observation/ObservationTest.java
+++ b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/observation/ObservationTest.java
@@ -77,11 +77,7 @@
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
 
-@RunWith(Parameterized.class)
-// Don't run "Parallelized" as this causes tests to timeout in "weak" environments
 public class ObservationTest extends AbstractRepositoryTest {
     public static final int ALL_EVENTS = NODE_ADDED | NODE_REMOVED | NODE_MOVED | PROPERTY_ADDED |
             PROPERTY_REMOVED | PROPERTY_CHANGED | PERSIST;
@@ -89,7 +85,7 @@
     private static final String REFERENCEABLE_NODE = "\"referenceable\"";
     private static final String TEST_PATH = '/' + TEST_NODE;
     private static final String TEST_TYPE = "mix:test";
-    public static final int TIME_OUT = 60;
+    public static final int TIME_OUT = 4;
 
     private Session observingSession;
     private ObservationManager observationManager;
@@ -464,7 +460,7 @@
         }).get(10, TimeUnit.SECONDS);
 
         // Make sure we see no more events
-        assertFalse(noEvents.wait(4, TimeUnit.SECONDS));
+        assertFalse(noEvents.wait(TIME_OUT, TimeUnit.SECONDS));
     }
 
     @Test
@@ -931,6 +927,10 @@
                 Futures.allAsList(expected).get(time, timeUnit);
             }
             catch (TimeoutException e) {
+                long dt = System.nanoTime() - t0;
+                // TODO remove again once OAK-1491 is fixed
+                assertTrue("Spurious wak-up after " + dt,
+                        dt > 0.8*TimeUnit.NANOSECONDS.convert(time, timeUnit));
                 for (Expectation exp : expected) {
                     if (!exp.isDone()) {
                         missing.add(exp);
diff --git a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/query/QueryTest.java b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/query/QueryTest.java
index c6b89cc..288b8ac 100644
--- a/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/query/QueryTest.java
+++ b/oak-jcr/src/test/java/org/apache/jackrabbit/oak/jcr/query/QueryTest.java
@@ -81,7 +81,7 @@
         // disable the nodetype index
         Node nodeTypeIndex = root.getNode("oak:index").getNode("nodetype");
         nodeTypeIndex.setProperty("declaringNodeTypes", new String[] {
-            }, PropertyType.NAME);
+        }, PropertyType.NAME);
 
         // add 10 nodes
         Node test = root.addNode("test");
@@ -108,29 +108,9 @@
             }
             buff.append(it.nextNode().getPath());
         }
+        
         assertEquals("/test/test9, /test/test8, /test/test7, /test/test6, /test/test5, /test/test4, /test/test3, /test/test2, /test/test1, /test/test0", 
                 buff.toString());
-        
-        RowIterator rit;
-        
-        r = session.getWorkspace().getQueryManager()
-                .createQuery("explain " + query, "xpath").execute();
-        rit = r.getRows();
-        assertEquals("[nt:base] as [a] /* ordered order by lastMod ancestor 1 " + 
-                "where ([a].[jcr:primaryType] = cast('oak:Unstructured' as string)) " + 
-                "and (isdescendantnode([a], [/test])) */", rit.nextRow().getValue("plan").getString());
-
-        query = "/jcr:root/test//*[@jcr:primaryType='oak:Unstructured' " + 
-                "and  content/@lastMod > '2001-02-01']";
-        r = session.getWorkspace().getQueryManager()
-                .createQuery("explain " + query, "xpath").execute();
-        rit = r.getRows();
-        assertEquals("[nt:base] as [a] /* ordered lastMod > 2001-02-01 " + 
-                "where (([a].[jcr:primaryType] = cast('oak:Unstructured' as string)) " + 
-                "and ([a].[content/lastMod] > cast('2001-02-01' as string))) " + 
-                "and (isdescendantnode([a], [/test])) */", 
-                rit.nextRow().getValue("plan").getString());
-        
     }
     
     @Test
diff --git a/oak-lucene/pom.xml b/oak-lucene/pom.xml
index 1289f6d..72c414f 100644
--- a/oak-lucene/pom.xml
+++ b/oak-lucene/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.jackrabbit</groupId>
     <artifactId>oak-parent</artifactId>
-    <version>1.1-SNAPSHOT</version>
+    <version>1.0.0</version>
     <relativePath>../oak-parent/pom.xml</relativePath>
   </parent>
 
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexNode.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexNode.java
index 571e2b4..5c3cd6a 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexNode.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexNode.java
@@ -24,8 +24,6 @@
 
 import java.io.File;
 import java.io.IOException;
-import java.util.concurrent.locks.ReadWriteLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 import org.apache.jackrabbit.oak.spi.state.NodeState;
 import org.apache.jackrabbit.oak.spi.state.ReadOnlyBuilder;
@@ -76,7 +74,7 @@
 
     private final IndexSearcher searcher;
 
-    private final ReadWriteLock lock = new ReentrantReadWriteLock();
+    private int refcount = 0;
 
     private boolean closed = false;
 
@@ -97,33 +95,27 @@
         return definition;
     }
 
-    IndexSearcher getSearcher() {
+    synchronized IndexSearcher acquireSearcher() {
+        checkState(!closed);
+        refcount++;
         return searcher;
     }
 
-    boolean acquire() {
-        lock.readLock().lock();
-        if (closed) {
-            lock.readLock().unlock();
-            return false;
-        } else {
-            return true;
+    synchronized void releaseSearcher() throws IOException {
+        refcount--;
+        if (closed && refcount == 0) {
+            reallyClose();
         }
     }
 
-    void release() {
-        lock.readLock().unlock();
+    synchronized void close() throws IOException {
+        closed = true;
+        if (refcount == 0) {
+            reallyClose();
+        }
     }
 
-    void close() throws IOException {
-        lock.writeLock().lock();
-        try {
-            checkState(!closed);
-            closed = true;
-        } finally {
-            lock.writeLock().unlock();
-        }
-
+    private void reallyClose() throws IOException {
         try {
             reader.close();
         } finally {
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexTracker.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexTracker.java
index 8af912a..6e4b430 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexTracker.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/IndexTracker.java
@@ -16,16 +16,9 @@
  */
 package org.apache.jackrabbit.oak.plugins.index.lucene;
 
-import static com.google.common.base.Preconditions.checkState;
-import static com.google.common.base.Predicates.in;
-import static com.google.common.base.Predicates.not;
-import static com.google.common.base.Predicates.notNull;
 import static com.google.common.collect.Lists.newArrayList;
 import static com.google.common.collect.Lists.newArrayListWithCapacity;
-import static com.google.common.collect.Maps.filterKeys;
-import static com.google.common.collect.Maps.filterValues;
 import static com.google.common.collect.Maps.newHashMap;
-import static java.util.Collections.emptyMap;
 import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_DEFINITIONS_NAME;
 import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.TYPE_PROPERTY_NAME;
 import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.TYPE_LUCENE;
@@ -46,7 +39,6 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Iterables;
 
 class IndexTracker {
@@ -57,12 +49,9 @@
 
     private NodeState root = EMPTY_NODE;
 
-    private volatile Map<String, IndexNode> indices = emptyMap();
+    private final Map<String, IndexNode> indices = newHashMap();
 
     synchronized void close() {
-        Map<String, IndexNode> indices = this.indices;
-        this.indices = emptyMap();
-
         for (Map.Entry<String, IndexNode> entry : indices.entrySet()) {
             try {
                 entry.getValue().close();
@@ -70,71 +59,44 @@
                 log.error("Failed to close the Lucene index at " + entry.getKey(), e);
             }
         }
+        indices.clear();
     }
 
     synchronized void update(NodeState root) {
-        Map<String, IndexNode> original = indices;
-        final Map<String, IndexNode> updates = newHashMap();
-
-        List<Editor> editors = newArrayListWithCapacity(original.size());
-        for (Map.Entry<String, IndexNode> entry : original.entrySet()) {
-            final String path = entry.getKey();
-            final String name = entry.getValue().getName();
-
+        List<Editor> editors = newArrayListWithCapacity(indices.size());
+        for (final String path : indices.keySet()) {
             List<String> elements = newArrayList();
             Iterables.addAll(elements, PathUtils.elements(path));
             elements.add(INDEX_DEFINITIONS_NAME);
-            elements.add(name);
+            elements.add(indices.get(path).getName());
             editors.add(new SubtreeEditor(new DefaultEditor() {
                 @Override
                 public void leave(NodeState before, NodeState after) {
+                    IndexNode index = indices.remove(path);
                     try {
-                        // TODO: Use DirectoryReader.openIfChanged()
-                        IndexNode index = IndexNode.open(name, after);
-                        updates.put(path, index); // index can be null
+                        index.close();
+                    } catch (IOException e) {
+                        log.error("Failed to close Lucene index at " + path, e);
+                    }
+
+                    try {
+                        index = IndexNode.open(index.getName(), after);
+                        if (index != null) {
+                            indices.put(path, index);
+                        }
                     } catch (IOException e) {
                         log.error("Failed to open Lucene index at " + path, e);
                     }
                 }
             }, elements.toArray(new String[elements.size()])));
         }
-
         EditorDiff.process(CompositeEditor.compose(editors), this.root, root);
         this.root = root;
-
-        if (!updates.isEmpty()) {
-            indices = ImmutableMap.<String, IndexNode>builder()
-                    .putAll(filterKeys(original, not(in(updates.keySet()))))
-                    .putAll(filterValues(updates, notNull()))
-                    .build();
-
-            for (String path : updates.keySet()) {
-                IndexNode index = original.get(path);
-                try {
-                    index.close();
-                } catch (IOException e) {
-                    log.error("Failed to close Lucene index at " + path, e);
-                }
-            }
-        }
     }
 
-    IndexNode acquireIndexNode(String path) {
-        IndexNode index = indices.get(path);
-        if (index != null && index.acquire()) {
-            return index;
-        } else {
-            return findIndexNode(path);
-        }
-    }
-
-    private synchronized IndexNode findIndexNode(String path) {
-        // Retry the lookup from acquireIndexNode now that we're
-        // synchronized. The acquire() call is guaranteed to succeed
-        // since the close() method is also synchronized.
+    synchronized IndexNode getIndexNode(String path) {
         IndexNode index = indices.get(path);
         if (index != null) {
-            checkState(index.acquire());
             return index;
         }
 
@@ -150,11 +112,7 @@
                 if (TYPE_LUCENE.equals(node.getString(TYPE_PROPERTY_NAME))) {
                     index = IndexNode.open(child.getName(), node);
                     if (index != null) {
-                        checkState(index.acquire());
-                        indices = ImmutableMap.<String, IndexNode>builder()
-                                .putAll(indices)
-                                .put(path, index)
-                                .build();
+                        indices.put(path, index);
                         return index;
                     }
                 }
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndex.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndex.java
index 7107bb3..8b65b54 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndex.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/LuceneIndex.java
@@ -177,39 +177,35 @@
 
     @Override
     public double getCost(Filter filter, NodeState root) {
-        IndexNode index = tracker.acquireIndexNode("/");
-        if (index == null) { // unusable index
+        if (tracker.getIndexNode("/") == null) {
+            // unusable index
             return Double.POSITIVE_INFINITY;
         }
-        try {
-            FullTextExpression ft = filter.getFullTextConstraint();
-            if (ft == null) {
-                // no full-text condition: don't use this index,
-                // as there might be a better one
-                return Double.POSITIVE_INFINITY;
-            }
-            Set<String> relPaths = getRelativePaths(ft);
-            if (relPaths.size() > 1) {
-                LOG.warn("More than one relative parent for query " + filter.getQueryStatement());
-                // there are multiple "parents", as in
-                // "contains(a/x, 'hello') and contains(b/x, 'world')"
-                return new MultiLuceneIndex(filter, root, relPaths).getCost();
-            }
-            String parent = relPaths.iterator().next();
-            if (parent.isEmpty()) {
-                // no relative properties
-                return 10;
-            }
-            // all relative properties have the same "parent", as in
-            // "contains(a/x, 'hello') and contains(a/y, 'world')" or
-            // "contains(a/x, 'hello') or contains(a/*, 'world')"
-            // TODO: proper cost calculation
-            // we assume this will cause more read operations,
-            // as we need to read the node and then the parent
-            return 15;
-        } finally {
-            index.release();
+        FullTextExpression ft = filter.getFullTextConstraint();
+        if (ft == null) {
+            // no full-text condition: don't use this index,
+            // as there might be a better one
+            return Double.POSITIVE_INFINITY;
         }
+        Set<String> relPaths = getRelativePaths(ft);
+        if (relPaths.size() > 1) {
+            LOG.warn("More than one relative parent for query " + filter.getQueryStatement());
+            // there are multiple "parents", as in
+            // "contains(a/x, 'hello') and contains(b/x, 'world')"
+            return new MultiLuceneIndex(filter, root, relPaths).getCost();
+        }
+        String parent = relPaths.iterator().next();
+        if (parent.isEmpty()) {
+            // no relative properties
+            return 10;
+        }
+        // all relative properties have the same "parent", as in
+        // "contains(a/x, 'hello') and contains(a/y, 'world')" or
+        // "contains(a/x, 'hello') or contains(a/*, 'world')"
+        // TODO: proper cost calculation
+        // we assume this will cause more read operations,
+        // as we need to read the node and then the parent
+        return 15;
     }
 
     /**
@@ -259,36 +255,35 @@
 
     @Override
     public String getPlan(Filter filter, NodeState root) {
-        IndexNode index = tracker.acquireIndexNode("/");
+        IndexNode index = tracker.getIndexNode("/");
         checkState(index != null, "The Lucene index is not available");
-        try {
-            FullTextExpression ft = filter.getFullTextConstraint();
-            Set<String> relPaths = getRelativePaths(ft);
-            if (relPaths.size() > 1) {
-                return new MultiLuceneIndex(filter, root, relPaths).getPlan();
-            }
-            String parent = relPaths.size() == 0 ? "" : relPaths.iterator().next();
-            // we only restrict non-full-text conditions if there is
-            // no relative property in the full-text constraint
-            boolean nonFullTextConstraints = parent.isEmpty();
-            String plan = getQuery(filter, null, nonFullTextConstraints, analyzer, index.getDefinition()) + " ft:(" + ft + ")";
-            if (!parent.isEmpty()) {
-                plan += " parent:" + parent;
-            }
-            return plan;
-        } finally {
-            index.release();
+
+        FullTextExpression ft = filter.getFullTextConstraint();
+        Set<String> relPaths = getRelativePaths(ft);
+        if (relPaths.size() > 1) {
+            return new MultiLuceneIndex(filter, root, relPaths).getPlan();
         }
+        String parent = relPaths.size() == 0 ? "" : relPaths.iterator().next();
+        // we only restrict non-full-text conditions if there is
+        // no relative property in the full-text constraint
+        boolean nonFullTextConstraints = parent.isEmpty();
+        String plan = getQuery(filter, null, nonFullTextConstraints, analyzer, index.getDefinition()) + " ft:(" + ft + ")";
+        if (!parent.isEmpty()) {
+            plan += " parent:" + parent;
+        }
+        return plan;
     }
 
     @Override
     public Cursor query(final Filter filter, final NodeState root) {
+        final IndexNode index = tracker.getIndexNode("/");
+        checkState(index != null, "The Lucene index is not available");
+
         FullTextExpression ft = filter.getFullTextConstraint();
         Set<String> relPaths = getRelativePaths(ft);
         if (relPaths.size() > 1) {
             return new MultiLuceneIndex(filter, root, relPaths).query();
         }
-
         final String parent = relPaths.size() == 0 ? "" : relPaths.iterator().next();
         // we only restrict non-full-text conditions if there is
         // no relative property in the full-text constraint
@@ -343,14 +338,14 @@
              * @return true if any document is loaded
              */
             private boolean loadDocs() {
+                IndexNode indexNode = null;
+                IndexSearcher searcher = null;
                 ScoreDoc lastDocToRecord = null;
-
-                IndexNode indexNode = tracker.acquireIndexNode("/");
-                checkState(indexNode != null);
                 try {
-                    IndexSearcher searcher = indexNode.getSearcher();
+                    indexNode = acquire();
+                    searcher = indexNode.acquireSearcher();
                     Query query = getQuery(filter, searcher.getIndexReader(),
-                            nonFullTextConstraints, analyzer, indexNode.getDefinition());
+                            nonFullTextConstraints, analyzer, index.getDefinition());
                     TopDocs docs;
                     if (lastDoc != null) {
                         docs = searcher.searchAfter(lastDoc, query, LUCENE_QUERY_BATCH_SIZE);
@@ -368,15 +363,28 @@
                 } catch (IOException e) {
                     LOG.warn("query via {} failed.", LuceneIndex.this, e);
                 } finally {
-                    indexNode.release();
+                    release(indexNode, searcher);
                 }
-
                 if (lastDocToRecord != null) {
                     this.lastDoc = lastDocToRecord;
                 }
-
                 return !queue.isEmpty();
             }
+
+            private IndexNode acquire() {
+                return tracker.getIndexNode("/");
+            }
+
+            private void release(IndexNode indexNode, IndexSearcher searcher){
+                try {
+                    if(searcher != null){
+                        indexNode.releaseSearcher();
+                    }
+                } catch (IOException e) {
+                    LOG.warn("Error occurred while releasing/closing the " +
+                            "IndexSearcher", e);
+                }
+            }
         };
         return new LucenePathCursor(itr, settings);
     }
diff --git a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakDirectory.java b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakDirectory.java
index 5d0d214..f7149b4 100644
--- a/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakDirectory.java
+++ b/oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/OakDirectory.java
@@ -133,14 +133,7 @@
         return lockFactory;
     }
 
-    /**
-     * Size of the blob entries to which the Lucene files are split.
-     * Set to higher than the 4kB inline limit for the BlobStore, but
-     * just below the 16+kB inline limit in the SegmentMK. This way the
-     * Lucene index gets stored in the SegmentMK segments for best performance
-     * even when an external Data/BlobStore is being used for normal binaries.
-     */
-    private static final int BLOB_SIZE = 16 * 1024;
+    private static final int BLOB_SIZE = 32 * 1024; // > blob inline limit
 
     private static class OakIndexFile {
 
diff --git a/oak-mk-api/pom.xml b/oak-mk-api/pom.xml
index 1f8623a..4e18a80 100644
--- a/oak-mk-api/pom.xml
+++ b/oak-mk-api/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.jackrabbit</groupId>
     <artifactId>oak-parent</artifactId>
-    <version>1.1-SNAPSHOT</version>
+    <version>1.0.0</version>
     <relativePath>../oak-parent/pom.xml</relativePath>
   </parent>
 
diff --git a/oak-mk-perf/README.md b/oak-mk-perf/README.md
deleted file mode 100644
index bd012d9..0000000
--- a/oak-mk-perf/README.md
+++ /dev/null
@@ -1,90 +0,0 @@
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-  -->
-
-This module contains performance tests for microkernel instances.
-
-Usage
------
-
-The tests can be launched locally by calling directly the remote profile from the pom file using the
-following commands:
-
-    mvn clean test -Premote -Poakmk   - for launching the tests against the oak microkernel or
-    mvn clean test -Premote -Pmongomk - for launching the tests against the mongodb microkernel.
-        
-More than that the tests can be launched remotely, for example on an mongodb cluster. In this case
-the pom file uploads the tests to the remote machine, runs them and collects the results. Use the
-following commands to remotely run the tests:
-    
-    mvn clean process-test-classes -Plocal [-Pmongomk | Poakmk]  \
-    -Dremotehost=<remotehost> -Dpass=<ssh-password for the remote machine>
-
-The test environment (mongodb cluster)
---------------------------------------
-    
-For measuring the performance of the microkernel, I created a mongodb cluster in amazon cloud with
-the following components:
-
-* 2 shards ( in the same time, replica sets) - each of it with 2 nodes installed on different
-  platforms
-* 3 configuration servers all of them installed on one platform
-* 1 mongos instance
-        
-The sharding is enabled and I'm using the following sharding key : {"path" :1, "revId":1}. I
-changed also the chunk size from 64 to 8 (MB).
-
-
-Tests
------
-
-All the tests bellow were launched remotely on amazon cloud for both types of microkernel. The tests
-are all executed on the platform where the mongos instance is installed.
-        
-* `MkAddNodesDifferentStructuresTest.testWriteNodesSameLevel`: Creates 100000 nodes, all having the
-  same parent node. All the nodes are added in a single microkernel commit.
-* `MkAddNodesDifferentStructuresTest.testWriteNodes10Children`: Creates 100000 nodes, in a pyramid
-  tree structure. All of the nodes have 10 children. All the nodes are added in a single microkernel
-  commit.
-* `MkAddNodesDifferentStructuresTest.testWriteNodes100Children`: Creates 100000 nodes, in a pyramid
-  tree structure. All of the nodes have 100 children. All the nodes are added in a single
-  microkernel commit.
-* `MkAddNodesDifferentStructuresTest.testWriteNodes1000Children`: Creates 100000 nodes, in a pyramid
-  tree structure. All of the nodes have 1000 children.All the nodes are added in a single
-  microkernel commit.
-* `MkAddNodesDifferentStructuresTest.testWriteNodes1Child`: Creates 100 nodes, each node has only
-  one child, so each of it is on a different tree level. All the nodes are added in a single
-  microkernel commit.
-* `MkAddNodesMultipleCommitsTest.testWriteNodesAllNodes1Commit`: Create 10000 nodes, in a pyramid
-  tree structure. All of the nodes have 100 children. Only one microkernel commit is performed for
-  adding the nodes.
-* `MkAddNodesMultipleCommitsTest.testWriteNodes50NodesPerCommit`: Create 10000 nodes, in a pyramid
-  tree structure. All of the nodes have 100 children. The nodes are added in chunks of 50 nodes per
-  commit.
-* `MkAddNodesMultipleCommitsTest.testWriteNodes1000NodesPerCommit`: Create 10000 nodes, in a pyramid
-  tree structure. All of the nodes have 100 children. The nodes are added in chunks of 1000 nodes
-  per commit.
-* `MkAddNodesMultipleCommitsTest.testWriteNodes1NodePerCommit`: Create 10000 nodes, in a pyramid
-  tree structure. All of the nodes have 100 children. Each node is individually added.
-* `MKAddNodesRelativePathTest.testWriteNodesSameLevel`: Create 1000 nodes, all on the same level.
-  Each node is individually added (in a separate commit). Each node is added using the relative
-  paths in the microkernel commit method.
-* `MKAddNodesRelativePathTest.testWriteNodes10Children`: Create 1000 nodes, each of them having
-  exactly 10 children. Each node is individually added (in a separate commit). Each node is added
-  using the relative paths in the microkernel commit method.
-* `MKAddNodesRelativePathTest.testWriteNodes100Children`: Create 1000 nodes, each of them having
-  exactly 100 children. Each node is individually added (in a separate commit). Each node is added
-  using the relative paths in the microkernel commit method.
diff --git a/oak-mk-perf/pom.xml b/oak-mk-perf/pom.xml
deleted file mode 100644
index cb19eca..0000000
--- a/oak-mk-perf/pom.xml
+++ /dev/null
@@ -1,139 +0,0 @@
-<?xml version="1.0"?>
-<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor 
-    license agreements. See the NOTICE file distributed with this work for additional 
-    information regarding copyright ownership. The ASF licenses this file to 
-    You under the Apache License, Version 2.0 (the "License"); you may not use 
-    this file except in compliance with the License. You may obtain a copy of 
-    the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required 
-    by applicable law or agreed to in writing, software distributed under the 
-    License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS 
-    OF ANY KIND, either express or implied. See the License for the specific 
-    language governing permissions and limitations under the License. -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.jackrabbit</groupId>
-        <artifactId>jackrabbit-oak</artifactId>
-        <version>0.20-SNAPSHOT</version>
-    </parent>
-    <artifactId>oak-mk-perf</artifactId>
-    <name>oak-mk-perf</name>
-    <url>http://maven.apache.org</url>
-    <properties>
-        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-    </properties>
-    <profiles>
-        <profile>
-            <id>local</id>
-            <build>
-                <plugins>
-
-                    <plugin>
-                        <artifactId>maven-dependency-plugin</artifactId>
-                        <configuration>
-                            <outputDirectory>
-                                ${project.build.outputDirectory}</outputDirectory>
-                        </configuration>
-                        <executions>
-                            <execution>
-                                <id>unpack-dependencies</id>
-                                <phase>process-resources</phase>
-                                <goals>
-                                    <goal>unpack-dependencies</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                    </plugin>
-
-                    <plugin>
-                        <groupId>org.evolvis.maven.plugins.remote-testing</groupId>
-                        <artifactId>remote-testing-plugin</artifactId>
-                        <version>0.6</version>
-                        <configuration>
-                            <testMachine>${remotehost}</testMachine>
-                            <username>qe1</username>
-                            <password>${pass}</password>
-                            <display>0</display>
-                            <!--<keyFile>/home/rogoz/private-key-file</keyFile> -->
-                            <remoteFolder>/home/qe1/tests/</remoteFolder>
-                            <pomfile>${basedir}/remotePom.xml</pomfile>
-                        </configuration>
-                        <executions>
-                            <execution>
-                                <id>remote testing</id>
-                                <goals>
-                                    <goal>clean</goal>
-                                    <goal>test</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                    </plugin>
-                </plugins>
-            </build>
-        </profile>
-    </profiles>
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-surefire-plugin</artifactId>
-                <configuration>
-                    <dependencies>
-                        <dependency>
-                            <groupId>org.apache.maven.surefire</groupId>
-                            <artifactId>surefire-junit47</artifactId>
-                            <version>2.12.3</version>
-                        </dependency>
-                    </dependencies>
-                    <argLine>-Xmx2024m</argLine>
-                    <includes>
-                        <include>**/*Test.java</include>
-                    </includes>
-                    <systemPropertyVariables>
-                        <mk.type>${mktype}</mk.type>
-                    </systemPropertyVariables>
-                </configuration>
-            </plugin>
-        </plugins>
-    </build>
-    <dependencies>
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-
-        </dependency>
-        <dependency>
-            <groupId>org.apache.jackrabbit</groupId>
-            <artifactId>oak-mk</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>com.h2database</groupId>
-            <artifactId>h2</artifactId>
-            <version>${h2.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>com.cedarsoft.commons</groupId>
-            <artifactId>test-utils</artifactId>
-            <version>5.0.9</version>
-            <exclusions>
-                <exclusion>
-                    <artifactId>maven-cobertura-plugin</artifactId>
-                    <groupId>maven-plugins</groupId>
-                </exclusion>
-                <exclusion>
-                    <artifactId>maven-findbugs-plugin</artifactId>
-                    <groupId>maven-plugins</groupId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-    </dependencies>
-    <pluginRepositories>
-        <pluginRepository>
-            <id>maven-repo.evolvis.org</id>
-            <url>http://maven-repo.evolvis.org/releases/</url>
-        </pluginRepository>
-    </pluginRepositories>
-
-</project>
diff --git a/oak-mk-perf/remotePom.xml b/oak-mk-perf/remotePom.xml
deleted file mode 100644
index a553e6d..0000000
--- a/oak-mk-perf/remotePom.xml
+++ /dev/null
@@ -1,57 +0,0 @@
-<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor 
-    license agreements. See the NOTICE file distributed with this work for additional 
-    information regarding copyright ownership. The ASF licenses this file to 
-    You under the Apache License, Version 2.0 (the "License"); you may not use 
-    this file except in compliance with the License. You may obtain a copy of 
-    the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required 
-    by applicable law or agreed to in writing, software distributed under the 
-    License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS 
-    OF ANY KIND, either express or implied. See the License for the specific 
-    language governing permissions and limitations under the License. -->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-
-    <groupId>org.apache.jackrabbit</groupId>
-    <artifactId>remote-oak-mk-perf</artifactId>
-    <version>0.0.1-SNAPSHOT</version>
-    <packaging>jar</packaging>
-
-    <name>remote-sharedcloud-oak-performance</name>
-    <url>http://maven.apache.org</url>
-
-    <properties>
-        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-    </properties>
-    <profiles>
-        <profile>
-            <id>remote</id>
-            <build>
-                <plugins>
-                    <plugin>
-                        <groupId>org.apache.maven.plugins</groupId>
-                        <artifactId>maven-surefire-plugin</artifactId>
-                        <version>2.10</version>
-                        <dependencies>
-                            <dependency>
-                                <groupId>org.apache.maven.surefire</groupId>
-                                <artifactId>surefire-junit47</artifactId>
-                                <version>2.12.3</version>
-                            </dependency>
-                        </dependencies>
-                        <configuration>
-                            <systemPropertyVariables>
-                                <mk.type>${env.mktype}</mk.type>
-                            </systemPropertyVariables>
-                            <testFailureIgnore>false</testFailureIgnore>
-                            <argLine>-Xmx4056m</argLine>
-                            <includes>
-                                <include>**/*Test.java</include>
-                            </includes>
-                        </configuration>
-                    </plugin>
-                </plugins>
-            </build>
-        </profile>
-    </profiles>
-</project>
diff --git a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/scenarios/ConcurrentAddNodes1Commit.java b/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/scenarios/ConcurrentAddNodes1Commit.java
deleted file mode 100644
index 71c15ef..0000000
--- a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/scenarios/ConcurrentAddNodes1Commit.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.scenarios;
-
-import java.util.ArrayList;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.jackrabbit.mk.api.MicroKernel;
-import org.apache.jackrabbit.mk.tasks.GenericWriteTask;
-import org.apache.jackrabbit.mk.util.Chronometer;
-import org.apache.jackrabbit.mk.util.MicroKernelOperation;
-
-public class ConcurrentAddNodes1Commit {
-
-    public static void concurentWritingFlatStructure(
-            ArrayList<MicroKernel> mks, int mkNumber, int nodesNumber,
-            Chronometer chronometer) throws InterruptedException {
-
-        ArrayList<GenericWriteTask> tasks = new ArrayList<GenericWriteTask>();
-        String diff;
-        for (int i = 0; i < mkNumber; i++) {
-            diff = MicroKernelOperation.buildPyramidDiff("/", 0, 0,
-                    nodesNumber, "N" + i + "N", new StringBuilder()).toString();
-            tasks.add(new GenericWriteTask(mks.get(i), diff, 0));
-            System.out.println("The diff size is " + diff.getBytes().length);
-        }
-
-        ExecutorService threadExecutor = Executors.newFixedThreadPool(mkNumber);
-        chronometer.start();
-        for (GenericWriteTask genericWriteTask : tasks) {
-            threadExecutor.execute(genericWriteTask);
-        }
-        threadExecutor.shutdown();
-        threadExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
-        chronometer.stop();
-        System.out.println("Total time for is " + chronometer.getSeconds());
-    }
-
-    public static void concurentWritingPyramid1(ArrayList<MicroKernel> mks,
-            int mkNumber, int nodesNumber, Chronometer chronometer)
-            throws InterruptedException {
-
-        ArrayList<GenericWriteTask> tasks = new ArrayList<GenericWriteTask>();
-        String diff;
-        for (int i = 0; i < mkNumber; i++) {
-            diff = MicroKernelOperation.buildPyramidDiff("/", 0, 10,
-                    nodesNumber, "N" + i + "N", new StringBuilder()).toString();
-            tasks.add(new GenericWriteTask(mks.get(i), diff, 0));
-            System.out.println("The diff size is " + diff.getBytes().length);
-        }
-
-        ExecutorService threadExecutor = Executors.newFixedThreadPool(mkNumber);
-        chronometer.start();
-        for (GenericWriteTask genericWriteTask : tasks) {
-            threadExecutor.execute(genericWriteTask);
-        }
-        threadExecutor.shutdown();
-        threadExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
-        chronometer.stop();
-        System.out.println("Total time is " + chronometer.getSeconds());
-    }
-
-    public static void concurentWritingPyramid2(ArrayList<MicroKernel> mks,
-            int mkNumber, int nodesNumber, Chronometer chronometer)
-            throws InterruptedException {
-
-        ArrayList<GenericWriteTask> tasks = new ArrayList<GenericWriteTask>();
-        String diff;
-        for (int i = 0; i < mkNumber; i++) {
-            diff = MicroKernelOperation.buildPyramidDiff("/", 0, 100,
-                    nodesNumber, "N" + i + "N", new StringBuilder()).toString();
-            tasks.add(new GenericWriteTask(mks.get(i), diff, 0));
-            System.out.println("The diff size is " + diff.getBytes().length);
-        }
-
-        ExecutorService threadExecutor = Executors.newFixedThreadPool(mkNumber);
-        chronometer.start();
-        for (GenericWriteTask genericWriteTask : tasks) {
-            threadExecutor.execute(genericWriteTask);
-        }
-        threadExecutor.shutdown();
-        threadExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
-        chronometer.stop();
-        System.out.println("Total time for is " + chronometer.getSeconds());
-    }
-
-}
diff --git a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/scenarios/ConcurrentAddNodesMultipleCommits.java b/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/scenarios/ConcurrentAddNodesMultipleCommits.java
deleted file mode 100644
index 756a17e..0000000
--- a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/scenarios/ConcurrentAddNodesMultipleCommits.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.scenarios;
-
-import java.util.ArrayList;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.jackrabbit.mk.api.MicroKernel;
-import org.apache.jackrabbit.mk.tasks.GenericWriteTask;
-import org.apache.jackrabbit.mk.util.Chronometer;
-import org.apache.jackrabbit.mk.util.MicroKernelOperation;
-
-public class ConcurrentAddNodesMultipleCommits {
-
-    public static void concurentWritingFlatStructure(
-            ArrayList<MicroKernel> mks, int mkNumber, long nodesNumber,
-            int numberOfNodesPerCommit, Chronometer chronometer)
-            throws InterruptedException {
-
-        int children = 0;
-        ArrayList<GenericWriteTask> tasks = new ArrayList<GenericWriteTask>();
-        String diff;
-        for (int i = 0; i < mkNumber; i++) {
-            diff = MicroKernelOperation.buildPyramidDiff("/", 0, children,
-                    nodesNumber, "N" + i + "N", new StringBuilder()).toString();
-            tasks.add(new GenericWriteTask(mks.get(i), diff,
-                    numberOfNodesPerCommit));
-            System.out.println("The diff size is " + diff.getBytes().length);
-        }
-
-        ExecutorService threadExecutor = Executors.newFixedThreadPool(mkNumber);
-        chronometer.start();
-        for (GenericWriteTask genericWriteTask : tasks) {
-            threadExecutor.execute(genericWriteTask);
-        }
-        threadExecutor.shutdown();
-        threadExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
-        chronometer.stop();
-        System.out.println("Total time is " + chronometer.getSeconds());
-    }
-
-    public static void concurentWritingPyramid1(ArrayList<MicroKernel> mks,
-            int mkNumber, long nodesNumber, int numberOfNodesPerCommit,
-            Chronometer chronometer) throws InterruptedException {
-        int children = 10;
-        ArrayList<GenericWriteTask> tasks = new ArrayList<GenericWriteTask>();
-        String diff;
-        for (int i = 0; i < mkNumber; i++) {
-            diff = MicroKernelOperation.buildPyramidDiff("/", 0, children,
-                    nodesNumber, "N" + i + "N", new StringBuilder()).toString();
-            tasks.add(new GenericWriteTask(mks.get(i), diff,
-                    numberOfNodesPerCommit));
-            System.out.println("The diff size is " + diff.getBytes().length);
-        }
-
-        ExecutorService threadExecutor = Executors.newFixedThreadPool(mkNumber);
-        chronometer.start();
-        for (GenericWriteTask genericWriteTask : tasks) {
-            threadExecutor.execute(genericWriteTask);
-        }
-        threadExecutor.shutdown();
-        threadExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
-        chronometer.stop();
-        System.out.println("Total time is " + chronometer.getSeconds());
-    }
-
-    public static void concurentWritingPyramid2(ArrayList<MicroKernel> mks,
-            int mkNumber, long nodesNumber, int numberOfNodesPerCommit,
-            Chronometer chronometer) throws InterruptedException {
-        int children = 100;
-        ArrayList<GenericWriteTask> tasks = new ArrayList<GenericWriteTask>();
-        String diff;
-        for (int i = 0; i < mkNumber; i++) {
-
-            diff = MicroKernelOperation.buildPyramidDiff("/", 0, children,
-                    nodesNumber, "N" + i + "N", new StringBuilder()).toString();
-
-            tasks.add(new GenericWriteTask(mks.get(i), diff,
-                    numberOfNodesPerCommit));
-
-        }
-
-        ExecutorService threadExecutor = Executors.newFixedThreadPool(mkNumber);
-        chronometer.start();
-        for (GenericWriteTask genericWriteTask : tasks) {
-            threadExecutor.execute(genericWriteTask);
-        }
-        threadExecutor.shutdown();
-        threadExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
-        chronometer.stop();
-        System.out.println("Total time for is " + chronometer.getSeconds());
-    }
-}
diff --git a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/scenarios/MicroKernelMultipleCommits.java b/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/scenarios/MicroKernelMultipleCommits.java
deleted file mode 100644
index 903a8e1..0000000
--- a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/scenarios/MicroKernelMultipleCommits.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.scenarios;
-
-import org.apache.jackrabbit.mk.api.MicroKernel;
-import org.apache.jackrabbit.mk.util.Chronometer;
-import org.apache.jackrabbit.mk.util.Committer;
-
-public class MicroKernelMultipleCommits {
-
-    public static void writeNodesAllNodes1Commit(MicroKernel mk, String diff,
-            Chronometer chronometer) {
-
-        Committer commiter = new Committer();
-        chronometer.start();
-        commiter.addNodes(mk, diff, 0);
-        chronometer.stop();
-        System.out.println("Total time for testWriteNodesAllNodes1Commit is "
-                + chronometer.getSeconds());
-    }
-
-    public static void writeNodes1NodePerCommit(MicroKernel mk, String diff,
-            Chronometer chronometer) {
-
-        Committer commiter = new Committer();
-        chronometer.start();
-        commiter.addNodes(mk, diff, 1);
-        chronometer.stop();
-        System.out.println("Total time for testWriteNodes1NodePerCommit is "
-                + chronometer.getSeconds());
-    }
-
-    public static void writeNodes50NodesPerCommit(MicroKernel mk, String diff,
-            Chronometer chronometer) {
-
-        Committer commiter = new Committer();
-        chronometer.start();
-        commiter.addNodes(mk, diff, 50);
-        chronometer.stop();
-        System.out.println("Total time for testWriteNodes50NodesPerCommit is "
-                + chronometer.getSeconds());
-    }
-
-    public static void writeNodes1000NodesPerCommit(MicroKernel mk,
-            String diff, Chronometer chronometer) {
-
-        Committer commiter = new Committer();
-        chronometer.start();
-        commiter.addNodes(mk, diff, 10);
-        chronometer.stop();
-        System.out
-                .println("Total time for testWriteNodes1000NodesPerCommit is "
-                        + chronometer.getSeconds());
-    }
-
-}
diff --git a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/scenarios/MicroKernelRelativePath.java b/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/scenarios/MicroKernelRelativePath.java
deleted file mode 100644
index 9d28563..0000000
--- a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/scenarios/MicroKernelRelativePath.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.scenarios;
-
-import org.apache.jackrabbit.mk.api.MicroKernel;
-import org.apache.jackrabbit.mk.util.Chronometer;
-import org.apache.jackrabbit.mk.util.Committer;
-
-public class MicroKernelRelativePath {
-
-    public static void writeNodesSameLevel(MicroKernel mk,
-            Chronometer chronometer, long nodesNumber, String nodeNamePrefix)
-            throws Exception {
-        Committer commiter = new Committer();
-        chronometer.start();
-        commiter.addPyramidStructure(mk, "/", 0, 0, nodesNumber, nodeNamePrefix);
-        chronometer.stop();
-        System.out.println("Total time for testWriteNodesSameLevel is "
-                + chronometer.getSeconds());
-    }
-
-    public static void writeNodes10Children(MicroKernel mk,
-            Chronometer chronometer, long nodesNumber, String nodeNamePrefix) {
-        Committer commiter = new Committer();
-        chronometer.start();
-
-        commiter.addPyramidStructure(mk, "/", 0, 10, nodesNumber,
-                nodeNamePrefix);
-        chronometer.stop();
-        System.out.println("Total time for testWriteNodes10Children is "
-                + chronometer.getSeconds());
-    }
-
-    public static void writeNodes100Children(MicroKernel mk,
-            Chronometer chronometer, long nodesNumber, String nodeNamePrefix) {
-        Committer commiter = new Committer();
-        chronometer.start();
-        commiter.addPyramidStructure(mk, "/", 0, 100, nodesNumber,
-                nodeNamePrefix);
-        chronometer.stop();
-        System.out.println("Total time for testWriteNodes100Children is "
-                + chronometer.getSeconds());
-    }
-}
\ No newline at end of file
diff --git a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/scenarios/MicrokernelDifferentStructures.java b/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/scenarios/MicrokernelDifferentStructures.java
deleted file mode 100644
index ec82b18..0000000
--- a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/scenarios/MicrokernelDifferentStructures.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.scenarios;
-
-import org.apache.jackrabbit.mk.api.MicroKernel;
-import org.apache.jackrabbit.mk.util.Chronometer;
-import org.apache.jackrabbit.mk.util.Committer;
-import org.apache.jackrabbit.mk.util.MicroKernelOperation;
-
-public class MicrokernelDifferentStructures {
-
-    /**
-     * Tree structure:
-     * <p>
-     * rootNode (/)
-     * <p>
-     * N0 N1... Nn-1 Nn
-     */
-
-    public static void writeNodesSameLevel(MicroKernel mk,
-            Chronometer chronometer, long nodesNumber, String nodeNamePrefix) {
-
-        String diff = MicroKernelOperation.buildPyramidDiff("/", 0, 0,
-                nodesNumber, nodeNamePrefix, new StringBuilder()).toString();
-        Committer committer = new Committer();
-        chronometer.start();
-        committer.addNodes(mk, diff, 0);
-        chronometer.stop();
-        System.out.println("Total time for testWriteNodesSameLevel is "
-                + chronometer.getSeconds());
-    }
-
-    /**
-     * Tree structure:
-     * <p>
-     * rootNode (/)
-     * <p>
-     * N0
-     * <p>
-     * N1
-     * <p>
-     * N2
-     * <p>
-     * N3
-     */
-    public static void writeNodes1Child(MicroKernel mk,
-            Chronometer chronometer, long nodesNumber, String nodeNamePrefix) {
-
-        String diff = MicroKernelOperation.buildPyramidDiff("/", 0, 1,
-                nodesNumber, nodeNamePrefix, new StringBuilder()).toString();
-        Committer committer = new Committer();
-        chronometer.start();
-        committer.addNodes(mk, diff, 0);
-        chronometer.stop();
-        System.out.println("Total time for testWriteNodes1Child is "
-                + chronometer.getSeconds());
-    }
-
-    /**
-     * Tree structure:
-     * <p>
-     * Number of nodes per <b>level</b> =10^(<b>level</b>).
-     * <p>
-     * Each node has 10 children.
-     */
-    public static void writeNodes10Children(MicroKernel mk,
-            Chronometer chronometer, long nodesNumber, String nodeNamePrefix) {
-
-        String diff = MicroKernelOperation.buildPyramidDiff("/", 0, 3,
-                nodesNumber, nodeNamePrefix, new StringBuilder()).toString();
-        Committer committer = new Committer();
-        chronometer.start();
-        committer.addNodes(mk, diff, 0);
-        chronometer.stop();
-        System.out.println("Total time for testWriteNodes10Children is "
-                + chronometer.getSeconds());
-    }
-
-    /**
-     * Tree structure:
-     * <p>
-     * Number of nodes per <b>level</b> =100^(<b>level</b>).
-     * <p>
-     * Each node has 100 children.
-     */
-    public static void writeNodes100Children(MicroKernel mk,
-            Chronometer chronometer, long nodesNumber, String nodeNamePrefix) {
-
-        String diff = MicroKernelOperation.buildPyramidDiff("/", 0, 100,
-                nodesNumber, nodeNamePrefix, new StringBuilder()).toString();
-        Committer committer = new Committer();
-        chronometer.start();
-        committer.addNodes(mk, diff, 0);
-        chronometer.stop();
-        System.out.println("Total time for testWriteNodes100Children is "
-                + chronometer.getSeconds());
-    }
-
-    /**
-     * Tree structure:
-     * <p>
-     * Number of nodes per <b>level</b> =1000^(<b>level</b>).
-     * <p>
-     * Each node has 1000 children.
-     */
-    public static void writeNodes1000Children(MicroKernel mk,
-            Chronometer chronometer, long nodesNumber, String nodeNamePrefix) {
-
-        String diff = MicroKernelOperation.buildPyramidDiff("/", 0, 1000,
-                nodesNumber, nodeNamePrefix, new StringBuilder()).toString();
-        Committer committer = new Committer();
-        chronometer.start();
-        committer.addNodes(mk, diff, 0);
-        chronometer.stop();
-        System.out.println("Total time for testWriteNodes1000Children is "
-                + chronometer.getSeconds());
-    }
-}
diff --git a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/tasks/GenericWriteTask.java b/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/tasks/GenericWriteTask.java
deleted file mode 100644
index a952216..0000000
--- a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/tasks/GenericWriteTask.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.tasks;
-
-import org.apache.jackrabbit.mk.api.MicroKernel;
-import org.apache.jackrabbit.mk.util.Committer;
-
-public class GenericWriteTask implements Runnable {
-
-    MicroKernel mk;
-    Committer committer;
-    String diff;
-    int nodesPerCommit;
-
-    public GenericWriteTask(MicroKernel mk, String diff, int nodesPerCommit) {
-        this.mk = mk;
-        this.diff = diff;
-        this.nodesPerCommit = nodesPerCommit;
-        committer = new Committer();
-    }
-
-    @Override
-    public void run() {
-        committer.addNodes(mk, diff, nodesPerCommit);
-    }
-}
diff --git a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/testing/ConcurrentMicroKernelTestBase.java b/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/testing/ConcurrentMicroKernelTestBase.java
deleted file mode 100644
index 0c8ffda..0000000
--- a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/testing/ConcurrentMicroKernelTestBase.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.testing;
-
-import java.util.ArrayList;
-
-import org.apache.jackrabbit.mk.api.MicroKernel;
-import org.apache.jackrabbit.mk.util.Chronometer;
-import org.apache.jackrabbit.mk.util.Configuration;
-import org.apache.jackrabbit.mk.util.MicroKernelConfigProvider;
-import org.junit.Before;
-import org.junit.BeforeClass;
-
-public class ConcurrentMicroKernelTestBase {
-    public static int mkNumber = 3;
-    public ArrayList<MicroKernel> mks;
-    public Chronometer chronometer;
-    static MicroKernelInitializer initializator;
-    static Configuration conf;
-
-    /**
-     * Loads the corresponding microkernel initialization class and the
-     * microkernel configuration.The method searches for the <b>mk.type</b>
-     * system property in order to initialize the proper microkernel.By default,
-     * the oak microkernel will be instantiated.
-     * 
-     * @throws Exception
-     */
-    @BeforeClass
-    public static void beforeSuite() throws Exception {
-
-        initializator = new OakMicroKernelInitializer();
-        System.out.println("Tests will run against ***"
-                + initializator.getType() + "***");
-        conf = MicroKernelConfigProvider.readConfig();
-    }
-
-    /**
-     * Creates a microkernel collection with only one microkernel.
-     * 
-     * @throws Exception
-     */
-    @Before
-    public void beforeTest() throws Exception {
-        mks = new MicroKernelCollection(initializator, conf, mkNumber)
-                .getMicroKernels();
-        chronometer = new Chronometer();
-    }
-
-}
diff --git a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/testing/MicroKernelCollection.java b/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/testing/MicroKernelCollection.java
deleted file mode 100644
index 995c924..0000000
--- a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/testing/MicroKernelCollection.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.testing;
-
-import java.util.ArrayList;
-
-import org.apache.jackrabbit.mk.api.MicroKernel;
-import org.apache.jackrabbit.mk.util.Configuration;
-
-/**
- * Represents a collection of microkernels.
- * 
- * 
- * 
- */
-public class MicroKernelCollection {
-    ArrayList<MicroKernel> mks;
-
-    /**
-     * Initialize a collection of microkernels.All microkernels have the same
-     * configuration.
-     * 
-     * @param initializator
-     *            The initialization class of a particular microkernel type.
-     * @param conf
-     *            The microkernel configuration data.
-     * @throws Exception
-     */
-    public MicroKernelCollection(MicroKernelInitializer initializator,
-            Configuration conf, int size) throws Exception {
-        mks = initializator.init(conf, size);
-    }
-
-    /**
-     * Returns a microkernel collection.
-     * 
-     * @return An array of initialized microkernels.
-     */
-    public ArrayList<MicroKernel> getMicroKernels() {
-        return mks;
-    }
-}
diff --git a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/testing/MicroKernelInitializer.java b/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/testing/MicroKernelInitializer.java
deleted file mode 100644
index 0e89e00..0000000
--- a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/testing/MicroKernelInitializer.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.testing;
-
-import java.util.ArrayList;
-
-import org.apache.jackrabbit.mk.api.MicroKernel;
-import org.apache.jackrabbit.mk.util.Configuration;
-
-/**
- * Interface for microkernel initialization.
- * 
- * 
- * 
- */
-public interface MicroKernelInitializer {
-    public ArrayList<MicroKernel> init(Configuration conf, int mksNumber)
-            throws Exception;
-
-    public String getType();
-}
diff --git a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/testing/MicroKernelTestBase.java b/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/testing/MicroKernelTestBase.java
deleted file mode 100644
index 4340edf..0000000
--- a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/testing/MicroKernelTestBase.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.testing;
-
-import org.apache.jackrabbit.mk.api.MicroKernel;
-import org.apache.jackrabbit.mk.util.Chronometer;
-import org.apache.jackrabbit.mk.util.Configuration;
-import org.apache.jackrabbit.mk.util.MicroKernelConfigProvider;
-import org.junit.Before;
-import org.junit.BeforeClass;
-
-/**
- * The test base class for tests that are using only one microkernel instance.
- * 
- * 
- * 
- */
-public abstract class MicroKernelTestBase {
-
-    protected static MicroKernelInitializer initializator;
-    public MicroKernel mk;
-    public static Configuration conf;
-    public Chronometer chronometer;
-
-    /**
-     * Loads the corresponding microkernel initialization class and the
-     * microkernel configuration.The method searches for the <b>mk.type</b>
-     * system property in order to initialize the proper microkernel.By default,
-     * the oak microkernel will be instantiated.
-     * 
-     * @throws Exception
-     */
-    @BeforeClass
-    public static void beforeSuite() throws Exception {
-        initializator = new OakMicroKernelInitializer();
-        System.out.println("Tests will run against ***"
-                + initializator.getType() + "***");
-        conf = MicroKernelConfigProvider.readConfig();
-    }
-
-    /**
-     * Creates a microkernel collection with only one microkernel.
-     * 
-     * @throws Exception
-     */
-    @Before
-    public void beforeTest() throws Exception {
-
-        mk = (new MicroKernelCollection(initializator, conf, 1))
-                .getMicroKernels().get(0);
-        chronometer = new Chronometer();
-    }
-
-}
diff --git a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/testing/OakMicroKernelInitializer.java b/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/testing/OakMicroKernelInitializer.java
deleted file mode 100644
index 40eaf12..0000000
--- a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/testing/OakMicroKernelInitializer.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.testing;
-
-import java.util.ArrayList;
-
-import org.apache.jackrabbit.mk.api.MicroKernel;
-import org.apache.jackrabbit.mk.core.MicroKernelImpl;
-import org.apache.jackrabbit.mk.core.Repository;
-import org.apache.jackrabbit.mk.util.Configuration;
-
-/**
- * Initialize a {@code MicroKernelImpl}.A new {@code Repository} is created for
- * each initialization.
- */
-public class OakMicroKernelInitializer implements MicroKernelInitializer {
-
-    @Override
-    public ArrayList<MicroKernel> init(Configuration conf, int mksNumber)
-            throws Exception {
-        ArrayList<MicroKernel> mks = new ArrayList<MicroKernel>();
-        Repository rep = new Repository(conf.getStoragePath()
-                + System.currentTimeMillis());
-        rep.init();
-        for (int i = 0; i < mksNumber; i++) {
-            mks.add(new MicroKernelImpl(rep));
-        }
-        return mks;
-    }
-
-    public String getType() {
-        // TODO Auto-generated method stub
-        return "Oak Microkernel";
-    }
-
-}
diff --git a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/util/Chronometer.java b/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/util/Chronometer.java
deleted file mode 100644
index fdd1af8..0000000
--- a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/util/Chronometer.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.util;
-
-public final class Chronometer {
-    private long begin, end;
-
-    public void start() {
-        begin = System.currentTimeMillis();
-    }
-
-    public void stop() {
-        end = System.currentTimeMillis();
-    }
-
-    public long getTime() {
-        return end - begin;
-    }
-
-    public long getMilliseconds() {
-        return end - begin;
-    }
-
-    public double getSeconds() {
-        return (end - begin) / 1000.0;
-    }
-
-    public double getMinutes() {
-        return (end - begin) / 60000.0;
-    }
-
-    public double getHours() {
-        return (end - begin) / 3600000.0;
-    }
-}
\ No newline at end of file
diff --git a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/util/Committer.java b/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/util/Committer.java
deleted file mode 100644
index 569c1eb..0000000
--- a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/util/Committer.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.util;
-
-import org.apache.jackrabbit.mk.api.MicroKernel;
-
-public class Committer {
-
-    /**
-     * Add nodes to the repository.
-     * 
-     * @param mk
-     *            The microkernel that is performing the action.
-     * @param diff
-     *            The diff that is commited.All the nodes must be define by
-     *            their absolute path.
-     * @param nodesPerCommit
-     *            Number of nodes per commit.
-     */
-    public void addNodes(MicroKernel mk, String diff, int nodesPerCommit) {
-
-        if (nodesPerCommit == 0) {
-            mk.commit("", diff.toString(), null, "");
-            return;
-        }
-        String[] string = diff.split(System.getProperty("line.separator"));
-        int i = 0;
-        StringBuilder finalCommit = new StringBuilder();
-        for (String line : string) {
-            finalCommit.append(line);
-            i++;
-            if (i == nodesPerCommit) {
-                mk.commit("", finalCommit.toString(), null, "");
-                finalCommit.setLength(0);
-                i = 0;
-            }
-        }
-        if (finalCommit.length() > 0)
-            mk.commit("", finalCommit.toString(), null, "");
-    }
-
-    /**
-     * Add an empty node to repository.
-     * 
-     * @param mk
-     *            Microkernel that is performing the action.
-     * @param parentPath
-     * @param name
-     *            Name of the node.
-     */
-    public void addNode(MicroKernel mk, String parentPath, String name) {
-        mk.commit(parentPath, "+\"" + name + "\" : {} \n", null, "");
-    }
-
-    /**
-     * Recursively builds a pyramid tree structure.Each node is added in a
-     * separate commit.
-     * 
-     * @param mk
-     *            Microkernel used for adding nodes.
-     * @param startingPoint
-     *            The path where the node will be added.
-     * @param index
-     * @param numberOfChildren
-     *            Number of children per level.
-     * @param nodesNumber
-     *            Total nodes number.
-     * @param nodePrefixName
-     *            The node's name prefix.The complete node name is
-     *            prefix+indexNumber.
-     **/
-    public void addPyramidStructure(MicroKernel mk, String startingPoint,
-            int index, int numberOfChildren, long nodesNumber,
-            String nodePrefixName) {
-        // if all the nodes are on the same level
-        if (numberOfChildren == 0) {
-            for (long i = 0; i < nodesNumber; i++) {
-                addNode(mk, startingPoint, nodePrefixName + i);
-                // System.out.println("Created node " + i);
-            }
-            return;
-        }
-        if (index >= nodesNumber)
-            return;
-        addNode(mk, startingPoint, nodePrefixName + index);
-        for (int i = 1; i <= numberOfChildren; i++) {
-            if (!startingPoint.endsWith("/"))
-                startingPoint = startingPoint + "/";
-            addPyramidStructure(mk, startingPoint + nodePrefixName + index,
-                    index * numberOfChildren + i, numberOfChildren,
-                    nodesNumber, nodePrefixName);
-        }
-
-    }
-}
diff --git a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/util/Configuration.java b/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/util/Configuration.java
deleted file mode 100644
index c141f57..0000000
--- a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/util/Configuration.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.util;
-
-import java.util.Properties;
-
-public class Configuration {
-
-    private static final String MK_TYPE = "mk.type";
-    private static final String HOST = "hostname";
-    private static final String MONGO_PORT = "mongo.port";
-    private static final String STORAGE_PATH = "storage.path";
-    private static final String DATABASE = "mongo.database";
-
-    private final Properties properties;
-
-    public Configuration(Properties properties) {
-        this.properties = properties;
-    }
-
-    public String getMkType() {
-        return properties.getProperty(MK_TYPE);
-    }
-
-    public String getHost() {
-        return properties.getProperty(HOST);
-    }
-
-    public int getMongoPort() {
-        return Integer.parseInt(properties.getProperty(MONGO_PORT));
-    }
-
-    public String getStoragePath() {
-        return properties.getProperty(STORAGE_PATH);
-    }
-
-    public String getMongoDatabase() {
-
-        return properties.getProperty(DATABASE);
-    }
-}
diff --git a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/util/MicroKernelConfigProvider.java b/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/util/MicroKernelConfigProvider.java
deleted file mode 100644
index 92f2003..0000000
--- a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/util/MicroKernelConfigProvider.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.util;
-
-import java.io.InputStream;
-import java.util.Properties;
-
-public class MicroKernelConfigProvider {
-
-    /**
-     * Read the mk configuration from file.
-     * 
-     * @param resourcePath
-     * @return
-     * @throws Exception
-     */
-    public static Configuration readConfig(String resourcePath)
-            throws Exception {
-
-        InputStream is = MicroKernelConfigProvider.class
-                .getResourceAsStream(resourcePath);
-
-        Properties properties = new Properties();
-        properties.load(is);
-        is.close();
-        return new Configuration(properties);
-    }
-
-    /**
-     * Read the mk configuration from config.cfg.
-     * 
-     * @param resourcePath
-     * @return
-     * @throws Exception
-     */
-    public static Configuration readConfig() throws Exception {
-
-        InputStream is = MicroKernelConfigProvider.class
-                .getResourceAsStream("/config.cfg");
-
-        Properties properties = new Properties();
-        properties.load(is);
-        // System.out.println(properties.toString());
-        is.close();
-        return new Configuration(properties);
-    }
-
-}
diff --git a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/util/MicroKernelOperation.java b/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/util/MicroKernelOperation.java
deleted file mode 100644
index 42d3603..0000000
--- a/oak-mk-perf/src/main/java/org/apache/jackrabbit/mk/util/MicroKernelOperation.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.util;
-
-/**
- * Useful methods for building node structure.
- * 
- * 
- */
-public class MicroKernelOperation {
-
-    /**
-     * Builds a diff representing a pyramid node structure.
-     * 
-     * @param The
-     *            path where the first node will be added.
-     * @param index
-     * @param numberOfChildren
-     *            The number of children that each node must have.
-     * @param nodesNumber
-     *            Total number of nodes.
-     * @param nodePrefixName
-     *            The node name prefix.
-     * @param diff
-     *            The string where the diff is builded.Put an empty string for
-     *            creating a new structure.
-     * @return
-     */
-    public static StringBuilder buildPyramidDiff(String startingPoint,
-            int index, int numberOfChildren, long nodesNumber,
-            String nodePrefixName, StringBuilder diff) {
-        if (numberOfChildren == 0) {
-            for (long i = 0; i < nodesNumber; i++)
-                diff.append(addNodeToDiff(startingPoint, nodePrefixName + i));
-            return diff;
-        }
-        if (index >= nodesNumber)
-            return diff;
-        diff.append(addNodeToDiff(startingPoint, nodePrefixName + index));
-        // System.out.println("Create node "+ index);
-        for (int i = 1; i <= numberOfChildren; i++) {
-            if (!startingPoint.endsWith("/"))
-                startingPoint = startingPoint + "/";
-            buildPyramidDiff(startingPoint + nodePrefixName + index, index
-                    * numberOfChildren + i, numberOfChildren, nodesNumber,
-                    nodePrefixName, diff);
-        }
-        return diff;
-    }
-
-    private static String addNodeToDiff(String startingPoint, String nodeName) {
-        if (!startingPoint.endsWith("/"))
-            startingPoint = startingPoint + "/";
-
-        return ("+\"" + startingPoint + nodeName + "\" : {\"key\":\"00000000000000000000\"} \n");
-    }
-}
diff --git a/oak-mk-perf/src/main/resources/config.cfg b/oak-mk-perf/src/main/resources/config.cfg
deleted file mode 100644
index d3aa433..0000000
--- a/oak-mk-perf/src/main/resources/config.cfg
+++ /dev/null
@@ -1,22 +0,0 @@
-#########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-mk.type=oak
-hostname=localhost
-mongo.port=27017
-mongo.database=test
-storage.path=target/mk-tck-repo
\ No newline at end of file
diff --git a/oak-mk-perf/src/test/java/org/apache/jackrabbit/mk/tests/MKAddNodesRelativePathTest.java b/oak-mk-perf/src/test/java/org/apache/jackrabbit/mk/tests/MKAddNodesRelativePathTest.java
deleted file mode 100644
index 343288b..0000000
--- a/oak-mk-perf/src/test/java/org/apache/jackrabbit/mk/tests/MKAddNodesRelativePathTest.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.tests;
-
-import org.apache.jackrabbit.mk.scenarios.MicroKernelRelativePath;
-import org.apache.jackrabbit.mk.testing.MicroKernelTestBase;
-import org.junit.Test;
-
-/**
- * Measure the time needed for writing nodes in different tree structures.Each
- * node is committed separately.Each node is also committed using the relative
- * path of the parent node.
- * 
- * 
- */
-
-public class MKAddNodesRelativePathTest extends MicroKernelTestBase {
-
-    static String nodeNamePrefix = "N";
-    static int nodesNumber = 1000;
-
-    @Test
-    public void testWriteNodesSameLevel() throws Exception {
-        MicroKernelRelativePath.writeNodesSameLevel(mk, chronometer,
-                nodesNumber, nodeNamePrefix);
-    }
-
-    @Test
-    public void testWriteNodes10Children() {
-        MicroKernelRelativePath.writeNodes10Children(mk, chronometer,
-                nodesNumber, nodeNamePrefix);
-    }
-
-    @Test
-    public void testWriteNodes100Children() {
-        MicroKernelRelativePath.writeNodes100Children(mk, chronometer,
-                nodesNumber, nodeNamePrefix);
-    }
-}
diff --git a/oak-mk-perf/src/test/java/org/apache/jackrabbit/mk/tests/MkAddNodesDifferentStructuresTest.java b/oak-mk-perf/src/test/java/org/apache/jackrabbit/mk/tests/MkAddNodesDifferentStructuresTest.java
deleted file mode 100644
index 173ab03..0000000
--- a/oak-mk-perf/src/test/java/org/apache/jackrabbit/mk/tests/MkAddNodesDifferentStructuresTest.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.tests;
-
-import org.apache.jackrabbit.mk.scenarios.MicrokernelDifferentStructures;
-import org.apache.jackrabbit.mk.testing.MicroKernelTestBase;
-import org.junit.Test;
-
-/**
- * Measure the time needed for writing nodes in different tree structures.All
- * the nodes are added in a single commit.
- * 
- * 
- */
-public class MkAddNodesDifferentStructuresTest extends MicroKernelTestBase {
-
-    static long nodesNumber = 100;
-    static String nodeNamePrefix = "N";
-
-    /**
-     * Tree structure:
-     * <p>
-     * rootNode (/)
-     * <p>
-     * N0 N1... Nn-1 Nn
-     */
-    @Test
-    public void testWriteNodesSameLevel() {
-        MicrokernelDifferentStructures.writeNodesSameLevel(mk, chronometer,
-                nodesNumber, nodeNamePrefix);
-    }
-
-    /**
-     * Tree structure:
-     * <p>
-     * rootNode (/)
-     * <p>
-     * N0
-     * <p>
-     * N1
-     * <p>
-     * N2
-     * <p>
-     * N3
-     */
-    @Test
-    public void testWriteNodes1Child() {
-        MicrokernelDifferentStructures.writeNodes1Child(mk, chronometer,
-                nodesNumber, nodeNamePrefix);
-    }
-
-    /**
-     * Tree structure:
-     * <p>
-     * Number of nodes per <b>level</b> =10^(<b>level</b>).
-     * <p>
-     * Each node has 10 children.
-     */
-    @Test
-    public void testWriteNodes10Children() {
-        MicrokernelDifferentStructures.writeNodes10Children(mk, chronometer,
-                nodesNumber, nodeNamePrefix);
-    }
-
-    /**
-     * Tree structure:
-     * <p>
-     * Number of nodes per <b>level</b> =100^(<b>level</b>).
-     * <p>
-     * Each node has 100 children.
-     */
-    @Test
-    public void testWriteNodes100Children() {
-        MicrokernelDifferentStructures.writeNodes100Children(mk, chronometer,
-                nodesNumber, nodeNamePrefix);
-    }
-
-    /**
-     * Tree structure:
-     * <p>
-     * Number of nodes per <b>level</b> =1000^(<b>level</b>).
-     * <p>
-     * Each node has 1000 children.
-     */
-    @Test
-    public void testWriteNodes1000Children() {
-        MicrokernelDifferentStructures.writeNodes1000Children(mk, chronometer,
-                nodesNumber, nodeNamePrefix);
-    }
-}
diff --git a/oak-mk-perf/src/test/java/org/apache/jackrabbit/mk/tests/MkAddNodesMultipleCommitsTest.java b/oak-mk-perf/src/test/java/org/apache/jackrabbit/mk/tests/MkAddNodesMultipleCommitsTest.java
deleted file mode 100644
index 6a03cbe..0000000
--- a/oak-mk-perf/src/test/java/org/apache/jackrabbit/mk/tests/MkAddNodesMultipleCommitsTest.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.tests;
-
-import org.apache.jackrabbit.mk.util.MicroKernelOperation;
-import org.apache.jackrabbit.mk.scenarios.MicroKernelMultipleCommits;
-import org.apache.jackrabbit.mk.testing.MicroKernelTestBase;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-/**
- * Measure the time needed for writing the same node structure in one or
- * multiple commit steps.
- * <p>
- * Tree structure:
- * <p>
- * Number of nodes per <b>level</b> =100^(<b>level</b>).
- * <p>
- * Each node has 100 children.
- * 
- * 
- * 
- * 
- */
-
-public class MkAddNodesMultipleCommitsTest extends MicroKernelTestBase {
-
-    static String diff;
-    static int nodesNumber = 1000;
-    static String nodeNamePrefix = "N";
-
-    @BeforeClass
-    public static void prepareDiff() {
-        diff = MicroKernelOperation.buildPyramidDiff("/", 0, 10, nodesNumber,
-                nodeNamePrefix, new StringBuilder()).toString();
-    }
-
-    @Test
-    public void testWriteNodesAllNodes1Commit() {
-        MicroKernelMultipleCommits.writeNodesAllNodes1Commit(mk, diff,
-                chronometer);
-    }
-
-    @Test
-    public void testWriteNodes1NodePerCommit() {
-        MicroKernelMultipleCommits.writeNodes1NodePerCommit(mk, diff,
-                chronometer);
-    }
-
-    @Test
-    public void testWriteNodes50NodesPerCommit() {
-        MicroKernelMultipleCommits.writeNodes50NodesPerCommit(mk, diff,
-                chronometer);
-    }
-
-    @Test
-    public void testWriteNodes1000NodesPerCommit() {
-        MicroKernelMultipleCommits.writeNodes1000NodesPerCommit(mk, diff,
-                chronometer);
-    }
-
-}
diff --git a/oak-mk-perf/src/test/java/org/apache/jackrabbit/mk/tests/MkConcurrentAddNodes1CommitTest.java b/oak-mk-perf/src/test/java/org/apache/jackrabbit/mk/tests/MkConcurrentAddNodes1CommitTest.java
deleted file mode 100644
index 0d4cfa6..0000000
--- a/oak-mk-perf/src/test/java/org/apache/jackrabbit/mk/tests/MkConcurrentAddNodes1CommitTest.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.tests;
-
-import org.apache.jackrabbit.mk.scenarios.ConcurrentAddNodes1Commit;
-import org.apache.jackrabbit.mk.testing.ConcurrentMicroKernelTestBase;
-import org.junit.Test;
-
-import com.cedarsoft.test.utils.CatchAllExceptionsRule;
-
-/**
- * Test class for microkernel concurrent writing.All the nodes are added in a
- * single commit.
- */
-
-public class MkConcurrentAddNodes1CommitTest extends
-        ConcurrentMicroKernelTestBase {
-
-    // nodes for each worker
-    int nodesNumber = 100;
-
-    /**
-     * @Rule public CatchAllExceptionsRule catchAllExceptionsRule = new
-     *       CatchAllExceptionsRule();
-     **/
-    @Test
-    public void testConcurentWritingFlatStructure() throws InterruptedException {
-
-        ConcurrentAddNodes1Commit.concurentWritingFlatStructure(mks, 3,
-                nodesNumber, chronometer);
-    }
-
-    @Test
-    public void testConcurentWritingPyramid1() throws InterruptedException {
-
-        ConcurrentAddNodes1Commit.concurentWritingPyramid1(mks, 3, nodesNumber,
-                chronometer);
-    }
-
-    @Test
-    public void testConcurentWritingPyramid2() throws InterruptedException {
-
-        ConcurrentAddNodes1Commit.concurentWritingPyramid2(mks, 3, nodesNumber,
-                chronometer);
-    }
-}
diff --git a/oak-mk-perf/src/test/java/org/apache/jackrabbit/mk/tests/MkConcurrentAddNodesMultipleCommitTest.java b/oak-mk-perf/src/test/java/org/apache/jackrabbit/mk/tests/MkConcurrentAddNodesMultipleCommitTest.java
deleted file mode 100644
index f673a25..0000000
--- a/oak-mk-perf/src/test/java/org/apache/jackrabbit/mk/tests/MkConcurrentAddNodesMultipleCommitTest.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.jackrabbit.mk.tests;
-
-import org.apache.jackrabbit.mk.scenarios.ConcurrentAddNodesMultipleCommits;
-import org.apache.jackrabbit.mk.testing.ConcurrentMicroKernelTestBase;
-
-import org.junit.Test;
-
-/**
- * Test class for microkernel concurrent writing.The microkernel is adding 1000
- * nodes per commit.
- */
-
-public class MkConcurrentAddNodesMultipleCommitTest extends
-        ConcurrentMicroKernelTestBase {
-
-    // nodes for each worker
-    int nodesNumber = 1000;
-    int numberOfNodesPerCommit = 10;
-
-    /**
-     * @Rule public CatchAllExceptionsRule catchAllExceptionsRule = new
-     *       CatchAllExceptionsRule();
-     */
-    @Test
-    public void testConcurentWritingFlatStructure() throws InterruptedException {
-        ConcurrentAddNodesMultipleCommits.concurentWritingFlatStructure(mks,
-                mkNumber, nodesNumber, numberOfNodesPerCommit, chronometer);
-    }
-
-    @Test
-    public void testConcurentWritingPyramid1() throws InterruptedException {
-        ConcurrentAddNodesMultipleCommits.concurentWritingPyramid1(mks,
-                mkNumber, nodesNumber, numberOfNodesPerCommit, chronometer);
-
-    }
-
-    @Test
-    public void testConcurentWritingPyramid2() throws InterruptedException {
-        ConcurrentAddNodesMultipleCommits.concurentWritingPyramid2(mks,
-                mkNumber, nodesNumber, numberOfNodesPerCommit, chronometer);
-    }
-
-}
diff --git a/oak-mk-remote/pom.xml b/oak-mk-remote/pom.xml
index c40298c..a384650 100644
--- a/oak-mk-remote/pom.xml
+++ b/oak-mk-remote/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.jackrabbit</groupId>
     <artifactId>oak-parent</artifactId>
-    <version>1.1-SNAPSHOT</version>
+    <version>1.0.0</version>
     <relativePath>../oak-parent/pom.xml</relativePath>
   </parent>
 
diff --git a/oak-mk/pom.xml b/oak-mk/pom.xml
index b67c383..acedeb5 100644
--- a/oak-mk/pom.xml
+++ b/oak-mk/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.jackrabbit</groupId>
     <artifactId>oak-parent</artifactId>
-    <version>1.1-SNAPSHOT</version>
+    <version>1.0.0</version>
     <relativePath>../oak-parent/pom.xml</relativePath>
   </parent>
 
@@ -114,7 +114,7 @@
     <dependency>
       <groupId>com.h2database</groupId>
       <artifactId>h2</artifactId>
-      <version>${h2.version}</version>
+      <version>1.3.175</version>
       <optional>true</optional>
     </dependency>
 
diff --git a/oak-parent/pom.xml b/oak-parent/pom.xml
index 7258b67..996f8f7 100644
--- a/oak-parent/pom.xml
+++ b/oak-parent/pom.xml
@@ -30,7 +30,7 @@
   <groupId>org.apache.jackrabbit</groupId>
   <artifactId>oak-parent</artifactId>
   <name>Oak Parent POM</name>
-  <version>1.1-SNAPSHOT</version>
+  <version>1.0.0</version>
   <packaging>pom</packaging>
 
   <properties>
@@ -41,7 +41,7 @@
     <project.reporting.outputEncoding>
       ${project.build.sourceEncoding}
     </project.reporting.outputEncoding>
-    <jackrabbit.version>2.8-SNAPSHOT</jackrabbit.version>
+    <jackrabbit.version>2.8.0</jackrabbit.version>
     <mongo.host>127.0.0.1</mongo.host>
     <mongo.port>27017</mongo.port>
     <mongo.db>MongoMKDB</mongo.db>
@@ -57,7 +57,6 @@
     <slf4j.api.version>1.7.6</slf4j.api.version>
     <slf4j.version>1.7.6</slf4j.version> <!-- sync with logback version -->
     <logback.version>1.1.0</logback.version>
-    <h2.version>1.3.176</h2.version>
   </properties>
 
   <issueManagement>
@@ -455,29 +454,19 @@
       </dependencies>
     </profile>
     <profile>
-      <id>rdb-h2</id>
-      <dependencies>
-        <dependency>
-          <groupId>com.h2database</groupId>
-          <artifactId>h2</artifactId>
-          <version>${h2.version}</version>
-        </dependency>
-      </dependencies>
-    </profile>
-    <profile>
       <!-- requires local copy of DB2 JDBC drivers -->
       <!-- run with -Ddb2.jdbc=foldername where foldername contains the 2 JARs-->
       <id>rdb-db2</id>
       <dependencies>
         <dependency>
-          <groupId>com.ibm.db2.jcc</groupId>
+          <groupId>org.example</groupId>
           <artifactId>db2</artifactId>
           <version>1.0</version>
           <scope>system</scope>
           <systemPath>${db2.jdbc}/db2jcc4.jar</systemPath>
         </dependency>
         <dependency>
-          <groupId>com.ibm.db2.jcc</groupId>
+          <groupId>org.example</groupId>
           <artifactId>db2-license</artifactId>
           <version>1.0</version>
           <scope>system</scope>
@@ -495,4 +484,10 @@
       </properties>
     </profile>
   </profiles>
+
+  <scm>
+    <connection>scm:svn:http://svn.apache.org/repos/asf/maven/pom/tags/jackrabbit-oak-1.0.0/oak-parent</connection>
+    <developerConnection>scm:svn:https://svn.apache.org/repos/asf/maven/pom/tags/jackrabbit-oak-1.0.0/oak-parent</developerConnection>
+    <url>http://svn.apache.org/viewvc/maven/pom/tags/jackrabbit-oak-1.0.0/oak-parent</url>
+  </scm>
 </project>
diff --git a/oak-pojosr/pom.xml b/oak-pojosr/pom.xml
index bcc3920..8840391 100644
--- a/oak-pojosr/pom.xml
+++ b/oak-pojosr/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.jackrabbit</groupId>
     <artifactId>oak-parent</artifactId>
-    <version>1.1-SNAPSHOT</version>
+    <version>1.0.0</version>
     <relativePath>../oak-parent/pom.xml</relativePath>
   </parent>
 
diff --git a/oak-run/README.md b/oak-run/README.md
index b10e4c1..6963d0d 100644
--- a/oak-run/README.md
+++ b/oak-run/README.md
@@ -83,19 +83,17 @@
 The optional fixture argument allows to specify the repository implementation
 to be used. The following fixtures are currently supported:
 
-| Fixture       | Description                                           |
-|---------------|-------------------------------------------------------|
-| Jackrabbit    | Jackrabbit with the default embedded Derby  bundle PM |
-| Oak-Memory    | Oak with default in-memory storage                    |
-| Oak-MemoryNS  | Oak with default in-memory NodeStore                  |
-| Oak-MemoryMK  | Oak with default in-memory MicroKernel                |
-| Oak-Mongo     | Oak with the default Mongo backend                    |
-| Oak-Mongo-FDS | Oak with the default Mongo backend and FileDataStore  |
-| Oak-MongoNS   | Oak with the Mongo NodeStore                          |
-| Oak-MongoMK   | Oak with the Mongo MicroKernel                        |
-| Oak-Tar       | Oak with the Tar backend (aka Segment NodeStore)      |
-| Oak-Tar-FDS   | Oak with the Tar backend and FileDataStore            |
-| Oak-H2        | Oak with the MK using embedded H2 database            |
+| Fixture     | Description                                           |
+|-------------|-------------------------------------------------------|
+| Jackrabbit  | Jackrabbit with the default embedded Derby  bundle PM |
+| Oak-Memory  | Oak with default in-memory storage                    |
+| Oak-MemoryNS| Oak with default in-memory NodeStore                  |
+| Oak-MemoryMK| Oak with default in-memory MicroKernel                |
+| Oak-Mongo   | Oak with the default Mongo backend                    |
+| Oak-MongoNS | Oak with the Mongo NodeStore                          |
+| Oak-MongoMK | Oak with the Mongo MicroKernel                        |
+| Oak-Tar     | Oak with the Tar backend (aka Segment NodeStore)      |
+| Oak-H2      | Oak with the MK using embedded H2 database            |
 
 
 Depending on the fixture the following options are available:
@@ -150,13 +148,7 @@
 cache size in MongoMK and the default H2 MK, and the segment cache
 size in SegmentMK.
 
-The `--concurrency` levels can be specified as comma separated list of values,
-eg: `--concurrency 1,4,8`, which will execute the same test with the number of
-respective threads. Note that the `beforeSuite()` and `afterSuite()` are executed
-before and after the concurrency loop. eg. in the example above, the execution order
-is: `beforeSuite()`, 1x `runTest()`, 4x `runTest()`, 8x `runTest()`, `afterSuite()`.
-Tests that create their own background threads, should be executed with
-`--concurrency 1` which is the default.
+The `--concurrency` levels can be specified as comma separated list of values, eg: `--concurrency 1,4,8`, which will execute the same test with the number of respective threads. Note that the `beforeSuite()` and `afterSuite()` are executed before and after the concurrency loop. eg. in the example above, the execution order is: `beforeSuite()`, 1x `runTest()`, 4x `runTest()`, 8x `runTest()`, `afterSuite()`. Tests that create their own background threads, should be executed with `--concurrency 1` which is the default.
 
 You can use extra JVM options like `-Xmx` settings to better control the
 benchmark environment. It's also possible to attach the JVM to a
@@ -186,18 +178,17 @@
 
 Finally the benchmark runner supports the following repository fixtures:
 
-| Fixture       | Description                                           |
-|---------------|-------------------------------------------------------|
-| Jackrabbit    | Jackrabbit with the default embedded Derby  bundle PM |
-| Oak-Memory    | Oak with default in-memory storage                    |
-| Oak-MemoryNS  | Oak with default in-memory NodeStore                  |
-| Oak-MemoryMK  | Oak with default in-memory MicroKernel                |
-| Oak-Mongo     | Oak with the default Mongo backend                    |
-| Oak-Mongo-FDS | Oak with the default Mongo backend and FileDataStore  |
-| Oak-MongoNS   | Oak with the Mongo NodeStore                          |
-| Oak-MongoMK   | Oak with the Mongo MicroKernel                        |
-| Oak-Tar       | Oak with the Tar backend (aka Segment NodeStore)      |
-| Oak-H2        | Oak with the MK using embedded H2 database            |
+| Fixture     | Description                                           |
+|-------------|-------------------------------------------------------|
+| Jackrabbit  | Jackrabbit with the default embedded Derby  bundle PM |
+| Oak-Memory  | Oak with default in-memory storage                    |
+| Oak-MemoryNS| Oak with default in-memory NodeStore                  |
+| Oak-MemoryMK| Oak with default in-memory MicroKernel                |
+| Oak-Mongo   | Oak with the default Mongo backend                    |
+| Oak-MongoNS | Oak with the Mongo NodeStore                          |
+| Oak-MongoMK | Oak with the Mongo MicroKernel                        |
+| Oak-Tar     | Oak with the Tar backend (aka Segment NodeStore)      |
+| Oak-H2      | Oak with the MK using embedded H2 database            |
 
 
 Once started, the benchmark runner will execute each listed test case
diff --git a/oak-run/pom.xml b/oak-run/pom.xml
index f46a61f..e36e6a6 100644
--- a/oak-run/pom.xml
+++ b/oak-run/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.jackrabbit</groupId>
     <artifactId>oak-parent</artifactId>
-    <version>1.1-SNAPSHOT</version>
+    <version>1.0.0</version>
     <relativePath>../oak-parent/pom.xml</relativePath>
   </parent>
 
@@ -53,9 +53,6 @@
                 <includes>
                   <include>*</include>
                 </includes>
-                <excludes>
-                  <exclude>${lucene.exclude}</exclude>
-                </excludes>
               </artifactSet>
               <filters>
                 <filter>
@@ -139,18 +136,13 @@
     </dependency>
     <dependency>
       <groupId>org.apache.jackrabbit</groupId>
-      <artifactId>oak-lucene</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.jackrabbit</groupId>
       <artifactId>oak-mk-remote</artifactId>
       <version>${project.version}</version>
     </dependency>
     <dependency>
       <groupId>com.h2database</groupId>
       <artifactId>h2</artifactId>
-      <version>${h2.version}</version>
+      <version>1.3.175</version>
     </dependency>
     <dependency>
       <groupId>org.mongodb</groupId>
@@ -165,12 +157,6 @@
       <groupId>org.apache.jackrabbit</groupId>
       <artifactId>jackrabbit-core</artifactId>
       <version>${jackrabbit.version}</version>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.lucene</groupId>
-          <artifactId>lucene-core</artifactId>
-        </exclusion>
-      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.jackrabbit</groupId>
@@ -183,11 +169,6 @@
       <version>2.0</version>
     </dependency>
     <dependency>
-      <groupId>org.apache.commons</groupId>
-      <artifactId>commons-compress</artifactId>
-      <version>1.8</version>
-    </dependency>
-    <dependency>
       <groupId>org.eclipse.jetty</groupId>
       <artifactId>jetty-servlet</artifactId>
       <version>${jetty.version}</version>
@@ -220,64 +201,4 @@
     </dependency>
     
   </dependencies>
-
-  <profiles>
-    <profile>
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
-      <id>oak-run-jr2</id>
-      <properties>
-        <lucene.exclude>org.apache.jackrabbit:oak-lucene</lucene.exclude>
-      </properties>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.lucene</groupId>
-          <artifactId>lucene-core</artifactId>
-          <version>3.6.0</version>
-        </dependency>
-      </dependencies>
-    </profile>
-    <profile>
-      <id>oak-run-oak</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <properties>
-        <!--
-        Lucene classes are already part of oak-lucene so exclude
-        them in maven-shade-plugin. Otherwise it causes duplicate
-        classes warning
-        -->
-        <lucene.exclude>org.apache.lucene:*</lucene.exclude>
-      </properties>
-      <dependencies>
-        <!--
-         oak-lucene embeds the Lucene jar. However when running in IDE
-         the IDE use the module classpath. So need to explicitly list the
-         lucene jars
-        -->
-        <dependency>
-          <groupId>org.apache.lucene</groupId>
-          <artifactId>lucene-core</artifactId>
-          <version>${lucene.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.lucene</groupId>
-          <artifactId>lucene-analyzers-common</artifactId>
-          <version>${lucene.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.lucene</groupId>
-          <artifactId>lucene-queryparser</artifactId>
-          <version>${lucene.version}</version>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.lucene</groupId>
-          <artifactId>lucene-queries</artifactId>
-          <version>${lucene.version}</version>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
 </project>
diff --git a/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/AbstractTest.java b/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/AbstractTest.java
index bd22696..489cf8a 100644
--- a/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/AbstractTest.java
+++ b/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/AbstractTest.java
@@ -39,7 +39,7 @@
 /**
  * Abstract base class for individual performance benchmarks.
  */
-abstract class AbstractTest<T> extends Benchmark implements CSVResultGenerator {
+abstract class AbstractTest extends Benchmark implements CSVResultGenerator {
 
     /**
      * A random string to guarantee concurrently running tests don't overwrite
@@ -133,7 +133,7 @@
         }
         for (RepositoryFixture fixture : fixtures) {
             try {
-                Repository[] cluster = createRepository(fixture);
+                Repository[] cluster = fixture.setUpCluster(1);
                 try {
                     runTest(fixture, cluster[0], concurrencyLevels);
                 } finally {
@@ -208,17 +208,14 @@
 
         @Override
         public void run() {
-            T context = null;
             try {
-                context = prepareThreadExecutionContext();
                 while (running) {
-                    statistics.addValue(execute(context));
+                    statistics.addValue(execute());
                 }
             } catch (Exception e) {
                 e.printStackTrace();
-            } finally {
-                disposeThreadExecutionContext(context);
             }
+
         }
     }
 
@@ -242,8 +239,7 @@
             for (Thread t: threads) {
                 t.start();
             }
-
-            //System.out.printf("Started %d threads%n", threads.size());
+            System.out.printf("Started %d threads%n", threads.size());
 
             // Run test iterations, and capture the execution times
             long runtimeEnd = System.currentTimeMillis() + RUNTIME;
@@ -282,23 +278,6 @@
             afterTest();
         }
     }
-
-    private long execute(T executionContext) throws Exception {
-        if(executionContext == null){
-            return execute();
-        }
-
-        beforeTest(executionContext);
-        try {
-            long start = System.currentTimeMillis();
-            // System.out.println("execute " + this);
-            runTest(executionContext);
-            return System.currentTimeMillis() - start;
-        } finally {
-            afterTest(executionContext);
-        }
-    }
-
     /**
      * Cleans up after this performance benchmark.
      *
@@ -355,36 +334,6 @@
     protected void afterSuite() throws Exception {
     }
 
-    /**
-     * Invoked before the thread starts. If the test later requires
-     * some thread local context e.g. JCR session per thread then sub
-     * classes can return a context instance. That instance would be
-     * passed as part of runTest call
-     *
-     * @return context instance to be used for runTest call for the
-     * current thread
-     */
-    protected T prepareThreadExecutionContext() {
-        return null;
-    }
-
-    protected void disposeThreadExecutionContext(T context) {
-
-    }
-
-    protected void afterTest(T executionContext) {
-
-    }
-
-    protected void runTest(T executionContext)  throws Exception {
-        throw new IllegalStateException("If thread execution context is used then subclass must " +
-                "override this method");
-    }
-
-    protected void beforeTest(T executionContext) {
-
-    }
-
     protected void failOnRepositoryVersions(String... versions)
             throws RepositoryException {
         String repositoryVersion =
@@ -486,11 +435,4 @@
         threads.add(thread);
     }
 
-    /**
-     * Customize the repository creation process by custom fixture handling
-     */
-    protected Repository[] createRepository(RepositoryFixture fixture) throws Exception {
-        return fixture.setUpCluster(1);
-    }
-
 }
diff --git a/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/BenchmarkRunner.java b/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/BenchmarkRunner.java
index 51fe103..d76c6c2 100644
--- a/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/BenchmarkRunner.java
+++ b/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/BenchmarkRunner.java
@@ -55,14 +55,9 @@
                 .defaultsTo("64".equals(System.getProperty("sun.arch.data.model")));
         OptionSpec<Integer> cache = parser.accepts("cache", "cache size (MB)")
                 .withRequiredArg().ofType(Integer.class).defaultsTo(100);
-        OptionSpec<Integer> fdsCache = parser.accepts("blobCache", "cache size (MB)")
-                .withRequiredArg().ofType(Integer.class).defaultsTo(32);
-        OptionSpec<File> wikipedia = parser
-                .accepts("wikipedia", "Wikipedia dump").withRequiredArg()
-                .ofType(File.class);
-        OptionSpec<Boolean> withStorage = parser
-                .accepts("storage", "Index storage enabled").withOptionalArg()
-                .ofType(Boolean.class);
+        OptionSpec<File> wikipedia =
+                parser.accepts("wikipedia", "Wikipedia dump")
+                .withRequiredArg().ofType(File.class);
         OptionSpec<Boolean> runAsAdmin = parser.accepts("runAsAdmin", "Run test using admin session")
                 .withRequiredArg().ofType(Boolean.class).defaultsTo(Boolean.FALSE);
         OptionSpec<Integer> itemsToRead = parser.accepts("itemsToRead", "Number of items to read")
@@ -77,7 +72,7 @@
                         .defaultsTo(Boolean.FALSE);
         OptionSpec<File> csvFile = parser.accepts("csvFile", "File to write a CSV version of the benchmark data.")
                 .withOptionalArg().ofType(File.class);
-        OptionSpec<Boolean> flatStructure = parser.accepts("flatStructure", "Whether the test should use a flat structure or not.")
+        OptionSpec<Boolean> flatStructure = parser.accepts("flatStructure", "Whether user/group should be setup with a flat structure or not.")
                 .withOptionalArg().ofType(Boolean.class).defaultsTo(Boolean.FALSE);
         OptionSpec<Integer> numberOfUsers = parser.accepts("numberOfUsers")
                 .withOptionalArg().ofType(Integer.class).defaultsTo(10000);
@@ -94,12 +89,6 @@
                         host.value(options), port.value(options),
                         dbName.value(options), dropDBAfterTest.value(options),
                         cacheSize * MB),
-                OakRepositoryFixture.getMongoWithFDS(
-                        host.value(options), port.value(options),
-                        dbName.value(options), dropDBAfterTest.value(options),
-                        cacheSize * MB,
-                        base.value(options),
-                        fdsCache.value(options)),
                 OakRepositoryFixture.getMongoNS(
                         host.value(options), port.value(options),
                         dbName.value(options), dropDBAfterTest.value(options),
@@ -109,8 +98,6 @@
                         dbName.value(options), dropDBAfterTest.value(options),
                         cacheSize * MB),
                 OakRepositoryFixture.getTar(
-                        base.value(options), 256, cacheSize, mmap.value(options)),
-                OakRepositoryFixture.getTarWithBlobStore(
                         base.value(options), 256, cacheSize, mmap.value(options))
         };
         Benchmark[] allBenchmarks = new Benchmark[] {
@@ -144,10 +131,7 @@
             new CreateManyNodesTest(),
             new UpdateManyChildNodesTest(),
             new TransientManyChildNodesTest(),
-            new WikipediaImport(
-                    wikipedia.value(options),
-                    flatStructure.value(options),
-                    report.value(options)),
+            new WikipediaImport(wikipedia.value(options)),
             new CreateNodesBenchmark(),
             new ManyNodes(),
             new ObservationTest(),
@@ -216,11 +200,7 @@
                     flatStructure.value(options)),
             new GetPrincipalTest(
                     numberOfUsers.value(options),
-                    flatStructure.value(options)),
-            new FullTextSearchTest(
-                    wikipedia.value(options),
-                    flatStructure.value(options),
-                    report.value(options), withStorage.value(options))
+                    flatStructure.value(options))
         };
 
         Set<String> argset = Sets.newHashSet(options.nonOptionArguments());
diff --git a/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/FullTextSearchTest.java b/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/FullTextSearchTest.java
deleted file mode 100644
index e0c336c..0000000
--- a/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/FullTextSearchTest.java
+++ /dev/null
@@ -1,175 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.jackrabbit.oak.benchmark;
-
-import static com.google.common.collect.Lists.newArrayList;
-import static com.google.common.collect.Sets.newHashSet;
-
-import java.io.File;
-import java.util.List;
-import java.util.Random;
-import java.util.Set;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import javax.jcr.Node;
-import javax.jcr.Repository;
-import javax.jcr.Session;
-import javax.jcr.query.Query;
-import javax.jcr.query.QueryManager;
-import javax.jcr.query.QueryResult;
-import javax.jcr.query.RowIterator;
-
-import org.apache.jackrabbit.oak.benchmark.wikipedia.WikipediaImport;
-import org.apache.jackrabbit.oak.fixture.JcrCustomizer;
-import org.apache.jackrabbit.oak.fixture.OakRepositoryFixture;
-import org.apache.jackrabbit.oak.fixture.RepositoryFixture;
-import org.apache.jackrabbit.oak.jcr.Jcr;
-import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexEditorProvider;
-import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexProvider;
-import org.apache.jackrabbit.oak.plugins.index.lucene.util.LuceneInitializerHelper;
-import org.apache.jackrabbit.oak.spi.commit.Observer;
-import org.apache.jackrabbit.oak.spi.query.QueryIndexProvider;
-
-public class FullTextSearchTest extends AbstractTest<FullTextSearchTest.TestContext> {
-
-    /**
-     * Pattern used to find words and other searchable tokens within the
-     * imported Wikipedia pages.
-     */
-    private static final Pattern WORD_PATTERN =
-            Pattern.compile("\\p{LD}{3,}");
-
-    private int maxSampleSize = 100;
-
-    private final WikipediaImport importer;
-
-    private final Set<String> sampleSet = newHashSet();
-
-    private final Random random = new Random(42); //fixed seed
-
-    private int count = 0;
-
-    private int maxRowsToFetch = Integer.getInteger("maxRowsToFetch",100);
-
-    private TestContext defaultContext;
-
-    /**
-     * null means true; true means true
-     */
-    private Boolean storageEnabled;
-
-    public FullTextSearchTest(File dump, boolean flat, boolean doReport, Boolean storageEnabled) {
-        this.importer = new WikipediaImport(dump, flat, doReport) {
-            @Override
-            protected void pageAdded(String title, String text) {
-                count++;
-                if (count % 100 == 0
-                        && sampleSet.size() < maxSampleSize
-                        && text != null) {
-                    List<String> words = newArrayList();
-
-                    Matcher matcher = WORD_PATTERN.matcher(text);
-                    while (matcher.find()) {
-                        words.add(matcher.group());
-                    }
-
-                    if (!words.isEmpty()) {
-                        sampleSet.add(words.get(words.size() / 2));
-                    }
-                }
-            }
-        };
-        this.storageEnabled = storageEnabled;
-    }
-
-    @Override
-    public void beforeSuite() throws Exception {
-        random.setSeed(42);
-        sampleSet.clear();
-        count = 0;
-
-        importer.importWikipedia(loginWriter());
-        Thread.sleep(10); // allow some time for the indexer to catch up
-
-        defaultContext = new TestContext();
-    }
-
-    @Override
-    protected TestContext prepareThreadExecutionContext() {
-        return new TestContext();
-    }
-
-    @Override
-    protected void runTest() throws Exception {
-        runTest(defaultContext);
-    }
-
-    @SuppressWarnings("deprecation")
-    @Override
-    protected void runTest(TestContext ec)  throws Exception {
-        QueryManager qm = ec.session.getWorkspace().getQueryManager();
-        // TODO verify why "order by jcr:score()" accounts for what looks
-        // like > 20% of the perf lost in Collections.sort
-        for (String word : ec.words) {
-            Query q = qm.createQuery("//*[jcr:contains(@text, '" + word + "')] ", Query.XPATH);
-            QueryResult r = q.execute();
-            RowIterator it = r.getRows();
-            for (int rows = 0; it.hasNext() && rows < maxRowsToFetch; rows++) {
-                Node n = it.nextRow().getNode();
-                ec.hash += n.getProperty("text").getString().hashCode();
-                ec.hash += n.getProperty("title").getString().hashCode();
-            }
-        }
-    }
-
-    class TestContext {
-        final Session session = loginWriter();
-        final String[] words = getRandomWords();
-        int hash = 0; // summary variable to prevent JIT compiler tricks
-    }
-
-    private String[] getRandomWords() {
-        List<String> samples = newArrayList(sampleSet);
-        String[] words = new String[100];
-        for (int i = 0; i < words.length; i++) {
-            words[i] = samples.get(random.nextInt(samples.size()));
-        }
-        return words;
-    }
-
-    @Override
-    protected Repository[] createRepository(RepositoryFixture fixture) throws Exception {
-        if (fixture instanceof OakRepositoryFixture) {
-            return ((OakRepositoryFixture) fixture).setUpCluster(1, new JcrCustomizer() {
-                @Override
-                public Jcr customize(Jcr jcr) {
-                    LuceneIndexProvider provider = new LuceneIndexProvider();
-                    jcr.with((QueryIndexProvider) provider)
-                       .with((Observer) provider)
-                       .with(new LuceneIndexEditorProvider())
-                       .with(new LuceneInitializerHelper("luceneGlobal", storageEnabled));
-                    return jcr;
-                }
-            });
-        }
-        return super.createRepository(fixture);
-    }
-
-}
diff --git a/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/wikipedia/WikipediaImport.java b/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/wikipedia/WikipediaImport.java
index b7e1118..3e1266d 100644
--- a/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/wikipedia/WikipediaImport.java
+++ b/oak-run/src/main/java/org/apache/jackrabbit/oak/benchmark/wikipedia/WikipediaImport.java
@@ -17,16 +17,12 @@
 package org.apache.jackrabbit.oak.benchmark.wikipedia;
 
 import static com.google.common.base.Preconditions.checkState;
-import static java.lang.Math.min;
 
-import java.io.BufferedInputStream;
 import java.io.File;
-import java.io.FileInputStream;
 
 import javax.jcr.Node;
 import javax.jcr.NodeIterator;
 import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
 import javax.jcr.Session;
 import javax.jcr.SimpleCredentials;
 import javax.xml.stream.XMLInputFactory;
@@ -34,8 +30,6 @@
 import javax.xml.stream.XMLStreamReader;
 import javax.xml.transform.stream.StreamSource;
 
-import org.apache.commons.compress.compressors.CompressorStreamFactory;
-import org.apache.jackrabbit.commons.JcrUtils;
 import org.apache.jackrabbit.oak.benchmark.Benchmark;
 import org.apache.jackrabbit.oak.fixture.RepositoryFixture;
 import org.apache.jackrabbit.util.Text;
@@ -44,14 +38,8 @@
 
     private final File dump;
 
-    private final boolean doReport;
-
-    private final boolean flat;
-
-    public WikipediaImport(File dump, boolean flat, boolean doReport) {
+    public WikipediaImport(File dump) {
         this.dump = dump;
-        this.flat = flat;
-        this.doReport = doReport;
     }
 
     @Override
@@ -87,49 +75,27 @@
                 new SimpleCredentials("admin", "admin".toCharArray()));
         try {
             int before = importWikipedia(session);
-            int after = new Traversal().traverse(session);
+            int after = traverseWikipedia(session);
             checkState(before == after, "Import vs. traverse mismatch");
         } finally {
             session.logout();
         }
     }
 
-    public int importWikipedia(Session session) throws Exception {
+    private int importWikipedia(Session session) throws Exception {
         long start = System.currentTimeMillis();
         int count = 0;
         int code = 0;
 
-        if(doReport) {
-            System.out.format("Importing %s...%n", dump);
-        }
-
-        String type = "nt:unstructured";
-        if (session.getWorkspace().getNodeTypeManager().hasNodeType("oak:Unstructured")) {
-            type = "oak:Unstructured";
-        }
-        Node wikipedia = session.getRootNode().addNode("wikipedia", type);
-
-        int levels = 0;
-        if (!flat) {
-            // calculate the number of levels needed, based on the rough
-            // estimate that the average XML size of a page is about 1kB
-            for (long pages = dump.length() / 1024; pages > 256; pages /= 256) {
-                levels++;
-            }
-        }
+        System.out.format("Importing %s...%n", dump);
+        Node wikipedia = session.getRootNode().addNode(
+                "wikipedia", "oak:Unstructured");
 
         String title = null;
         String text = null;
         XMLInputFactory factory = XMLInputFactory.newInstance();
-        StreamSource source;
-        if (dump.getName().endsWith(".xml")) {
-            source = new StreamSource(dump);
-        } else {
-            CompressorStreamFactory csf = new CompressorStreamFactory();
-            source = new StreamSource(csf.createCompressorInputStream(
-                    new BufferedInputStream(new FileInputStream(dump))));
-        }
-        XMLStreamReader reader = factory.createXMLStreamReader(source);
+        XMLStreamReader reader =
+                factory.createXMLStreamReader(new StreamSource(dump));
         while (reader.hasNext()) {
             switch (reader.next()) {
             case XMLStreamConstants.START_ELEMENT:
@@ -142,34 +108,18 @@
             case XMLStreamConstants.END_ELEMENT:
                 if ("page".equals(reader.getLocalName())) {
                     String name = Text.escapeIllegalJcrChars(title);
-                    Node parent = wikipedia;
-                    if (levels > 0) {
-                        int n = name.length();
-                        for (int i = 0; i < levels; i++) {
-                            int hash = name.substring(min(i, n)).hashCode();
-                            parent = JcrUtils.getOrAddNode(
-                                    parent, String.format("%02x", hash & 0xff));
-                        }
-                    }
-                    Node page = parent.addNode(name);
+                    Node page = wikipedia.addNode(name);
                     page.setProperty("title", title);
                     page.setProperty("text", text);
                     code += title.hashCode();
                     code += text.hashCode();
                     count++;
                     if (count % 1000 == 0) {
-                        if (!flat) {
-                            session.save();
-                        }
-                        if (doReport) {
-                            long millis = System.currentTimeMillis() - start;
-                            System.out.format(
-                                    "Added %d pages in %d seconds (%.2fms/page)%n",
-                                    count, millis / 1000, (double) millis / count);
-                        }
+                        long millis = System.currentTimeMillis() - start;
+                        System.out.format(
+                                "Added %d pages in %d seconds (%.2fms/page)%n",
+                                count, millis / 1000, (double) millis / count);
                     }
-
-                    pageAdded(title, text);
                 }
                 break;
             }
@@ -177,61 +127,40 @@
 
         session.save();
 
-        if (doReport) {
-            long millis = System.currentTimeMillis() - start;
-            System.out.format(
-                    "Imported %d pages in %d seconds (%.2fms/page)%n",
-                    count, millis / 1000, (double) millis / count);
-        }
-
+        long millis = System.currentTimeMillis() - start;
+        System.out.format(
+                "Imported %d pages in %d seconds (%.2fms/page)%n",
+                count, millis / 1000, (double) millis / count);
         return code;
     }
 
-    protected void pageAdded(String title, String text) {
-    }
+    private int traverseWikipedia(Session session) throws Exception {
+        long start = System.currentTimeMillis();
+        int count = 0;
+        int code = 0;
 
-    private class Traversal {
+        System.out.format("Traversing imported pages...%n");
+        Node wikipedia = session.getNode("/wikipedia");
 
-        private final long start = System.currentTimeMillis();
-        private int count = 0;
-        private int code = 0;
-
-        private int traverse(Session session) throws Exception {
-            System.out.format("Traversing imported pages...%n");
-            Node wikipedia = session.getNode("/wikipedia");
-
-            traverse(wikipedia);
-
-            if (doReport) {
+        NodeIterator pages = wikipedia.getNodes();
+        while (pages.hasNext()) {
+            Node page = pages.nextNode();
+            code += page.getProperty("title").getString().hashCode();
+            code += page.getProperty("text").getString().hashCode();
+            count++;
+            if (count % 1000 == 0) {
                 long millis = System.currentTimeMillis() - start;
                 System.out.format(
-                        "Traversed %d pages in %d seconds (%.2fms/page)%n",
+                        "Read %d pages in %d seconds (%.2fms/page)%n",
                         count, millis / 1000, (double) millis / count);
             }
-
-            return code;
         }
 
-        private void traverse(Node parent) throws RepositoryException {
-            NodeIterator pages = parent.getNodes();
-            while (pages.hasNext()) {
-                Node page = pages.nextNode();
-
-                code += page.getProperty("title").getString().hashCode();
-                code += page.getProperty("text").getString().hashCode();
-
-                count++;
-                if (count % 1000 == 0 && doReport) {
-                    long millis = System.currentTimeMillis() - start;
-                    System.out.format(
-                            "Read %d pages in %d seconds (%.2fms/page)%n",
-                            count, millis / 1000, (double) millis / count);
-                }
-
-                traverse(page);
-            }
-        }
-
+        long millis = System.currentTimeMillis() - start;
+        System.out.format(
+                "Traversed %d pages in %d seconds (%.2fms/page)%n",
+                count, millis / 1000, (double) millis / count);
+        return code;
     }
 
 }
diff --git a/oak-run/src/main/java/org/apache/jackrabbit/oak/fixture/JcrCustomizer.java b/oak-run/src/main/java/org/apache/jackrabbit/oak/fixture/JcrCustomizer.java
deleted file mode 100644
index 07dc1ca..0000000
--- a/oak-run/src/main/java/org/apache/jackrabbit/oak/fixture/JcrCustomizer.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.jackrabbit.oak.fixture;
-
-import org.apache.jackrabbit.oak.jcr.Jcr;
-
-public interface JcrCustomizer {
-    JcrCustomizer DEFAULT = new JcrCustomizer() {
-        @Override
-        public Jcr customize(Jcr jcr) {
-            return jcr;
-        }
-    };
-
-    Jcr customize(Jcr jcr);
-}
diff --git a/oak-run/src/main/java/org/apache/jackrabbit/oak/fixture/OakFixture.java b/oak-run/src/main/java/org/apache/jackrabbit/oak/fixture/OakFixture.java
index 1397e7d..b0871e0 100644
--- a/oak-run/src/main/java/org/apache/jackrabbit/oak/fixture/OakFixture.java
+++ b/oak-run/src/main/java/org/apache/jackrabbit/oak/fixture/OakFixture.java
@@ -22,8 +22,6 @@
 import com.google.common.collect.Maps;
 import org.apache.commons.io.FileUtils;
 import org.apache.jackrabbit.core.data.DataStore;
-import org.apache.jackrabbit.core.data.DataStoreException;
-import org.apache.jackrabbit.core.data.FileDataStore;
 import org.apache.jackrabbit.mk.api.MicroKernel;
 import org.apache.jackrabbit.mk.core.MicroKernelImpl;
 import org.apache.jackrabbit.oak.Oak;
@@ -33,7 +31,6 @@
 import org.apache.jackrabbit.oak.plugins.blob.datastore.DataStoreBlobStore;
 import org.apache.jackrabbit.oak.plugins.document.DocumentMK;
 import org.apache.jackrabbit.oak.plugins.document.util.MongoConnection;
-import org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState;
 import org.apache.jackrabbit.oak.plugins.memory.MemoryNodeStore;
 import org.apache.jackrabbit.oak.plugins.segment.SegmentNodeStore;
 import org.apache.jackrabbit.oak.plugins.segment.SegmentStore;
@@ -47,13 +44,11 @@
     public static final String OAK_MEMORY_MK = "Oak-MemoryMK";
 
     public static final String OAK_MONGO = "Oak-Mongo";
-    public static final String OAK_MONGO_FDS = "Oak-Mongo-FDS";
     public static final String OAK_MONGO_NS = "Oak-MongoNS";
     public static final String OAK_MONGO_MK = "Oak-MongoMK";
 
     public static final String OAK_H2 = "Oak-H2";
     public static final String OAK_TAR = "Oak-Tar";
-    public static final String OAK_TAR_FDS = "Oak-Tar-FDS";
 
 
     private final String name;
@@ -61,11 +56,7 @@
 
     protected OakFixture(String name) {
         this.name = name;
-        this.unique = getUniqueDatabaseName(name);
-    }
-    
-    private static String getUniqueDatabaseName(String name) {
-        return String.format("%s-%d", name, System.currentTimeMillis());
+        this.unique = String.format("%s-%d", name, System.currentTimeMillis());
     }
 
     public abstract Oak getOak(int clusterId) throws Exception;
@@ -129,52 +120,30 @@
     public static OakFixture getMongo(String host, int port, String database,
                                       boolean dropDBAfterTest, long cacheSize) {
         return getMongo(OAK_MONGO, false, host, port, database,
-                dropDBAfterTest, cacheSize, false, null,0);
+                dropDBAfterTest, cacheSize);
     }
 
     public static OakFixture getMongoMK(String host, int port, String database,
                                         boolean dropDBAfterTest, long cacheSize) {
         return getMongo(OAK_MONGO_MK, true, host, port, database,
-                dropDBAfterTest, cacheSize, false, null, 0);
+                dropDBAfterTest, cacheSize);
     }
 
     public static OakFixture getMongoNS(String host, int port, String database,
                                         boolean dropDBAfterTest, long cacheSize) {
         return getMongo(OAK_MONGO_NS, false, host, port, database,
-                dropDBAfterTest, cacheSize, false, null, 0);
+                dropDBAfterTest, cacheSize);
     }
 
     public static OakFixture getMongo(String name, final boolean useMk, final String host,
-                                      final int port, String database,
-                                      final boolean dropDBAfterTest, final long cacheSize,
-                                      final boolean useFileDataStore,
-                                      final File base,
-                                      final int fdsCacheInMB) {
-        if (database == null) {
-            database = getUniqueDatabaseName(name);
-        }
-        String uri = "mongodb://" + host + ":" + port + "/" + database;
-        return getMongo(name, uri, useMk, dropDBAfterTest, cacheSize, useFileDataStore, base, fdsCacheInMB);
-    }
-
-    public static OakFixture getMongo(final String name, final String uri,
-                                      final boolean useMk,
-                                      final boolean dropDBAfterTest, final long cacheSize,
-                                      final boolean useFileDataStore,
-                                      final File base, final int fdsCacheInMB) {
+                                      final int port, final String database,
+                                      final boolean dropDBAfterTest, final long cacheSize) {
         return new OakFixture(name) {
+            private String dbName = database != null ? database : unique;
             private DocumentMK[] kernels;
             private BlobStore blobStore;
-            private File blobStoreDir;
 
             private BlobStore getBlobStore() {
-                if(useFileDataStore){
-                    FileDataStore fds = new FileDataStore();
-                    fds.setMinRecordLength(4092);
-                    blobStoreDir = new File(base, "datastore"+unique);
-                    fds.init(blobStoreDir.getAbsolutePath());
-                    return new DataStoreBlobStore(fds, true, fdsCacheInMB);
-                }
 
                 try {
                     String className = System.getProperty("dataStore");
@@ -208,7 +177,7 @@
 
             @Override
             public Oak getOak(int clusterId) throws Exception {
-                MongoConnection mongo = new MongoConnection(uri);
+                MongoConnection mongo = new MongoConnection(host, port, dbName);
                 BlobStore blobStore = getBlobStore();
                 DocumentMK.Builder mkBuilder = new DocumentMK.Builder().
                         setMongoDB(mongo.getDB()).
@@ -232,7 +201,7 @@
                 Oak[] cluster = new Oak[n];
                 kernels = new DocumentMK[cluster.length];
                 for (int i = 0; i < cluster.length; i++) {
-                    MongoConnection mongo = new MongoConnection(uri);
+                    MongoConnection mongo = new MongoConnection(host, port, dbName);
                     BlobStore blobStore = getBlobStore();
                     DocumentMK.Builder mkBuilder = new DocumentMK.Builder().
                             setMongoDB(mongo.getDB()).
@@ -261,7 +230,7 @@
                 if (dropDBAfterTest) {
                     try {
                         MongoConnection mongo =
-                                new MongoConnection(uri);
+                                new MongoConnection(host, port, dbName);
                         mongo.getDB().dropDatabase();
                         mongo.close();
                         if (blobStore instanceof CloudBlobStore) {
@@ -274,19 +243,16 @@
                     } catch (Exception e) {
                         throw new RuntimeException(e);
                     }
-                    FileUtils.deleteQuietly(blobStoreDir);
                 }
             }
         };
     }
 
     public static OakFixture getTar(
-            final String name, final File base, final int maxFileSizeMB, final int cacheSizeMB,
-            final boolean memoryMapping, final boolean useBlobStore) {
-        return new OakFixture(name) {
+            final File base, final int maxFileSizeMB, final int cacheSizeMB,
+            final boolean memoryMapping) {
+        return new OakFixture(OAK_TAR) {
             private SegmentStore[] stores;
-            private BlobStore[] blobStores = new BlobStore[0];
-            private String blobStoreDir = "datastore"+unique;
 
             @Override
             public Oak getOak(int clusterId) throws Exception {
@@ -298,20 +264,9 @@
             public Oak[] setUpCluster(int n) throws Exception {
                 Oak[] cluster = new Oak[n];
                 stores = new FileStore[cluster.length];
-                if (useBlobStore) {
-                    blobStores = new BlobStore[cluster.length];
-                }
-
                 for (int i = 0; i < cluster.length; i++) {
-                    BlobStore blobStore = null;
-                    if (useBlobStore) {
-                        blobStore = createBlobStore();
-                        blobStores[i] = blobStore;
-                    }
-
-                    stores[i] = new FileStore(blobStore,
+                    stores[i] = new FileStore(
                             new File(base, unique),
-                            EmptyNodeState.EMPTY_NODE,
                             maxFileSizeMB, cacheSizeMB, memoryMapping);
                     cluster[i] = new Oak(new SegmentNodeStore(stores[i]));
                 }
@@ -322,30 +277,11 @@
                 for (SegmentStore store : stores) {
                     store.close();
                 }
-                for(BlobStore blobStore : blobStores){
-                    if(blobStore instanceof DataStore){
-                        try {
-                            ((DataStore) blobStore).close();
-                        } catch (DataStoreException e) {
-                            e.printStackTrace();
-                        }
-                    }
-                }
                 FileUtils.deleteQuietly(new File(base, unique));
-                FileUtils.deleteQuietly(new File(base, blobStoreDir));
-            }
-
-            private BlobStore createBlobStore(){
-                FileDataStore fds = new FileDataStore();
-                fds.setMinRecordLength(4092);
-                fds.init(new File(base, blobStoreDir).getAbsolutePath());
-                return new DataStoreBlobStore(fds);
             }
         };
     }
 
-
-
     public static OakFixture getH2MK(final File base, final long cacheSize) {
         return new OakFixture(OAK_H2) {
             private MicroKernelImpl[] kernels;
diff --git a/oak-run/src/main/java/org/apache/jackrabbit/oak/fixture/OakRepositoryFixture.java b/oak-run/src/main/java/org/apache/jackrabbit/oak/fixture/OakRepositoryFixture.java
index 3e86212..c8acfa1 100644
--- a/oak-run/src/main/java/org/apache/jackrabbit/oak/fixture/OakRepositoryFixture.java
+++ b/oak-run/src/main/java/org/apache/jackrabbit/oak/fixture/OakRepositoryFixture.java
@@ -47,45 +47,29 @@
 
     public static RepositoryFixture getMongo(String host, int port, String database,
                                              boolean dropDBAfterTest, long cacheSize) {
-        return getMongo(OakFixture.OAK_MONGO, false, host, port, database, dropDBAfterTest, cacheSize, false, null, 0);
-    }
-
-    public static RepositoryFixture getMongoWithFDS(String host, int port, String database,
-                                             boolean dropDBAfterTest, long cacheSize,
-                                             final File base, int fdsCacheInMB) {
-        return getMongo(OakFixture.OAK_MONGO_FDS, false, host, port, database,
-                dropDBAfterTest, cacheSize, true, base, fdsCacheInMB);
+        return getMongo(OakFixture.OAK_MONGO, false, host, port, database, dropDBAfterTest, cacheSize);
     }
 
     public static RepositoryFixture getMongoMK(String host, int port, String database,
                                                boolean dropDBAfterTest, long cacheSize) {
-        return getMongo(OakFixture.OAK_MONGO_MK, true, host, port, database, dropDBAfterTest, cacheSize, false, null, 0);
+        return getMongo(OakFixture.OAK_MONGO_MK, true, host, port, database, dropDBAfterTest, cacheSize);
     }
 
     public static RepositoryFixture getMongoNS(String host, int port, String database,
                                                boolean dropDBAfterTest, long cacheSize) {
-        return getMongo(OakFixture.OAK_MONGO_NS, false, host, port, database, dropDBAfterTest, cacheSize, false, null, 0);
+        return getMongo(OakFixture.OAK_MONGO_NS, false, host, port, database, dropDBAfterTest, cacheSize);
     }
 
     private static RepositoryFixture getMongo(String name, boolean useMK,
                                               String host, int port, String database,
-                                              boolean dropDBAfterTest, long cacheSize,
-                                              final boolean useFileDataStore,
-                                              final File base,
-                                              final int fdsCacheInMB) {
-        return new OakRepositoryFixture(OakFixture.getMongo(name, useMK, host, port, database, dropDBAfterTest,
-                cacheSize, useFileDataStore, base, fdsCacheInMB));
+                                              boolean dropDBAfterTest, long cacheSize) {
+        return new OakRepositoryFixture(OakFixture.getMongo(name, useMK, host, port, database, dropDBAfterTest, cacheSize));
     }
-    
+
     public static RepositoryFixture getTar(File base, int maxFileSizeMB, int cacheSizeMB, boolean memoryMapping) {
-        return new OakRepositoryFixture(OakFixture.getTar(OakFixture.OAK_TAR ,base, maxFileSizeMB, cacheSizeMB, memoryMapping, false));
+        return new OakRepositoryFixture(OakFixture.getTar(base, maxFileSizeMB, cacheSizeMB, memoryMapping));
     }
 
-    public static RepositoryFixture getTarWithBlobStore(File base, int maxFileSizeMB, int cacheSizeMB, boolean memoryMapping) {
-        return new OakRepositoryFixture(OakFixture.getTar(OakFixture.OAK_TAR_FDS,base, maxFileSizeMB, cacheSizeMB, memoryMapping, true));
-    }
-
-
     private final OakFixture oakFixture;
     private Repository[] cluster;
 
@@ -100,14 +84,10 @@
 
     @Override
     public final Repository[] setUpCluster(int n) throws Exception {
-        return setUpCluster(n,JcrCustomizer.DEFAULT);
-    }
-
-    public Repository[] setUpCluster(int n, JcrCustomizer customizer) throws Exception {
         Oak[] oaks = oakFixture.setUpCluster(n);
         cluster = new Repository[oaks.length];
         for (int i = 0; i < oaks.length; i++) {
-            cluster[i] = customizer.customize(new Jcr(oaks[i])).createRepository();
+            cluster[i] = new Jcr(oaks[i]).createRepository();;
         }
         return cluster;
     }
diff --git a/oak-run/src/main/java/org/apache/jackrabbit/oak/run/Main.java b/oak-run/src/main/java/org/apache/jackrabbit/oak/run/Main.java
index 07aac6e..91ec749 100644
--- a/oak-run/src/main/java/org/apache/jackrabbit/oak/run/Main.java
+++ b/oak-run/src/main/java/org/apache/jackrabbit/oak/run/Main.java
@@ -22,7 +22,6 @@
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
-import java.util.Locale;
 import java.util.Map;
 import java.util.Properties;
 import java.util.Queue;
@@ -88,7 +87,7 @@
 
         Mode mode = Mode.SERVER;
         if (args.length > 0) {
-            mode = Mode.valueOf(args[0].toUpperCase(Locale.ENGLISH));
+            mode = Mode.valueOf(args[0].toUpperCase());
             String[] tail = new String[args.length - 1];
             System.arraycopy(args, 1, tail, 0, tail.length);
             args = tail;
@@ -179,6 +178,7 @@
                             dataCount++;
                             dataSize += segment.size();
                             idmap.put(id, segment.getReferencedIds());
+                            System.out.println(id + " -> " + idmap.get(id));
                         } else if (id.isBulkSegmentId()) {
                             bulkCount++;
                             bulkSize += id.getSegment().size();
@@ -246,15 +246,10 @@
                                 path = matcher.group(3);
                             }
                             NodeState node = new SegmentNodeState(id);
-                            System.out.println("/ (" + id + ") -> " + node);
+                            System.out.println("/ -> " + node);
                             for (String name : PathUtils.elements(path)) {
                                 node = node.getChildNode(name);
-                                RecordId nid = null;
-                                if (node instanceof SegmentNodeState) {
-                                    nid = ((SegmentNodeState) node).getRecordId();
-                                }
-                                System.out.println(
-                                        "  " + name  + " (" + nid + ") -> " + node);
+                                System.out.println(" " + name  + " -> " + node);
                             }
                         }
                     }
@@ -384,7 +379,7 @@
             if (baseFile == null) {
                 throw new IllegalArgumentException("Required argument base missing.");
             }
-            oakFixture = OakFixture.getTar(OakFixture.OAK_TAR, baseFile, 256, cacheSize, mmap.value(options), false);
+            oakFixture = OakFixture.getTar(baseFile, 256, cacheSize, mmap.value(options));
         } else if (fix.equals(OakFixture.OAK_H2)) {
             File baseFile = base.value(options);
             if (baseFile == null) {
diff --git a/oak-solr-core/pom.xml b/oak-solr-core/pom.xml
index a454e0b..4405887 100644
--- a/oak-solr-core/pom.xml
+++ b/oak-solr-core/pom.xml
@@ -23,7 +23,7 @@
     <parent>
         <groupId>org.apache.jackrabbit</groupId>
         <artifactId>oak-parent</artifactId>
-        <version>1.1-SNAPSHOT</version>
+        <version>1.0.0</version>
         <relativePath>../oak-parent/pom.xml</relativePath>
     </parent>
 
@@ -34,38 +34,8 @@
 
     <properties>
         <known.issues>
-            org.apache.jackrabbit.core.query.FulltextQueryTest#testMultipleOrExpressions                   <!-- different ranking -->
-            org.apache.jackrabbit.core.query.FulltextQueryTest#testMultiByte                               <!-- wildcards with multi byte support -->
-            org.apache.jackrabbit.core.query.JoinTest#testJoinWithOR4                                      <!-- OAK-955 -->
-            org.apache.jackrabbit.core.query.JoinTest#testJoinWithOR5                                      <!-- OAK-955 -->
-            org.apache.jackrabbit.core.query.SQL2NodeLocalNameTest#testLowerLocalNameOrContains            <!-- OAK-957 -->
-            org.apache.jackrabbit.core.query.SQL2NodeLocalNameTest#testUpperLocalNameOrContains            <!-- OAK-957 -->
-            org.apache.jackrabbit.core.query.FnNameQueryTest#testLikeWithPrefix                            <!-- OAK-328 -->
-            org.apache.jackrabbit.core.query.ShareableNodeTest#testName                                    <!-- OAK-118 -->
-            org.apache.jackrabbit.core.query.ShareableNodeTest#testPathConstraint                          <!-- OAK-118 -->
-            org.apache.jackrabbit.core.query.SelectClauseTest#testSameNameSiblingSQL                       <!-- OAK-203 -->
-            org.apache.jackrabbit.core.query.ChildAxisQueryTest#testRelationQuery                          <!-- OAK-203 -->
-            org.apache.jackrabbit.core.query.ChildAxisQueryTest#testRelationQueryDeep                      <!-- OAK-203 -->
-            org.apache.jackrabbit.core.query.ChildAxisQueryTest#testMultiRelation                          <!-- OAK-203 -->
-            org.apache.jackrabbit.core.query.ChildAxisQueryTest#testLike                                   <!-- OAK-203 -->
-            org.apache.jackrabbit.core.query.ChildAxisQueryTest#testContains                               <!-- OAK-203 -->
-            org.apache.jackrabbit.core.query.ChildAxisQueryTest#testStarNameTest                           <!-- OAK-203 -->
-            org.apache.jackrabbit.core.query.ChildAxisQueryTest#testNotIsDescendantNodeQuery               <!-- OAK-203? -->
-            org.apache.jackrabbit.core.query.SQL2PathEscapingTest                                          <!-- ? -->
-            org.apache.jackrabbit.core.query.UpperLowerCaseQueryTest                                       <!-- ? -->
-            org.apache.jackrabbit.core.query.SimpleQueryTest                                               <!-- ? -->
-            org.apache.jackrabbit.core.query.XPathAxisTest                                                 <!-- ? -->
-            org.apache.jackrabbit.core.query.DerefTest                                                     <!-- ? -->
-            org.apache.jackrabbit.core.query.ParentNodeTest                                                <!-- ? -->
-            org.apache.jackrabbit.core.query.QueryResultTest                                               <!-- ? -->
-            org.apache.jackrabbit.core.query.ExcerptTest#testMoreTextDotsAtEnd                             <!-- OAK-318 -->
-            org.apache.jackrabbit.core.query.ExcerptTest#testMoreTextDotsAtStart                           <!-- OAK-318 -->
-            org.apache.jackrabbit.core.query.ExcerptTest#testMoreTextDotsAtStartAndEnd                     <!-- OAK-318 -->
-            org.apache.jackrabbit.core.query.ExcerptTest#testPunctuationStartsFragment                     <!-- OAK-318 -->
-            org.apache.jackrabbit.core.query.ExcerptTest#testPunctuationStartsFragmentEndsWithDots         <!-- OAK-318 -->
-            org.apache.jackrabbit.core.query.ExcerptTest#testPreferPhrase                                  <!-- OAK-318 -->
-            org.apache.jackrabbit.core.query.SQL2OuterJoinTest                                             <!-- ? -->
-            org.apache.jackrabbit.core.query.MixinTest                                                     <!-- ? -->
+            org.apache.jackrabbit.core.query.FulltextQueryTest#testMultipleOrExpressions                    <!-- different ranking -->
+            org.apache.jackrabbit.core.query.FulltextQueryTest#testMultiByte                                <!-- wildcards with multi byte support -->
         </known.issues>
     </properties>
 
diff --git a/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/DefaultSolrConfiguration.java b/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/DefaultSolrConfiguration.java
index 3858926..43f41e3 100644
--- a/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/DefaultSolrConfiguration.java
+++ b/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/DefaultSolrConfiguration.java
@@ -82,9 +82,4 @@
         return SolrServerConfigurationDefaults.CATCHALL_FIELD;
     }
 
-    @Override
-    public int getRows() {
-        return SolrServerConfigurationDefaults.ROWS;
-    }
-
 }
diff --git a/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/OakSolrConfiguration.java b/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/OakSolrConfiguration.java
index a4dd0c9..37931a0 100644
--- a/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/OakSolrConfiguration.java
+++ b/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/OakSolrConfiguration.java
@@ -68,11 +68,4 @@
      * @return a <code>String</code> representing the Solr field to be used as "catch all" field
      */
     public String getCatchAllField();
-
-    /**
-     * Provide the number of documents (rows) to be fetched for each Solr query
-     *
-     * @return an <code>int</code> for the setting of Solr rows parameter
-     */
-    public int getRows();
 }
diff --git a/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/OakSolrNodeStateConfiguration.java b/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/OakSolrNodeStateConfiguration.java
index 7a4fac8..22cc84e 100644
--- a/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/OakSolrNodeStateConfiguration.java
+++ b/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/OakSolrNodeStateConfiguration.java
@@ -99,25 +99,8 @@
         return CommitPolicy.valueOf(getStringValueFor(Properties.COMMIT_POLICY, CommitPolicy.SOFT.toString()));
     }
 
-    @Override
-    public int getRows() {
-        return getIntValueFor(Properties.ROWS, SolrServerConfigurationDefaults.ROWS);
-    }
-
-    private int getIntValueFor(String propertyName, int defaultValue) {
-        long value = defaultValue;
-        NodeState configurationNodeState = getConfigurationNodeState();
-        if (configurationNodeState.exists()) {
-            PropertyState property = configurationNodeState.getProperty(propertyName);
-            if (property != null) {
-                value = property.getValue(Type.LONG);
-            }
-        }
-        return (int) value;
-    }
-
     protected String getStringValueFor(String propertyName, String defaultValue) {
-        String value = defaultValue;
+        String value = null;
         NodeState configurationNodeState = getConfigurationNodeState();
         if (configurationNodeState.exists()) {
             PropertyState property = configurationNodeState.getProperty(propertyName);
@@ -125,6 +108,9 @@
                 value = property.getValue(Type.STRING);
             }
         }
+        if (value == null || value.length() == 0) {
+            value = defaultValue;
+        }
         return value;
     }
 
@@ -157,6 +143,6 @@
         public static final String DESCENDANTS_FIELD = "descendantsField";
         public static final String CATCHALL_FIELD = "catchAllField";
         public static final String COMMIT_POLICY = "commitPolicy";
-        public static final String ROWS = "rows";
+
     }
 }
diff --git a/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/SolrServerConfigurationDefaults.java b/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/SolrServerConfigurationDefaults.java
index 29d262a..0961d6d 100644
--- a/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/SolrServerConfigurationDefaults.java
+++ b/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/SolrServerConfigurationDefaults.java
@@ -31,10 +31,8 @@
 
     public static final String PATH_FIELD_NAME = "path_exact";
     public static final String CHILD_FIELD_NAME = "path_child";
-    public static final String DESC_FIELD_NAME = "path_des";
+    public static final String DESC_FIELD_NAME = "path_desc";
     public static final String ANC_FIELD_NAME = "path_anc";
 
     public static final String CATCHALL_FIELD = "catch_all";
-
-    public static final int ROWS = 100000;
 }
diff --git a/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/package-info.java b/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/package-info.java
index 7007a7c..746d825 100644
--- a/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/package-info.java
+++ b/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/package-info.java
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-@Version("1.1")
+@Version("1.0")
 @Export(optional = "provide:=true")
 package org.apache.jackrabbit.oak.plugins.index.solr.configuration;
 
diff --git a/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/osgi/OakSolrConfigurationProviderService.java b/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/osgi/OakSolrConfigurationProviderService.java
index 77a9000..13f60b3 100644
--- a/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/osgi/OakSolrConfigurationProviderService.java
+++ b/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/osgi/OakSolrConfigurationProviderService.java
@@ -25,7 +25,6 @@
 import org.apache.jackrabbit.oak.plugins.index.solr.configuration.DefaultSolrConfiguration;
 import org.apache.jackrabbit.oak.plugins.index.solr.configuration.OakSolrConfiguration;
 import org.apache.jackrabbit.oak.plugins.index.solr.configuration.OakSolrConfigurationProvider;
-import org.apache.jackrabbit.oak.plugins.index.solr.configuration.SolrServerConfigurationDefaults;
 import org.apache.jackrabbit.oak.spi.query.Filter;
 import org.osgi.service.component.ComponentContext;
 
@@ -36,12 +35,11 @@
 @Service(OakSolrConfigurationProvider.class)
 public class OakSolrConfigurationProviderService implements OakSolrConfigurationProvider {
 
-    private static final String DEFAULT_DESC_FIELD = SolrServerConfigurationDefaults.DESC_FIELD_NAME;
-    private static final String DEFAULT_CHILD_FIELD = SolrServerConfigurationDefaults.CHILD_FIELD_NAME;
-    private static final String DEFAULT_PARENT_FIELD = SolrServerConfigurationDefaults.ANC_FIELD_NAME;
-    private static final String DEFAULT_PATH_FIELD = SolrServerConfigurationDefaults.PATH_FIELD_NAME;
-    private static final String DEFAULT_CATCHALL_FIELD = SolrServerConfigurationDefaults.CATCHALL_FIELD;
-    private static final int DEFAULT_ROWS = SolrServerConfigurationDefaults.ROWS;
+    private static final String DEFAULT_DESC_FIELD = "path_des";
+    private static final String DEFAULT_CHILD_FIELD = "path_child";
+    private static final String DEFAULT_PARENT_FIELD = "path_anc";
+    private static final String DEFAULT_PATH_FIELD = "path_exact";
+    private static final String DEFAULT_CATCHALL_FIELD = "catch_all";
 
     @Property(value = DEFAULT_DESC_FIELD, label = "field for descendants search")
     private static final String PATH_DESCENDANTS_FIELD = "path.desc.field";
@@ -72,29 +70,22 @@
     )
     private static final String COMMIT_POLICY = "commit.policy";
 
-
-    @Property(intValue = DEFAULT_ROWS, label = "rows")
-    private static final String ROWS = "rows";
-
-
     private String pathChildrenFieldName;
     private String pathParentFieldName;
     private String pathDescendantsFieldName;
     private String pathExactFieldName;
     private String catchAllField;
     private CommitPolicy commitPolicy;
-    private int rows;
 
     private OakSolrConfiguration oakSolrConfiguration;
 
-    @Activate
+  @Activate
     protected void activate(ComponentContext componentContext) throws Exception {
         pathChildrenFieldName = String.valueOf(componentContext.getProperties().get(PATH_CHILDREN_FIELD));
         pathParentFieldName = String.valueOf(componentContext.getProperties().get(PATH_PARENT_FIELD));
         pathExactFieldName = String.valueOf(componentContext.getProperties().get(PATH_EXACT_FIELD));
         pathDescendantsFieldName = String.valueOf(componentContext.getProperties().get(PATH_DESCENDANTS_FIELD));
         catchAllField = String.valueOf(componentContext.getProperties().get(CATCH_ALL_FIELD));
-        rows = Integer.parseInt(String.valueOf(componentContext.getProperties().get(ROWS)));
         commitPolicy = CommitPolicy.valueOf(String.valueOf(componentContext.getProperties().get(COMMIT_POLICY)));
     }
 
@@ -147,11 +138,6 @@
                 public String getCatchAllField() {
                     return catchAllField;
                 }
-
-                @Override
-                public int getRows() {
-                    return rows;
-                }
             };
         }
         return oakSolrConfiguration;
diff --git a/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/query/SolrQueryIndex.java b/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/query/SolrQueryIndex.java
index ae9c0a8..08f67ce 100644
--- a/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/query/SolrQueryIndex.java
+++ b/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/query/SolrQueryIndex.java
@@ -38,7 +38,6 @@
 import org.apache.jackrabbit.oak.spi.state.NodeState;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServer;
-import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrDocumentList;
@@ -337,7 +336,8 @@
             solrQuery.setParam("df", catchAllField);
         }
 
-        solrQuery.setParam("rows", String.valueOf(configuration.getRows()));
+        // TODO : can we handle this better? e.g. with deep paging support?
+        solrQuery.setParam("rows", "100000");
     }
 
     private static String createRangeQuery(String first, String last, boolean firstIncluding, boolean lastIncluding) {
@@ -381,7 +381,7 @@
             if (log.isDebugEnabled()) {
                 log.debug("getting response {}", queryResponse);
             }
-            cursor = new SolrCursor(queryResponse, query);
+            cursor = new SolrCursor(queryResponse);
         } catch (Exception e) {
             throw new RuntimeException(e);
         }
@@ -391,34 +391,29 @@
 
     private class SolrCursor implements Cursor {
 
-        private SolrDocumentList results;
+        private final SolrDocumentList results;
 
-        private SolrQuery query;
+        private int i;
 
-        private int counter;
-        private int offset;
-
-        public SolrCursor(QueryResponse queryResponse, SolrQuery query) {
+        public SolrCursor(QueryResponse queryResponse) {
             this.results = queryResponse.getResults();
-            this.counter = 0;
-            this.offset = 0;
-            this.query = query;
+            i = 0;
         }
 
         @Override
         public boolean hasNext() {
-            return results != null && offset + counter < results.getNumFound();
+            return results != null && i < results.size();
         }
 
         @Override
         public void remove() {
-            results.remove(counter);
+            results.remove(i);
         }
 
         public IndexRow next() {
-            if (counter < results.size() || updateResults()) {
-                final SolrDocument doc = results.get(counter);
-                counter++;
+            if (i < results.size()) {
+                final SolrDocument doc = results.get(i);
+                i++;
                 return new IndexRow() {
                     @Override
                     public String getPath() {
@@ -445,26 +440,6 @@
                 return null;
             }
         }
-
-        private boolean updateResults() {
-            int newOffset = offset + results.size();
-            query.setParam("start", String.valueOf(newOffset));
-            try {
-                QueryResponse queryResponse = solrServer.query(query);
-                SolrDocumentList localResults = queryResponse.getResults();
-                boolean hasMoreResults = localResults.size() > 0;
-                if (hasMoreResults) {
-                    counter = 0;
-                    offset = newOffset;
-                    results = localResults;
-                } else {
-                    query.setParam("start", String.valueOf(offset));
-                }
-                return hasMoreResults;
-            } catch (SolrServerException e) {
-                throw new RuntimeException("error retrieving paged results", e);
-            }
-        }
     }
 
 
diff --git a/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/server/RemoteSolrServerProvider.java b/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/server/RemoteSolrServerProvider.java
index d357183..c92b484 100644
--- a/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/server/RemoteSolrServerProvider.java
+++ b/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/server/RemoteSolrServerProvider.java
@@ -18,7 +18,6 @@
 
 import java.io.File;
 import java.io.IOException;
-
 import org.apache.jackrabbit.oak.plugins.index.solr.configuration.RemoteSolrServerConfiguration;
 import org.apache.solr.client.solrj.SolrServer;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -86,68 +85,45 @@
         // try SolrCloud client
         CloudSolrServer cloudSolrServer = new CloudSolrServer(remoteSolrServerConfiguration.getSolrZkHost());
         cloudSolrServer.setZkConnectTimeout(100);
-        if (connectToZK(cloudSolrServer)) {
-            cloudSolrServer.setDefaultCollection("collection1"); // workaround for first request when the needed collection may not exist
+        cloudSolrServer.connect();
+        cloudSolrServer.setDefaultCollection("collection1"); // workaround for first request when the needed collection may not exist
 
-            // create specified collection if it doesn't exists
-            try {
-                createCollectionIfNeeded(cloudSolrServer);
-            } catch (Throwable t) {
-                if (log.isWarnEnabled()) {
-                    log.warn("could not create the collection on {}", remoteSolrServerConfiguration.getSolrZkHost(), t);
-                }
+        // create specified collection if it doesn't exists
+        try {
+            createCollectionIfNeeded(cloudSolrServer);
+        } catch (Throwable t) {
+            if (log.isWarnEnabled()) {
+                log.warn("could not create the collection on {}", remoteSolrServerConfiguration.getSolrZkHost(), t);
             }
-
-            cloudSolrServer.setDefaultCollection(remoteSolrServerConfiguration.getSolrCollection());
-
-            // SolrCloud may need some time to sync on collection creation (to spread it over the shards / replicas)
-            int i = 0;
-            while (i < 3) {
-                try {
-                    SolrPingResponse ping = cloudSolrServer.ping();
-                    if (ping != null && 0 == ping.getStatus()) {
-                        return cloudSolrServer;
-                    } else {
-                        throw new IOException("the found SolrCloud server is not alive");
-                    }
-                } catch (Exception e) {
-                    // wait a bit
-                    try {
-                        if (log.isDebugEnabled()) {
-                            log.debug("server is not alive yet, wait a bit", e);
-                        }
-                        Thread.sleep(3000);
-                    } catch (InterruptedException e1) {
-                        // do nothing
-                    }
-                }
-                i++;
-            }
-            throw new IOException("the found SolrCloud server is not alive");
-        }
-        else {
-            throw new IOException("could not connect to Zookeeper hosted at " + remoteSolrServerConfiguration.getSolrZkHost());
         }
 
-    }
+        cloudSolrServer.setDefaultCollection(remoteSolrServerConfiguration.getSolrCollection());
 
-    private boolean connectToZK(CloudSolrServer cloudSolrServer) {
-        boolean connected = false;
-        for (int i = 0; i < 3; i++) {
+        // SolrCloud may need some time to sync on collection creation (to spread it over the shards / replicas)
+        int i = 0;
+        while (i < 3) {
             try {
-                cloudSolrServer.connect();
-                connected = true;
-                break;
+                SolrPingResponse ping = cloudSolrServer.ping();
+                if (ping != null && 0 == ping.getStatus()) {
+                    return cloudSolrServer;
+                } else {
+                    throw new IOException("the found SolrCloud server is not alive");
+                }
             } catch (Exception e) {
-                log.warn("could not connect to ZK", e);
+                // wait a bit
                 try {
+                    if (log.isDebugEnabled()) {
+                        log.debug("server is not alive yet, wait a bit", e);
+                    }
                     Thread.sleep(3000);
                 } catch (InterruptedException e1) {
                     // do nothing
                 }
             }
+            i++;
         }
-        return connected;
+        throw new IOException("the found SolrCloud server is not alive");
+
     }
 
     private void createCollectionIfNeeded(CloudSolrServer cloudSolrServer) throws SolrServerException, IOException {
@@ -166,8 +142,10 @@
                 ZkController.uploadConfigDir(zkClient, dir, solrCollection);
                 UpdateRequest req = new UpdateRequest("/admin/collections");
                 req.setParam("action", "CREATE");
-                req.setParam("numShards", String.valueOf(remoteSolrServerConfiguration.getSolrShardsNo()));
-                req.setParam("replicationFactor", String.valueOf(remoteSolrServerConfiguration.getSolrReplicationFactor()));
+                if (remoteSolrServerConfiguration != null) {
+                    req.setParam("numShards", String.valueOf(remoteSolrServerConfiguration.getSolrShardsNo()));
+                    req.setParam("replicationFactor", String.valueOf(remoteSolrServerConfiguration.getSolrReplicationFactor()));
+                }
                 req.setParam("collection.configName", solrCollection);
                 req.setParam("name", solrCollection);
                 cloudSolrServer.request(req);
diff --git a/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/util/SolrIndexInitializer.java b/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/util/SolrIndexInitializer.java
index 64aa037c..8307d31 100644
--- a/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/util/SolrIndexInitializer.java
+++ b/oak-solr-core/src/main/java/org/apache/jackrabbit/oak/plugins/index/solr/util/SolrIndexInitializer.java
@@ -41,7 +41,7 @@
      */
     public SolrIndexInitializer() {
         this.name = SOLR_IDX;
-        this.async = ASYNC;
+        this.async = "async";
         this.reindex = true;
     }
 
@@ -64,7 +64,7 @@
      */
     public SolrIndexInitializer(boolean async) {
         this.name = SOLR_IDX;
-        this.async = async ? ASYNC : null;
+        this.async = async ? "async" : null;
         this.reindex = true;
     }
 
@@ -76,9 +76,9 @@
      * @param name    the name of the node holding the Solr index definition
      * @param reindex <code>true</code> if the reindexing should be enabled
      */
-    public SolrIndexInitializer(boolean async, String name, boolean reindex) {
+    public SolrIndexInitializer(String async, String name, boolean reindex) {
+        this.async = async;
         this.name = name;
-        this.async = async ? ASYNC : null;
         this.reindex = reindex;
     }
 
diff --git a/oak-solr-core/src/main/resources/solr/oak/conf/schema.xml b/oak-solr-core/src/main/resources/solr/oak/conf/schema.xml
index dbcc9bf..be09070 100644
--- a/oak-solr-core/src/main/resources/solr/oak/conf/schema.xml
+++ b/oak-solr-core/src/main/resources/solr/oak/conf/schema.xml
@@ -21,8 +21,6 @@
     <fieldType name="descendent_path" class="solr.TextField">
       <analyzer type="index">
         <tokenizer class="solr.PathHierarchyTokenizerFactory" delimiter="/" />
-        <filter class="solr.PatternCaptureGroupFilterFactory" pattern="((/).*)" preserve_original="false"/>
-        <filter class="solr.RemoveDuplicatesTokenFilterFactory" />
       </analyzer>
       <analyzer type="query">
         <tokenizer class="solr.KeywordTokenizerFactory" />
@@ -102,15 +100,43 @@
     <field name="path_child" type="children_path" indexed="true" stored="false"/>
     <field name="path_anc" type="parent_path" indexed="true" stored="false"/>
     <field name="path_des" type="descendent_path" indexed="true" stored="false"/>
+    <field name="ignored" type="ignored" multiValued="true"/>
     <field name="catch_all" type="text_general" indexed="true" stored="false" multiValued="true"/>
-    <field name=":path" type="string" indexed="true" stored="false"/>
     <field name="_version_" type="long" indexed="true" stored="true"/>
+    <field name=":path" type="string" indexed="true" stored="false"/>
+
+    <dynamicField name="*_i"  type="int"    indexed="true"  stored="true"/>
+    <dynamicField name="*_is" type="int"    indexed="true"  stored="true"  multiValued="true"/>
+    <dynamicField name="*_s"  type="string"  indexed="true"  stored="true" />
+    <dynamicField name="*_ss" type="string"  indexed="true"  stored="true" multiValued="true"/>
+    <dynamicField name="*_l"  type="long"   indexed="true"  stored="true"/>
+    <dynamicField name="*_ls" type="long"   indexed="true"  stored="true"  multiValued="true"/>
+    <dynamicField name="*_t"  type="text_general"    indexed="true"  stored="true"/>
+    <dynamicField name="*_txt" type="text_general"   indexed="true"  stored="true" multiValued="true"/>
+    <dynamicField name="*_b"  type="boolean" indexed="true" stored="true"/>
+    <dynamicField name="*_bs" type="boolean" indexed="true" stored="true"  multiValued="true"/>
+    <dynamicField name="*_f"  type="float"  indexed="true"  stored="true"/>
+    <dynamicField name="*_fs" type="float"  indexed="true"  stored="true"  multiValued="true"/>
+    <dynamicField name="*_d"  type="double" indexed="true"  stored="true"/>
+    <dynamicField name="*_ds" type="double" indexed="true"  stored="true"  multiValued="true"/>
+
+    <dynamicField name="*_dt"  type="date"    indexed="true"  stored="true"/>
+    <dynamicField name="*_dts" type="date"    indexed="true"  stored="true" multiValued="true"/>
+
+    <dynamicField name="*_ti" type="tint"    indexed="true"  stored="true"/>
+    <dynamicField name="*_tl" type="tlong"   indexed="true"  stored="true"/>
+    <dynamicField name="*_tf" type="tfloat"  indexed="true"  stored="true"/>
+    <dynamicField name="*_td" type="tdouble" indexed="true"  stored="true"/>
+    <dynamicField name="*_tdt" type="tdate"  indexed="true"  stored="true"/>
+
+    <dynamicField name="*_pi"  type="pint"    indexed="true"  stored="true"/>
+    <dynamicField name="*_c"   type="currency" indexed="true"  stored="true"/>
     <dynamicField name="*" type="text_general" indexed="true" stored="true" multiValued="true"/>
   </fields>
   <uniqueKey>path_exact</uniqueKey>
   <copyField source="path_exact" dest="path_anc"/>
   <copyField source="path_exact" dest="path_des"/>
   <copyField source="path_exact" dest="path_child"/>
-  <copyField source="path_exact" dest=":path"/>
+    <copyField source="path_exact" dest=":path"/>
   <copyField source="*" dest="catch_all"/>
 </schema>
diff --git a/oak-solr-core/src/main/resources/solr/oak/conf/solrconfig.xml b/oak-solr-core/src/main/resources/solr/oak/conf/solrconfig.xml
index 1e2803a..348dd09 100644
--- a/oak-solr-core/src/main/resources/solr/oak/conf/solrconfig.xml
+++ b/oak-solr-core/src/main/resources/solr/oak/conf/solrconfig.xml
@@ -99,6 +99,17 @@
     <directoryFactory name="DirectoryFactory"
                       class="${solr.directoryFactory:solr.NRTCachingDirectoryFactory}">
 
+
+        <!-- These will be used if you are using the solr.HdfsDirectoryFactory,
+             otherwise they will be ignored. If you don't plan on using hdfs,
+             you can safely remove this section. -->
+        <!-- The root directory that collection data should be written to. -->
+        <str name="solr.hdfs.home">${solr.hdfs.home:}</str>
+        <!-- The hadoop configuration files to use for the hdfs client. -->
+        <str name="solr.hdfs.confdir">${solr.hdfs.confdir:}</str>
+        <!-- Enable/Disable the hdfs cache. -->
+        <str name="solr.hdfs.blockcache.enabled">${solr.hdfs.blockcache.enabled:true}</str>
+
     </directoryFactory>
 
     <!-- The CodecFactory for defining the format of the inverted index.
@@ -192,10 +203,10 @@
              will be allowed before they are merged into one.
              Default is 10 for both merge policies.
           -->
-
         <!--
         <mergeFactor>10</mergeFactor>
-        -->
+          -->
+
 
         <!-- Expert: Merge Scheduler
              The Merge Scheduler in Lucene controls how merges are
@@ -294,7 +305,7 @@
              IndexWriter to write its info stream to solr's log. By default,
              this is enabled here, and controlled through log4j.properties.
           -->
-        <infoStream>false</infoStream>
+        <infoStream>true</infoStream>
     </indexConfig>
 
 
@@ -488,19 +499,19 @@
                    and old cache.
           -->
         <filterCache class="solr.FastLRUCache"
-                     size="4096"
-                     initialSize="1024"
-                     autowarmCount="512"/>
+                     size="512"
+                     initialSize="512"
+                     autowarmCount="0"/>
 
         <!-- Query Result Cache
 
              Caches results of searches - ordered lists of document ids
              (DocList) based on a query, a sort, and the range of documents requested.
           -->
-        <queryResultCache class="solr.FastLRUCache"
-                          size="40960"
-                          initialSize="4096"
-                          autowarmCount="2048"/>
+        <queryResultCache class="solr.LRUCache"
+                          size="512"
+                          initialSize="512"
+                          autowarmCount="0"/>
 
         <!-- Document Cache
 
@@ -509,8 +520,8 @@
              this cache will not be autowarmed.
           -->
         <documentCache class="solr.LRUCache"
-                       size="40960"
-                       initialSize="1024"
+                       size="512"
+                       initialSize="512"
                        autowarmCount="0"/>
 
         <!-- custom cache currently used by block join -->
@@ -527,10 +538,12 @@
              by document id.  The fieldValueCache is created by default
              even if not configured here.
           -->
-       <fieldValueCache class="solr.FastLRUCache"
-                        size="4096"
-                        autowarmCount="1024"
-                        showItems="32" />
+        <!--
+           <fieldValueCache class="solr.FastLRUCache"
+                            size="512"
+                            autowarmCount="128"
+                            showItems="32" />
+          -->
 
         <!-- Custom Cache
 
@@ -614,10 +627,12 @@
         <!-- QuerySenderListener takes an array of NamedList and executes a
              local query request for each NamedList in sequence.
           -->
-        <!--<listener event="newSearcher" class="solr.QuerySenderListener">
+        <listener event="newSearcher" class="solr.QuerySenderListener">
             <arr name="queries">
+                <!--
                    <lst><str name="q">solr</str><str name="sort">price asc</str></lst>
                    <lst><str name="q">rocks</str><str name="sort">weight asc</str></lst>
+                  -->
             </arr>
         </listener>
         <listener event="firstSearcher" class="solr.QuerySenderListener">
@@ -626,7 +641,7 @@
                     <str name="q">static firstSearcher warming in solrconfig.xml</str>
                 </lst>
             </arr>
-        </listener>-->
+        </listener>
 
         <!-- Use Cold Searcher
 
diff --git a/oak-solr-core/src/test/java/org/apache/jackrabbit/oak/jcr/query/QueryJcrTest.java b/oak-solr-core/src/test/java/org/apache/jackrabbit/oak/jcr/query/QueryJcrTest.java
index a7a0254..79a6b4b 100644
--- a/oak-solr-core/src/test/java/org/apache/jackrabbit/oak/jcr/query/QueryJcrTest.java
+++ b/oak-solr-core/src/test/java/org/apache/jackrabbit/oak/jcr/query/QueryJcrTest.java
@@ -19,34 +19,15 @@
 import junit.framework.Test;
 import junit.framework.TestCase;
 import junit.framework.TestSuite;
-import org.apache.jackrabbit.core.query.ChildAxisQueryTest;
-import org.apache.jackrabbit.core.query.DerefTest;
-import org.apache.jackrabbit.core.query.ExcerptTest;
-import org.apache.jackrabbit.core.query.FnNameQueryTest;
 import org.apache.jackrabbit.core.query.FulltextQueryTest;
-import org.apache.jackrabbit.core.query.FulltextSQL2QueryTest;
 import org.apache.jackrabbit.core.query.JoinTest;
 import org.apache.jackrabbit.core.query.LimitAndOffsetTest;
-import org.apache.jackrabbit.core.query.MixinTest;
-import org.apache.jackrabbit.core.query.OrderByTest;
-import org.apache.jackrabbit.core.query.ParentNodeTest;
 import org.apache.jackrabbit.core.query.PathQueryNodeTest;
-import org.apache.jackrabbit.core.query.QueryResultTest;
-import org.apache.jackrabbit.core.query.SQL2NodeLocalNameTest;
 import org.apache.jackrabbit.core.query.SQL2OffsetLimitTest;
-import org.apache.jackrabbit.core.query.SQL2OrderByTest;
-import org.apache.jackrabbit.core.query.SQL2OuterJoinTest;
-import org.apache.jackrabbit.core.query.SQL2PathEscapingTest;
 import org.apache.jackrabbit.core.query.SQL2QueryResultTest;
 import org.apache.jackrabbit.core.query.SQLTest;
-import org.apache.jackrabbit.core.query.SelectClauseTest;
-import org.apache.jackrabbit.core.query.ShareableNodeTest;
-import org.apache.jackrabbit.core.query.SimilarQueryTest;
-import org.apache.jackrabbit.core.query.SimpleQueryTest;
 import org.apache.jackrabbit.core.query.SkipDeletedNodesTest;
-import org.apache.jackrabbit.core.query.UpperLowerCaseQueryTest;
 import org.apache.jackrabbit.core.query.VersionStoreQueryTest;
-import org.apache.jackrabbit.core.query.XPathAxisTest;
 import org.apache.jackrabbit.test.ConcurrentTestSuite;
 
 /**
@@ -59,31 +40,31 @@
                 "Jackrabbit query tests using a Solr based index");
         suite.addTestSuite(FulltextQueryTest.class);
         suite.addTestSuite(SQLTest.class);
-        suite.addTestSuite(JoinTest.class);
+//        suite.addTestSuite(JoinTest.class); // fail
         suite.addTestSuite(SkipDeletedNodesTest.class);
         suite.addTestSuite(PathQueryNodeTest.class);
-        suite.addTestSuite(FulltextSQL2QueryTest.class);
-        suite.addTestSuite(SQL2NodeLocalNameTest.class);
-        suite.addTestSuite(SQL2OrderByTest.class);
-        suite.addTestSuite(MixinTest.class);
-        suite.addTestSuite(SQL2OuterJoinTest.class);
+//        suite.addTestSuite(FulltextSQL2QueryTest.class); // fail
+//        suite.addTestSuite(SQL2NodeLocalNameTest.class); // fail
+//        suite.addTestSuite(SQL2OrderByTest.class); // fail
+//        suite.addTestSuite(MixinTest.class); // fail
+//        suite.addTestSuite(SQL2OuterJoinTest.class);
         suite.addTestSuite(SQL2OffsetLimitTest.class);
-        suite.addTestSuite(LimitAndOffsetTest.class);
-        suite.addTestSuite(OrderByTest.class);
-        suite.addTestSuite(ExcerptTest.class);
-        suite.addTestSuite(QueryResultTest.class);
-        suite.addTestSuite(ParentNodeTest.class);
-        suite.addTestSuite(SimilarQueryTest.class);
-        suite.addTestSuite(DerefTest.class);
-        suite.addTestSuite(XPathAxisTest.class);
+//        suite.addTestSuite(LimitAndOffsetTest.class); // randomly failing
+//        suite.addTestSuite(OrderByTest.class); // fail
+//        suite.addTestSuite(ExcerptTest.class); // error unsupported
+//        suite.addTestSuite(QueryResultTest.class); // fail
+//        suite.addTestSuite(ParentNodeTest.class);  // fail
+//        suite.addTestSuite(SimilarQueryTest.class); // error unsupported
+//        suite.addTestSuite(DerefTest.class); // error
+//        suite.addTestSuite(XPathAxisTest.class); // fail and error
         suite.addTestSuite(SQL2QueryResultTest.class);
-        suite.addTestSuite(SimpleQueryTest.class);
-        suite.addTestSuite(FnNameQueryTest.class);
-        suite.addTestSuite(UpperLowerCaseQueryTest.class);
-        suite.addTestSuite(SQL2PathEscapingTest.class);
-        suite.addTestSuite(ChildAxisQueryTest.class);
-        suite.addTestSuite(SelectClauseTest.class);
-        suite.addTestSuite(ShareableNodeTest.class);
+//        suite.addTestSuite(SimpleQueryTest.class); // fail and error
+//        suite.addTestSuite(FnNameQueryTest.class); // fail
+//        suite.addTestSuite(UpperLowerCaseQueryTest.class); // fail
+//        suite.addTestSuite(SQL2PathEscapingTest.class); // fail and error
+//        suite.addTestSuite(ChildAxisQueryTest.class); // fail and error : javax.jcr.ItemExistsException: node3
+//        suite.addTestSuite(SelectClauseTest.class); // error : javax.jcr.ItemExistsException: node
+//        suite.addTestSuite(ShareableNodeTest.class); //not implemented
         suite.addTestSuite(VersionStoreQueryTest.class);
         return suite;
     }
diff --git a/oak-solr-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/solr/TestUtils.java b/oak-solr-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/solr/TestUtils.java
index 16237c7..fed6b6a 100644
--- a/oak-solr-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/solr/TestUtils.java
+++ b/oak-solr-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/solr/TestUtils.java
@@ -18,11 +18,12 @@
 
 import java.io.File;
 
+import org.apache.jackrabbit.oak.api.Type;
 import org.apache.jackrabbit.oak.plugins.index.solr.configuration.CommitPolicy;
-import org.apache.jackrabbit.oak.plugins.index.solr.configuration.DefaultSolrConfiguration;
 import org.apache.jackrabbit.oak.plugins.index.solr.configuration.OakSolrConfiguration;
 import org.apache.jackrabbit.oak.plugins.index.solr.configuration.OakSolrConfigurationProvider;
 import org.apache.jackrabbit.oak.plugins.index.solr.server.SolrServerProvider;
+import org.apache.jackrabbit.oak.spi.query.Filter;
 import org.apache.solr.client.solrj.SolrServer;
 import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
 import org.apache.solr.core.CoreContainer;
@@ -57,11 +58,61 @@
     }
 
     public static OakSolrConfiguration getTestConfiguration() {
-        return new DefaultSolrConfiguration() {
+        return new OakSolrConfiguration() {
+            @Override
+            public String getFieldNameFor(Type<?> propertyType) {
+                return null;
+            }
+
+            @Override
+            public String getPathField() {
+                return "path_exact";
+            }
+
+            @Override
+            public String getFieldForPathRestriction(Filter.PathRestriction pathRestriction) {
+                String fieldName = null;
+                switch (pathRestriction) {
+                    case ALL_CHILDREN: {
+                        fieldName = "path_des";
+                        break;
+                    }
+                    case DIRECT_CHILDREN: {
+                        fieldName = "path_child";
+                        break;
+                    }
+                    case EXACT: {
+                        fieldName = "path_exact";
+                        break;
+                    }
+                    case PARENT: {
+                        fieldName = "path_anc";
+                        break;
+                    }
+                    case NO_RESTRICTION:
+                        break;
+                    default:
+                        break;
+
+                }
+                return fieldName;
+            }
+
+            @Override
+            public String getFieldForPropertyRestriction(Filter.PropertyRestriction propertyRestriction) {
+                return null;
+            }
+
             @Override
             public CommitPolicy getCommitPolicy() {
                 return CommitPolicy.HARD;
             }
+
+            @Override
+            public String getCatchAllField() {
+                return "catch_all";
+            }
+
         };
     }
 
diff --git a/oak-solr-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/DefaultAnalyzersConfigurationTest.java b/oak-solr-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/DefaultAnalyzersConfigurationTest.java
index 425a118..7a9e4bb 100644
--- a/oak-solr-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/DefaultAnalyzersConfigurationTest.java
+++ b/oak-solr-core/src/test/java/org/apache/jackrabbit/oak/plugins/index/solr/configuration/DefaultAnalyzersConfigurationTest.java
@@ -25,9 +25,7 @@
 import org.apache.lucene.analysis.Tokenizer;
 import org.apache.lucene.analysis.core.KeywordTokenizer;
 import org.apache.lucene.analysis.miscellaneous.LengthFilter;
-import org.apache.lucene.analysis.miscellaneous.RemoveDuplicatesTokenFilter;
 import org.apache.lucene.analysis.path.PathHierarchyTokenizer;
-import org.apache.lucene.analysis.pattern.PatternCaptureGroupTokenFilter;
 import org.apache.lucene.analysis.pattern.PatternReplaceFilter;
 import org.apache.lucene.analysis.reverse.ReverseStringFilter;
 import org.apache.lucene.util.LuceneTestCase;
@@ -103,9 +101,7 @@
             @Override
             protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
                 Tokenizer source = new PathHierarchyTokenizer(reader);
-                TokenStream filter = new PatternCaptureGroupTokenFilter(source, false, Pattern.compile("((\\/).*)"));
-                filter = new RemoveDuplicatesTokenFilter(filter);
-                return new TokenStreamComponents(source, filter);
+                return new TokenStreamComponents(source);
             }
         };
         this.allChildrenPathSearchingAnalyzer = new Analyzer() {
@@ -121,7 +117,7 @@
     public void testAllChildrenIndexingTokenization() throws Exception {
         try {
             TokenStream ts = allChildrenPathIndexingAnalyzer.tokenStream("text", new StringReader("/jcr:a/jcr:b/c/jcr:d"));
-            assertTokenStreamContents(ts, new String[]{"/jcr:a", "/", "/jcr:a/jcr:b", "/jcr:a/jcr:b/c", "/jcr:a/jcr:b/c/jcr:d"});
+            assertTokenStreamContents(ts, new String[]{"/jcr:a", "/jcr:a/jcr:b", "/jcr:a/jcr:b/c", "/jcr:a/jcr:b/c/jcr:d"});
         } finally {
             allChildrenPathIndexingAnalyzer.close();
         }
@@ -195,21 +191,13 @@
     public void testAllChildrenPathMatching() throws Exception {
         String nodePath = "/jcr:a/jcr:b/c";
         String descendantPath = nodePath + "/d/jcr:e";
-        assertAnalyzesTo(allChildrenPathIndexingAnalyzer, descendantPath, new String[]{"/jcr:a", "/", "/jcr:a/jcr:b", "/jcr:a/jcr:b/c", "/jcr:a/jcr:b/c/d", "/jcr:a/jcr:b/c/d/jcr:e"});
+        assertAnalyzesTo(allChildrenPathIndexingAnalyzer, descendantPath, new String[]{"/jcr:a", "/jcr:a/jcr:b", "/jcr:a/jcr:b/c", "/jcr:a/jcr:b/c/d", "/jcr:a/jcr:b/c/d/jcr:e"});
         assertAnalyzesTo(allChildrenPathSearchingAnalyzer, nodePath, new String[]{nodePath});
         assertAnalyzesTo(allChildrenPathSearchingAnalyzer, "/jcr:a", new String[]{"/jcr:a"});
         assertAnalyzesTo(allChildrenPathSearchingAnalyzer, "/jcr:a/b", new String[]{"/jcr:a/b"});
         assertAnalyzesTo(allChildrenPathSearchingAnalyzer, "/a/b/c", new String[]{"/a/b/c"});
         assertAnalyzesTo(allChildrenPathSearchingAnalyzer, "/a/b/c/d", new String[]{"/a/b/c/d"});
         assertAnalyzesTo(allChildrenPathSearchingAnalyzer, "/a/b/c/d/jcr:e", new String[]{"/a/b/c/d/jcr:e"});
-        assertAnalyzesTo(allChildrenPathSearchingAnalyzer, "/", new String[]{"/"});
-    }
-
-    @Test
-    public void testAllChildrenPathMatchingOnRootNode() throws Exception {
-        String nodePath = "/";
-        String descendantPath = nodePath + "jcr:a/jcr:b";
-        assertAnalyzesTo(allChildrenPathIndexingAnalyzer, descendantPath, new String[]{"/jcr:a", "/", "/jcr:a/jcr:b"});
     }
 
     @Test
diff --git a/oak-solr-core/src/test/resources/solr/oak/conf/schema.xml b/oak-solr-core/src/test/resources/solr/oak/conf/schema.xml
index d97a100..f53a6f2 100644
--- a/oak-solr-core/src/test/resources/solr/oak/conf/schema.xml
+++ b/oak-solr-core/src/test/resources/solr/oak/conf/schema.xml
@@ -21,8 +21,6 @@
         <fieldType name="descendent_path" class="solr.TextField">
             <analyzer type="index">
                 <tokenizer class="solr.PathHierarchyTokenizerFactory" delimiter="/" />
-                <filter class="solr.PatternCaptureGroupFilterFactory" pattern="((/).*)" preserve_original="false"/>
-                <filter class="solr.RemoveDuplicatesTokenFilterFactory" />
             </analyzer>
             <analyzer type="query">
                 <tokenizer class="solr.KeywordTokenizerFactory" />
diff --git a/oak-solr-osgi/pom.xml b/oak-solr-osgi/pom.xml
index 1f26256..05efc09 100644
--- a/oak-solr-osgi/pom.xml
+++ b/oak-solr-osgi/pom.xml
@@ -23,7 +23,7 @@
     <parent>
         <groupId>org.apache.jackrabbit</groupId>
         <artifactId>oak-parent</artifactId>
-        <version>1.1-SNAPSHOT</version>
+        <version>1.0.0</version>
         <relativePath>../oak-parent/pom.xml</relativePath>
     </parent>
 
diff --git a/oak-upgrade/pom.xml b/oak-upgrade/pom.xml
index 66c6bd7..78a542b 100644
--- a/oak-upgrade/pom.xml
+++ b/oak-upgrade/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.jackrabbit</groupId>
     <artifactId>oak-parent</artifactId>
-    <version>1.1-SNAPSHOT</version>
+    <version>1.0.0</version>
     <relativePath>../oak-parent/pom.xml</relativePath>
   </parent>
 
@@ -95,7 +95,7 @@
     <dependency>
       <groupId>com.h2database</groupId>
       <artifactId>h2</artifactId>
-      <version>${h2.version}</version>
+      <version>1.3.175</version>
       <scope>test</scope>
     </dependency>
     <dependency>
diff --git a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/JackrabbitNodeState.java b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/JackrabbitNodeState.java
index fd02d81..38a8cab 100644
--- a/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/JackrabbitNodeState.java
+++ b/oak-upgrade/src/main/java/org/apache/jackrabbit/oak/upgrade/JackrabbitNodeState.java
@@ -42,7 +42,6 @@
 import static org.apache.jackrabbit.oak.api.Type.NAMES;
 import static org.apache.jackrabbit.oak.api.Type.STRING;
 import static org.apache.jackrabbit.oak.plugins.tree.TreeConstants.OAK_CHILD_ORDER;
-import static org.apache.jackrabbit.oak.plugins.version.VersionConstants.MIX_REP_VERSIONABLE_PATHS;
 
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
@@ -287,14 +286,7 @@
             String uuid = getString(JCR_UUID);
             String path = versionablePaths.get(uuid);
             if (path != null) {
-                properties.put(workspaceName, PropertyStates.createProperty(
-                        workspaceName, path, Type.PATH));
-
-                Set<String> mixins = newLinkedHashSet(getNames(JCR_MIXINTYPES));
-                if (mixins.add(MIX_REP_VERSIONABLE_PATHS)) {
-                    properties.put(JCR_MIXINTYPES, PropertyStates.createProperty(
-                            JCR_MIXINTYPES, mixins, Type.NAMES));
-                }
+                properties.put(workspaceName, PropertyStates.createProperty(workspaceName, path, Type.PATH));
             }
         }
     }
diff --git a/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgradeTest.java b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgradeTest.java
index ef68f90..6d5dd5e 100644
--- a/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgradeTest.java
+++ b/oak-upgrade/src/test/java/org/apache/jackrabbit/oak/upgrade/RepositoryUpgradeTest.java
@@ -434,7 +434,6 @@
                     frozenChild2.getProperty(JCR_FROZENUUID).getString());
 
             VersionHistory history = manager.getVersionHistory("/versionable");
-            assertTrue(history.isNodeType("rep:VersionablePaths"));
             Property versionablePath = history.getProperty("default");
             assertEquals("/versionable", versionablePath.getString());
         } finally {
diff --git a/pom.xml b/pom.xml
index d4da10a..627448a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache.jackrabbit</groupId>
     <artifactId>oak-parent</artifactId>
-    <version>1.1-SNAPSHOT</version>
+    <version>1.0.0</version>
     <relativePath>oak-parent/pom.xml</relativePath>
   </parent>
 
@@ -54,13 +54,12 @@
     <module>oak-run</module>
     <module>oak-it</module>
     <module>oak-pojosr</module>
-    <!-- <module>oak-mk-perf</module> -->
   </modules>
 
   <scm>
-    <connection>scm:svn:http://svn.apache.org/repos/asf/jackrabbit/oak/trunk</connection>
-    <developerConnection>scm:svn:https://svn.apache.org/repos/asf/jackrabbit/oak/trunk</developerConnection>
-    <url>http://svn.apache.org/viewvc/jackrabbit/oak/trunk</url>
+    <connection>scm:svn:http://svn.apache.org/repos/asf/jackrabbit/oak/tags/jackrabbit-oak-1.0.0</connection>
+    <developerConnection>scm:svn:https://svn.apache.org/repos/asf/jackrabbit/oak/tags/jackrabbit-oak-1.0.0</developerConnection>
+    <url>http://svn.apache.org/viewvc/jackrabbit/oak/tags/jackrabbit-oak-1.0.0</url>
   </scm>
 
   <build>
@@ -80,8 +79,6 @@
               <exclude>oak-doc/*.iml</exclude>
               <exclude>oak-doc/target/**</exclude>
               <exclude>oak-js/package.json</exclude>
-              <exclude>oak-mk-perf/.*/**</exclude>
-              <exclude>oak-mk-perf/target/**</exclude>
               <exclude>oak-mongomk-perf/.*/**</exclude>
               <exclude>oak-mongomk-perf/target/**</exclude>
             </excludes>