LUCENE-6005: merge trunk

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene6005@1658277 13f79535-47bb-0310-9956-ffa450edef68
diff --git a/dev-tools/idea/solr/contrib/dataimporthandler/dataimporthandler.iml b/dev-tools/idea/solr/contrib/dataimporthandler/dataimporthandler.iml
index 433f679..6268247 100644
--- a/dev-tools/idea/solr/contrib/dataimporthandler/dataimporthandler.iml
+++ b/dev-tools/idea/solr/contrib/dataimporthandler/dataimporthandler.iml
@@ -25,5 +25,6 @@
     <orderEntry type="module" module-name="solrj" />
     <orderEntry type="module" module-name="analysis-common" />
     <orderEntry type="module" module-name="lucene-core" />
+    <orderEntry type="module" scope="TEST" module-name="join" />
   </component>
 </module>
diff --git a/dev-tools/idea/solr/contrib/velocity/velocity.iml b/dev-tools/idea/solr/contrib/velocity/velocity.iml
index 6f5bf4d..6b91e12 100644
--- a/dev-tools/idea/solr/contrib/velocity/velocity.iml
+++ b/dev-tools/idea/solr/contrib/velocity/velocity.iml
@@ -6,8 +6,10 @@
     <exclude-output />
     <content url="file://$MODULE_DIR$">
       <sourceFolder url="file://$MODULE_DIR$/src/test" isTestSource="true" />
+      <sourceFolder url="file://$MODULE_DIR$/src/test/velocity" type="java-test-resource" />
       <sourceFolder url="file://$MODULE_DIR$/src/test-files" type="java-test-resource" />
       <sourceFolder url="file://$MODULE_DIR$/src/java" isTestSource="false" />
+      <sourceFolder url="file://$MODULE_DIR$/src/resources" type="java-resource" />
     </content>
     <orderEntry type="inheritedJdk" />
     <orderEntry type="sourceFolder" forTests="false" />
diff --git a/dev-tools/maven/lucene/replicator/pom.xml.template b/dev-tools/maven/lucene/replicator/pom.xml.template
index 1f287b7..60d0085 100644
--- a/dev-tools/maven/lucene/replicator/pom.xml.template
+++ b/dev-tools/maven/lucene/replicator/pom.xml.template
@@ -68,11 +68,6 @@
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-surefire-plugin</artifactId>
-        <configuration>
-          <systemPropertyVariables>
-            <tests.jettyConnector>${tests.jettyConnector}</tests.jettyConnector>
-          </systemPropertyVariables>
-        </configuration>
       </plugin>
     </plugins>
   </build>
diff --git a/dev-tools/maven/solr/contrib/velocity/pom.xml.template b/dev-tools/maven/solr/contrib/velocity/pom.xml.template
index 6dfc110..952572a 100644
--- a/dev-tools/maven/solr/contrib/velocity/pom.xml.template
+++ b/dev-tools/maven/solr/contrib/velocity/pom.xml.template
@@ -62,12 +62,23 @@
   </dependencies>
   <build>
     <sourceDirectory>${module-path}/src/java</sourceDirectory>
+    <resources>
+      <resource>
+        <directory>${module-path}/src/resources</directory>
+      </resource>
+    </resources>
     <testSourceDirectory>${module-path}/src/test</testSourceDirectory>
     <testResources>
       <testResource>
         <directory>${module-path}/src/test-files</directory>
       </testResource>
       <testResource>
+        <directory>${module-path}/src/test</directory>
+        <includes>
+          <include>velocity/*.properties</include>
+        </includes>
+      </testResource>
+      <testResource>
         <directory>${top-level}/dev-tools/maven/solr</directory>
         <includes>
           <include>maven.testlogging.properties</include>
diff --git a/dev-tools/maven/solr/pom.xml.template b/dev-tools/maven/solr/pom.xml.template
index c2fd1a1..518748d 100644
--- a/dev-tools/maven/solr/pom.xml.template
+++ b/dev-tools/maven/solr/pom.xml.template
@@ -103,7 +103,6 @@
           <artifactId>maven-surefire-plugin</artifactId>
           <configuration>
             <systemPropertyVariables>
-              <tests.jettyConnector>${tests.jettyConnector}</tests.jettyConnector>
               <tests.disableHdfs>${tests.disableHdfs}</tests.disableHdfs>
             </systemPropertyVariables>
           </configuration>
diff --git a/dev-tools/scripts/buildAndPushRelease.py b/dev-tools/scripts/buildAndPushRelease.py
index 2ffa386..6e73f8d 100644
--- a/dev-tools/scripts/buildAndPushRelease.py
+++ b/dev-tools/scripts/buildAndPushRelease.py
@@ -280,9 +280,9 @@
     rev = open('rev.txt', encoding='UTF-8').read()
 
   if c.push_remote:
-    url = push(version, root, rev, rcNum, username)
+    url = push(c.version, c.root, rev, c.rc_num, c.push_remote)
   elif c.push_local:
-    url = pushLocal(version, root, rev, c.rc_num, c.push_local)
+    url = pushLocal(c.version, c.root, rev, c.rc_num, c.push_local)
   else:
     url = None
 
diff --git a/dev-tools/scripts/checkJavaDocs.py b/dev-tools/scripts/checkJavaDocs.py
index b8a6599..4c36809 100644
--- a/dev-tools/scripts/checkJavaDocs.py
+++ b/dev-tools/scripts/checkJavaDocs.py
@@ -80,8 +80,6 @@
   Checks for invalid HTML in the full javadocs under each field/method.
   """
 
-  isAttributeSource = fullPath.endswith('AttributeSource.html')
-
   # TODO: only works with java7 generated javadocs now!
   with open(fullPath, encoding='UTF-8') as f:
     desc = None
@@ -90,15 +88,9 @@
     errors = []
     for line in f.readlines():
 
-      if isAttributeSource:
-        # Work around Javadocs bug that fails to escape the <T> type parameter in {@link #getAttribute} and {@link #addAttribute}
-        line = line.replace('<code>getAttribute(java.lang.Class<T>)</code>', '<code>getAttribute(java.lang.Class)</code>')
-        line = line.replace('<code>addAttribute(java.lang.Class<T>)</code>', '<code>addAttribute(java.lang.Class)</code>')
-      
       m = reH3.search(line)
       if m is not None:
         if desc is not None:
-          # Have to fake <ul> context because we pulled a fragment out "across" two <ul>s:
           desc = ''.join(desc)
           if True or cat == 'Constructor Detail':
             idx = desc.find('</div>')
@@ -108,6 +100,7 @@
               continue
             desc = desc[:idx+6]
           else:
+            # Have to fake <ul> context because we pulled a fragment out "across" two <ul>s:
             desc = '<ul>%s</ul>' % ''.join(desc)
           #print('  VERIFY %s: %s: %s' % (cat, item, desc))
           try:
@@ -123,7 +116,13 @@
       if m is not None:
         if desc is not None:
           # Have to fake <ul> context because we pulled a fragment out "across" two <ul>s:
-          desc = '<ul>%s</ul>' % ''.join(desc)
+          if cat == 'Element Detail':
+            desc = ''.join(desc)
+            idx = desc.find('</dl>')
+            if idx != -1:
+              desc = desc[:idx+5]
+          else:
+            desc = '<ul>%s</ul>' % ''.join(desc)
           #print('  VERIFY %s: %s: %s' % (cat, item, desc))
           try:
             verifyHTML(desc)
diff --git a/dev-tools/scripts/checkJavadocLinks.py b/dev-tools/scripts/checkJavadocLinks.py
index f57e7eb..b7b9a56 100644
--- a/dev-tools/scripts/checkJavadocLinks.py
+++ b/dev-tools/scripts/checkJavadocLinks.py
@@ -197,6 +197,9 @@
         elif link == 'http://lucene.apache.org/solr/':
           # OK
           pass
+        elif link == 'http://lucene.apache.org/solr/resources.html':
+          # OK
+          pass
         elif link.find('lucene.apache.org/java/docs/discussion.html') != -1:
           # OK
           pass
@@ -206,10 +209,7 @@
         elif link.find('lucene.apache.org/solr/mirrors-solr-latest-redir.html') != -1:
           # OK
           pass
-        elif link.find('lucene.apache.org/solr/discussion.html') != -1:
-          # OK
-          pass
-        elif link.find('lucene.apache.org/solr/features.html') != -1:
+        elif link.find('lucene.apache.org/solr/quickstart.html') != -1:
           # OK
           pass
         elif (link.find('svn.apache.org') != -1
diff --git a/dev-tools/scripts/smokeTestRelease.py b/dev-tools/scripts/smokeTestRelease.py
index d8c9c30..202e10d 100644
--- a/dev-tools/scripts/smokeTestRelease.py
+++ b/dev-tools/scripts/smokeTestRelease.py
@@ -635,8 +635,6 @@
     textFiles.extend(('JRE_VERSION_MIGRATION', 'CHANGES', 'MIGRATE', 'SYSTEM_REQUIREMENTS'))
     if isSrc:
       textFiles.append('BUILD')
-  elif not isSrc:
-    textFiles.append('SYSTEM_REQUIREMENTS')
 
   for fileName in textFiles:
     fileName += '.txt'
@@ -686,8 +684,6 @@
   if project == 'lucene':
     if len(l) > 0:
       raise RuntimeError('%s: unexpected files/dirs in artifact %s: %s' % (project, artifact, l))
-  elif isSrc and not os.path.exists('%s/solr/SYSTEM_REQUIREMENTS.txt' % unpackPath):
-    raise RuntimeError('%s: solr/SYSTEM_REQUIREMENTS.txt does not exist in artifact %s' % (project, artifact))
 
   if isSrc:
     print('    make sure no JARs/WARs in src dist...')
@@ -856,7 +852,7 @@
 
     print('      startup done')
     # Create the techproducts config (used to be collection1)
-    subprocess.call(['bin/solr','create_core','-n','techproducts','-c','sample_techproducts_configs'])
+    subprocess.call(['bin/solr','create_core','-c','techproducts','-d','sample_techproducts_configs'])
     os.chdir('example')
     print('      test utf8...')
     run('sh ./exampledocs/test_utf8.sh http://localhost:8983/solr/techproducts', 'utf8.log')
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index f070359..e722c39 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -29,6 +29,33 @@
 * LUCENE-6067: Accountable.getChildResources has a default
   implementation returning the empty list.  (Robert Muir)
 
+======================= Lucene 5.1.0 =======================
+
+Optimizations
+
+* LUCENE-6183, LUCENE-5647: Avoid recompressing stored fields
+  and term vectors when merging segments without deletions. 
+  Lucene50Codec's BEST_COMPRESSION mode uses a higher deflate 
+  level for more compact storage.  (Robert Muir)
+
+* LUCENE-6184: Make BooleanScorer only score windows that contain
+  matches. (Adrien Grand)
+
+* LUCENE-6161: Speed up resolving of deleted terms to docIDs by doing
+  a combined merge sort between deleted terms and segment terms
+  instead of a separate merge sort for each segment.  In delete-heavy
+  use cases this can be a sizable speedup. (Mike McCandless)
+
+API Changes
+
+* LUCENE-6204, LUCENE-6208: Simplify CompoundFormat: remove files()
+  and remove files parameter to write(). (Robert Muir)
+
+Other
+
+* LUCENE-6193: Collapse identical catch branches in try-catch statements.
+  (shalin)
+
 ======================= Lucene 5.0.0 =======================
 
 New Features
@@ -130,6 +157,22 @@
 * LUCENE-5914: Add an option to Lucene50Codec to support either BEST_SPEED
   or BEST_COMPRESSION for stored fields. (Adrien Grand, Robert Muir)
 
+* LUCENE-6119: Add auto-IO-throttling to ConcurrentMergeScheduler, to
+  rate limit IO writes for each merge depending on incoming merge
+  rate.  (Mike McCandless)
+
+* LUCENE-6155: Add payload support to MemoryIndex. The default highlighter's
+  QueryScorer and WeighedSpanTermExtractor now have setUsePayloads(bool).
+  (David Smiley)
+
+* LUCENE-6166: Deletions (alone) can now trigger new merges.  (Mike McCandless)
+
+* LUCENE-6177: Add CustomAnalyzer that allows to configure analyzers
+  like you do in Solr's index schema. This class has a builder API to configure
+  Tokenizers, TokenFilters, and CharFilters based on their SPI names
+  and parameters as documented by the corresponding factories.
+  (Uwe Schindler)
+
 Optimizations
 
 * LUCENE-5960: Use a more efficient bitset, not a Set<Integer>, to
@@ -189,6 +232,9 @@
 * LUCENE-6145: Make EarlyTerminatingSortingCollector able to early-terminate
   when the sort order is a prefix of the index-time order. (Adrien Grand)
 
+* LUCENE-6178: Score boolean queries containing MUST_NOT clauses with BooleanScorer2,
+  to use skip list data and avoid unnecessary scoring. (Adrien Grand, Robert Muir)
+
 API Changes
 
 * LUCENE-5900: Deprecated more constructors taking Version in *InfixSuggester and
@@ -217,6 +263,9 @@
 * LUCENE-5527: The Collector API has been refactored to use a dedicated Collector
   per leaf. (Shikhar Bhushan, Adrien Grand)
 
+* LUCENE-5702: The FieldComparator API has been refactor to a per-leaf API, just
+  like Collectors. (Adrien Grand)
+
 * LUCENE-4246: IndexWriter.close now always closes, even if it throws
   an exception.  The new IndexWriterConfig.setCommitOnClose (default
   true) determines whether close() should commit before closing.
@@ -332,6 +381,22 @@
 * LUCENE-6146: Replaced Directory.copy() with Directory.copyFrom().
   (Robert Muir)
 
+* LUCENE-6149: Infix suggesters' highlighting and allTermsRequired can
+  be set at the constructor for non-contextual lookup.
+  (Boon Low, Tomás Fernández Löbbe)
+
+* LUCENE-6158, LUCENE-6165: IndexWriter.addIndexes(IndexReader...) changed to
+  addIndexes(CodecReader...) (Robert Muir)
+
+* LUCENE-6179: Out-of-order scoring is not allowed anymore, so
+  Weight.scoresDocsOutOfOrder and LeafCollector.acceptsDocsOutOfOrder have been
+  removed and boolean queries now always score in order.
+
+* LUCENE-6212: IndexWriter no longer accepts per-document Analyzer to
+  add/updateDocument.  These methods were trappy as they made it
+  easy to accidentally index tokens that were not easily
+  searchable. (Mike McCandless)
+
 Bug Fixes
 
 * LUCENE-5650: Enforce read-only access to any path outside the temporary
@@ -351,7 +416,7 @@
 
 * LUCENE-5980: Don't let document length overflow. (Robert Muir)
 
-* LUCENE-5961: Fix the exists() method for FunctionValues returned by many ValueSoures to
+* LUCENE-5961: Fix the exists() method for FunctionValues returned by many ValueSources to
   behave properly when wrapping other ValueSources which do not exist for the specified document
   (hossman)
 
@@ -408,7 +473,26 @@
   
 * LUCENE-6152: Fix double close problems in OutputStreamIndexOutput.
   (Uwe Schindler)
+
+* LUCENE-6139: Highlighter: TokenGroup start & end offset getters should have
+  been returning the offsets of just the matching tokens in the group when
+  there's a distinction. (David Smiley)
   
+* LUCENE-6173: NumericTermAttribute and spatial/CellTokenStream do not clone
+  their BytesRef(Builder)s. Also equals/hashCode was missing.  (Uwe Schindler)
+
+* LUCENE-6205: Fixed intermittent concurrency issue that could cause
+  FileNotFoundException when writing doc values updates at the same
+  time that a merge kicks off.  (Mike McCandless)
+  
+* LUCENE-6192: Fix int overflow corruption case in skip data for
+  high frequency terms in extremely large indices (Robert Muir, Mike
+  McCandless)
+
+* LUCENE-6093: Don't throw NullPointerException from
+  BlendedInfixSuggester for lookups that do not end in a prefix
+  token.  (jane chang via Mike McCandless)
+
 Documentation
 
 * LUCENE-5392: Add/improve analysis package documentation to reflect
@@ -463,11 +547,20 @@
 * LUCENE-5563: Removed sep layout: which has fallen behind on features and doesn't
   perform as well as other options.  (Robert Muir)
 
-* LUCENE-5858: Moved compatibility codecs to 'lucene-backward-codecs.jar'. 
+* LUCENE-4086: Removed support for Lucene 3.x indexes. See migration guide for
+  more information.  (Robert Muir)
+
+* LUCENE-5858: Moved Lucene 4 compatibility codecs to 'lucene-backward-codecs.jar'. 
   (Adrien Grand, Robert Muir)
 
 * LUCENE-5915: Remove Pulsing postings format. (Robert Muir)
 
+======================= Lucene 4.10.4 ======================
+
+* LUCENE-6207: Fixed consumption of several terms enums on the same
+  sorted (set) doc values instance at the same time.
+  (Tom Shally, Robert Muir, Adrien Grand)
+
 ======================= Lucene 4.10.3 ======================
 
 Bug fixes
@@ -475,6 +568,7 @@
 * LUCENE-3229: Overlapping ordered SpanNearQuery spans should not match.
   (Ludovic Boutros, Paul Elschot, Greg Dearing, ehatcher)
 
+
 ======================= Lucene 4.10.2 ======================
 
 Bug fixes
@@ -607,7 +701,7 @@
 * LUCENE-5883: You can now change the MergePolicy instance on a live IndexWriter,
   without first closing and reopening the writer. This allows to e.g. run a special
   merge with UpgradeIndexMergePolicy without reopening the writer. Also, MergePolicy
-  no longer implements Closeable; if you need to release your custom MegePolicy's
+  no longer implements Closeable; if you need to release your custom MergePolicy's
   resources, you need to implement close() and call it explicitly. (Shai Erera)
 
 * LUCENE-5859: Deprecate Analyzer constructors taking Version.  Use Analyzer.setVersion()
@@ -742,7 +836,7 @@
   Mike McCandless)
 
 * LUCENE-5827: Make all Directory implementations correctly fail with
-  IllegalArgumentException if slices are out of bounds.  (Uwe SChindler)
+  IllegalArgumentException if slices are out of bounds.  (Uwe Schindler)
 
 * LUCENE-5897, LUCENE-5400: JFlex-based tokenizers StandardTokenizer and
   UAX29URLEmailTokenizer tokenize extremely slowly over long sequences of
@@ -814,7 +908,7 @@
 API Changes
 
 * LUCENE-5756: IndexWriter now implements Accountable and IW#ramSizeInBytes()
-  has been deprecated infavor of IW#ramBytesUsed() (Simon Willnauer)
+  has been deprecated in favor of IW#ramBytesUsed() (Simon Willnauer)
 
 * LUCENE-5725: MoreLikeThis#like now accepts multiple values per field.
   The pre-existing method has been deprecated in favor of a variable arguments
@@ -1287,7 +1381,7 @@
   to obtain the lock. (Uwe Schindler, Robert Muir)
 
 * LUCENE-5626: Fix bug in SimpleFSLockFactory's obtain() that sometimes throwed
-  IOException (ERROR_ACESS_DENIED) on Windows if the lock file was created
+  IOException (ERROR_ACCESS_DENIED) on Windows if the lock file was created
   concurrently. This error is now handled the same way like in NativeFSLockFactory
   by returning false.  (Uwe Schindler, Robert Muir, Dawid Weiss)
 
@@ -1403,8 +1497,8 @@
 
 * LUCENE-5553: IndexReader#ReaderClosedListener is not always invoked when 
   IndexReader#close() is called or if refCount is 0. If an exception is 
-  thrown during interal close or on any of the close listerns some or all
-  listerners might be missed. This can cause memory leaks if the core listeners
+  thrown during internal close or on any of the close listeners some or all
+  listeners might be missed. This can cause memory leaks if the core listeners
   are used to clear caches. (Simon Willnauer)
 
 Build
@@ -1420,7 +1514,7 @@
   (Jack Conradson via Robert Muir)
   
 * LUCENE-5337: Add Payload support to FileDictionary (Suggest) and make it more
-  configurable (Areek Zilluer via Erick Erickson)
+  configurable (Areek Zillur via Erick Erickson)
 
 * LUCENE-5329: suggest: DocumentDictionary and
   DocumentExpressionDictionary are now lenient for dirty documents
@@ -1589,8 +1683,8 @@
   to byte, before calling Similarity.decodeNormValue.  (Peng Cheng via
   Mike McCandless)
 
-* LUCENE-5436: RefrenceManager#accquire can result in infinite loop if
-  managed resource is abused outside of the RefrenceManager. Decrementing
+* LUCENE-5436: ReferenceManager#accquire can result in infinite loop if
+  managed resource is abused outside of the ReferenceManager. Decrementing
   the reference without a corresponding incRef() call can cause an infinite
   loop. ReferenceManager now throws IllegalStateException if currently managed
   resources ref count is 0. (Simon Willnauer)
@@ -1598,7 +1692,7 @@
 * LUCENE-5443: Lucene45DocValuesProducer.ramBytesUsed() may throw
   ConcurrentModificationException. (Shai Erera, Simon Willnauer)
 
-* LUCENE-5444: MemoryIndex did't respect the analyzers offset gap and
+* LUCENE-5444: MemoryIndex didn't respect the analyzers offset gap and
   offsets were corrupted if multiple fields with the same name were
   added to the memory index. (Britta Weber, Simon Willnauer)
 
@@ -1622,7 +1716,7 @@
   to IndexWriter.addDocument(). (Shai Erera, Gilad Barkai, Rob
   Muir, Mike McCandless)
 
-* LUCENE-5405: Make ShingleAnalzyerWrapper.getWrappedAnalyzer() public final (gsingers)
+* LUCENE-5405: Make ShingleAnalyzerWrapper.getWrappedAnalyzer() public final (gsingers)
 
 * LUCENE-5395: The SpatialArgsParser now only reads WKT, no more "lat, lon"
   etc. but it's easy to override the parseShape method if you wish. (David
@@ -1666,7 +1760,7 @@
   to the analysis package overview.  
   (Benson Margulies via Robert Muir - pull request #12)
 
-* LUCENE-5389: Add more guidance in the analyis documentation 
+* LUCENE-5389: Add more guidance in the analysis documentation 
   package overview.
   (Benson Margulies via Robert Muir - pull request #14)
 
@@ -1865,7 +1959,7 @@
 Documentation
 
 * LUCENE-5211: Better javadocs and error checking of 'format' option in 
-  StopFilterFactory, as well as comments in all snowball formated files
+  StopFilterFactory, as well as comments in all snowball formatted files
   about specifying format option.  (hossman)
 
 Changes in backwards compatibility policy
@@ -2023,7 +2117,7 @@
   some result documents were missing category associations. (Shai Erera)
 
 * LUCENE-5152: Fix MemoryPostingsFormat to not modify borrowed BytesRef from FSTEnum
-  seek/lookup which can cause sideeffects if done on a cached FST root arc.
+  seek/lookup which can cause side effects if done on a cached FST root arc.
   (Simon Willnauer)
 
 * LUCENE-5160: Handle the case where reading from a file or FileChannel returns -1,
@@ -2096,7 +2190,7 @@
 
 * LUCENE-5144: StandardFacetsAccumulator renamed to OldFacetsAccumulator, and all
   associated classes were moved under o.a.l.facet.old. The intention to remove it
-  one day, when the features it covers (complements, partitiona, sampling) will be
+  one day, when the features it covers (complements, partitions, sampling) will be
   migrated to the new FacetsAggregator and FacetsAccumulator API. Also,
   FacetRequest.createAggregator was replaced by OldFacetsAccumulator.createAggregator.
   (Shai Erera)
@@ -2173,7 +2267,7 @@
   synchronization and concurrent interaction with IndexWriter. DWPT is now
   only setup once and has no reset logic. All segment publishing and state
   transition from DWPT into IndexWriter is now done via an Event-Queue
-  processed from within the IndexWriter in order to prevent suituations
+  processed from within the IndexWriter in order to prevent situations
   where DWPT or DW calling int IW causing deadlocks. (Simon Willnauer)
 
 * LUCENE-5182: Terminate phrase searches early if max phrase window is 
@@ -2294,7 +2388,7 @@
   jvms and it's faster not to cache.  (Robert Muir)
 
 * LUCENE-5038: MergePolicy now has a default implementation for useCompoundFile based
-  on segment size and noCFSRatio. The default implemantion was pulled up from
+  on segment size and noCFSRatio. The default implementation was pulled up from
   TieredMergePolicy. (Simon Willnauer)
 
 * LUCENE-5063: FieldCache.get(Bytes|Shorts), SortField.Type.(BYTE|SHORT) and
@@ -2494,7 +2588,7 @@
   
 * LUCENE-5097: Analyzer now has an additional tokenStream(String fieldName,
   String text) method, so wrapping by StringReader for common use is no
-  longer needed. This method uses an internal reuseable reader, which was
+  longer needed. This method uses an internal reusable reader, which was
   previously only used by the Field class.  (Uwe Schindler, Robert Muir)
   
 * LUCENE-4542: HunspellStemFilter's maximum recursion level is now configurable.
@@ -2503,7 +2597,7 @@
 Build
 
 * LUCENE-4987: Upgrade randomized testing to version 2.0.10: 
-  Test framework may fail internally due to overly aggresive J9 optimizations. 
+  Test framework may fail internally due to overly aggressive J9 optimizations. 
   (Dawid Weiss, Shai Erera)
 
 * LUCENE-5043: The eclipse target now uses the containing directory for the
@@ -2550,7 +2644,7 @@
   large heap/explicitly disabled.  (Mike McCandless, Uwe Schindler, Robert Muir)
 
 * LUCENE-4953: Fixed ParallelCompositeReader to inform ReaderClosedListeners of
-  its synthetic subreaders. FieldCaches keyed on the atomic childs will be purged
+  its synthetic subreaders. FieldCaches keyed on the atomic children will be purged
   earlier and FC insanity prevented.  In addition, ParallelCompositeReader's
   toString() was changed to better reflect the reader structure.
   (Mike McCandless, Uwe Schindler)
@@ -2915,7 +3009,7 @@
   OpenMode.CREATE_OR_APPEND is used. This might also affect application that set
   the open mode manually using DirectoryReader#indexExists. (Simon Willnauer)
 
-* LUCENE-4878: Override getRegexpQuery in MultiFieldQueryParser to prefent
+* LUCENE-4878: Override getRegexpQuery in MultiFieldQueryParser to prevent
   NullPointerException when regular expression syntax is used with
   MultiFieldQueryParser. (Simon Willnauer, Adam Rauch)
 
@@ -2984,7 +3078,7 @@
   sorted by value, while ties are broken by category ordinal. (Shai Erera)
 
 * LUCENE-4772: Facet associations moved to new FacetsAggregator API. You
-  should override FacetsAccumualtor and return the relevant aggregator,
+  should override FacetsAccumulator and return the relevant aggregator,
   for aggregating the association values. (Shai Erera)
   
 * LUCENE-4748: A FacetRequest on a non-existent field now returns an
@@ -3675,7 +3769,7 @@
 * LUCENE-4391, LUCENE-4440: All methods of Lucene40Codec but
   getPostingsFormatForField are now final. To reuse functionality 
   of Lucene40, you should extend FilterCodec and delegate to Lucene40
-  instead of extending Lucene40Codec.  (Adrien Grand, Shai Erea,
+  instead of extending Lucene40Codec.  (Adrien Grand, Shai Erera,
   Robert Muir, Uwe Schindler)
 
 * LUCENE-4299: Added Terms.hasPositions() and Terms.hasOffsets().
diff --git a/lucene/analysis/common/README.txt b/lucene/analysis/common/README.txt
index f4338a4..189af76 100644
--- a/lucene/analysis/common/README.txt
+++ b/lucene/analysis/common/README.txt
@@ -1,7 +1,7 @@
-Lucene Contrib Analyzers README file
+Lucene Analyzers README file
 
 This project provides pre-compiled version of the Snowball stemmers
-based on revision 500 of the Tartarus Snowball repository,
+based on revision 502 of the Tartarus Snowball repository,
 together with classes integrating them with the Lucene search engine.
 
 A few changes has been made to the static Snowball code and compiled stemmers:
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java
index cc461f4..25d4a74 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/compound/DictionaryCompoundWordTokenFilterFactory.java
@@ -48,7 +48,6 @@
   /** Creates a new DictionaryCompoundWordTokenFilterFactory */
   public DictionaryCompoundWordTokenFilterFactory(Map<String, String> args) {
     super(args);
-    assureMatchVersion();
     dictFile = require(args, "dictionary");
     minWordSize = getInt(args, "minWordSize", CompoundWordTokenFilterBase.DEFAULT_MIN_WORD_SIZE);
     minSubwordSize = getInt(args, "minSubwordSize", CompoundWordTokenFilterBase.DEFAULT_MIN_SUBWORD_SIZE);
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java
index d4b04f4..8479182 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/compound/HyphenationCompoundWordTokenFilterFactory.java
@@ -74,7 +74,6 @@
   /** Creates a new HyphenationCompoundWordTokenFilterFactory */
   public HyphenationCompoundWordTokenFilterFactory(Map<String, String> args) {
     super(args);
-    assureMatchVersion();
     dictFile = get(args, "dictionary");
     encoding = get(args, "encoding");
     hypFile = require(args, "hyphenator");
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/CustomAnalyzer.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/CustomAnalyzer.java
new file mode 100644
index 0000000..b836f02
--- /dev/null
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/CustomAnalyzer.java
@@ -0,0 +1,323 @@
+package org.apache.lucene.analysis.custom;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import java.io.Reader;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.Tokenizer;
+import org.apache.lucene.analysis.util.AbstractAnalysisFactory;
+import org.apache.lucene.analysis.util.CharFilterFactory;
+import org.apache.lucene.analysis.util.ClasspathResourceLoader;
+import org.apache.lucene.analysis.util.FilesystemResourceLoader;
+import org.apache.lucene.analysis.util.ResourceLoader;
+import org.apache.lucene.analysis.util.ResourceLoaderAware;
+import org.apache.lucene.analysis.util.TokenFilterFactory;
+import org.apache.lucene.analysis.util.TokenizerFactory;
+import org.apache.lucene.util.SetOnce;
+import org.apache.lucene.util.Version;
+
+/**
+ * A general-purpose Analyzer that can be created with a builder-style API.
+ * Under the hood it uses the factory classes {@link TokenizerFactory},
+ * {@link TokenFilterFactory}, and {@link CharFilterFactory}.
+ * <p>You can create an instance of this Analyzer using the builder:
+ * <pre class="prettyprint">
+ * Analyzer ana = CustomAnalyzer.builder(Paths.get(&quot;/path/to/config/dir&quot;))
+ *   .withTokenizer(&quot;standard&quot;)
+ *   .addTokenFilter(&quot;standard&quot;)
+ *   .addTokenFilter(&quot;lowercase&quot;)
+ *   .addTokenFilter(&quot;stop&quot;, &quot;ignoreCase&quot;, &quot;false&quot;, &quot;words&quot;, &quot;stopwords.txt&quot;, &quot;format&quot;, &quot;wordset&quot;)
+ *   .build();
+ * </pre>
+ * The parameters passed to components are also used by Apache Solr and are documented
+ * on their corresponding factory classes. Refer to documentation of subclasses
+ * of {@link TokenizerFactory}, {@link TokenFilterFactory}, and {@link CharFilterFactory}.
+ * <p>The list of names to be used for components can be looked up through:
+ * {@link TokenizerFactory#availableTokenizers()}, {@link TokenFilterFactory#availableTokenFilters()},
+ * and {@link CharFilterFactory#availableCharFilters()}.
+ */
+public final class CustomAnalyzer extends Analyzer {
+  
+  /** Returns a builder for custom analyzers that loads all resources from classpath.
+   * All path names given must be absolute with package prefixes. */
+  public static Builder builder() {
+    return builder(new ClasspathResourceLoader());
+  }
+  
+  /** Returns a builder for custom analyzers that loads all resources from the given
+   * file system base directory. Place, e.g., stop word files there.
+   * Files that are not in the given directory are loaded from classpath. */
+  public static Builder builder(Path configDir) {
+    return builder(new FilesystemResourceLoader(configDir));
+  }
+  
+  /** Returns a builder for custom analyzers that loads all resources using the given {@link ResourceLoader}. */
+  public static Builder builder(ResourceLoader loader) {
+    return new Builder(loader);
+  }
+  
+  private final CharFilterFactory[] charFilters;
+  private final TokenizerFactory tokenizer;
+  private final TokenFilterFactory[] tokenFilters;
+  private final Integer posIncGap, offsetGap;
+
+  CustomAnalyzer(Version defaultMatchVersion, CharFilterFactory[] charFilters, TokenizerFactory tokenizer, TokenFilterFactory[] tokenFilters, Integer posIncGap, Integer offsetGap) {
+    this.charFilters = charFilters;
+    this.tokenizer = tokenizer;
+    this.tokenFilters = tokenFilters;
+    this.posIncGap = posIncGap;
+    this.offsetGap = offsetGap;
+    if (defaultMatchVersion != null) {
+      setVersion(defaultMatchVersion);
+    }
+  }
+  
+  @Override
+  protected Reader initReader(String fieldName, Reader reader) {
+    for (final CharFilterFactory charFilter : charFilters) {
+      reader = charFilter.create(reader);
+    }
+    return reader;
+  }
+
+  @Override
+  protected TokenStreamComponents createComponents(String fieldName) {
+    final Tokenizer tk = tokenizer.create();
+    TokenStream ts = tk;
+    for (final TokenFilterFactory filter : tokenFilters) {
+      ts = filter.create(ts);
+    }
+    return new TokenStreamComponents(tk, ts);
+  }
+  
+  @Override
+  public int getPositionIncrementGap(String fieldName) {
+    // use default from Analyzer base class if null
+    return (posIncGap == null) ? super.getPositionIncrementGap(fieldName) : posIncGap.intValue();
+  }
+  
+  @Override
+  public int getOffsetGap(String fieldName) {
+    // use default from Analyzer base class if null
+    return (offsetGap == null) ? super.getOffsetGap(fieldName) : offsetGap.intValue();
+  }
+  
+  /** Returns the list of char filters that are used in this analyzer. */
+  public List<CharFilterFactory> getCharFilterFactories() {
+    return Collections.unmodifiableList(Arrays.asList(charFilters));
+  }
+  
+  /** Returns the tokenizer that is used in this analyzer. */
+  public TokenizerFactory getTokenizerFactory() {
+    return tokenizer;
+  }
+  
+  /** Returns the list of token filters that are used in this analyzer. */
+  public List<TokenFilterFactory> getTokenFilterFactories() {
+    return Collections.unmodifiableList(Arrays.asList(tokenFilters));
+  }
+
+  @Override
+  public String toString() {
+    final StringBuilder sb = new StringBuilder(getClass().getSimpleName()).append('(');
+    for (final CharFilterFactory filter : charFilters) {
+      sb.append(filter).append(',');
+    }
+    sb.append(tokenizer);
+    for (final TokenFilterFactory filter : tokenFilters) {
+      sb.append(',').append(filter);
+    }
+    return sb.append(')').toString();
+  }
+
+  /** Builder for {@link CustomAnalyzer}.
+   * @see CustomAnalyzer#builder()
+   * @see CustomAnalyzer#builder(Path)
+   * @see CustomAnalyzer#builder(ResourceLoader)
+   */
+  public static final class Builder {
+    private final ResourceLoader loader;
+    private final SetOnce<Version> defaultMatchVersion = new SetOnce<>();
+    private final List<CharFilterFactory> charFilters = new ArrayList<>();
+    private final SetOnce<TokenizerFactory> tokenizer = new SetOnce<>();
+    private final List<TokenFilterFactory> tokenFilters = new ArrayList<>();
+    private final SetOnce<Integer> posIncGap = new SetOnce<>();
+    private final SetOnce<Integer> offsetGap = new SetOnce<>();
+    
+    private boolean componentsAdded = false;
+    
+    Builder(ResourceLoader loader) {
+      this.loader = loader;
+    }
+    
+    /** This match version is passed as default to all tokenizers or filters. It is used unless you
+     * pass the parameter {code luceneMatchVersion} explicitly. It defaults to undefined, so the
+     * underlying factory will (in most cases) use {@link Version#LATEST}. */
+    public Builder withDefaultMatchVersion(Version version) {
+      Objects.requireNonNull(version, "version may not be null");
+      if (componentsAdded) {
+        throw new IllegalStateException("You may only set the default match version before adding tokenizers, "+
+            "token filters, or char filters.");
+      }
+      this.defaultMatchVersion.set(version);
+      return this;
+    }
+    
+    /** Sets the position increment gap of the analyzer.
+     * The default is defined in the analyzer base class.
+     * @see Analyzer#getPositionIncrementGap(String)
+     */
+    public Builder withPositionIncrementGap(int posIncGap) {
+      if (posIncGap < 0) {
+        throw new IllegalArgumentException("posIncGap must be >= 0");
+      }
+      this.posIncGap.set(posIncGap);
+      return this;
+    }
+    
+    /** Sets the offset gap of the analyzer. The default is defined
+     * in the analyzer base class.
+     * @see Analyzer#getOffsetGap(String)
+     */
+    public Builder withOffsetGap(int offsetGap) {
+      if (offsetGap < 0) {
+        throw new IllegalArgumentException("offsetGap must be >= 0");
+      }
+      this.offsetGap.set(offsetGap);
+      return this;
+    }
+    
+    /** Uses the given tokenizer.
+     * @param name is used to look up the factory with {@link TokenizerFactory#forName(String, Map)}.
+     *  The list of possible names can be looked up with {@link TokenizerFactory#availableTokenizers()}.
+     * @param params a list of factory string params as key/value pairs.
+     *  The number of parameters must be an even number, as they are pairs.
+     */
+    public Builder withTokenizer(String name, String... params) throws IOException {
+      return withTokenizer(name, paramsToMap(params));
+    }
+    
+    /** Uses the given tokenizer.
+     * @param name is used to look up the factory with {@link TokenizerFactory#forName(String, Map)}.
+     *  The list of possible names can be looked up with {@link TokenizerFactory#availableTokenizers()}.
+     * @param params the map of parameters to be passed to factory. The map must be modifiable.
+     */
+    public Builder withTokenizer(String name, Map<String,String> params) throws IOException {
+      Objects.requireNonNull(name, "Tokenizer name may not be null");
+      tokenizer.set(applyResourceLoader(TokenizerFactory.forName(name, applyDefaultParams(params))));
+      componentsAdded = true;
+      return this;
+    }
+    
+    /** Adds the given token filter.
+     * @param name is used to look up the factory with {@link TokenFilterFactory#forName(String, Map)}.
+     *  The list of possible names can be looked up with {@link TokenFilterFactory#availableTokenFilters()}.
+     * @param params a list of factory string params as key/value pairs.
+     *  The number of parameters must be an even number, as they are pairs.
+     */
+    public Builder addTokenFilter(String name, String... params) throws IOException {
+      return addTokenFilter(name, paramsToMap(params));
+    }
+    
+    /** Adds the given token filter.
+     * @param name is used to look up the factory with {@link TokenFilterFactory#forName(String, Map)}.
+     *  The list of possible names can be looked up with {@link TokenFilterFactory#availableTokenFilters()}.
+     * @param params the map of parameters to be passed to factory. The map must be modifiable.
+     */
+    public Builder addTokenFilter(String name, Map<String,String> params) throws IOException {
+      Objects.requireNonNull(name, "TokenFilter name may not be null");
+      tokenFilters.add(applyResourceLoader(TokenFilterFactory.forName(name, applyDefaultParams(params))));
+      componentsAdded = true;
+      return this;
+    }
+    
+    /** Adds the given char filter.
+     * @param name is used to look up the factory with {@link CharFilterFactory#forName(String, Map)}.
+     *  The list of possible names can be looked up with {@link CharFilterFactory#availableCharFilters()}.
+     * @param params a list of factory string params as key/value pairs.
+     *  The number of parameters must be an even number, as they are pairs.
+     */
+    public Builder addCharFilter(String name, String... params) throws IOException {
+      return addCharFilter(name, paramsToMap(params));
+    }
+    
+    /** Adds the given char filter.
+     * @param name is used to look up the factory with {@link CharFilterFactory#forName(String, Map)}.
+     *  The list of possible names can be looked up with {@link CharFilterFactory#availableCharFilters()}.
+     * @param params the map of parameters to be passed to factory. The map must be modifiable.
+     */
+    public Builder addCharFilter(String name, Map<String,String> params) throws IOException {
+      Objects.requireNonNull(name, "CharFilter name may not be null");
+      charFilters.add(applyResourceLoader(CharFilterFactory.forName(name, applyDefaultParams(params))));
+      componentsAdded = true;
+      return this;
+    }
+    
+    /** Builds the analyzer. */
+    public CustomAnalyzer build() {
+      if (tokenizer.get() == null) {
+        throw new IllegalStateException("You have to set at least a tokenizer.");
+      }
+      return new CustomAnalyzer(
+        defaultMatchVersion.get(),
+        charFilters.toArray(new CharFilterFactory[charFilters.size()]),
+        tokenizer.get(), 
+        tokenFilters.toArray(new TokenFilterFactory[tokenFilters.size()]),
+        posIncGap.get(),
+        offsetGap.get()
+      );
+    }
+    
+    private Map<String,String> applyDefaultParams(Map<String,String> map) {
+      if (defaultMatchVersion.get() != null && !map.containsKey(AbstractAnalysisFactory.LUCENE_MATCH_VERSION_PARAM)) {
+        map.put(AbstractAnalysisFactory.LUCENE_MATCH_VERSION_PARAM, defaultMatchVersion.get().toString());
+      }
+      return map;
+    }
+    
+    private Map<String, String> paramsToMap(String... params) {
+      if (params.length % 2 != 0) {
+        throw new IllegalArgumentException("Key-value pairs expected, so the number of params must be even.");
+      }
+      final Map<String, String> map = new HashMap<>();
+      for (int i = 0; i < params.length; i += 2) {
+        Objects.requireNonNull(params[i], "Key of param may not be null.");
+        map.put(params[i], params[i + 1]);
+      }
+      return map;
+    }
+    
+    private <T> T applyResourceLoader(T factory) throws IOException {
+      if (factory instanceof ResourceLoaderAware) {
+        ((ResourceLoaderAware) factory).inform(loader);
+      }
+      return factory;
+    }
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/common/package.html b/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/package.html
similarity index 81%
rename from solr/core/src/java/org/apache/solr/common/package.html
rename to lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/package.html
index 6c25154..80c2dac 100644
--- a/solr/core/src/java/org/apache/solr/common/package.html
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/custom/package.html
@@ -15,15 +15,8 @@
  See the License for the specific language governing permissions and
  limitations under the License.
 -->
-<html>
-<head>
-   <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
-</head>
+<html><head></head>
 <body>
-<p>
-Commonly reused classes and interfaces (deprecated package, do not add new classes)
-
-
-</p>
+A general-purpose Analyzer that can be created with a builder-style API.
 </body>
 </html>
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/CapitalizationFilterFactory.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/CapitalizationFilterFactory.java
index 8159d8a..cc34391 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/CapitalizationFilterFactory.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/CapitalizationFilterFactory.java
@@ -78,7 +78,6 @@
   /** Creates a new CapitalizationFilterFactory */
   public CapitalizationFilterFactory(Map<String, String> args) {
     super(args);
-    assureMatchVersion();
     boolean ignoreCase = getBoolean(args, KEEP_IGNORE_CASE, false);
     Set<String> k = getSet(args, KEEP);
     if (k != null) {
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/StemmerOverrideFilterFactory.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/StemmerOverrideFilterFactory.java
index b56fe76..450af5c 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/StemmerOverrideFilterFactory.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/StemmerOverrideFilterFactory.java
@@ -55,7 +55,6 @@
   @Override
   public void inform(ResourceLoader loader) throws IOException {
     if (dictionaryFiles != null) {
-      assureMatchVersion();
       List<String> files = splitFileNames(dictionaryFiles);
       if (files.size() > 0) {
         StemmerOverrideFilter.Builder builder = new StemmerOverrideFilter.Builder(ignoreCase);
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterFilterFactory.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterFilterFactory.java
index 9ab83c8..a7bd468 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterFilterFactory.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterFilterFactory.java
@@ -63,7 +63,6 @@
   /** Creates a new WordDelimiterFilterFactory */
   public WordDelimiterFilterFactory(Map<String, String> args) {
     super(args);
-    assureMatchVersion();
     int flags = 0;
     if (getInt(args, "generateWordParts", 1) != 0) {
       flags |= GENERATE_WORD_PARTS;
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/synonym/SynonymFilterFactory.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/synonym/SynonymFilterFactory.java
index c5afd87..c8b01c5 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/synonym/SynonymFilterFactory.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/synonym/SynonymFilterFactory.java
@@ -102,7 +102,6 @@
     }
 
     if (tokenizerFactory != null) {
-      assureMatchVersion();
       tokArgs.put("luceneMatchVersion", getLuceneMatchVersion().toString());
       for (Iterator<String> itr = args.keySet().iterator(); itr.hasNext();) {
         String key = itr.next();
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/util/AbstractAnalysisFactory.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/util/AbstractAnalysisFactory.java
index 9c75651..bc48531 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/util/AbstractAnalysisFactory.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/util/AbstractAnalysisFactory.java
@@ -70,7 +70,7 @@
     originalArgs = Collections.unmodifiableMap(new HashMap<>(args));
     String version = get(args, LUCENE_MATCH_VERSION_PARAM);
     if (version == null) {
-      luceneMatchVersion = null;
+      luceneMatchVersion = Version.LATEST;
     } else {
       try {
         luceneMatchVersion = Version.parseLeniently(version);
@@ -85,16 +85,6 @@
     return originalArgs;
   }
 
-   /** this method can be called in the {@link org.apache.lucene.analysis.util.TokenizerFactory#create()}
-   * or {@link org.apache.lucene.analysis.util.TokenFilterFactory#create(org.apache.lucene.analysis.TokenStream)} methods,
-   * to inform user, that for this factory a {@link #luceneMatchVersion} is required */
-  protected final void assureMatchVersion() {
-    if (luceneMatchVersion == null) {
-      throw new IllegalArgumentException("Configuration Error: Factory '" + this.getClass().getName() +
-        "' needs a 'luceneMatchVersion' parameter");
-    }
-  }
-
   public final Version getLuceneMatchVersion() {
     return this.luceneMatchVersion;
   }
@@ -241,7 +231,6 @@
    */
   protected final CharArraySet getWordSet(ResourceLoader loader,
       String wordFiles, boolean ignoreCase) throws IOException {
-    assureMatchVersion();
     List<String> files = splitFileNames(wordFiles);
     CharArraySet words = null;
     if (files.size() > 0) {
@@ -267,7 +256,6 @@
    * except the input is in snowball format. */
   protected final CharArraySet getSnowballWordSet(ResourceLoader loader,
       String wordFiles, boolean ignoreCase) throws IOException {
-    assureMatchVersion();
     List<String> files = splitFileNames(wordFiles);
     CharArraySet words = null;
     if (files.size() > 0) {
diff --git a/lucene/analysis/common/src/java/overview.html b/lucene/analysis/common/src/java/overview.html
index a251be2..10ccea2 100644
--- a/lucene/analysis/common/src/java/overview.html
+++ b/lucene/analysis/common/src/java/overview.html
@@ -27,7 +27,8 @@
     This module contains concrete components ({@link org.apache.lucene.analysis.CharFilter}s,
     {@link org.apache.lucene.analysis.Tokenizer}s, and ({@link org.apache.lucene.analysis.TokenFilter}s) for 
     analyzing different types of content. It also provides a number of {@link org.apache.lucene.analysis.Analyzer}s
-    for different languages that you can use to get started quickly. 
+    for different languages that you can use to get started quickly. To define fully custom Analyzers
+    (like in the index schema of Apache Solr), this module provides {@link org.apache.lucene.analysis.custom.CustomAnalyzer}.
     </p>
   </body>
 </html>
\ No newline at end of file
diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestFactories.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestFactories.java
index d8a14f3..ccdfc49 100644
--- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestFactories.java
+++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestFactories.java
@@ -135,9 +135,7 @@
     AbstractAnalysisFactory factory = null;
     try {
       factory = ctor.newInstance(args);
-    } catch (InstantiationException e) {
-      throw new RuntimeException(e);
-    } catch (IllegalAccessException e) {
+    } catch (InstantiationException | IllegalAccessException e) {
       throw new RuntimeException(e);
     } catch (InvocationTargetException e) {
       if (e.getCause() instanceof IllegalArgumentException) {
diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestRandomChains.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestRandomChains.java
index 0fddc53..ced8b72 100644
--- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestRandomChains.java
+++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestRandomChains.java
@@ -712,10 +712,8 @@
         } else {
           Rethrow.rethrow(cause);
         }
-      } catch (IllegalAccessException iae) {
+      } catch (IllegalAccessException | InstantiationException iae) {
         Rethrow.rethrow(iae);
-      } catch (InstantiationException ie) {
-        Rethrow.rethrow(ie);
       }
       return null; // no success
     }
diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/custom/TestCustomAnalyzer.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/custom/TestCustomAnalyzer.java
new file mode 100644
index 0000000..005c990
--- /dev/null
+++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/custom/TestCustomAnalyzer.java
@@ -0,0 +1,289 @@
+package org.apache.lucene.analysis.custom;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.nio.file.Paths;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.analysis.BaseTokenStreamTestCase;
+import org.apache.lucene.analysis.charfilter.HTMLStripCharFilterFactory;
+import org.apache.lucene.analysis.core.LowerCaseFilterFactory;
+import org.apache.lucene.analysis.core.StopFilterFactory;
+import org.apache.lucene.analysis.core.WhitespaceTokenizerFactory;
+import org.apache.lucene.analysis.miscellaneous.ASCIIFoldingFilterFactory;
+import org.apache.lucene.analysis.standard.ClassicTokenizerFactory;
+import org.apache.lucene.analysis.util.CharFilterFactory;
+import org.apache.lucene.analysis.util.TokenFilterFactory;
+import org.apache.lucene.util.SetOnce.AlreadySetException;
+import org.apache.lucene.util.Version;
+
+public class TestCustomAnalyzer extends BaseTokenStreamTestCase {
+  
+  // Test some examples (TODO: we only check behavior, we may need something like TestRandomChains...)
+
+  public void testWhitespaceWithFolding() throws Exception {
+    CustomAnalyzer a = CustomAnalyzer.builder()
+        .withTokenizer("whitespace")
+        .addTokenFilter("asciifolding", "preserveOriginal", "true")
+        .addTokenFilter("lowercase")
+        .build();
+    
+    assertSame(WhitespaceTokenizerFactory.class, a.getTokenizerFactory().getClass());
+    assertEquals(Collections.emptyList(), a.getCharFilterFactories());
+    List<TokenFilterFactory> tokenFilters = a.getTokenFilterFactories();
+    assertEquals(2, tokenFilters.size());
+    assertSame(ASCIIFoldingFilterFactory.class, tokenFilters.get(0).getClass());
+    assertSame(LowerCaseFilterFactory.class, tokenFilters.get(1).getClass());
+    assertEquals(0, a.getPositionIncrementGap("dummy"));
+    assertEquals(1, a.getOffsetGap("dummy"));
+    assertSame(Version.LATEST, a.getVersion());
+
+    assertAnalyzesTo(a, "foo bar FOO BAR", 
+        new String[] { "foo", "bar", "foo", "bar" },
+        new int[]    { 1,     1,     1,     1});
+    assertAnalyzesTo(a, "föó bär FÖÖ BAR", 
+        new String[] { "foo", "föó", "bar", "bär", "foo", "föö", "bar" },
+        new int[]    { 1,     0,     1,     0,     1,     0,     1});
+  }
+
+  public void testHtmlStripClassicFolding() throws Exception {
+    CustomAnalyzer a = CustomAnalyzer.builder()
+        .withDefaultMatchVersion(Version.LUCENE_5_0_0)
+        .addCharFilter("htmlstrip")
+        .withTokenizer("classic")
+        .addTokenFilter("asciifolding", "preserveOriginal", "true")
+        .addTokenFilter("lowercase")
+        .withPositionIncrementGap(100)
+        .withOffsetGap(1000)
+        .build();
+    
+    assertSame(ClassicTokenizerFactory.class, a.getTokenizerFactory().getClass());
+    List<CharFilterFactory> charFilters = a.getCharFilterFactories();
+    assertEquals(1, charFilters.size());
+    assertEquals(HTMLStripCharFilterFactory.class, charFilters.get(0).getClass());
+    List<TokenFilterFactory> tokenFilters = a.getTokenFilterFactories();
+    assertEquals(2, tokenFilters.size());
+    assertSame(ASCIIFoldingFilterFactory.class, tokenFilters.get(0).getClass());
+    assertSame(LowerCaseFilterFactory.class, tokenFilters.get(1).getClass());
+    assertEquals(100, a.getPositionIncrementGap("dummy"));
+    assertEquals(1000, a.getOffsetGap("dummy"));
+    assertSame(Version.LUCENE_5_0_0, a.getVersion());
+
+    assertAnalyzesTo(a, "<p>foo bar</p> FOO BAR", 
+        new String[] { "foo", "bar", "foo", "bar" },
+        new int[]    { 1,     1,     1,     1});
+    assertAnalyzesTo(a, "<p><b>föó</b> bär     FÖÖ BAR</p>", 
+        new String[] { "foo", "föó", "bar", "bär", "foo", "föö", "bar" },
+        new int[]    { 1,     0,     1,     0,     1,     0,     1});
+  }
+  
+  public void testStopWordsFromClasspath() throws Exception {
+    CustomAnalyzer a = CustomAnalyzer.builder()
+        .withTokenizer("whitespace")
+        .addTokenFilter("stop",
+            "ignoreCase", "true",
+            "words", "org/apache/lucene/analysis/custom/teststop.txt",
+            "format", "wordset")
+        .build();
+    
+    assertSame(WhitespaceTokenizerFactory.class, a.getTokenizerFactory().getClass());
+    assertEquals(Collections.emptyList(), a.getCharFilterFactories());
+    List<TokenFilterFactory> tokenFilters = a.getTokenFilterFactories();
+    assertEquals(1, tokenFilters.size());
+    assertSame(StopFilterFactory.class, tokenFilters.get(0).getClass());
+    assertEquals(0, a.getPositionIncrementGap("dummy"));
+    assertEquals(1, a.getOffsetGap("dummy"));
+    assertSame(Version.LATEST, a.getVersion());
+
+    assertAnalyzesTo(a, "foo Foo Bar", new String[0]);
+  }
+  
+  public void testStopWordsFromClasspathWithMap() throws Exception {
+    Map<String,String> stopConfig1 = new HashMap<>();
+    stopConfig1.put("ignoreCase", "true");
+    stopConfig1.put("words", "org/apache/lucene/analysis/custom/teststop.txt");
+    stopConfig1.put("format", "wordset");
+    
+    Map<String,String> stopConfig2 = Collections.unmodifiableMap(new HashMap<>(stopConfig1));
+
+    CustomAnalyzer a = CustomAnalyzer.builder()
+        .withTokenizer("whitespace")
+        .addTokenFilter("stop", stopConfig1)
+        .build();
+    assertTrue(stopConfig1.isEmpty());
+    assertAnalyzesTo(a, "foo Foo Bar", new String[0]);
+    
+    // try with unmodifiableMap, should fail
+    try {
+      CustomAnalyzer.builder()
+          .withTokenizer("whitespace")
+          .addTokenFilter("stop", stopConfig2)
+          .build();
+      fail();
+    } catch (IllegalArgumentException | UnsupportedOperationException e) {
+      // pass
+    }
+  }
+  
+  public void testStopWordsFromFile() throws Exception {
+    CustomAnalyzer a = CustomAnalyzer.builder(this.getDataPath(""))
+        .withTokenizer("whitespace")
+        .addTokenFilter("stop",
+            "ignoreCase", "true",
+            "words", "teststop.txt",
+            "format", "wordset")
+        .build();
+    assertAnalyzesTo(a, "foo Foo Bar", new String[0]);
+  }
+  
+  public void testStopWordsFromFileAbsolute() throws Exception {
+    CustomAnalyzer a = CustomAnalyzer.builder(Paths.get("."))
+        .withTokenizer("whitespace")
+        .addTokenFilter("stop",
+            "ignoreCase", "true",
+            "words", this.getDataPath("teststop.txt").toString(),
+            "format", "wordset")
+        .build();
+    assertAnalyzesTo(a, "foo Foo Bar", new String[0]);
+  }
+  
+  // Now test misconfigurations:
+
+  public void testIncorrectOrder() throws Exception {
+    try {
+      CustomAnalyzer.builder()
+          .addCharFilter("htmlstrip")
+          .withDefaultMatchVersion(Version.LATEST)
+          .withTokenizer("whitespace")
+          .build();
+      fail();
+    } catch (IllegalStateException e) {
+      // pass
+    }
+  }
+
+  public void testMissingSPI() throws Exception {
+    try {
+      CustomAnalyzer.builder()
+          .withTokenizer("foobar_nonexistent")
+          .build();
+      fail();
+    } catch (IllegalArgumentException e) {
+      assertTrue(e.getMessage().contains("SPI"));
+      assertTrue(e.getMessage().contains("does not exist"));
+    }
+  }
+
+  public void testSetTokenizerTwice() throws Exception {
+    try {
+      CustomAnalyzer.builder()
+          .withTokenizer("whitespace")
+          .withTokenizer("standard")
+          .build();
+      fail();
+    } catch (AlreadySetException e) {
+      // pass
+    }
+  }
+
+  public void testSetMatchVersionTwice() throws Exception {
+    try {
+      CustomAnalyzer.builder()
+          .withDefaultMatchVersion(Version.LATEST)
+          .withDefaultMatchVersion(Version.LATEST)
+          .withTokenizer("standard")
+          .build();
+      fail();
+    } catch (AlreadySetException e) {
+      // pass
+    }
+  }
+
+  public void testSetPosIncTwice() throws Exception {
+    try {
+      CustomAnalyzer.builder()
+          .withPositionIncrementGap(2)
+          .withPositionIncrementGap(3)
+          .withTokenizer("standard")
+          .build();
+      fail();
+    } catch (AlreadySetException e) {
+      // pass
+    }
+  }
+
+  public void testSetOfsGapTwice() throws Exception {
+    try {
+      CustomAnalyzer.builder()
+          .withOffsetGap(2)
+          .withOffsetGap(3)
+          .withTokenizer("standard")
+          .build();
+      fail();
+    } catch (AlreadySetException e) {
+      // pass
+    }
+  }
+
+  public void testNoTokenizer() throws Exception {
+    try {
+      CustomAnalyzer.builder().build();
+      fail();
+    } catch (IllegalStateException e) {
+      assertTrue(e.getMessage().equals("You have to set at least a tokenizer."));
+    }
+  }
+
+  public void testNullTokenizer() throws Exception {
+    try {
+      CustomAnalyzer.builder()
+        .withTokenizer(null)
+        .build();
+      fail();
+    } catch (NullPointerException e) {
+      // pass
+    }
+  }
+
+  public void testNullParamKey() throws Exception {
+    try {
+      CustomAnalyzer.builder()
+        .withTokenizer("whitespace", null, "foo")
+        .build();
+      fail();
+    } catch (NullPointerException e) {
+      // pass
+    }
+  }
+
+  public void testNullMatchVersion() throws Exception {
+    try {
+      CustomAnalyzer.builder()
+        .withDefaultMatchVersion(null)
+        .withTokenizer("whitespace")
+        .build();
+      fail();
+    } catch (NullPointerException e) {
+      // pass
+    }
+  }
+
+}
diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/custom/teststop.txt b/lucene/analysis/common/src/test/org/apache/lucene/analysis/custom/teststop.txt
new file mode 100644
index 0000000..3fe6d02
--- /dev/null
+++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/custom/teststop.txt
@@ -0,0 +1,17 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+foo
+bar
diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/util/TestFilesystemResourceLoader.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/util/TestFilesystemResourceLoader.java
index 877949e..36c7eac 100644
--- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/util/TestFilesystemResourceLoader.java
+++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/util/TestFilesystemResourceLoader.java
@@ -17,11 +17,8 @@
  * limitations under the License.
  */
 
-import java.io.File;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
-import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
@@ -29,8 +26,6 @@
 
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.TestUtil;
-import org.apache.lucene.util.TestUtil;
 
 public class TestFilesystemResourceLoader extends LuceneTestCase {
   
diff --git a/lucene/analysis/uima/src/java/org/apache/lucene/analysis/uima/UIMAAnnotationsTokenizer.java b/lucene/analysis/uima/src/java/org/apache/lucene/analysis/uima/UIMAAnnotationsTokenizer.java
index 0b8fc39..c0d7389 100644
--- a/lucene/analysis/uima/src/java/org/apache/lucene/analysis/uima/UIMAAnnotationsTokenizer.java
+++ b/lucene/analysis/uima/src/java/org/apache/lucene/analysis/uima/UIMAAnnotationsTokenizer.java
@@ -58,9 +58,7 @@
   protected void initializeIterator() throws IOException {
     try {
       analyzeInput();
-    } catch (AnalysisEngineProcessException e) {
-      throw new IOException(e);
-    } catch (ResourceInitializationException e) {
+    } catch (AnalysisEngineProcessException | ResourceInitializationException e) {
       throw new IOException(e);
     }
     finalOffset = correctOffset(cas.getDocumentText().length());
diff --git a/lucene/analysis/uima/src/java/org/apache/lucene/analysis/uima/UIMATypeAwareAnnotationsTokenizer.java b/lucene/analysis/uima/src/java/org/apache/lucene/analysis/uima/UIMATypeAwareAnnotationsTokenizer.java
index 65fd2ef..fbf1d8e 100644
--- a/lucene/analysis/uima/src/java/org/apache/lucene/analysis/uima/UIMATypeAwareAnnotationsTokenizer.java
+++ b/lucene/analysis/uima/src/java/org/apache/lucene/analysis/uima/UIMATypeAwareAnnotationsTokenizer.java
@@ -70,9 +70,7 @@
   protected void initializeIterator() throws IOException {
     try {
       analyzeInput();
-    } catch (AnalysisEngineProcessException e) {
-      throw new IOException(e);
-    } catch (ResourceInitializationException e) {
+    } catch (AnalysisEngineProcessException | ResourceInitializationException e) {
       throw new IOException(e);
     }
     featurePath = cas.createFeaturePath();
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
index 5568ac4..d86d733 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
@@ -297,6 +297,8 @@
       "4.10.1-nocfs",
       "4.10.2-cfs",
       "4.10.2-nocfs",
+      "4.10.3-cfs",
+      "4.10.3-nocfs",
   };
   
   final static String[] oldSingleSegmentNames = {
@@ -548,11 +550,11 @@
 
   public void testAddOldIndexesReader() throws IOException {
     for (String name : oldNames) {
-      IndexReader reader = DirectoryReader.open(oldIndexDirs.get(name));
+      DirectoryReader reader = DirectoryReader.open(oldIndexDirs.get(name));
       
       Directory targetDir = newDirectory();
       IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(new MockAnalyzer(random())));
-      w.addIndexes(reader);
+      TestUtil.addIndexesSlowly(w, reader);
       w.close();
       reader.close();
             
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/unsupported.4.10.3-cfs.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/unsupported.4.10.3-cfs.zip
new file mode 100644
index 0000000..cecbcc2
--- /dev/null
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/index/unsupported.4.10.3-cfs.zip
Binary files differ
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/unsupported.4.10.3-nocfs.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/unsupported.4.10.3-nocfs.zip
new file mode 100644
index 0000000..201dc53
--- /dev/null
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/index/unsupported.4.10.3-nocfs.zip
Binary files differ
diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/EnwikiContentSource.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/EnwikiContentSource.java
index e8a9b38..768a253 100644
--- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/EnwikiContentSource.java
+++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/EnwikiContentSource.java
@@ -202,10 +202,8 @@
             }
           }
         }
-      } catch (SAXException sae) {
+      } catch (SAXException | IOException sae) {
         throw new RuntimeException(sae);
-      } catch (IOException ioe) {
-        throw new RuntimeException(ioe);
       } finally {
         synchronized(this) {
           threadDone = true;
diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/NoMoreDataException.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/NoMoreDataException.java
index 4b8d48d..11e48ea 100644
--- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/NoMoreDataException.java
+++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/feeds/NoMoreDataException.java
@@ -19,7 +19,7 @@
 
 /**
  * Exception indicating there is no more data.
- * Thrown by Docs Makers if doc.maker.forever is false and docs sources of that maker where exhausted.
+ * Thrown by Docs Makers if content.source.forever is false and docs sources of that maker where exhausted.
  * This is useful for iterating all document of a source, in case we don't know in advance how many docs there are.
  */
 public class NoMoreDataException extends Exception {
diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/package.html b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/package.html
index 147d2e2..7c09671 100644
--- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/package.html
+++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/package.html
@@ -557,7 +557,7 @@
   <li><b>Docs and queries creation:</b></li>
     <ul><li>analyzer
     </li><li>doc.maker
-    </li><li>doc.maker.forever
+    </li><li>content.source.forever
     </li><li>html.parser
     </li><li>doc.stored
     </li><li>doc.tokenized
diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/AddIndexesTask.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/AddIndexesTask.java
index 714a668..ddc72f0 100644
--- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/AddIndexesTask.java
+++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/AddIndexesTask.java
@@ -18,18 +18,23 @@
  */
 
 import java.nio.file.Paths;
+import java.util.List;
 
 import org.apache.lucene.benchmark.byTask.PerfRunData;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.CodecReader;
+import org.apache.lucene.index.SlowCodecReaderWrapper;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.FSDirectory;
 
 /**
  * Adds an input index to an existing index, using
  * {@link IndexWriter#addIndexes(Directory...)} or
- * {@link IndexWriter#addIndexes(IndexReader...)}. The location of the input
+ * {@link IndexWriter#addIndexes(CodecReader...)}. The location of the input
  * index is specified by the parameter {@link #ADDINDEXES_INPUT_DIR} and is
  * assumed to be a directory on the file system.
  * <p>
@@ -63,11 +68,13 @@
     if (useAddIndexesDir) {
       writer.addIndexes(inputDir);
     } else {
-      IndexReader r = DirectoryReader.open(inputDir);
-      try {
-        writer.addIndexes(r);
-      } finally {
-        r.close();
+      try (IndexReader r = DirectoryReader.open(inputDir)) {
+        CodecReader leaves[] = new CodecReader[r.leaves().size()];
+        int i = 0;
+        for (LeafReaderContext leaf : r.leaves()) {
+          leaves[i++] = SlowCodecReaderWrapper.wrap(leaf.reader());
+        }
+        writer.addIndexes(leaves);
       }
     }
     return 1;
@@ -79,7 +86,7 @@
    * @param params
    *          {@code useAddIndexesDir=true} for using
    *          {@link IndexWriter#addIndexes(Directory...)} or {@code false} for
-   *          using {@link IndexWriter#addIndexes(IndexReader...)}. Defaults to
+   *          using {@link IndexWriter#addIndexes(CodecReader...)}. Defaults to
    *          {@code true}.
    */
   @Override
diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ReadTask.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ReadTask.java
index 1541800..4f6344e 100644
--- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ReadTask.java
+++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ReadTask.java
@@ -120,8 +120,7 @@
             // pulling the Weight ourselves:
             TopFieldCollector collector = TopFieldCollector.create(sort, numHits,
                                                                    true, withScore(),
-                                                                   withMaxScore(),
-                                                                   false);
+                                                                   withMaxScore());
             searcher.search(q, null, collector);
             hits = collector.topDocs();
           } else {
@@ -187,7 +186,7 @@
   }
 
   protected Collector createCollector() throws Exception {
-    return TopScoreDocCollector.create(numHits(), true);
+    return TopScoreDocCollector.create(numHits());
   }
 
 
diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/SearchWithCollectorTask.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/SearchWithCollectorTask.java
index e357c5b..3278163 100644
--- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/SearchWithCollectorTask.java
+++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/SearchWithCollectorTask.java
@@ -52,10 +52,8 @@
   @Override
   protected Collector createCollector() throws Exception {
     Collector collector = null;
-    if (clnName.equalsIgnoreCase("topScoreDocOrdered") == true) {
-      collector = TopScoreDocCollector.create(numHits(), true);
-    } else if (clnName.equalsIgnoreCase("topScoreDocUnOrdered") == true) {
-      collector = TopScoreDocCollector.create(numHits(), false);
+    if (clnName.equalsIgnoreCase("topScoreDoc") == true) {
+      collector = TopScoreDocCollector.create(numHits());
     } else if (clnName.length() > 0){
       collector = Class.forName(clnName).asSubclass(Collector.class).newInstance();
 
diff --git a/lucene/classification/src/test/org/apache/lucene/classification/ClassificationTestBase.java b/lucene/classification/src/test/org/apache/lucene/classification/ClassificationTestBase.java
index f4bb333..0559959 100644
--- a/lucene/classification/src/test/org/apache/lucene/classification/ClassificationTestBase.java
+++ b/lucene/classification/src/test/org/apache/lucene/classification/ClassificationTestBase.java
@@ -23,6 +23,7 @@
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.FieldTypes;
 import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.index.SlowCompositeReaderWrapper;
diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/BlockTreeOrdsPostingsFormat.java b/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/BlockTreeOrdsPostingsFormat.java
index 61e8f84..b149a14 100644
--- a/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/BlockTreeOrdsPostingsFormat.java
+++ b/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/BlockTreeOrdsPostingsFormat.java
@@ -24,6 +24,7 @@
 import org.apache.lucene.codecs.PostingsFormat;
 import org.apache.lucene.codecs.PostingsReaderBase;
 import org.apache.lucene.codecs.PostingsWriterBase;
+import org.apache.lucene.codecs.blocktree.BlockTreeTermsWriter;
 import org.apache.lucene.codecs.lucene50.Lucene50PostingsReader;
 import org.apache.lucene.codecs.lucene50.Lucene50PostingsWriter;
 import org.apache.lucene.index.SegmentReadState;
@@ -56,9 +57,8 @@
   public BlockTreeOrdsPostingsFormat(int minTermBlockSize, int maxTermBlockSize) {
     super("BlockTreeOrds");
     this.minTermBlockSize = minTermBlockSize;
-    assert minTermBlockSize > 1;
     this.maxTermBlockSize = maxTermBlockSize;
-    assert minTermBlockSize <= maxTermBlockSize;
+    BlockTreeTermsWriter.validateSettings(minTermBlockSize, maxTermBlockSize);
   }
 
   @Override
diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/OrdsBlockTreeTermsWriter.java b/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/OrdsBlockTreeTermsWriter.java
index b576d26..de4d6d9 100644
--- a/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/OrdsBlockTreeTermsWriter.java
+++ b/lucene/codecs/src/java/org/apache/lucene/codecs/blocktreeords/OrdsBlockTreeTermsWriter.java
@@ -418,7 +418,7 @@
         //  System.out.println("      add sub=" + indexEnt.input + " " + indexEnt.input + " output=" + indexEnt.output);
         //}
         Output output = indexEnt.output;
-        long blockTermCount = output.endOrd - output.startOrd + 1;
+        //long blockTermCount = output.endOrd - output.startOrd + 1;
         Output newOutput = FST_OUTPUTS.newOutput(output.bytes, termOrdOffset+output.startOrd, output.endOrd-termOrdOffset);
         //System.out.println("  append sub=" + indexEnt.input + " output=" + indexEnt.output + " termOrdOffset=" + termOrdOffset + " blockTermCount=" + blockTermCount  + " newOutput=" + newOutput  + " endOrd=" + (termOrdOffset+Long.MAX_VALUE-output.endOrd));
         builder.add(Util.toIntsRef(indexEnt.input, scratchIntsRef), newOutput);
@@ -603,9 +603,6 @@
       // compact format in this case:
       boolean isLeafBlock = hasSubBlocks == false;
 
-      // Number of terms in this block
-      int termCount;
-
       // Number of terms in this block and all sub-blocks (recursively)
       long totalTermCount;
 
@@ -652,12 +649,10 @@
           bytesWriter.reset();
           absolute = false;
         }
-        termCount = end-start;
         totalTermCount = end-start;
       } else {
         // Mixed terms and sub-blocks:
         subIndices = new ArrayList<>();
-        termCount = 0;
         totalTermCount = 0;
         for (int i=start;i<end;i++) {
           PendingEntry ent = pending.get(i);
@@ -705,7 +700,6 @@
             bytesWriter.reset();
             absolute = false;
 
-            termCount++;
             totalTermCount++;
           } else {
             PendingBlock block = (PendingBlock) ent;
diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesConsumer.java b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesConsumer.java
index df40a37..048cca8 100644
--- a/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesConsumer.java
+++ b/lucene/codecs/src/java/org/apache/lucene/codecs/memory/MemoryDocValuesConsumer.java
@@ -305,6 +305,7 @@
     int maxLength = Integer.MIN_VALUE;
     final long startFP = data.getFilePointer();
     boolean missing = false;
+    int upto = 0;
     for(BytesRef v : values) {
       final int length;
       if (v == null) {
@@ -314,8 +315,9 @@
         length = v.length;
       }
       if (length > MemoryDocValuesFormat.MAX_BINARY_FIELD_LENGTH) {
-        throw new IllegalArgumentException("DocValuesField \"" + field.name + "\" is too large, must be <= " + MemoryDocValuesFormat.MAX_BINARY_FIELD_LENGTH);
+        throw new IllegalArgumentException("DocValuesField \"" + field.name + "\" is too large, must be <= " + MemoryDocValuesFormat.MAX_BINARY_FIELD_LENGTH + " but got length=" + length + " v=" + v + "; upto=" + upto + " values=" + values);
       }
+      upto++;
       minLength = Math.min(minLength, length);
       maxLength = Math.max(maxLength, length);
       if (v != null) {
diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextCompoundFormat.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextCompoundFormat.java
index cef75aa..bcc42b9 100644
--- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextCompoundFormat.java
+++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextCompoundFormat.java
@@ -29,7 +29,6 @@
 
 import org.apache.lucene.codecs.CompoundFormat;
 import org.apache.lucene.index.CorruptIndexException;
-import org.apache.lucene.index.MergeState.CheckAbort;
 import org.apache.lucene.index.IndexFileNames;
 import org.apache.lucene.index.SegmentInfo;
 import org.apache.lucene.store.Directory;
@@ -157,11 +156,11 @@
   }
 
   @Override
-  public void write(Directory dir, SegmentInfo si, Collection<String> files, CheckAbort checkAbort, IOContext context) throws IOException {
+  public void write(Directory dir, SegmentInfo si, IOContext context) throws IOException {
     String dataFile = IndexFileNames.segmentFileName(si.name, "", DATA_EXTENSION);
     
-    int numFiles = files.size();
-    String names[] = files.toArray(new String[numFiles]);
+    int numFiles = si.files().size();
+    String names[] = si.files().toArray(new String[numFiles]);
     Arrays.sort(names);
     long startOffsets[] = new long[numFiles];
     long endOffsets[] = new long[numFiles];
@@ -181,8 +180,6 @@
           out.copyBytes(in, in.length());
         }
         endOffsets[i] = out.getFilePointer();
-        
-        checkAbort.work(endOffsets[i] - startOffsets[i]);
       }
       
       long tocPos = out.getFilePointer();
@@ -212,11 +209,6 @@
       SimpleTextUtil.writeNewline(out);
     }
   }
-
-  @Override
-  public String[] files(SegmentInfo si) {
-    return new String[] { IndexFileNames.segmentFileName(si.name, "", DATA_EXTENSION) };
-  }
   
   // helper method to strip strip away 'prefix' from 'scratch' and return as String
   private String stripPrefix(BytesRefBuilder scratch, BytesRef prefix) throws IOException {
diff --git a/lucene/common-build.xml b/lucene/common-build.xml
index 493ac55..1a402b9 100644
--- a/lucene/common-build.xml
+++ b/lucene/common-build.xml
@@ -1042,7 +1042,6 @@
                 <propertyref prefix="tests.badapples" />
                 <propertyref prefix="tests.bwcdir" />
                 <propertyref prefix="tests.timeoutSuite" />
-                <propertyref prefix="tests.jettyConnector" />
                 <propertyref prefix="tests.disableHdfs" />
                 <propertyref prefix="tests.filter" />
                 <propertyref prefix="tests.leavetmpdir" />
diff --git a/lucene/core/src/java/org/apache/lucene/analysis/package.html b/lucene/core/src/java/org/apache/lucene/analysis/package.html
index 8dc5362..3bff32b 100644
--- a/lucene/core/src/java/org/apache/lucene/analysis/package.html
+++ b/lucene/core/src/java/org/apache/lucene/analysis/package.html
@@ -164,7 +164,11 @@
   supplies a large family of <code>Analyzer</code> classes that deliver useful
   analysis chains. The most common of these is the <a href="{@docRoot}/../analyzers-common/org/apache/lucene/analysis/standard/StandardAnalyzer.html">StandardAnalyzer</a>.
   Many applications will have a long and industrious life with nothing more
-  than the <code>StandardAnalyzer</code>.
+  than the <code>StandardAnalyzer</code>. The <a href="{@docRoot}/../analyzers-common/overview-summary.html">analyzers-common</a>
+  library provides many pre-existing analyzers for various languages.
+  The analysis-common library also allows to configure a custom Analyzer without subclassing using the
+  <a href="{@docRoot}/../analyzers-common/org/apache/lucene/analysis/custom/CustomAnalyzer.html">CustomAnalyzer</a>
+  class.
 </p>
 <p>
   Aside from the <code>StandardAnalyzer</code>,
@@ -258,8 +262,7 @@
   create, or a combination of existing and newly created components.  Before
   pursuing this approach, you may find it worthwhile to explore the
   <a href="{@docRoot}/../analyzers-common/overview-summary.html">analyzers-common</a> library and/or ask on the 
-  <a href="http://lucene.apache.org/core/discussion.html"
-      >java-user@lucene.apache.org mailing list</a> first to see if what you
+  <a href="http://lucene.apache.org/core/discussion.html">java-user@lucene.apache.org mailing list</a> first to see if what you
   need already exists. If you are still committed to creating your own
   Analyzer, have a look at the source code of any one of the many samples
   located in this package.
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/CodecUtil.java b/lucene/core/src/java/org/apache/lucene/codecs/CodecUtil.java
index 2cd0365..0181619 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/CodecUtil.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/CodecUtil.java
@@ -413,7 +413,7 @@
     
     final int magic = in.readInt();
     if (magic != FOOTER_MAGIC) {
-      throw new CorruptIndexException("codec footer mismatch: actual footer=" + magic + " vs expected footer=" + FOOTER_MAGIC, in);
+      throw new CorruptIndexException("codec footer mismatch (file truncated?): actual footer=" + magic + " vs expected footer=" + FOOTER_MAGIC, in);
     }
     
     final int algorithmID = in.readInt();
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java
index 2955630..042a9b1 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java
@@ -18,9 +18,7 @@
  */
 
 import java.io.IOException;
-import java.util.Collection;
 
-import org.apache.lucene.index.MergeState.CheckAbort;
 import org.apache.lucene.index.SegmentInfo;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
@@ -45,15 +43,7 @@
   public abstract Directory getCompoundReader(Directory dir, SegmentInfo si, IOContext context) throws IOException;
   
   /**
-   * Packs the provided files into a compound format.
+   * Packs the provided segment's files into a compound format.
    */
-  // TODO: get checkAbort out of here, and everywhere, and have iw do it at a higher level
-  public abstract void write(Directory dir, SegmentInfo si, Collection<String> files, CheckAbort checkAbort, IOContext context) throws IOException;
-
-  /**
-   * Returns the compound file names used by this segment.
-   */
-  // TODO: get this out of here, and use trackingdirwrapper. but this is really scary in IW right now...
-  // NOTE: generally si.useCompoundFile is not even yet 'set' when this is called.
-  public abstract String[] files(SegmentInfo si);
+  public abstract void write(Directory dir, SegmentInfo si, IOContext context) throws IOException;
 }
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/StoredFieldsWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/StoredFieldsWriter.java
index 93d5109..d0ea256 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/StoredFieldsWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/StoredFieldsWriter.java
@@ -97,7 +97,6 @@
         storedFieldsReader.visitDocument(docID, visitor);
         finishDocument();
         docCount++;
-        mergeState.checkAbort.work(300);
       }
     }
     finish(mergeState.mergeFieldInfos, docCount);
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/TermVectorsWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/TermVectorsWriter.java
index cadc548..af12f09 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/TermVectorsWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/TermVectorsWriter.java
@@ -196,7 +196,6 @@
         }
         addAllDocVectors(vectors, mergeState);
         docCount++;
-        mergeState.checkAbort.work(300);
       }
     }
     finish(mergeState.mergeFieldInfos, docCount);
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingStoredFieldsReader.java b/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingStoredFieldsReader.java
index 41a8cf0..2872e78 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingStoredFieldsReader.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingStoredFieldsReader.java
@@ -36,6 +36,7 @@
 import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.TYPE_BITS;
 import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.TYPE_MASK;
 import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.VERSION_CURRENT;
+import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.VERSION_CHUNK_STATS;
 import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.VERSION_START;
 
 import java.io.EOFException;
@@ -88,6 +89,8 @@
   private final int numDocs;
   private final boolean merging;
   private final BlockState state;
+  private final long numChunks; // number of compressed blocks written
+  private final long numDirtyChunks; // number of incomplete compressed blocks written
   private boolean closed;
 
   // used by clone
@@ -102,6 +105,8 @@
     this.compressionMode = reader.compressionMode;
     this.decompressor = reader.decompressor.clone();
     this.numDocs = reader.numDocs;
+    this.numChunks = reader.numChunks;
+    this.numDirtyChunks = reader.numDirtyChunks;
     this.merging = merging;
     this.state = new BlockState();
     this.closed = false;
@@ -145,9 +150,6 @@
     try {
       // Open the data file and read metadata
       fieldsStream = d.openInput(fieldsStreamFN, context);
-      if (maxPointer + CodecUtil.footerLength() != fieldsStream.length()) {
-        throw new CorruptIndexException("Invalid fieldsStream maxPointer (file truncated?): maxPointer=" + maxPointer + ", length=" + fieldsStream.length(), fieldsStream);
-      }
       final String codecNameDat = formatName + CODEC_SFX_DAT;
       final int fieldsVersion = CodecUtil.checkIndexHeader(fieldsStream, codecNameDat, VERSION_START, VERSION_CURRENT, si.getId(), segmentSuffix);
       if (version != fieldsVersion) {
@@ -161,6 +163,17 @@
       this.merging = false;
       this.state = new BlockState();
       
+      if (version >= VERSION_CHUNK_STATS) {
+        fieldsStream.seek(maxPointer);
+        numChunks = fieldsStream.readVLong();
+        numDirtyChunks = fieldsStream.readVLong();
+        if (numDirtyChunks > numChunks) {
+          throw new CorruptIndexException("invalid chunk counts: dirty=" + numDirtyChunks + ", total=" + numChunks, fieldsStream);
+        }
+      } else {
+        numChunks = numDirtyChunks = -1;
+      }
+      
       // NOTE: data file is too costly to verify checksum against all the bytes on open,
       // but for now we at least verify proper structure of the checksum footer: which looks
       // for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption
@@ -496,8 +509,6 @@
       final int totalLength = offsets[chunkDocs];
       final int numStoredFields = this.numStoredFields[index];
 
-      fieldsStream.seek(startPointer);
-
       final DataInput documentInput;
       if (length == 0) {
         // empty
@@ -506,6 +517,7 @@
         // already decompressed
         documentInput = new ByteArrayDataInput(bytes.bytes, bytes.offset + offset, length);
       } else if (sliced) {
+        fieldsStream.seek(startPointer);
         decompressor.decompress(fieldsStream, chunkSize, offset, Math.min(length, chunkSize - offset), bytes);
         documentInput = new DataInput() {
 
@@ -545,6 +557,7 @@
 
         };
       } else {
+        fieldsStream.seek(startPointer);
         decompressor.decompress(fieldsStream, totalLength, offset, length, bytes);
         assert bytes.length == length;
         documentInput = new ByteArrayDataInput(bytes.bytes, bytes.offset, bytes.length);
@@ -610,10 +623,34 @@
   CompressionMode getCompressionMode() {
     return compressionMode;
   }
+  
+  CompressingStoredFieldsIndexReader getIndexReader() {
+    return indexReader;
+  }
+  
+  long getMaxPointer() {
+    return maxPointer;
+  }
+  
+  IndexInput getFieldsStream() {
+    return fieldsStream;
+  }
 
   int getChunkSize() {
     return chunkSize;
   }
+  
+  long getNumChunks() {
+    return numChunks;
+  }
+  
+  long getNumDirtyChunks() {
+    return numDirtyChunks;
+  }
+
+  int getPackedIntsVersion() {
+    return packedIntsVersion;
+  }
 
   @Override
   public long ramBytesUsed() {
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingStoredFieldsWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingStoredFieldsWriter.java
index dd6f45c..8e9944d 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingStoredFieldsWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingStoredFieldsWriter.java
@@ -24,6 +24,7 @@
 import org.apache.lucene.codecs.StoredFieldsReader;
 import org.apache.lucene.codecs.StoredFieldsWriter;
 import org.apache.lucene.codecs.compressing.CompressingStoredFieldsReader.SerializedDocument;
+import org.apache.lucene.index.CorruptIndexException;
 import org.apache.lucene.index.FieldInfo;
 import org.apache.lucene.index.FieldInfos;
 import org.apache.lucene.index.IndexFileNames;
@@ -33,6 +34,7 @@
 import org.apache.lucene.store.DataOutput;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.util.ArrayUtil;
 import org.apache.lucene.util.BitUtil;
@@ -68,13 +70,15 @@
   static final String CODEC_SFX_IDX = "Index";
   static final String CODEC_SFX_DAT = "Data";
   static final int VERSION_START = 0;
-  static final int VERSION_CURRENT = VERSION_START;
+  static final int VERSION_CHUNK_STATS = 1;
+  static final int VERSION_CURRENT = VERSION_CHUNK_STATS;
 
   private final String segment;
   private CompressingStoredFieldsIndexWriter indexWriter;
   private IndexOutput fieldsStream;
 
   private final Compressor compressor;
+  private final CompressionMode compressionMode;
   private final int chunkSize;
   private final int maxDocsPerChunk;
 
@@ -83,12 +87,16 @@
   private int[] endOffsets; // end offsets in bufferedDocs
   private int docBase; // doc ID at the beginning of the chunk
   private int numBufferedDocs; // docBase + numBufferedDocs == current doc ID
+  
+  private long numChunks; // number of compressed blocks written
+  private long numDirtyChunks; // number of incomplete compressed blocks written
 
   /** Sole constructor. */
   public CompressingStoredFieldsWriter(Directory directory, SegmentInfo si, String segmentSuffix, IOContext context,
       String formatName, CompressionMode compressionMode, int chunkSize, int maxDocsPerChunk, int blockSize) throws IOException {
     assert directory != null;
     this.segment = si.name;
+    this.compressionMode = compressionMode;
     this.compressor = compressionMode.newCompressor();
     this.chunkSize = chunkSize;
     this.maxDocsPerChunk = maxDocsPerChunk;
@@ -234,6 +242,7 @@
     docBase += numBufferedDocs;
     numBufferedDocs = 0;
     bufferedDocs.length = 0;
+    numChunks++;
   }
   
   byte scratchBytes[] = new byte[16];
@@ -459,6 +468,7 @@
   public void finish(FieldInfos fis, int numDocs) throws IOException {
     if (numBufferedDocs > 0) {
       flush();
+      numDirtyChunks++; // incomplete: we had to force this flush
     } else {
       assert bufferedDocs.length == 0;
     }
@@ -466,9 +476,24 @@
       throw new RuntimeException("Wrote " + docBase + " docs, finish called with numDocs=" + numDocs);
     }
     indexWriter.finish(numDocs, fieldsStream.getFilePointer());
+    fieldsStream.writeVLong(numChunks);
+    fieldsStream.writeVLong(numDirtyChunks);
     CodecUtil.writeFooter(fieldsStream);
     assert bufferedDocs.length == 0;
   }
+  
+  // bulk merge is scary: its caused corruption bugs in the past.
+  // we try to be extra safe with this impl, but add an escape hatch to
+  // have a workaround for undiscovered bugs.
+  static final String BULK_MERGE_ENABLED_SYSPROP = CompressingStoredFieldsWriter.class.getName() + ".enableBulkMerge";
+  static final boolean BULK_MERGE_ENABLED;
+  static {
+    boolean v = true;
+    try {
+      v = Boolean.parseBoolean(System.getProperty(BULK_MERGE_ENABLED_SYSPROP, "true"));
+    } catch (SecurityException ignored) {}
+    BULK_MERGE_ENABLED = v;
+  }
 
   @Override
   public int merge(MergeState mergeState) throws IOException {
@@ -491,8 +516,8 @@
       final int maxDoc = mergeState.maxDocs[readerIndex];
       final Bits liveDocs = mergeState.liveDocs[readerIndex];
 
-      // if its some other format, or an older version of this format:
-      if (matchingFieldsReader == null || matchingFieldsReader.getVersion() != VERSION_CURRENT) {
+      // if its some other format, or an older version of this format, or safety switch:
+      if (matchingFieldsReader == null || matchingFieldsReader.getVersion() != VERSION_CURRENT || BULK_MERGE_ENABLED == false) {
         // naive merge...
         StoredFieldsReader storedFieldsReader = mergeState.storedFieldsReaders[readerIndex];
         if (storedFieldsReader != null) {
@@ -506,12 +531,80 @@
           storedFieldsReader.visitDocument(docID, visitor);
           finishDocument();
           ++docCount;
-          mergeState.checkAbort.work(300);
         }
+      } else if (matchingFieldsReader.getCompressionMode() == compressionMode && 
+                 matchingFieldsReader.getChunkSize() == chunkSize && 
+                 matchingFieldsReader.getPackedIntsVersion() == PackedInts.VERSION_CURRENT &&
+                 liveDocs == null &&
+                 !tooDirty(matchingFieldsReader)) { 
+        // optimized merge, raw byte copy
+        // its not worth fine-graining this if there are deletions.
+        
+        // if the format is older, its always handled by the naive merge case above
+        assert matchingFieldsReader.getVersion() == VERSION_CURRENT;        
+        matchingFieldsReader.checkIntegrity();
+        
+        // flush any pending chunks
+        if (numBufferedDocs > 0) {
+          flush();
+          numDirtyChunks++; // incomplete: we had to force this flush
+        }
+        
+        // iterate over each chunk. we use the stored fields index to find chunk boundaries,
+        // read the docstart + doccount from the chunk header (we write a new header, since doc numbers will change),
+        // and just copy the bytes directly.
+        IndexInput rawDocs = matchingFieldsReader.getFieldsStream();
+        CompressingStoredFieldsIndexReader index = matchingFieldsReader.getIndexReader();
+        rawDocs.seek(index.getStartPointer(0));
+        int docID = 0;
+        while (docID < maxDoc) {
+          // read header
+          int base = rawDocs.readVInt();
+          if (base != docID) {
+            throw new CorruptIndexException("invalid state: base=" + base + ", docID=" + docID, rawDocs);
+          }
+          int code = rawDocs.readVInt();
+          
+          // write a new index entry and new header for this chunk.
+          int bufferedDocs = code >>> 1;
+          indexWriter.writeIndex(bufferedDocs, fieldsStream.getFilePointer());
+          fieldsStream.writeVInt(docBase); // rebase
+          fieldsStream.writeVInt(code);
+          docID += bufferedDocs;
+          docBase += bufferedDocs;
+          docCount += bufferedDocs;
+          
+          if (docID > maxDoc) {
+            throw new CorruptIndexException("invalid state: base=" + base + ", count=" + bufferedDocs + ", maxDoc=" + maxDoc, rawDocs);
+          }
+          
+          // copy bytes until the next chunk boundary (or end of chunk data).
+          // using the stored fields index for this isn't the most efficient, but fast enough
+          // and is a source of redundancy for detecting bad things.
+          final long end;
+          if (docID == maxDoc) {
+            end = matchingFieldsReader.getMaxPointer();
+          } else {
+            end = index.getStartPointer(docID);
+          }
+          fieldsStream.copyBytes(rawDocs, end - rawDocs.getFilePointer());
+        }
+               
+        if (rawDocs.getFilePointer() != matchingFieldsReader.getMaxPointer()) {
+          throw new CorruptIndexException("invalid state: pos=" + rawDocs.getFilePointer() + ", max=" + matchingFieldsReader.getMaxPointer(), rawDocs);
+        }
+        
+        // since we bulk merged all chunks, we inherit any dirty ones from this segment.
+        numChunks += matchingFieldsReader.getNumChunks();
+        numDirtyChunks += matchingFieldsReader.getNumDirtyChunks();
       } else {
         // optimized merge, we copy serialized (but decompressed) bytes directly
         // even on simple docs (1 stored field), it seems to help by about 20%
+        
+        // if the format is older, its always handled by the naive merge case above
+        assert matchingFieldsReader.getVersion() == VERSION_CURRENT;
         matchingFieldsReader.checkIntegrity();
+
         for (int docID = 0; docID < maxDoc; docID++) {
           if (liveDocs != null && liveDocs.get(docID) == false) {
             continue;
@@ -522,11 +615,23 @@
           numStoredFieldsInDoc = doc.numStoredFields;
           finishDocument();
           ++docCount;
-          mergeState.checkAbort.work(300);
         }
       }
     }
     finish(mergeState.mergeFieldInfos, docCount);
     return docCount;
   }
+  
+  /** 
+   * Returns true if we should recompress this reader, even though we could bulk merge compressed data 
+   * <p>
+   * The last chunk written for a segment is typically incomplete, so without recompressing,
+   * in some worst-case situations (e.g. frequent reopen with tiny flushes), over time the 
+   * compression ratio can degrade. This is a safety switch.
+   */
+  boolean tooDirty(CompressingStoredFieldsReader candidate) {
+    // more than 1% dirty, or more than hard limit of 1024 dirty chunks
+    return candidate.getNumDirtyChunks() > 1024 || 
+           candidate.getNumDirtyChunks() * 100 > candidate.getNumChunks();
+  }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsReader.java b/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsReader.java
index e3ff664..5ff54fc 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsReader.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsReader.java
@@ -26,6 +26,7 @@
 import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.POSITIONS;
 import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.VECTORS_EXTENSION;
 import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.VECTORS_INDEX_EXTENSION;
+import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.VERSION_CHUNK_STATS;
 import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.VERSION_CURRENT;
 import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.VERSION_START;
 
@@ -82,6 +83,9 @@
   private final int numDocs;
   private boolean closed;
   private final BlockPackedReaderIterator reader;
+  private final long numChunks; // number of compressed blocks written
+  private final long numDirtyChunks; // number of incomplete compressed blocks written
+  private final long maxPointer; // end of the data section
 
   // used by clone
   private CompressingTermVectorsReader(CompressingTermVectorsReader reader) {
@@ -95,6 +99,9 @@
     this.numDocs = reader.numDocs;
     this.reader = new BlockPackedReaderIterator(vectorsStream, packedIntsVersion, PACKED_BLOCK_SIZE, 0);
     this.version = reader.version;
+    this.numChunks = reader.numChunks;
+    this.numDirtyChunks = reader.numDirtyChunks;
+    this.maxPointer = reader.maxPointer;
     this.closed = false;
   }
 
@@ -109,6 +116,8 @@
     int version = -1;
     CompressingStoredFieldsIndexReader indexReader = null;
     
+    long maxPointer = -1;
+    
     // Load the index into memory
     final String indexName = IndexFileNames.segmentFileName(segment, segmentSuffix, VECTORS_INDEX_EXTENSION);
     try (ChecksumIndexInput input = d.openChecksumInput(indexName, context)) {
@@ -118,7 +127,7 @@
         version = CodecUtil.checkIndexHeader(input, codecNameIdx, VERSION_START, VERSION_CURRENT, si.getId(), segmentSuffix);
         assert CodecUtil.indexHeaderLength(codecNameIdx, segmentSuffix) == input.getFilePointer();
         indexReader = new CompressingStoredFieldsIndexReader(input, si);
-        input.readVLong(); // the end of the data file
+        maxPointer = input.readVLong(); // the end of the data section
       } catch (Throwable exception) {
         priorE = exception;
       } finally {
@@ -128,6 +137,7 @@
     
     this.version = version;
     this.indexReader = indexReader;
+    this.maxPointer = maxPointer;
 
     try {
       // Open the data file and read metadata
@@ -141,6 +151,18 @@
       assert CodecUtil.indexHeaderLength(codecNameDat, segmentSuffix) == vectorsStream.getFilePointer();
       
       long pos = vectorsStream.getFilePointer();
+      
+      if (version >= VERSION_CHUNK_STATS) {
+        vectorsStream.seek(maxPointer);
+        numChunks = vectorsStream.readVLong();
+        numDirtyChunks = vectorsStream.readVLong();
+        if (numDirtyChunks > numChunks) {
+          throw new CorruptIndexException("invalid chunk counts: dirty=" + numDirtyChunks + ", total=" + numChunks, vectorsStream);
+        }
+      } else {
+        numChunks = numDirtyChunks = -1;
+      }
+      
       // NOTE: data file is too costly to verify checksum against all the bytes on open,
       // but for now we at least verify proper structure of the checksum footer: which looks
       // for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption
@@ -177,13 +199,25 @@
     return version;
   }
 
-  CompressingStoredFieldsIndexReader getIndex() {
+  CompressingStoredFieldsIndexReader getIndexReader() {
     return indexReader;
   }
 
   IndexInput getVectorsStream() {
     return vectorsStream;
   }
+  
+  long getMaxPointer() {
+    return maxPointer;
+  }
+  
+  long getNumChunks() {
+    return numChunks;
+  }
+  
+  long getNumDirtyChunks() {
+    return numDirtyChunks;
+  }
 
   /**
    * @throws AlreadyClosedException if this TermVectorsReader is closed
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsWriter.java
index a9cfdc7..f432dd2 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressingTermVectorsWriter.java
@@ -28,6 +28,7 @@
 import org.apache.lucene.codecs.CodecUtil;
 import org.apache.lucene.codecs.TermVectorsReader;
 import org.apache.lucene.codecs.TermVectorsWriter;
+import org.apache.lucene.index.CorruptIndexException;
 import org.apache.lucene.index.FieldInfo;
 import org.apache.lucene.index.FieldInfos;
 import org.apache.lucene.index.Fields;
@@ -66,7 +67,8 @@
   static final String CODEC_SFX_DAT = "Data";
 
   static final int VERSION_START = 0;
-  static final int VERSION_CURRENT = VERSION_START;
+  static final int VERSION_CHUNK_STATS = 1;
+  static final int VERSION_CURRENT = VERSION_CHUNK_STATS;
 
   static final int PACKED_BLOCK_SIZE = 64;
 
@@ -75,15 +77,16 @@
   static final int  PAYLOADS = 0x04;
   static final int FLAGS_BITS = PackedInts.bitsRequired(POSITIONS | OFFSETS | PAYLOADS);
 
-  private final Directory directory;
   private final String segment;
-  private final String segmentSuffix;
   private CompressingStoredFieldsIndexWriter indexWriter;
   private IndexOutput vectorsStream;
 
   private final CompressionMode compressionMode;
   private final Compressor compressor;
   private final int chunkSize;
+  
+  private long numChunks; // number of compressed blocks written
+  private long numDirtyChunks; // number of incomplete compressed blocks written
 
   /** a pending doc */
   private class DocData {
@@ -206,9 +209,7 @@
   public CompressingTermVectorsWriter(Directory directory, SegmentInfo si, String segmentSuffix, IOContext context,
       String formatName, CompressionMode compressionMode, int chunkSize, int blockSize) throws IOException {
     assert directory != null;
-    this.directory = directory;
     this.segment = si.name;
-    this.segmentSuffix = segmentSuffix;
     this.compressionMode = compressionMode;
     this.compressor = compressionMode.newCompressor();
     this.chunkSize = chunkSize;
@@ -365,6 +366,7 @@
     curDoc = null;
     curField = null;
     termSuffixes.length = 0;
+    numChunks++;
   }
 
   private int flushNumFields(int chunkDocs) throws IOException {
@@ -647,11 +649,14 @@
   public void finish(FieldInfos fis, int numDocs) throws IOException {
     if (!pendingDocs.isEmpty()) {
       flush();
+      numDirtyChunks++; // incomplete: we had to force this flush
     }
     if (numDocs != this.numDocs) {
       throw new RuntimeException("Wrote " + this.numDocs + " docs, finish called with numDocs=" + numDocs);
     }
     indexWriter.finish(numDocs, vectorsStream.getFilePointer());
+    vectorsStream.writeVLong(numChunks);
+    vectorsStream.writeVLong(numDirtyChunks);
     CodecUtil.writeFooter(vectorsStream);
   }
 
@@ -712,6 +717,19 @@
 
     curField.totalPositions += numProx;
   }
+  
+  // bulk merge is scary: its caused corruption bugs in the past.
+  // we try to be extra safe with this impl, but add an escape hatch to
+  // have a workaround for undiscovered bugs.
+  static final String BULK_MERGE_ENABLED_SYSPROP = CompressingTermVectorsWriter.class.getName() + ".enableBulkMerge";
+  static final boolean BULK_MERGE_ENABLED;
+  static {
+    boolean v = true;
+    try {
+      v = Boolean.parseBoolean(System.getProperty(BULK_MERGE_ENABLED_SYSPROP, "true"));
+    } catch (SecurityException ignored) {}
+    BULK_MERGE_ENABLED = v;
+  }
 
   @Override
   public int merge(MergeState mergeState) throws IOException {
@@ -732,17 +750,81 @@
 
       final int maxDoc = mergeState.maxDocs[readerIndex];
       final Bits liveDocs = mergeState.liveDocs[readerIndex];
-
-      if (matchingVectorsReader == null
-          || matchingVectorsReader.getVersion() != VERSION_CURRENT
-          || matchingVectorsReader.getCompressionMode() != compressionMode
-          || matchingVectorsReader.getChunkSize() != chunkSize
-          || matchingVectorsReader.getPackedIntsVersion() != PackedInts.VERSION_CURRENT) {
+      
+      if (matchingVectorsReader != null &&
+          matchingVectorsReader.getCompressionMode() == compressionMode &&
+          matchingVectorsReader.getChunkSize() == chunkSize &&
+          matchingVectorsReader.getVersion() == VERSION_CURRENT && 
+          matchingVectorsReader.getPackedIntsVersion() == PackedInts.VERSION_CURRENT &&
+          BULK_MERGE_ENABLED &&
+          liveDocs == null &&
+          !tooDirty(matchingVectorsReader)) {
+        // optimized merge, raw byte copy
+        // its not worth fine-graining this if there are deletions.
+        
+        matchingVectorsReader.checkIntegrity();
+        
+        // flush any pending chunks
+        if (!pendingDocs.isEmpty()) {
+          flush();
+          numDirtyChunks++; // incomplete: we had to force this flush
+        }
+        
+        // iterate over each chunk. we use the vectors index to find chunk boundaries,
+        // read the docstart + doccount from the chunk header (we write a new header, since doc numbers will change),
+        // and just copy the bytes directly.
+        IndexInput rawDocs = matchingVectorsReader.getVectorsStream();
+        CompressingStoredFieldsIndexReader index = matchingVectorsReader.getIndexReader();
+        rawDocs.seek(index.getStartPointer(0));
+        int docID = 0;
+        while (docID < maxDoc) {
+          // read header
+          int base = rawDocs.readVInt();
+          if (base != docID) {
+            throw new CorruptIndexException("invalid state: base=" + base + ", docID=" + docID, rawDocs);
+          }
+          int bufferedDocs = rawDocs.readVInt();
+          
+          // write a new index entry and new header for this chunk.
+          indexWriter.writeIndex(bufferedDocs, vectorsStream.getFilePointer());
+          vectorsStream.writeVInt(docCount); // rebase
+          vectorsStream.writeVInt(bufferedDocs);
+          docID += bufferedDocs;
+          docCount += bufferedDocs;
+          numDocs += bufferedDocs;
+          
+          if (docID > maxDoc) {
+            throw new CorruptIndexException("invalid state: base=" + base + ", count=" + bufferedDocs + ", maxDoc=" + maxDoc, rawDocs);
+          }
+          
+          // copy bytes until the next chunk boundary (or end of chunk data).
+          // using the stored fields index for this isn't the most efficient, but fast enough
+          // and is a source of redundancy for detecting bad things.
+          final long end;
+          if (docID == maxDoc) {
+            end = matchingVectorsReader.getMaxPointer();
+          } else {
+            end = index.getStartPointer(docID);
+          }
+          vectorsStream.copyBytes(rawDocs, end - rawDocs.getFilePointer());
+        }
+               
+        if (rawDocs.getFilePointer() != matchingVectorsReader.getMaxPointer()) {
+          throw new CorruptIndexException("invalid state: pos=" + rawDocs.getFilePointer() + ", max=" + matchingVectorsReader.getMaxPointer(), rawDocs);
+        }
+        
+        // since we bulk merged all chunks, we inherit any dirty ones from this segment.
+        numChunks += matchingVectorsReader.getNumChunks();
+        numDirtyChunks += matchingVectorsReader.getNumDirtyChunks();
+      } else {        
         // naive merge...
         if (vectorsReader != null) {
           vectorsReader.checkIntegrity();
         }
-        for (int i = nextLiveDoc(0, liveDocs, maxDoc); i < maxDoc; i = nextLiveDoc(i + 1, liveDocs, maxDoc)) {
+        for (int i = 0; i < maxDoc; i++) {
+          if (liveDocs != null && liveDocs.get(i) == false) {
+            continue;
+          }
           Fields vectors;
           if (vectorsReader == null) {
             vectors = null;
@@ -751,91 +833,23 @@
           }
           addAllDocVectors(vectors, mergeState);
           ++docCount;
-          mergeState.checkAbort.work(300);
         }
-      } else {
-        final CompressingStoredFieldsIndexReader index = matchingVectorsReader.getIndex();
-        final IndexInput vectorsStreamOrig = matchingVectorsReader.getVectorsStream();
-        vectorsStreamOrig.seek(0);
-        final ChecksumIndexInput vectorsStream = new BufferedChecksumIndexInput(vectorsStreamOrig.clone());
-        
-        for (int i = nextLiveDoc(0, liveDocs, maxDoc); i < maxDoc; ) {
-          // We make sure to move the checksum input in any case, otherwise the final
-          // integrity check might need to read the whole file a second time
-          final long startPointer = index.getStartPointer(i);
-          if (startPointer > vectorsStream.getFilePointer()) {
-            vectorsStream.seek(startPointer);
-          }
-          if (pendingDocs.isEmpty()
-              && (i == 0 || index.getStartPointer(i - 1) < startPointer)) { // start of a chunk
-            final int docBase = vectorsStream.readVInt();
-            final int chunkDocs = vectorsStream.readVInt();
-            assert docBase + chunkDocs <= maxDoc;
-            if (docBase + chunkDocs < maxDoc
-                && nextDeletedDoc(docBase, liveDocs, docBase + chunkDocs) == docBase + chunkDocs) {
-              final long chunkEnd = index.getStartPointer(docBase + chunkDocs);
-              final long chunkLength = chunkEnd - vectorsStream.getFilePointer();
-              indexWriter.writeIndex(chunkDocs, this.vectorsStream.getFilePointer());
-              this.vectorsStream.writeVInt(docCount);
-              this.vectorsStream.writeVInt(chunkDocs);
-              this.vectorsStream.copyBytes(vectorsStream, chunkLength);
-              docCount += chunkDocs;
-              this.numDocs += chunkDocs;
-              mergeState.checkAbort.work(300 * chunkDocs);
-              i = nextLiveDoc(docBase + chunkDocs, liveDocs, maxDoc);
-            } else {
-              for (; i < docBase + chunkDocs; i = nextLiveDoc(i + 1, liveDocs, maxDoc)) {
-                Fields vectors;
-                if (vectorsReader == null) {
-                  vectors = null;
-                } else {
-                  vectors = vectorsReader.get(i);
-                }
-                addAllDocVectors(vectors, mergeState);
-                ++docCount;
-                mergeState.checkAbort.work(300);
-              }
-            }
-          } else {
-            Fields vectors;
-            if (vectorsReader == null) {
-              vectors = null;
-            } else {
-              vectors = vectorsReader.get(i);
-            }
-            addAllDocVectors(vectors, mergeState);
-            ++docCount;
-            mergeState.checkAbort.work(300);
-            i = nextLiveDoc(i + 1, liveDocs, maxDoc);
-          }
-        }
-        
-        vectorsStream.seek(vectorsStream.length() - CodecUtil.footerLength());
-        CodecUtil.checkFooter(vectorsStream);
       }
     }
     finish(mergeState.mergeFieldInfos, docCount);
     return docCount;
   }
 
-  private static int nextLiveDoc(int doc, Bits liveDocs, int maxDoc) {
-    if (liveDocs == null) {
-      return doc;
-    }
-    while (doc < maxDoc && !liveDocs.get(doc)) {
-      ++doc;
-    }
-    return doc;
+  /** 
+   * Returns true if we should recompress this reader, even though we could bulk merge compressed data 
+   * <p>
+   * The last chunk written for a segment is typically incomplete, so without recompressing,
+   * in some worst-case situations (e.g. frequent reopen with tiny flushes), over time the 
+   * compression ratio can degrade. This is a safety switch.
+   */
+  boolean tooDirty(CompressingTermVectorsReader candidate) {
+    // more than 1% dirty, or more than hard limit of 1024 dirty chunks
+    return candidate.getNumDirtyChunks() > 1024 || 
+           candidate.getNumDirtyChunks() * 100 > candidate.getNumChunks();
   }
-
-  private static int nextDeletedDoc(int doc, Bits liveDocs, int maxDoc) {
-    if (liveDocs == null) {
-      return maxDoc;
-    }
-    while (doc < maxDoc && liveDocs.get(doc)) {
-      ++doc;
-    }
-    return doc;
-  }
-
 }
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressionMode.java b/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressionMode.java
index f98a93a..ba041e9 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressionMode.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/compressing/CompressionMode.java
@@ -70,8 +70,10 @@
 
     @Override
     public Compressor newCompressor() {
+      // notes:
       // 3 is the highest level that doesn't have lazy match evaluation
-      return new DeflateCompressor(3);
+      // 6 is the default, higher than that is just a waste of cpu
+      return new DeflateCompressor(6);
     }
 
     @Override
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50CompoundFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50CompoundFormat.java
index 1c4443a..921fdaa 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50CompoundFormat.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50CompoundFormat.java
@@ -23,7 +23,6 @@
 import org.apache.lucene.codecs.CodecUtil;
 import org.apache.lucene.codecs.CompoundFormat;
 import org.apache.lucene.index.IndexFileNames;
-import org.apache.lucene.index.MergeState.CheckAbort;
 import org.apache.lucene.index.SegmentInfo;
 import org.apache.lucene.store.DataOutput;
 import org.apache.lucene.store.Directory;
@@ -73,7 +72,7 @@
   }
 
   @Override
-  public void write(Directory dir, SegmentInfo si, Collection<String> files, CheckAbort checkAbort, IOContext context) throws IOException {
+  public void write(Directory dir, SegmentInfo si, IOContext context) throws IOException {
     String dataFile = IndexFileNames.segmentFileName(si.name, "", DATA_EXTENSION);
     String entriesFile = IndexFileNames.segmentFileName(si.name, "", ENTRIES_EXTENSION);
     
@@ -83,8 +82,8 @@
       CodecUtil.writeIndexHeader(entries, ENTRY_CODEC, VERSION_CURRENT, si.getId(), "");
       
       // write number of files
-      entries.writeVInt(files.size());
-      for (String file : files) {
+      entries.writeVInt(si.files().size());
+      for (String file : si.files()) {
         
         // write bytes for file
         long startOffset = data.getFilePointer();
@@ -99,8 +98,6 @@
         entries.writeString(IndexFileNames.stripSegmentName(file));
         entries.writeLong(startOffset);
         entries.writeLong(length);
-        
-        checkAbort.work(length);
       }
       
       CodecUtil.writeFooter(data);
@@ -108,14 +105,6 @@
     }
   }
 
-  @Override
-  public String[] files(SegmentInfo si) {
-    return new String[] {
-      IndexFileNames.segmentFileName(si.name, "", DATA_EXTENSION),
-      IndexFileNames.segmentFileName(si.name, "", ENTRIES_EXTENSION)
-    };
-  }
-
   /** Extension of compound file */
   static final String DATA_EXTENSION = "cfs";
   /** Extension of compound file entries */
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50SkipReader.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50SkipReader.java
index a46774a..e3a8d1b 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50SkipReader.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50SkipReader.java
@@ -179,10 +179,10 @@
   @Override
   protected int readSkipData(int level, IndexInput skipStream) throws IOException {
     int delta = skipStream.readVInt();
-    docPointer[level] += skipStream.readVInt();
+    docPointer[level] += skipStream.readVLong();
 
     if (posPointer != null) {
-      posPointer[level] += skipStream.readVInt();
+      posPointer[level] += skipStream.readVLong();
       posBufferUpto[level] = skipStream.readVInt();
 
       if (payloadByteUpto != null) {
@@ -190,7 +190,7 @@
       }
 
       if (payPointer != null) {
-        payPointer[level] += skipStream.readVInt();
+        payPointer[level] += skipStream.readVLong();
       }
     }
     return delta;
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50SkipWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50SkipWriter.java
index a4d0459..4e65970 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50SkipWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50SkipWriter.java
@@ -147,12 +147,12 @@
     skipBuffer.writeVInt(delta);
     lastSkipDoc[level] = curDoc;
 
-    skipBuffer.writeVInt((int) (curDocPointer - lastSkipDocPointer[level]));
+    skipBuffer.writeVLong(curDocPointer - lastSkipDocPointer[level]);
     lastSkipDocPointer[level] = curDocPointer;
 
     if (fieldHasPositions) {
 
-      skipBuffer.writeVInt((int) (curPosPointer - lastSkipPosPointer[level]));
+      skipBuffer.writeVLong(curPosPointer - lastSkipPosPointer[level]);
       lastSkipPosPointer[level] = curPosPointer;
       skipBuffer.writeVInt(curPosBufferUpto);
 
@@ -161,7 +161,7 @@
       }
 
       if (fieldHasOffsets || fieldHasPayloads) {
-        skipBuffer.writeVInt((int) (curPayPointer - lastSkipPayPointer[level]));
+        skipBuffer.writeVLong(curPayPointer - lastSkipPayPointer[level]);
         lastSkipPayPointer[level] = curPayPointer;
       }
     }
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50StoredFieldsFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50StoredFieldsFormat.java
index 8505996..2774e01 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50StoredFieldsFormat.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50StoredFieldsFormat.java
@@ -71,7 +71,7 @@
  * <a href="http://fastcompression.blogspot.fr/2011/05/lz4-explained.html">compression format</a>.</p>
  * <p>Here is a more detailed description of the field data file format:</p>
  * <ul>
- * <li>FieldData (.fdt) --&gt; &lt;Header&gt;, PackedIntsVersion, &lt;Chunk&gt;<sup>ChunkCount</sup></li>
+ * <li>FieldData (.fdt) --&gt; &lt;Header&gt;, PackedIntsVersion, &lt;Chunk&gt;<sup>ChunkCount</sup>, ChunkCount, DirtyChunkCount, Footer</li>
  * <li>Header --&gt; {@link CodecUtil#writeIndexHeader IndexHeader}</li>
  * <li>PackedIntsVersion --&gt; {@link PackedInts#VERSION_CURRENT} as a {@link DataOutput#writeVInt VInt}</li>
  * <li>ChunkCount is not known in advance and is the number of chunks necessary to store all document of the segment</li>
@@ -102,6 +102,9 @@
  * <li>FieldNum --&gt; an ID of the field</li>
  * <li>Value --&gt; {@link DataOutput#writeString(String) String} | BinaryValue | Int | Float | Long | Double depending on Type</li>
  * <li>BinaryValue --&gt; ValueLength &lt;Byte&gt;<sup>ValueLength</sup></li>
+ * <li>ChunkCount --&gt; the number of chunks in this file</li>
+ * <li>DirtyChunkCount --&gt; the number of prematurely flushed chunks in this file</li>
+ * <li>Footer --&gt; {@link CodecUtil#writeFooter CodecFooter}</li>
  * </ul>
  * <p>Notes</p>
  * <ul>
@@ -123,9 +126,10 @@
  * <li><a name="field_index" id="field_index"></a>
  * <p>A fields index file (extension <tt>.fdx</tt>).</p>
  * <ul>
- * <li>FieldsIndex (.fdx) --&gt; &lt;Header&gt;, &lt;ChunkIndex&gt;</li>
+ * <li>FieldsIndex (.fdx) --&gt; &lt;Header&gt;, &lt;ChunkIndex&gt;, Footer</li>
  * <li>Header --&gt; {@link CodecUtil#writeIndexHeader IndexHeader}</li>
  * <li>ChunkIndex: See {@link CompressingStoredFieldsIndexWriter}</li>
+ * <li>Footer --&gt; {@link CodecUtil#writeFooter CodecFooter}</li>
  * </ul>
  * </li>
  * </ol>
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50TermVectorsFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50TermVectorsFormat.java
index bf9465d..ca62754 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50TermVectorsFormat.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene50/Lucene50TermVectorsFormat.java
@@ -58,7 +58,7 @@
  * {@link BlockPackedWriter blocks of packed ints} for positions.</p>
  * <p>Here is a more detailed description of the field data file format:</p>
  * <ul>
- * <li>VectorData (.tvd) --&gt; &lt;Header&gt;, PackedIntsVersion, ChunkSize, &lt;Chunk&gt;<sup>ChunkCount</sup>, Footer</li>
+ * <li>VectorData (.tvd) --&gt; &lt;Header&gt;, PackedIntsVersion, ChunkSize, &lt;Chunk&gt;<sup>ChunkCount</sup>, ChunkCount, DirtyChunkCount, Footer</li>
  * <li>Header --&gt; {@link CodecUtil#writeIndexHeader IndexHeader}</li>
  * <li>PackedIntsVersion --&gt; {@link PackedInts#VERSION_CURRENT} as a {@link DataOutput#writeVInt VInt}</li>
  * <li>ChunkSize is the number of bytes of terms to accumulate before flushing, as a {@link DataOutput#writeVInt VInt}</li>
@@ -106,6 +106,8 @@
  * <li>FieldTermsAndPayLoads --&gt; Terms (Payloads)</li>
  * <li>Terms: term bytes</li>
  * <li>Payloads: payload bytes (if the field has payloads)</li>
+ * <li>ChunkCount --&gt; the number of chunks in this file</li>
+ * <li>DirtyChunkCount --&gt; the number of prematurely flushed chunks in this file</li>
  * <li>Footer --&gt; {@link CodecUtil#writeFooter CodecFooter}</li>
  * </ul>
  * </li>
diff --git a/lucene/core/src/java/org/apache/lucene/document/BigDecimalComparator.java b/lucene/core/src/java/org/apache/lucene/document/BigDecimalComparator.java
index b34e4e1..4cf940d 100644
--- a/lucene/core/src/java/org/apache/lucene/document/BigDecimalComparator.java
+++ b/lucene/core/src/java/org/apache/lucene/document/BigDecimalComparator.java
@@ -25,13 +25,13 @@
 import org.apache.lucene.index.DocValues;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.SortedDocValues;
-import org.apache.lucene.search.FieldComparator;
+import org.apache.lucene.search.SimpleFieldComparator;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.NumericUtils;
 
 // TODO: this doesn't use the ord; we could index BinaryDV instead for the single valued case?
-class BigDecimalComparator extends FieldComparator<BigDecimal> {
+class BigDecimalComparator extends SimpleFieldComparator<BigDecimal> {
   private final String field;
   private final BytesRef[] values;
   private final int byteWidth;
@@ -107,7 +107,7 @@
   }
 
   @Override
-  public FieldComparator<BigDecimal> setNextReader(LeafReaderContext context) throws IOException {
+  public void doSetNextReader(LeafReaderContext context) throws IOException {
     currentReaderValues = getDocValues(context);
     assert currentReaderValues != null;
     docsWithField = DocValues.getDocsWithField(context.reader(), field);
@@ -116,7 +116,6 @@
     if (docsWithField instanceof Bits.MatchAllBits) {
       docsWithField = null;
     }
-    return this;
   }
 
   protected SortedDocValues getDocValues(LeafReaderContext context) throws IOException {
diff --git a/lucene/core/src/java/org/apache/lucene/document/BigIntComparator.java b/lucene/core/src/java/org/apache/lucene/document/BigIntComparator.java
index b9b343e..03b774a 100644
--- a/lucene/core/src/java/org/apache/lucene/document/BigIntComparator.java
+++ b/lucene/core/src/java/org/apache/lucene/document/BigIntComparator.java
@@ -25,12 +25,13 @@
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.SortedDocValues;
 import org.apache.lucene.search.FieldComparator;
+import org.apache.lucene.search.SimpleFieldComparator;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.NumericUtils;
 
 // TODO: this doesn't use the ord; we could index BinaryDV instead for the single valued case?
-class BigIntComparator extends FieldComparator<BigInteger> {
+class BigIntComparator extends SimpleFieldComparator<BigInteger> {
   private final String field;
   private final BytesRef[] values;
   private final int byteWidth;
@@ -104,7 +105,7 @@
   }
 
   @Override
-  public FieldComparator<BigInteger> setNextReader(LeafReaderContext context) throws IOException {
+  public void doSetNextReader(LeafReaderContext context) throws IOException {
     currentReaderValues = getDocValues(context);
     assert currentReaderValues != null;
     docsWithField = DocValues.getDocsWithField(context.reader(), field);
@@ -113,7 +114,6 @@
     if (docsWithField instanceof Bits.MatchAllBits) {
       docsWithField = null;
     }
-    return this;
   }
 
   protected SortedDocValues getDocValues(LeafReaderContext context) throws IOException {
diff --git a/lucene/core/src/java/org/apache/lucene/document/SortKeyComparator.java b/lucene/core/src/java/org/apache/lucene/document/SortKeyComparator.java
index b97465e..8e24783 100644
--- a/lucene/core/src/java/org/apache/lucene/document/SortKeyComparator.java
+++ b/lucene/core/src/java/org/apache/lucene/document/SortKeyComparator.java
@@ -22,13 +22,13 @@
 import org.apache.lucene.index.BinaryDocValues;
 import org.apache.lucene.index.DocValues;
 import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.search.FieldComparator;
+import org.apache.lucene.search.SimpleFieldComparator;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.BytesRefBuilder;
 
 /** Sorts a field by a provided deferenced sort key. */
-class SortKeyComparator extends FieldComparator<BytesRef> {
+class SortKeyComparator extends SimpleFieldComparator<BytesRef> {
 
   // TODO: we could cache the sort keys...
   private final BytesRef[] values;
@@ -85,13 +85,12 @@
   }
 
   @Override
-  public FieldComparator<BytesRef> setNextReader(LeafReaderContext context) throws IOException {
+  public void doSetNextReader(LeafReaderContext context) throws IOException {
     docTerms = DocValues.getBinary(context.reader(), field);
     docsWithField = DocValues.getDocsWithField(context.reader(), field);
     if (docsWithField instanceof Bits.MatchAllBits) {
       docsWithField = null;
     }
-    return this;
   }
     
   @Override
diff --git a/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java b/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java
index 5436959..272c989 100644
--- a/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java
+++ b/lucene/core/src/java/org/apache/lucene/index/BufferedUpdatesStream.java
@@ -23,6 +23,7 @@
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.List;
+import java.util.Locale;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
 
@@ -34,7 +35,9 @@
 import org.apache.lucene.store.IOContext;
 import org.apache.lucene.util.Accountable;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.InfoStream;
+import org.apache.lucene.util.PriorityQueue;
 
 /* Tracks the stream of {@link BufferedDeletes}.
  * When DocumentsWriterPerThread flushes, its buffered
@@ -64,7 +67,7 @@
   private long nextGen = 1;
 
   // used only by assert
-  private Term lastDeleteTerm;
+  private BytesRef lastDeleteTerm;
 
   private final InfoStream infoStream;
   private final AtomicLong bytesUsed = new AtomicLong();
@@ -93,7 +96,7 @@
     numTerms.addAndGet(packet.numTermDeletes);
     bytesUsed.addAndGet(packet.bytesUsed);
     if (infoStream.isEnabled("BD")) {
-      infoStream.message("BD", "push deletes " + packet + " delGen=" + packet.delGen() + " packetCount=" + updates.size() + " totBytesUsed=" + bytesUsed.get());
+      infoStream.message("BD", "push deletes " + packet + " segmentPrivate?=" + packet.isSegmentPrivate + " delGen=" + packet.delGen() + " packetCount=" + updates.size() + " totBytesUsed=" + bytesUsed.get());
     }
     assert checkDeleteStats();
     return packet.delGen();
@@ -148,177 +151,167 @@
   /** Resolves the buffered deleted Term/Query/docIDs, into
    *  actual deleted docIDs in the liveDocs MutableBits for
    *  each SegmentReader. */
-  public synchronized ApplyDeletesResult applyDeletesAndUpdates(IndexWriter.ReaderPool readerPool, List<SegmentCommitInfo> infos) throws IOException {
+  public synchronized ApplyDeletesResult applyDeletesAndUpdates(IndexWriter.ReaderPool pool, List<SegmentCommitInfo> infos) throws IOException {
     final long t0 = System.currentTimeMillis();
 
-    if (infos.size() == 0) {
-      return new ApplyDeletesResult(false, nextGen++, null);
-    }
-
-    assert checkDeleteStats();
-
-    if (!any()) {
-      if (infoStream.isEnabled("BD")) {
-        infoStream.message("BD", "applyDeletes: no deletes; skipping");
-      }
-      return new ApplyDeletesResult(false, nextGen++, null);
-    }
-
-    if (infoStream.isEnabled("BD")) {
-      infoStream.message("BD", "applyDeletes: infos=" + infos + " packetCount=" + updates.size());
-    }
-
     final long gen = nextGen++;
 
-    List<SegmentCommitInfo> infos2 = new ArrayList<>();
-    infos2.addAll(infos);
-    Collections.sort(infos2, sortSegInfoByDelGen);
+    if (infos.size() == 0) {
+      return new ApplyDeletesResult(false, gen, null);
+    }
 
-    CoalescedUpdates coalescedDeletes = null;
-    boolean anyNewDeletes = false;
+    // We only init these on demand, when we find our first deletes that need to be applied:
+    SegmentState[] segStates = null;
 
-    int infosIDX = infos2.size()-1;
-    int delIDX = updates.size()-1;
+    long totDelCount = 0;
+    long totTermVisitedCount = 0;
 
-    List<SegmentCommitInfo> allDeleted = null;
+    boolean success = false;
 
-    while (infosIDX >= 0) {
-      //System.out.println("BD: cycle delIDX=" + delIDX + " infoIDX=" + infosIDX);
+    ApplyDeletesResult result = null;
 
-      final FrozenBufferedUpdates packet = delIDX >= 0 ? updates.get(delIDX) : null;
-      final SegmentCommitInfo info = infos2.get(infosIDX);
-      final long segGen = info.getBufferedDeletesGen();
+    try {
+      if (infoStream.isEnabled("BD")) {
+        infoStream.message("BD", String.format(Locale.ROOT, "applyDeletes: open segment readers took %d msec", System.currentTimeMillis()-t0));
+      }
 
-      if (packet != null && segGen < packet.delGen()) {
-//        System.out.println("  coalesce");
-        if (coalescedDeletes == null) {
-          coalescedDeletes = new CoalescedUpdates();
-        }
-        if (!packet.isSegmentPrivate) {
-          /*
-           * Only coalesce if we are NOT on a segment private del packet: the segment private del packet
-           * must only applied to segments with the same delGen.  Yet, if a segment is already deleted
-           * from the SI since it had no more documents remaining after some del packets younger than
-           * its segPrivate packet (higher delGen) have been applied, the segPrivate packet has not been
-           * removed.
-           */
-          coalescedDeletes.update(packet);
-        }
+      assert checkDeleteStats();
 
-        delIDX--;
-      } else if (packet != null && segGen == packet.delGen()) {
-        assert packet.isSegmentPrivate : "Packet and Segments deletegen can only match on a segment private del packet gen=" + segGen;
-        //System.out.println("  eq");
-
-        // Lock order: IW -> BD -> RP
-        assert readerPool.infoIsLive(info);
-        final ReadersAndUpdates rld = readerPool.get(info, true);
-        final SegmentReader reader = rld.getReader(IOContext.READ);
-        int delCount = 0;
-        final boolean segAllDeletes;
-        try {
-          final DocValuesFieldUpdates.Container dvUpdates = new DocValuesFieldUpdates.Container();
-          if (coalescedDeletes != null) {
-            //System.out.println("    del coalesced");
-            delCount += applyTermDeletes(coalescedDeletes.termsIterable(), rld, reader);
-            delCount += applyQueryDeletes(coalescedDeletes.queriesIterable(), rld, reader);
-            applyDocValuesUpdates(coalescedDeletes.numericDVUpdates, rld, reader, dvUpdates);
-            applyDocValuesUpdates(coalescedDeletes.binaryDVUpdates, rld, reader, dvUpdates);
-          }
-          //System.out.println("    del exact");
-          // Don't delete by Term here; DocumentsWriterPerThread
-          // already did that on flush:
-          delCount += applyQueryDeletes(packet.queriesIterable(), rld, reader);
-          applyDocValuesUpdates(Arrays.asList(packet.numericDVUpdates), rld, reader, dvUpdates);
-          applyDocValuesUpdates(Arrays.asList(packet.binaryDVUpdates), rld, reader, dvUpdates);
-          if (dvUpdates.any()) {
-            rld.writeFieldUpdates(info.info.dir, dvUpdates);
-          }
-          final int fullDelCount = rld.info.getDelCount() + rld.getPendingDeleteCount();
-          assert fullDelCount <= rld.info.info.getDocCount();
-          segAllDeletes = fullDelCount == rld.info.info.getDocCount();
-        } finally {
-          rld.release(reader);
-          readerPool.release(rld);
-        }
-        anyNewDeletes |= delCount > 0;
-
-        if (segAllDeletes) {
-          if (allDeleted == null) {
-            allDeleted = new ArrayList<>();
-          }
-          allDeleted.add(info);
-        }
-
+      if (!any()) {
         if (infoStream.isEnabled("BD")) {
-          infoStream.message("BD", "seg=" + info + " segGen=" + segGen + " segDeletes=[" + packet + "]; coalesced deletes=[" + (coalescedDeletes == null ? "null" : coalescedDeletes) + "] newDelCount=" + delCount + (segAllDeletes ? " 100% deleted" : ""));
+          infoStream.message("BD", "applyDeletes: no segments; skipping");
         }
+        return new ApplyDeletesResult(false, gen, null);
+      }
 
-        if (coalescedDeletes == null) {
-          coalescedDeletes = new CoalescedUpdates();
-        }
-        
-        /*
-         * Since we are on a segment private del packet we must not
-         * update the coalescedDeletes here! We can simply advance to the 
-         * next packet and seginfo.
-         */
-        delIDX--;
-        infosIDX--;
-        info.setBufferedDeletesGen(gen);
+      if (infoStream.isEnabled("BD")) {
+        infoStream.message("BD", "applyDeletes: infos=" + infos + " packetCount=" + updates.size());
+      }
 
-      } else {
-        //System.out.println("  gt");
+      infos = sortByDelGen(infos);
 
-        if (coalescedDeletes != null) {
+      CoalescedUpdates coalescedUpdates = null;
+      int infosIDX = infos.size()-1;
+      int delIDX = updates.size()-1;
+
+      // Backwards merge sort the segment delGens with the packet delGens in the buffered stream:
+      while (infosIDX >= 0) {
+        final FrozenBufferedUpdates packet = delIDX >= 0 ? updates.get(delIDX) : null;
+        final SegmentCommitInfo info = infos.get(infosIDX);
+        final long segGen = info.getBufferedDeletesGen();
+
+        if (packet != null && segGen < packet.delGen()) {
+          if (!packet.isSegmentPrivate && packet.any()) {
+            /*
+             * Only coalesce if we are NOT on a segment private del packet: the segment private del packet
+             * must only apply to segments with the same delGen.  Yet, if a segment is already deleted
+             * from the SI since it had no more documents remaining after some del packets younger than
+             * its segPrivate packet (higher delGen) have been applied, the segPrivate packet has not been
+             * removed.
+             */
+            if (coalescedUpdates == null) {
+              coalescedUpdates = new CoalescedUpdates();
+            }
+            coalescedUpdates.update(packet);
+          }
+
+          delIDX--;
+        } else if (packet != null && segGen == packet.delGen()) {
+          assert packet.isSegmentPrivate : "Packet and Segments deletegen can only match on a segment private del packet gen=" + segGen;
+
+          if (segStates == null) {
+            segStates = openSegmentStates(pool, infos);
+          }
+
+          SegmentState segState = segStates[infosIDX];
+
           // Lock order: IW -> BD -> RP
-          assert readerPool.infoIsLive(info);
-          final ReadersAndUpdates rld = readerPool.get(info, true);
-          final SegmentReader reader = rld.getReader(IOContext.READ);
+          assert pool.infoIsLive(info);
           int delCount = 0;
-          final boolean segAllDeletes;
-          try {
-            delCount += applyTermDeletes(coalescedDeletes.termsIterable(), rld, reader);
-            delCount += applyQueryDeletes(coalescedDeletes.queriesIterable(), rld, reader);
+          final DocValuesFieldUpdates.Container dvUpdates = new DocValuesFieldUpdates.Container();
+          if (coalescedUpdates != null) {
+            delCount += applyQueryDeletes(coalescedUpdates.queriesIterable(), segState);
+            applyDocValuesUpdates(coalescedUpdates.numericDVUpdates, segState, dvUpdates);
+            applyDocValuesUpdates(coalescedUpdates.binaryDVUpdates, segState, dvUpdates);
+          }
+          delCount += applyQueryDeletes(packet.queriesIterable(), segState);
+          applyDocValuesUpdates(Arrays.asList(packet.numericDVUpdates), segState, dvUpdates);
+          applyDocValuesUpdates(Arrays.asList(packet.binaryDVUpdates), segState, dvUpdates);
+          if (dvUpdates.any()) {
+            segState.rld.writeFieldUpdates(info.info.dir, dvUpdates);
+          }
+
+          totDelCount += delCount;
+
+          /*
+           * Since we are on a segment private del packet we must not
+           * update the coalescedUpdates here! We can simply advance to the 
+           * next packet and seginfo.
+           */
+          delIDX--;
+          infosIDX--;
+
+        } else {
+          if (coalescedUpdates != null) {
+            if (segStates == null) {
+              segStates = openSegmentStates(pool, infos);
+            }
+            SegmentState segState = segStates[infosIDX];
+            // Lock order: IW -> BD -> RP
+            assert pool.infoIsLive(info);
+            int delCount = 0;
+            delCount += applyQueryDeletes(coalescedUpdates.queriesIterable(), segState);
             DocValuesFieldUpdates.Container dvUpdates = new DocValuesFieldUpdates.Container();
-            applyDocValuesUpdates(coalescedDeletes.numericDVUpdates, rld, reader, dvUpdates);
-            applyDocValuesUpdates(coalescedDeletes.binaryDVUpdates, rld, reader, dvUpdates);
+            applyDocValuesUpdates(coalescedUpdates.numericDVUpdates, segState, dvUpdates);
+            applyDocValuesUpdates(coalescedUpdates.binaryDVUpdates, segState, dvUpdates);
             if (dvUpdates.any()) {
-              rld.writeFieldUpdates(info.info.dir, dvUpdates);
+              segState.rld.writeFieldUpdates(info.info.dir, dvUpdates);
             }
-            final int fullDelCount = rld.info.getDelCount() + rld.getPendingDeleteCount();
-            assert fullDelCount <= rld.info.info.getDocCount();
-            segAllDeletes = fullDelCount == rld.info.info.getDocCount();
-          } finally {
-            rld.release(reader);
-            readerPool.release(rld);
-          }
-          anyNewDeletes |= delCount > 0;
 
-          if (segAllDeletes) {
-            if (allDeleted == null) {
-              allDeleted = new ArrayList<>();
-            }
-            allDeleted.add(info);
+            totDelCount += delCount;
           }
 
-          if (infoStream.isEnabled("BD")) {
-            infoStream.message("BD", "seg=" + info + " segGen=" + segGen + " coalesced deletes=[" + coalescedDeletes + "] newDelCount=" + delCount + (segAllDeletes ? " 100% deleted" : ""));
-          }
+          infosIDX--;
         }
-        info.setBufferedDeletesGen(gen);
+      }
 
-        infosIDX--;
+      // Now apply all term deletes:
+      if (coalescedUpdates != null && coalescedUpdates.totalTermCount != 0) {
+        if (segStates == null) {
+          segStates = openSegmentStates(pool, infos);
+        }
+        totTermVisitedCount += applyTermDeletes(coalescedUpdates, segStates);
+      }
+
+      assert checkDeleteStats();
+
+      success = true;
+
+    } finally {
+      if (segStates != null) {
+        result = closeSegmentStates(pool, segStates, success, gen);
       }
     }
 
-    assert checkDeleteStats();
-    if (infoStream.isEnabled("BD")) {
-      infoStream.message("BD", "applyDeletes took " + (System.currentTimeMillis()-t0) + " msec");
+    if (result == null) {
+      result = new ApplyDeletesResult(false, gen, null);      
     }
-    // assert infos != segmentInfos || !any() : "infos=" + infos + " segmentInfos=" + segmentInfos + " any=" + any;
 
-    return new ApplyDeletesResult(anyNewDeletes, gen, allDeleted);
+    if (infoStream.isEnabled("BD")) {
+      infoStream.message("BD",
+                         String.format(Locale.ROOT,
+                                       "applyDeletes took %d msec for %d segments, %d newly deleted docs (query deletes), %d visited terms, allDeleted=%s",
+                                       System.currentTimeMillis()-t0, infos.size(), totDelCount, totTermVisitedCount, result.allDeleted));
+    }
+
+    return result;
+  }
+
+  private List<SegmentCommitInfo> sortByDelGen(List<SegmentCommitInfo> infos) {
+    infos = new ArrayList<>(infos);
+    // Smaller delGens come first:
+    Collections.sort(infos, sortSegInfoByDelGen);
+    return infos;
   }
 
   synchronized long getNextGen() {
@@ -376,79 +369,249 @@
     }
   }
 
-  // Delete by Term
-  private synchronized long applyTermDeletes(Iterable<Term> termsIter, ReadersAndUpdates rld, SegmentReader reader) throws IOException {
-    long delCount = 0;
-    Fields fields = reader.fields();
+  static class SegmentState {
+    final long delGen;
+    final ReadersAndUpdates rld;
+    final SegmentReader reader;
+    final int startDelCount;
 
-    TermsEnum termsEnum = null;
+    TermsEnum termsEnum;
+    DocsEnum docsEnum;
+    BytesRef term;
+    boolean any;
 
-    String currentField = null;
-    DocsEnum docs = null;
+    public SegmentState(IndexWriter.ReaderPool pool, SegmentCommitInfo info) throws IOException {
+      rld = pool.get(info, true);
+      startDelCount = rld.getPendingDeleteCount();
+      reader = rld.getReader(IOContext.READ);
+      delGen = info.getBufferedDeletesGen();
+    }
 
-    assert checkDeleteTerm(null);
-
-    boolean any = false;
-
-    //System.out.println(Thread.currentThread().getName() + " del terms reader=" + reader);
-    for (Term term : termsIter) {
-      // Since we visit terms sorted, we gain performance
-      // by re-using the same TermsEnum and seeking only
-      // forwards
-      if (!term.field().equals(currentField)) {
-        assert currentField == null || currentField.compareTo(term.field()) < 0;
-        currentField = term.field();
-        Terms terms = fields.terms(currentField);
-        if (terms != null) {
-          termsEnum = terms.iterator(termsEnum);
-        } else {
-          termsEnum = null;
-        }
+    public void finish(IndexWriter.ReaderPool pool) throws IOException {
+      try {
+        rld.release(reader);
+      } finally {
+        pool.release(rld);
       }
+    }
+  }
 
-      if (termsEnum == null) {
-        continue;
+  /** Does a merge sort by current term across all segments. */
+  static class SegmentQueue extends PriorityQueue<SegmentState> {
+    public SegmentQueue(int size) {
+      super(size);
+    }
+
+    @Override
+    protected boolean lessThan(SegmentState a, SegmentState b) {
+      return a.term.compareTo(b.term) < 0;
+    }
+  }
+
+  /** Opens SegmentReader and inits SegmentState for each segment. */
+  private SegmentState[] openSegmentStates(IndexWriter.ReaderPool pool, List<SegmentCommitInfo> infos) throws IOException {
+    int numReaders = infos.size();
+    SegmentState[] segStates = new SegmentState[numReaders];
+    boolean success = false;
+    try {
+      for(int i=0;i<numReaders;i++) {
+        segStates[i] = new SegmentState(pool, infos.get(i));
       }
-      assert checkDeleteTerm(term);
-
-      // System.out.println("  term=" + term);
-
-      if (termsEnum.seekExact(term.bytes())) {
-        // we don't need term frequencies for this
-        DocsEnum docsEnum = termsEnum.docs(rld.getLiveDocs(), docs, DocsEnum.FLAG_NONE);
-        //System.out.println("BDS: got docsEnum=" + docsEnum);
-
-        if (docsEnum != null) {
-          while (true) {
-            final int docID = docsEnum.nextDoc();
-            //System.out.println(Thread.currentThread().getName() + " del term=" + term + " doc=" + docID);
-            if (docID == DocIdSetIterator.NO_MORE_DOCS) {
-              break;
-            }   
-            if (!any) {
-              rld.initWritableLiveDocs();
-              any = true;
-            }
-            // NOTE: there is no limit check on the docID
-            // when deleting by Term (unlike by Query)
-            // because on flush we apply all Term deletes to
-            // each segment.  So all Term deleting here is
-            // against prior segments:
-            if (rld.delete(docID)) {
-              delCount++;
+      success = true;
+    } finally {
+      if (success == false) {
+        for(int j=0;j<numReaders;j++) {
+          if (segStates[j] != null) {
+            try {
+              segStates[j].finish(pool);
+            } catch (Throwable th) {
+              // suppress so we keep throwing original exc
             }
           }
         }
       }
     }
 
-    return delCount;
+    return segStates;
+  }
+
+  /** Close segment states previously opened with openSegmentStates. */
+  private ApplyDeletesResult closeSegmentStates(IndexWriter.ReaderPool pool, SegmentState[] segStates, boolean success, long gen) throws IOException {
+    int numReaders = segStates.length;
+    Throwable firstExc = null;
+    List<SegmentCommitInfo> allDeleted = null;
+    long totDelCount = 0;
+    for (int j=0;j<numReaders;j++) {
+      SegmentState segState = segStates[j];
+      if (success) {
+        totDelCount += segState.rld.getPendingDeleteCount() - segState.startDelCount;
+        segState.reader.getSegmentInfo().setBufferedDeletesGen(gen);
+        int fullDelCount = segState.rld.info.getDelCount() + segState.rld.getPendingDeleteCount();
+        assert fullDelCount <= segState.rld.info.info.getDocCount();
+        if (fullDelCount == segState.rld.info.info.getDocCount()) {
+          if (allDeleted == null) {
+            allDeleted = new ArrayList<>();
+          }
+          allDeleted.add(segState.reader.getSegmentInfo());
+        }
+      }
+      try {
+        segStates[j].finish(pool);
+      } catch (Throwable th) {
+        if (firstExc != null) {
+          firstExc = th;
+        }
+      }
+    }
+
+    if (success) {
+      // Does nothing if firstExc is null:
+      IOUtils.reThrow(firstExc);
+    }
+
+    if (infoStream.isEnabled("BD")) {
+      infoStream.message("BD", "applyDeletes: " + totDelCount + " new deleted documents");
+    }
+
+    return new ApplyDeletesResult(totDelCount > 0, gen, allDeleted);      
+  }
+
+  /** Merge sorts the deleted terms and all segments to resolve terms to docIDs for deletion. */
+  private synchronized long applyTermDeletes(CoalescedUpdates updates, SegmentState[] segStates) throws IOException {
+
+    long startNS = System.nanoTime();
+
+    int numReaders = segStates.length;
+
+    long delTermVisitedCount = 0;
+    long segTermVisitedCount = 0;
+
+    FieldTermIterator iter = updates.termIterator();
+
+    String field = null;
+    SegmentQueue queue = null;
+
+    while (true) {
+
+      boolean newField;
+
+      newField = iter.next();
+
+      if (newField) {
+        field = iter.field();
+        if (field == null) {
+          // No more terms:
+          break;
+        }
+
+        queue = new SegmentQueue(numReaders);
+
+        long segTermCount = 0;
+        for(int i=0;i<numReaders;i++) {
+          SegmentState state = segStates[i];
+          Terms terms = state.reader.fields().terms(field);
+          if (terms != null) {
+            segTermCount += terms.size();
+            state.termsEnum = terms.iterator(state.termsEnum);
+            state.term = state.termsEnum.next();
+            if (state.term != null) {
+              queue.add(state);
+            }
+          }
+        }
+
+        assert checkDeleteTerm(null);
+      }
+
+      // Get next term to delete
+      BytesRef term = iter.term();
+      assert checkDeleteTerm(term);
+      delTermVisitedCount++;
+
+      long delGen = iter.delGen();
+
+      while (queue.size() != 0) {
+
+        // Get next term merged across all segments
+        SegmentState state = queue.top();
+        segTermVisitedCount++;
+
+        int cmp = term.compareTo(state.term);
+
+        if (cmp < 0) {
+          break;
+        } else if (cmp == 0) {
+          // fall through
+        } else {
+          TermsEnum.SeekStatus status = state.termsEnum.seekCeil(term);
+          if (status == TermsEnum.SeekStatus.FOUND) {
+            // fallthrough
+          } else {
+            if (status == TermsEnum.SeekStatus.NOT_FOUND) {
+              state.term = state.termsEnum.term();
+              queue.updateTop();
+            } else {
+              // No more terms in this segment
+              queue.pop();
+            }
+
+            continue;
+          }
+        }
+
+        assert state.delGen != delGen;
+
+        if (state.delGen < delGen) {
+
+          // we don't need term frequencies for this
+          state.docsEnum = state.termsEnum.docs(state.rld.getLiveDocs(), state.docsEnum, DocsEnum.FLAG_NONE);
+
+          assert state.docsEnum != null;
+
+          while (true) {
+            final int docID = state.docsEnum.nextDoc();
+            if (docID == DocIdSetIterator.NO_MORE_DOCS) {
+              break;
+            }
+            if (!state.any) {
+              state.rld.initWritableLiveDocs();
+              state.any = true;
+            }
+
+            // NOTE: there is no limit check on the docID
+            // when deleting by Term (unlike by Query)
+            // because on flush we apply all Term deletes to
+            // each segment.  So all Term deleting here is
+            // against prior segments:
+            state.rld.delete(docID);
+          }
+        }
+
+        state.term = state.termsEnum.next();
+        if (state.term == null) {
+          queue.pop();
+        } else {
+          queue.updateTop();
+        }
+      }
+    }
+
+    if (infoStream.isEnabled("BD")) {
+      infoStream.message("BD",
+                         String.format(Locale.ROOT, "applyTermDeletes took %.1f msec for %d segments and %d packets; %d del terms visited; %d seg terms visited",
+                                       (System.nanoTime()-startNS)/1000000.,
+                                       numReaders,
+                                       updates.terms.size(),
+                                       delTermVisitedCount, segTermVisitedCount));
+    }
+
+    return delTermVisitedCount;
   }
 
   // DocValues updates
   private synchronized void applyDocValuesUpdates(Iterable<? extends DocValuesUpdate> updates, 
-      ReadersAndUpdates rld, SegmentReader reader, DocValuesFieldUpdates.Container dvUpdatesContainer) throws IOException {
-    Fields fields = reader.fields();
+      SegmentState segState, DocValuesFieldUpdates.Container dvUpdatesContainer) throws IOException {
+    Fields fields = segState.reader.fields();
 
     // TODO: we can process the updates per DV field, from last to first so that
     // if multiple terms affect same document for the same field, we add an update
@@ -462,9 +625,8 @@
     
     String currentField = null;
     TermsEnum termsEnum = null;
-    DocsEnum docs = null;
+    DocsEnum docsEnum = null;
     
-    //System.out.println(Thread.currentThread().getName() + " numericDVUpdate reader=" + reader);
     for (DocValuesUpdate update : updates) {
       Term term = update.term;
       int limit = update.docIDUpto;
@@ -488,28 +650,24 @@
           termsEnum = terms.iterator(termsEnum);
         } else {
           termsEnum = null;
-          continue; // no terms in that field
         }
       }
 
       if (termsEnum == null) {
+        // no terms in this field
         continue;
       }
-      // System.out.println("  term=" + term);
 
       if (termsEnum.seekExact(term.bytes())) {
         // we don't need term frequencies for this
-        DocsEnum docsEnum = termsEnum.docs(rld.getLiveDocs(), docs, DocsEnum.FLAG_NONE);
-      
-        //System.out.println("BDS: got docsEnum=" + docsEnum);
+        docsEnum = termsEnum.docs(segState.rld.getLiveDocs(), docsEnum, DocsEnum.FLAG_NONE);
 
         DocValuesFieldUpdates dvUpdates = dvUpdatesContainer.getUpdates(update.field, update.type);
         if (dvUpdates == null) {
-          dvUpdates = dvUpdatesContainer.newUpdates(update.field, update.type, reader.maxDoc());
+          dvUpdates = dvUpdatesContainer.newUpdates(update.field, update.type, segState.reader.maxDoc());
         }
         int doc;
         while ((doc = docsEnum.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
-          //System.out.println(Thread.currentThread().getName() + " numericDVUpdate term=" + term + " doc=" + docID);
           if (doc >= limit) {
             break; // no more docs that can be updated for this term
           }
@@ -529,29 +687,27 @@
   }
 
   // Delete by query
-  private static long applyQueryDeletes(Iterable<QueryAndLimit> queriesIter, ReadersAndUpdates rld, final SegmentReader reader) throws IOException {
+  private static long applyQueryDeletes(Iterable<QueryAndLimit> queriesIter, SegmentState segState) throws IOException {
     long delCount = 0;
-    final LeafReaderContext readerContext = reader.getContext();
-    boolean any = false;
+    final LeafReaderContext readerContext = segState.reader.getContext();
     for (QueryAndLimit ent : queriesIter) {
       Query query = ent.query;
       int limit = ent.limit;
-      final DocIdSet docs = new QueryWrapperFilter(query).getDocIdSet(readerContext, reader.getLiveDocs());
+      final DocIdSet docs = new QueryWrapperFilter(query).getDocIdSet(readerContext, segState.reader.getLiveDocs());
       if (docs != null) {
         final DocIdSetIterator it = docs.iterator();
         if (it != null) {
-          while(true)  {
+          while (true)  {
             int doc = it.nextDoc();
             if (doc >= limit) {
               break;
             }
 
-            if (!any) {
-              rld.initWritableLiveDocs();
-              any = true;
+            if (!segState.any) {
+              segState.rld.initWritableLiveDocs();
+              segState.any = true;
             }
-
-            if (rld.delete(doc)) {
+            if (segState.rld.delete(doc)) {
               delCount++;
             }
           }
@@ -563,12 +719,12 @@
   }
 
   // used only by assert
-  private boolean checkDeleteTerm(Term term) {
+  private boolean checkDeleteTerm(BytesRef term) {
     if (term != null) {
-      assert lastDeleteTerm == null || term.compareTo(lastDeleteTerm) > 0: "lastTerm=" + lastDeleteTerm + " vs term=" + term;
+      assert lastDeleteTerm == null || term.compareTo(lastDeleteTerm) >= 0: "lastTerm=" + lastDeleteTerm + " vs term=" + term;
     }
     // TODO: we re-use term now in our merged iterable, but we shouldn't clone, instead copy for this assert
-    lastDeleteTerm = term == null ? null : new Term(term.field(), BytesRef.deepCopyOf(term.bytes));
+    lastDeleteTerm = term == null ? null : BytesRef.deepCopyOf(term);
     return true;
   }
 
diff --git a/lucene/core/src/java/org/apache/lucene/index/CoalescedUpdates.java b/lucene/core/src/java/org/apache/lucene/index/CoalescedUpdates.java
index 61ecf79..747d730 100644
--- a/lucene/core/src/java/org/apache/lucene/index/CoalescedUpdates.java
+++ b/lucene/core/src/java/org/apache/lucene/index/CoalescedUpdates.java
@@ -18,34 +18,36 @@
  */
 
 import java.util.ArrayList;
-import java.util.Iterator;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.lucene.search.Query;
 import org.apache.lucene.index.BufferedUpdatesStream.QueryAndLimit;
 import org.apache.lucene.index.DocValuesUpdate.BinaryDocValuesUpdate;
 import org.apache.lucene.index.DocValuesUpdate.NumericDocValuesUpdate;
+import org.apache.lucene.search.Query;
 import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.MergedIterator;
 
 class CoalescedUpdates {
   final Map<Query,Integer> queries = new HashMap<>();
-  final List<Iterable<Term>> iterables = new ArrayList<>();
+  final List<PrefixCodedTerms> terms = new ArrayList<>();
   final List<NumericDocValuesUpdate> numericDVUpdates = new ArrayList<>();
   final List<BinaryDocValuesUpdate> binaryDVUpdates = new ArrayList<>();
+  int totalTermCount;
   
   @Override
   public String toString() {
     // note: we could add/collect more debugging information
-    return "CoalescedUpdates(termSets=" + iterables.size() + ",queries="
-        + queries.size() + ",numericDVUpdates=" + numericDVUpdates.size()
-        + ",binaryDVUpdates=" + binaryDVUpdates.size() + ")";
+    return "CoalescedUpdates(termSets=" + terms.size()
+      + ",totalTermCount=" + totalTermCount
+      + ",queries=" + queries.size() + ",numericDVUpdates=" + numericDVUpdates.size()
+      + ",binaryDVUpdates=" + binaryDVUpdates.size() + ")";
   }
 
   void update(FrozenBufferedUpdates in) {
-    iterables.add(in.termsIterable());
+    totalTermCount += in.termCount;
+    terms.add(in.terms);
 
     for (int queryIdx = 0; queryIdx < in.queries.length; queryIdx++) {
       final Query query = in.queries[queryIdx];
@@ -65,18 +67,12 @@
     }
   }
 
- public Iterable<Term> termsIterable() {
-   return new Iterable<Term>() {
-     @SuppressWarnings({"unchecked","rawtypes"})
-     @Override
-     public Iterator<Term> iterator() {
-       Iterator<Term> subs[] = new Iterator[iterables.size()];
-       for (int i = 0; i < iterables.size(); i++) {
-         subs[i] = iterables.get(i).iterator();
-       }
-       return new MergedIterator<>(subs);
-     }
-   };
+  public FieldTermIterator termIterator() {
+    if (terms.size() == 1) {
+      return terms.get(0).iterator();
+    } else {
+      return new MergedPrefixCodedTermsIterator(terms);
+    }
   }
 
   public Iterable<QueryAndLimit> queriesIterable() {
diff --git a/lucene/core/src/java/org/apache/lucene/index/CodecReader.java b/lucene/core/src/java/org/apache/lucene/index/CodecReader.java
new file mode 100644
index 0000000..701e65f
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/index/CodecReader.java
@@ -0,0 +1,379 @@
+package org.apache.lucene.index;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.codecs.DocValuesProducer;
+import org.apache.lucene.codecs.FieldsProducer;
+import org.apache.lucene.codecs.NormsProducer;
+import org.apache.lucene.codecs.StoredFieldsReader;
+import org.apache.lucene.codecs.TermVectorsReader;
+import org.apache.lucene.util.Accountable;
+import org.apache.lucene.util.Accountables;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.CloseableThreadLocal;
+import org.apache.lucene.util.IOUtils;
+
+/**
+ * LeafReader implemented by codec APIs.
+ */
+public abstract class CodecReader extends LeafReader implements Accountable {
+  
+  /** Sole constructor. (For invocation by subclass 
+   * constructors, typically implicit.) */
+  protected CodecReader() {}
+  
+  /** 
+   * Expert: retrieve thread-private StoredFieldsReader
+   * @lucene.internal 
+   */
+  public abstract StoredFieldsReader getFieldsReader();
+  
+  /** 
+   * Expert: retrieve thread-private TermVectorsReader
+   * @lucene.internal 
+   */
+  public abstract TermVectorsReader getTermVectorsReader();
+  
+  /** 
+   * Expert: retrieve underlying NormsProducer
+   * @lucene.internal 
+   */
+  public abstract NormsProducer getNormsReader();
+  
+  /** 
+   * Expert: retrieve underlying DocValuesProducer
+   * @lucene.internal 
+   */
+  public abstract DocValuesProducer getDocValuesReader();
+  
+  /**
+   * Expert: retrieve underlying FieldsProducer
+   * @lucene.internal
+   */
+  public abstract FieldsProducer getPostingsReader();
+  
+  @Override
+  public final void document(int docID, StoredFieldVisitor visitor) throws IOException {
+    checkBounds(docID);
+    getFieldsReader().visitDocument(docID, visitor);
+  }
+  
+  @Override
+  public final Fields getTermVectors(int docID) throws IOException {
+    TermVectorsReader termVectorsReader = getTermVectorsReader();
+    if (termVectorsReader == null) {
+      return null;
+    }
+    checkBounds(docID);
+    return termVectorsReader.get(docID);
+  }
+  
+  private void checkBounds(int docID) {
+    if (docID < 0 || docID >= maxDoc()) {       
+      throw new IndexOutOfBoundsException("docID must be >= 0 and < maxDoc=" + maxDoc() + " (got docID=" + docID + ")");
+    }
+  }
+  
+  @Override
+  public final Fields fields() {
+    return getPostingsReader();
+  }
+  
+  final CloseableThreadLocal<Map<String,Object>> docValuesLocal = new CloseableThreadLocal<Map<String,Object>>() {
+    @Override
+    protected Map<String,Object> initialValue() {
+      return new HashMap<>();
+    }
+  };
+
+  final CloseableThreadLocal<Map<String,Bits>> docsWithFieldLocal = new CloseableThreadLocal<Map<String,Bits>>() {
+    @Override
+    protected Map<String,Bits> initialValue() {
+      return new HashMap<>();
+    }
+  };
+  
+  // returns the FieldInfo that corresponds to the given field and type, or
+  // null if the field does not exist, or not indexed as the requested
+  // DovDocValuesType.
+  private FieldInfo getDVField(String field, DocValuesType type) {
+    FieldInfo fi = getFieldInfos().fieldInfo(field);
+    if (fi == null) {
+      // Field does not exist
+      return null;
+    }
+    if (fi.getDocValuesType() == DocValuesType.NONE) {
+      // Field was not indexed with doc values
+      return null;
+    }
+    if (fi.getDocValuesType() != type) {
+      // Field DocValues are different than requested type
+      return null;
+    }
+
+    return fi;
+  }
+  
+  @Override
+  public final NumericDocValues getNumericDocValues(String field) throws IOException {
+    ensureOpen();
+    Map<String,Object> dvFields = docValuesLocal.get();
+
+    Object previous = dvFields.get(field);
+    if (previous != null && previous instanceof NumericDocValues) {
+      return (NumericDocValues) previous;
+    } else {
+      FieldInfo fi = getDVField(field, DocValuesType.NUMERIC);
+      if (fi == null) {
+        return null;
+      }
+      NumericDocValues dv = getDocValuesReader().getNumeric(fi);
+      dvFields.put(field, dv);
+      return dv;
+    }
+  }
+
+  @Override
+  public final Bits getDocsWithField(String field) throws IOException {
+    ensureOpen();
+    Map<String,Bits> dvFields = docsWithFieldLocal.get();
+
+    Bits previous = dvFields.get(field);
+    if (previous != null) {
+      return previous;
+    } else {
+      FieldInfo fi = getFieldInfos().fieldInfo(field);
+      if (fi == null) {
+        // Field does not exist
+        return null;
+      }
+      if (fi.getDocValuesType() == DocValuesType.NONE) {
+        // Field was not indexed with doc values
+        return null;
+      }
+      Bits dv = getDocValuesReader().getDocsWithField(fi);
+      dvFields.put(field, dv);
+      return dv;
+    }
+  }
+
+  @Override
+  public final BinaryDocValues getBinaryDocValues(String field) throws IOException {
+    ensureOpen();
+    FieldInfo fi = getDVField(field, DocValuesType.BINARY);
+    if (fi == null) {
+      return null;
+    }
+
+    Map<String,Object> dvFields = docValuesLocal.get();
+
+    BinaryDocValues dvs = (BinaryDocValues) dvFields.get(field);
+    if (dvs == null) {
+      dvs = getDocValuesReader().getBinary(fi);
+      dvFields.put(field, dvs);
+    }
+
+    return dvs;
+  }
+
+  @Override
+  public final SortedDocValues getSortedDocValues(String field) throws IOException {
+    ensureOpen();
+    Map<String,Object> dvFields = docValuesLocal.get();
+    
+    Object previous = dvFields.get(field);
+    if (previous != null && previous instanceof SortedDocValues) {
+      return (SortedDocValues) previous;
+    } else {
+      FieldInfo fi = getDVField(field, DocValuesType.SORTED);
+      if (fi == null) {
+        return null;
+      }
+      SortedDocValues dv = getDocValuesReader().getSorted(fi);
+      dvFields.put(field, dv);
+      return dv;
+    }
+  }
+  
+  @Override
+  public final SortedNumericDocValues getSortedNumericDocValues(String field) throws IOException {
+    ensureOpen();
+    Map<String,Object> dvFields = docValuesLocal.get();
+
+    Object previous = dvFields.get(field);
+    if (previous != null && previous instanceof SortedNumericDocValues) {
+      return (SortedNumericDocValues) previous;
+    } else {
+      FieldInfo fi = getDVField(field, DocValuesType.SORTED_NUMERIC);
+      if (fi == null) {
+        return null;
+      }
+      SortedNumericDocValues dv = getDocValuesReader().getSortedNumeric(fi);
+      dvFields.put(field, dv);
+      return dv;
+    }
+  }
+
+  @Override
+  public final SortedSetDocValues getSortedSetDocValues(String field) throws IOException {
+    ensureOpen();
+    Map<String,Object> dvFields = docValuesLocal.get();
+    
+    Object previous = dvFields.get(field);
+    if (previous != null && previous instanceof SortedSetDocValues) {
+      return (SortedSetDocValues) previous;
+    } else {
+      FieldInfo fi = getDVField(field, DocValuesType.SORTED_SET);
+      if (fi == null) {
+        return null;
+      }
+      SortedSetDocValues dv = getDocValuesReader().getSortedSet(fi);
+      dvFields.put(field, dv);
+      return dv;
+    }
+  }
+  
+  final CloseableThreadLocal<Map<String,NumericDocValues>> normsLocal = new CloseableThreadLocal<Map<String,NumericDocValues>>() {
+    @Override
+    protected Map<String,NumericDocValues> initialValue() {
+      return new HashMap<>();
+    }
+  };
+  
+  @Override
+  public final NumericDocValues getNormValues(String field) throws IOException {
+    ensureOpen();
+    Map<String,NumericDocValues> normFields = normsLocal.get();
+
+    NumericDocValues norms = normFields.get(field);
+    if (norms != null) {
+      return norms;
+    } else {
+      FieldInfo fi = getFieldInfos().fieldInfo(field);
+      if (fi == null || !fi.hasNorms()) {
+        // Field does not exist or does not index norms
+        return null;
+      }
+      norms = getNormsReader().getNorms(fi);
+      normFields.put(field, norms);
+      return norms;
+    }
+  }
+
+  @Override
+  protected void doClose() throws IOException {
+    IOUtils.close(docValuesLocal, docsWithFieldLocal, normsLocal);
+  }
+  
+  @Override
+  public long ramBytesUsed() {
+    ensureOpen();
+    
+    // terms/postings
+    long ramBytesUsed = getPostingsReader().ramBytesUsed();
+    
+    // norms
+    if (getNormsReader() != null) {
+      ramBytesUsed += getNormsReader().ramBytesUsed();
+    }
+    
+    // docvalues
+    if (getDocValuesReader() != null) {
+      ramBytesUsed += getDocValuesReader().ramBytesUsed();
+    }
+    
+    // stored fields
+    if (getFieldsReader() != null) {
+      ramBytesUsed += getFieldsReader().ramBytesUsed();
+    }
+    
+    // term vectors
+    if (getTermVectorsReader() != null) {
+      ramBytesUsed += getTermVectorsReader().ramBytesUsed();
+    }
+    
+    return ramBytesUsed;
+  }
+  
+  @Override
+  public Collection<Accountable> getChildResources() {
+    ensureOpen();
+    List<Accountable> resources = new ArrayList<>();
+    
+    // terms/postings
+    resources.add(Accountables.namedAccountable("postings", getPostingsReader()));
+    
+    // norms
+    if (getNormsReader() != null) {
+      resources.add(Accountables.namedAccountable("norms", getNormsReader()));
+    }
+    
+    // docvalues
+    if (getDocValuesReader() != null) {
+      resources.add(Accountables.namedAccountable("docvalues", getDocValuesReader()));
+    }
+    
+    // stored fields
+    if (getFieldsReader() != null) {
+      resources.add(Accountables.namedAccountable("stored fields", getFieldsReader()));
+    }
+
+    // term vectors
+    if (getTermVectorsReader() != null) {
+      resources.add(Accountables.namedAccountable("term vectors", getTermVectorsReader()));
+    }
+    
+    return Collections.unmodifiableList(resources);
+  }
+
+  @Override
+  public void checkIntegrity() throws IOException {
+    ensureOpen();
+    
+    // terms/postings
+    getPostingsReader().checkIntegrity();
+    
+    // norms
+    if (getNormsReader() != null) {
+      getNormsReader().checkIntegrity();
+    }
+    
+    // docvalues
+    if (getDocValuesReader() != null) {
+      getDocValuesReader().checkIntegrity();
+    }
+
+    // stored fields
+    if (getFieldsReader() != null) {
+      getFieldsReader().checkIntegrity();
+    }
+    
+    // term vectors
+    if (getTermVectorsReader() != null) {
+      getTermVectorsReader().checkIntegrity();
+    }
+  }
+}
diff --git a/lucene/core/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java b/lucene/core/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java
index 78633f5..e7418de 100644
--- a/lucene/core/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java
+++ b/lucene/core/src/java/org/apache/lucene/index/ConcurrentMergeScheduler.java
@@ -19,9 +19,11 @@
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.Comparator;
 import java.util.List;
+import java.util.Locale;
 
+import org.apache.lucene.index.MergePolicy.OneMerge;
+import org.apache.lucene.store.AlreadyClosedException;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.CollectionUtil;
 import org.apache.lucene.util.IOUtils;
@@ -54,18 +56,17 @@
  *  settings for spinning or solid state disks for such
  *  operating systems, use {@link #setDefaultMaxMergesAndThreads(boolean)}.
  */ 
+
 public class ConcurrentMergeScheduler extends MergeScheduler {
 
   /** Dynamic default for {@code maxThreadCount} and {@code maxMergeCount},
    *  used to detect whether the index is backed by an SSD or rotational disk and
    *  set {@code maxThreadCount} accordingly.  If it's an SSD,
-   *  {@code maxThreadCount} is set to {@code max(1, min(3, cpuCoreCount/2))},
+   *  {@code maxThreadCount} is set to {@code max(1, min(4, cpuCoreCount/2))},
    *  otherwise 1.  Note that detection only currently works on
    *  Linux; other platforms will assume the index is not on an SSD. */
   public static final int AUTO_DETECT_MERGES_AND_THREADS = -1;
 
-  private int mergeThreadPriority = -1;
-
   /** List of currently active {@link MergeThread}s. */
   protected final List<MergeThread> mergeThreads = new ArrayList<>();
   
@@ -81,16 +82,30 @@
   // throttling the incoming threads
   private int maxMergeCount = AUTO_DETECT_MERGES_AND_THREADS;
 
-  /** {@link Directory} that holds the index. */
-  protected Directory dir;
-
-  /** {@link IndexWriter} that owns this instance. */
-  protected IndexWriter writer;
-
   /** How many {@link MergeThread}s have kicked off (this is use
    *  to name them). */
   protected int mergeThreadCount;
 
+  /** Floor for IO write rate limit (we will never go any lower than this) */
+  private static final double MIN_MERGE_MB_PER_SEC = 5.0;
+
+  /** Ceiling for IO write rate limit (we will never go any higher than this) */
+  private static final double MAX_MERGE_MB_PER_SEC = 10240.0;
+
+  /** Initial value for IO write rate limit when doAutoIOThrottle is true */
+  private static final double START_MB_PER_SEC = 20.0;
+
+  /** Merges below this size are not counted in the maxThreadCount, i.e. they can freely run in their own thread (up until maxMergeCount). */
+  private static final double MIN_BIG_MERGE_MB = 50.0;
+
+  /** Current IO writes throttle rate */
+  protected double targetMBPerSec = START_MB_PER_SEC;
+
+  /** true if we should rate-limit writes for each merge */
+  private boolean doAutoIOThrottle = true;
+
+  private double forceMergeMBPerSec = Double.POSITIVE_INFINITY;
+
   /** Sole constructor, with all settings set to default
    *  values. */
   public ConcurrentMergeScheduler() {
@@ -142,10 +157,48 @@
   public synchronized void setDefaultMaxMergesAndThreads(boolean spins) {
     if (spins) {
       maxThreadCount = 1;
-      maxMergeCount = 2;
+      maxMergeCount = 6;
     } else {
-      maxThreadCount = Math.max(1, Math.min(3, Runtime.getRuntime().availableProcessors()/2));
-      maxMergeCount = maxThreadCount+2;
+      maxThreadCount = Math.max(1, Math.min(4, Runtime.getRuntime().availableProcessors()/2));
+      maxMergeCount = maxThreadCount+5;
+    }
+  }
+
+  /** Set the per-merge IO throttle rate for forced merges (default: {@code Double.POSITIVE_INFINITY}). */
+  public synchronized void setForceMergeMBPerSec(double v) {
+    forceMergeMBPerSec = v;
+    updateMergeThreads();
+  }
+
+  /** Get the per-merge IO throttle rate for forced merges. */
+  public synchronized double getForceMergeMBPerSec() {
+    return forceMergeMBPerSec;
+  }
+
+  /** Turn on dynamic IO throttling, to adaptively rate limit writes
+   *  bytes/sec to the minimal rate necessary so merges do not fall behind.
+   *  By default this is enabled. */
+  public synchronized void enableAutoIOThrottle() {
+    doAutoIOThrottle = true;
+    targetMBPerSec = START_MB_PER_SEC;
+    updateMergeThreads();
+  }
+
+  /** Turn off auto IO throttling.
+   *
+   * @see #enableAutoIOThrottle */
+  public synchronized void disableAutoIOThrottle() {
+    doAutoIOThrottle = false;
+    updateMergeThreads();
+  }
+
+  /** Returns the currently set per-merge IO writes rate limit, if {@link #enableAutoIOThrottle}
+   *  was called, else {@code Double.POSITIVE_INFINITY}. */
+  public synchronized double getIORateLimitMBPerSec() {
+    if (doAutoIOThrottle) {
+      return targetMBPerSec;
+    } else {
+      return Double.POSITIVE_INFINITY;
     }
   }
 
@@ -161,48 +214,27 @@
     return maxMergeCount;
   }
 
-  /** Return the priority that merge threads run at.  By
-   *  default the priority is 1 plus the priority of (ie,
-   *  slightly higher priority than) the first thread that
-   *  calls merge. */
-  public synchronized int getMergeThreadPriority() {
-    initMergeThreadPriority();
-    return mergeThreadPriority;
-  }
-
-  /** Set the base priority that merge threads run at.
-   *  Note that CMS may increase priority of some merge
-   *  threads beyond this base priority.  It's best not to
-   *  set this any higher than
-   *  Thread.MAX_PRIORITY-maxThreadCount, so that CMS has
-   *  room to set relative priority among threads.  */
-  public synchronized void setMergeThreadPriority(int pri) {
-    if (pri > Thread.MAX_PRIORITY || pri < Thread.MIN_PRIORITY)
-      throw new IllegalArgumentException("priority must be in range " + Thread.MIN_PRIORITY + " .. " + Thread.MAX_PRIORITY + " inclusive");
-    mergeThreadPriority = pri;
-    updateMergeThreads();
-  }
-
-  /** Sorts {@link MergeThread}s; larger merges come first. */
-  protected static final Comparator<MergeThread> compareByMergeDocCount = new Comparator<MergeThread>() {
-    @Override
-    public int compare(MergeThread t1, MergeThread t2) {
-      final MergePolicy.OneMerge m1 = t1.getCurrentMerge();
-      final MergePolicy.OneMerge m2 = t2.getCurrentMerge();
-      
-      final int c1 = m1 == null ? Integer.MAX_VALUE : m1.totalDocCount;
-      final int c2 = m2 == null ? Integer.MAX_VALUE : m2.totalDocCount;
-
-      return c2 - c1;
+  /** Removes the calling thread from the active merge threads. */
+  synchronized void removeMergeThread() {
+    Thread currentThread = Thread.currentThread();
+    // Paranoia: don't trust Thread.equals:
+    for(int i=0;i<mergeThreads.size();i++) {
+      if (mergeThreads.get(i) == currentThread) {
+        mergeThreads.remove(i);
+        return;
+      }
     }
-  };
+      
+    assert false: "merge thread " + currentThread + " was not found";
+  }
 
   /**
-   * Called whenever the running merges have changed, to pause and unpause
-   * threads. This method sorts the merge threads by their merge size in
+   * Called whenever the running merges have changed, to set merge IO limits.
+   * This method sorts the merge threads by their merge size in
    * descending order and then pauses/unpauses threads from first to last --
    * that way, smaller merges are guaranteed to run before larger ones.
    */
+
   protected synchronized void updateMergeThreads() {
 
     // Only look at threads that are alive & not in the
@@ -217,93 +249,121 @@
         mergeThreads.remove(threadIdx);
         continue;
       }
-      if (mergeThread.getCurrentMerge() != null) {
-        activeMerges.add(mergeThread);
-      }
+      activeMerges.add(mergeThread);
       threadIdx++;
     }
 
-    // Sort the merge threads in descending order.
-    CollectionUtil.timSort(activeMerges, compareByMergeDocCount);
-    
-    int pri = mergeThreadPriority;
+    // Sort the merge threads, largest first:
+    CollectionUtil.timSort(activeMerges);
+
     final int activeMergeCount = activeMerges.size();
-    for (threadIdx=0;threadIdx<activeMergeCount;threadIdx++) {
-      final MergeThread mergeThread = activeMerges.get(threadIdx);
-      final MergePolicy.OneMerge merge = mergeThread.getCurrentMerge();
-      if (merge == null) { 
-        continue;
+
+    int bigMergeCount = 0;
+
+    for (threadIdx=activeMergeCount-1;threadIdx>=0;threadIdx--) {
+      MergeThread mergeThread = activeMerges.get(threadIdx);
+      if (mergeThread.merge.estimatedMergeBytes > MIN_BIG_MERGE_MB*1024*1024) {
+        bigMergeCount = 1+threadIdx;
+        break;
       }
+    }
+
+    long now = System.nanoTime();
+
+    StringBuilder message;
+    if (verbose()) {
+      message = new StringBuilder();
+      message.append(String.format(Locale.ROOT, "updateMergeThreads ioThrottle=%s targetMBPerSec=%.1f MB/sec", doAutoIOThrottle, targetMBPerSec));
+    } else {
+      message = null;
+    }
+
+    for (threadIdx=0;threadIdx<activeMergeCount;threadIdx++) {
+      MergeThread mergeThread = activeMerges.get(threadIdx);
+
+      OneMerge merge = mergeThread.merge;
 
       // pause the thread if maxThreadCount is smaller than the number of merge threads.
-      final boolean doPause = threadIdx < activeMergeCount - maxThreadCount;
+      final boolean doPause = threadIdx < bigMergeCount - maxThreadCount;
 
+      double newMBPerSec;
+      if (doPause) {
+        newMBPerSec = 0.0;
+      } else if (merge.maxNumSegments != -1) {
+        newMBPerSec = forceMergeMBPerSec;
+      } else if (doAutoIOThrottle == false) {
+        newMBPerSec = Double.POSITIVE_INFINITY;
+      } else if (merge.estimatedMergeBytes < MIN_BIG_MERGE_MB*1024*1024) {
+        // Don't rate limit small merges:
+        newMBPerSec = Double.POSITIVE_INFINITY;
+      } else {
+        newMBPerSec = targetMBPerSec;
+      }
+
+      double curMBPerSec = merge.rateLimiter.getMBPerSec();
+      
       if (verbose()) {
-        if (doPause != merge.getPause()) {
-          if (doPause) {
-            message("pause thread " + mergeThread.getName());
+        long mergeStartNS = merge.mergeStartNS;
+        if (mergeStartNS == -1) {
+          // IndexWriter didn't start the merge yet:
+          mergeStartNS = now;
+        }
+        message.append('\n');
+        message.append(String.format(Locale.ROOT, "merge thread %s estSize=%.1f MB (written=%.1f MB) runTime=%.1fs (stopped=%.1fs, paused=%.1fs) rate=%s\n",
+                                     mergeThread.getName(),
+                                     bytesToMB(merge.estimatedMergeBytes),
+                                     bytesToMB(merge.rateLimiter.totalBytesWritten),
+                                     nsToSec(now - mergeStartNS),
+                                     nsToSec(merge.rateLimiter.getTotalStoppedNS()),
+                                     nsToSec(merge.rateLimiter.getTotalPausedNS()),
+                                     rateToString(merge.rateLimiter.getMBPerSec())));
+
+        if (newMBPerSec != curMBPerSec) {
+          if (newMBPerSec == 0.0) {
+            message.append("  now stop");
+          } else if (curMBPerSec == 0.0) {
+            if (newMBPerSec == Double.POSITIVE_INFINITY) {
+              message.append("  now resume");
+            } else {
+              message.append(String.format(Locale.ROOT, "  now resume to %.1f MB/sec", newMBPerSec));
+            }
           } else {
-            message("unpause thread " + mergeThread.getName());
+            message.append(String.format(Locale.ROOT, "  now change from %.1f MB/sec to %.1f MB/sec", curMBPerSec, newMBPerSec));
           }
+        } else if (curMBPerSec == 0.0) {
+          message.append("  leave stopped");
+        } else {
+          message.append(String.format(Locale.ROOT, "  leave running at %.1f MB/sec", curMBPerSec));
         }
       }
-      if (doPause != merge.getPause()) {
-        merge.setPause(doPause);
-      }
 
-      if (!doPause) {
-        if (verbose()) {
-          message("set priority of merge thread " + mergeThread.getName() + " to " + pri);
-        }
-        mergeThread.setThreadPriority(pri);
-        pri = Math.min(Thread.MAX_PRIORITY, 1+pri);
-      }
+      merge.rateLimiter.setMBPerSec(newMBPerSec);
+    }
+    if (verbose()) {
+      message(message.toString());
     }
   }
 
-  /**
-   * Returns true if verbosing is enabled. This method is usually used in
-   * conjunction with {@link #message(String)}, like that:
-   * 
-   * <pre class="prettyprint">
-   * if (verbose()) {
-   *   message(&quot;your message&quot;);
-   * }
-   * </pre>
-   */
-  protected boolean verbose() {
-    return writer != null && writer.infoStream.isEnabled("CMS");
-  }
-  
-  /**
-   * Outputs the given message - this method assumes {@link #verbose()} was
-   * called and returned true.
-   */
-  protected void message(String message) {
-    writer.infoStream.message("CMS", message);
-  }
-
-  private synchronized void initMergeThreadPriority() {
-    if (mergeThreadPriority == -1) {
-      // Default to slightly higher priority than our
-      // calling thread
-      mergeThreadPriority = 1+Thread.currentThread().getPriority();
-      if (mergeThreadPriority > Thread.MAX_PRIORITY)
-        mergeThreadPriority = Thread.MAX_PRIORITY;
-    }
-  }
-
-  private synchronized void initMaxMergesAndThreads() throws IOException {
+  private synchronized void initDynamicDefaults(IndexWriter writer) throws IOException {
     if (maxThreadCount == AUTO_DETECT_MERGES_AND_THREADS) {
-      assert writer != null;
       boolean spins = IOUtils.spins(writer.getDirectory());
       setDefaultMaxMergesAndThreads(spins);
       if (verbose()) {
-        message("initMaxMergesAndThreads spins=" + spins + " maxThreadCount=" + maxThreadCount + " maxMergeCount=" + maxMergeCount);
+        message("initDynamicDefaults spins=" + spins + " maxThreadCount=" + maxThreadCount + " maxMergeCount=" + maxMergeCount);
       }
     }
   }
 
+  private static String rateToString(double mbPerSec) {
+    if (mbPerSec == 0.0) {
+      return "stopped";
+    } else if (mbPerSec == Double.POSITIVE_INFINITY) {
+      return "unlimited";
+    } else {
+      return String.format(Locale.ROOT, "%.1f MB/sec", mbPerSec);
+    }
+  }
+
   @Override
   public void close() {
     sync();
@@ -341,17 +401,17 @@
   }
 
   /**
-   * Returns the number of merge threads that are alive. Note that this number
-   * is &le; {@link #mergeThreads} size.
+   * Returns the number of merge threads that are alive, ignoring the calling thread
+   * if it is a merge thread.  Note that this number is &le; {@link #mergeThreads} size.
+   *
+   * @lucene.internal
    */
-  protected synchronized int mergeThreadCount() {
+  public synchronized int mergeThreadCount() {
+    Thread currentThread = Thread.currentThread();
     int count = 0;
-    for (MergeThread mt : mergeThreads) {
-      if (mt.isAlive()) {
-        MergePolicy.OneMerge merge = mt.getCurrentMerge();
-        if (merge != null && merge.isAborted() == false) {
-          count++;
-        }
+    for (MergeThread mergeThread : mergeThreads) {
+      if (currentThread != mergeThread && mergeThread.isAlive() && mergeThread.merge.rateLimiter.getAbort() == false) {
+        count++;
       }
     }
     return count;
@@ -362,12 +422,13 @@
 
     assert !Thread.holdsLock(writer);
 
-    this.writer = writer;
+    initDynamicDefaults(writer);
 
-    initMergeThreadPriority();
-    initMaxMergesAndThreads();
-
-    dir = writer.getDirectory();
+    if (trigger == MergeTrigger.CLOSING) {
+      // Disable throttling on close:
+      targetMBPerSec = MAX_MERGE_MB_PER_SEC;
+      updateMergeThreads();
+    }
 
     // First, quickly run through the newly proposed merges
     // and add any orthogonal merges (ie a merge not
@@ -385,9 +446,11 @@
     // pending merges, until it's empty:
     while (true) {
 
-      maybeStall();
+      if (maybeStall(writer) == false) {
+        break;
+      }
 
-      MergePolicy.OneMerge merge = writer.getNextMerge();
+      OneMerge merge = writer.getNextMerge();
       if (merge == null) {
         if (verbose()) {
           message("  no more merges pending; now return");
@@ -395,6 +458,8 @@
         return;
       }
 
+      updateIOThrottle(merge);
+
       boolean success = false;
       try {
         if (verbose()) {
@@ -405,15 +470,12 @@
         // merge:
         final MergeThread merger = getMergeThread(writer, merge);
         mergeThreads.add(merger);
+
         if (verbose()) {
           message("    launch new thread [" + merger.getName() + "]");
         }
 
         merger.start();
-
-        // Must call this after starting the thread else
-        // the new thread is removed from mergeThreads
-        // (since it's not alive yet):
         updateMergeThreads();
 
         success = true;
@@ -431,11 +493,16 @@
    *  many segments for merging to keep up, to wait until merges catch
    *  up. Applications that can take other less drastic measures, such
    *  as limiting how many threads are allowed to index, can do nothing
-   *  here and throttle elsewhere. */
+   *  here and throttle elsewhere.
+   *
+   *  If this method wants to stall but the calling thread is a merge
+   *  thread, it should return false to tell caller not to kick off
+   *  any new merges. */
 
-  protected synchronized void maybeStall() {
+  protected synchronized boolean maybeStall(IndexWriter writer) {
     long startStallTime = 0;
     while (writer.hasPendingMerges() && mergeThreadCount() >= maxMergeCount) {
+
       // This means merging has fallen too far behind: we
       // have already created maxMergeCount threads, and
       // now there's at least one more merge pending.
@@ -445,147 +512,111 @@
       // updateMergeThreads).  We stall this producer
       // thread to prevent creation of new segments,
       // until merging has caught up:
+
+      if (mergeThreads.contains(Thread.currentThread())) {
+        // Never stall a merge thread since this blocks the thread from
+        // finishing and calling updateMergeThreads, and blocking it
+        // accomplishes nothing anyway (it's not really a segment producer):
+        return false;
+      }
+
       if (verbose() && startStallTime == 0) {
         message("    too many merges; stalling...");
       }
       startStallTime = System.currentTimeMillis();
-      try {
-        // Only wait 0.25 seconds, so if all merges are aborted (by IW.rollback) we notice:
-        wait(250);
-      } catch (InterruptedException ie) {
-        throw new ThreadInterruptedException(ie);
-      }
+      doStall();
     }
 
-    if (verbose()) {
-      if (startStallTime != 0) {
-        message("  stalled for " + (System.currentTimeMillis()-startStallTime) + " msec");
-      }
+    if (verbose() && startStallTime != 0) {
+      message("  stalled for " + (System.currentTimeMillis()-startStallTime) + " msec");
+    }
+
+    return true;
+  }
+
+  /** Called from {@link #maybeStall} to pause the calling thread for a bit. */
+  protected synchronized void doStall() {
+    try {
+      // Defensively wait for only .25 seconds in case we are missing a .notify/All somewhere:
+      wait(250);
+    } catch (InterruptedException ie) {
+      throw new ThreadInterruptedException(ie);
     }
   }
 
   /** Does the actual merge, by calling {@link IndexWriter#merge} */
-  protected void doMerge(MergePolicy.OneMerge merge) throws IOException {
+  protected void doMerge(IndexWriter writer, OneMerge merge) throws IOException {
     writer.merge(merge);
   }
 
   /** Create and return a new MergeThread */
-  protected synchronized MergeThread getMergeThread(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
+  protected synchronized MergeThread getMergeThread(IndexWriter writer, OneMerge merge) throws IOException {
     final MergeThread thread = new MergeThread(writer, merge);
-    thread.setThreadPriority(mergeThreadPriority);
     thread.setDaemon(true);
     thread.setName("Lucene Merge Thread #" + mergeThreadCount++);
     return thread;
   }
 
-  /** Runs a merge thread, which may run one or more merges
-   *  in sequence. */
-  protected class MergeThread extends Thread {
+  /** Runs a merge thread to execute a single merge, then exits. */
+  protected class MergeThread extends Thread implements Comparable<MergeThread> {
 
-    IndexWriter tWriter;
-    MergePolicy.OneMerge startMerge;
-    MergePolicy.OneMerge runningMerge;
-    private volatile boolean done;
+    final IndexWriter writer;
+    final OneMerge merge;
 
     /** Sole constructor. */
-    public MergeThread(IndexWriter writer, MergePolicy.OneMerge startMerge) {
-      this.tWriter = writer;
-      this.startMerge = startMerge;
+    public MergeThread(IndexWriter writer, OneMerge merge) {
+      this.writer = writer;
+      this.merge = merge;
     }
-
-    /** Record the currently running merge. */
-    public synchronized void setRunningMerge(MergePolicy.OneMerge merge) {
-      runningMerge = merge;
-    }
-
-    /** Return the currently running merge. */
-    public synchronized MergePolicy.OneMerge getRunningMerge() {
-      return runningMerge;
-    }
-
-    /** Return the current merge, or null if this {@code
-     *  MergeThread} is done. */
-    public synchronized MergePolicy.OneMerge getCurrentMerge() {
-      if (done) {
-        return null;
-      } else if (runningMerge != null) {
-        return runningMerge;
-      } else {
-        return startMerge;
-      }
-    }
-
-    /** Set the priority of this thread. */
-    public void setThreadPriority(int pri) {
-      try {
-        setPriority(pri);
-      } catch (NullPointerException npe) {
-        // Strangely, Sun's JDK 1.5 on Linux sometimes
-        // throws NPE out of here...
-      } catch (SecurityException se) {
-        // Ignore this because we will still run fine with
-        // normal thread priority
-      }
+    
+    @Override
+    public int compareTo(MergeThread other) {
+      // Larger merges sort first:
+      return Long.compare(other.merge.estimatedMergeBytes, merge.estimatedMergeBytes);
     }
 
     @Override
     public void run() {
-      
-      // First time through the while loop we do the merge
-      // that we were started with:
-      MergePolicy.OneMerge merge = this.startMerge;
-      
+
       try {
 
         if (verbose()) {
           message("  merge thread: start");
         }
 
-        while(true) {
-          setRunningMerge(merge);
-          doMerge(merge);
-
-          // Subsequent times through the loop we do any new
-          // merge that writer says is necessary:
-          merge = tWriter.getNextMerge();
-
-          // Notify here in case any threads were stalled;
-          // they will notice that the pending merge has
-          // been pulled and possibly resume:
-          synchronized(ConcurrentMergeScheduler.this) {
-            ConcurrentMergeScheduler.this.notifyAll();
-          }
-
-          if (merge != null) {
-            updateMergeThreads();
-            if (verbose()) {
-              message("  merge thread: do another merge " + tWriter.segString(merge.segments));
-            }
-          } else {
-            break;
-          }
-        }
+        doMerge(writer, merge);
 
         if (verbose()) {
           message("  merge thread: done");
         }
 
+        // Let CMS run new merges if necessary:
+        try {
+          merge(writer, MergeTrigger.MERGE_FINISHED, true);
+        } catch (AlreadyClosedException ace) {
+          // OK
+        } catch (IOException ioe) {
+          throw new RuntimeException(ioe);
+        }
+
       } catch (Throwable exc) {
 
-        // Ignore the exception if it was due to abort:
-        if (!(exc instanceof MergePolicy.MergeAbortedException)) {
-          //System.out.println(Thread.currentThread().getName() + ": CMS: exc");
-          //exc.printStackTrace(System.out);
-          if (!suppressExceptions) {
-            // suppressExceptions is normally only set during
-            // testing.
-            handleMergeException(exc);
-          }
+        if (exc instanceof MergePolicy.MergeAbortedException) {
+          // OK to ignore
+        } else if (suppressExceptions == false) {
+          // suppressExceptions is normally only set during
+          // testing.
+          handleMergeException(writer.getDirectory(), exc);
         }
+
       } finally {
-        done = true;
         synchronized(ConcurrentMergeScheduler.this) {
+          removeMergeThread();
+
           updateMergeThreads();
+
+          // In case we had stalled indexing, we can now wake up
+          // and possibly unstall:
           ConcurrentMergeScheduler.this.notifyAll();
         }
       }
@@ -594,7 +625,7 @@
 
   /** Called when an exception is hit in a background merge
    *  thread */
-  protected void handleMergeException(Throwable exc) {
+  protected void handleMergeException(Directory dir, Throwable exc) {
     try {
       // When an exception is hit during merge, IndexWriter
       // removes any partial files and then allows another
@@ -606,6 +637,7 @@
     } catch (InterruptedException ie) {
       throw new ThreadInterruptedException(ie);
     }
+
     throw new MergePolicy.MergeException(exc, dir);
   }
 
@@ -626,7 +658,122 @@
     StringBuilder sb = new StringBuilder(getClass().getSimpleName() + ": ");
     sb.append("maxThreadCount=").append(maxThreadCount).append(", ");    
     sb.append("maxMergeCount=").append(maxMergeCount).append(", ");    
-    sb.append("mergeThreadPriority=").append(mergeThreadPriority);
+    sb.append("ioThrottle=").append(doAutoIOThrottle);
     return sb.toString();
   }
+
+  private boolean isBacklog(long now, OneMerge merge) {
+    double mergeMB = bytesToMB(merge.estimatedMergeBytes);
+    for (MergeThread mergeThread : mergeThreads) {
+      long mergeStartNS = mergeThread.merge.mergeStartNS;
+      if (mergeThread.isAlive() && mergeThread.merge != merge &&
+          mergeStartNS != -1 &&
+          mergeThread.merge.estimatedMergeBytes >= MIN_BIG_MERGE_MB*1024*1024 &&
+          nsToSec(now-mergeStartNS) > 3.0) {
+        double otherMergeMB = bytesToMB(mergeThread.merge.estimatedMergeBytes);
+        double ratio = otherMergeMB / mergeMB;
+        if (ratio > 0.3 && ratio < 3.0) {
+          return true;
+        }
+      }
+    }
+
+    return false;
+  }
+
+  /** Tunes IO throttle when a new merge starts. */
+  private synchronized void updateIOThrottle(OneMerge newMerge) throws IOException {
+    if (doAutoIOThrottle == false) {
+      return;
+    }
+
+    double mergeMB = bytesToMB(newMerge.estimatedMergeBytes);
+    if (mergeMB < MIN_BIG_MERGE_MB) {
+      // Only watch non-trivial merges for throttling; this is safe because the MP must eventually
+      // have to do larger merges:
+      return;
+    }
+
+    long now = System.nanoTime();
+
+    // Simplistic closed-loop feedback control: if we find any other similarly
+    // sized merges running, then we are falling behind, so we bump up the
+    // IO throttle, else we lower it:
+    boolean newBacklog = isBacklog(now, newMerge);
+
+    boolean curBacklog = false;
+
+    if (newBacklog == false) {
+      if (mergeThreads.size() > maxThreadCount) {
+        // If there are already more than the maximum merge threads allowed, count that as backlog:
+        curBacklog = true;
+      } else {
+        // Now see if any still-running merges are backlog'd:
+        for (MergeThread mergeThread : mergeThreads) {
+          if (isBacklog(now, mergeThread.merge)) {
+            curBacklog = true;
+            break;
+          }
+        }
+      }
+    }
+
+    double curMBPerSec = targetMBPerSec;
+
+    if (newBacklog) {
+      // This new merge adds to the backlog: increase IO throttle by 20%
+      targetMBPerSec *= 1.20;
+      if (targetMBPerSec > MAX_MERGE_MB_PER_SEC) {
+        targetMBPerSec = MAX_MERGE_MB_PER_SEC;
+      }
+      if (verbose()) {
+        if (curMBPerSec == targetMBPerSec) {
+          message(String.format(Locale.ROOT, "io throttle: new merge backlog; leave IO rate at ceiling %.1f MB/sec", targetMBPerSec));
+        } else {
+          message(String.format(Locale.ROOT, "io throttle: new merge backlog; increase IO rate to %.1f MB/sec", targetMBPerSec));
+        }
+      }
+    } else if (curBacklog) {
+      // We still have an existing backlog; leave the rate as is:
+      if (verbose()) {
+        message(String.format(Locale.ROOT, "io throttle: current merge backlog; leave IO rate at %.1f MB/sec",
+                              targetMBPerSec));
+      }
+    } else {
+      // We are not falling behind: decrease IO throttle by 10%
+      targetMBPerSec /= 1.10;
+      if (targetMBPerSec < MIN_MERGE_MB_PER_SEC) {
+        targetMBPerSec = MIN_MERGE_MB_PER_SEC;
+      }
+      if (verbose()) {
+        if (curMBPerSec == targetMBPerSec) {
+          message(String.format(Locale.ROOT, "io throttle: no merge backlog; leave IO rate at floor %.1f MB/sec", targetMBPerSec));
+        } else {
+          message(String.format(Locale.ROOT, "io throttle: no merge backlog; decrease IO rate to %.1f MB/sec", targetMBPerSec));
+        }
+      }
+    }
+
+    double rate;
+
+    if (newMerge.maxNumSegments != -1) {
+      rate = forceMergeMBPerSec;
+    } else {
+      rate = targetMBPerSec;
+    }
+    newMerge.rateLimiter.setMBPerSec(rate);
+    targetMBPerSecChanged();
+  }
+
+  /** Subclass can override to tweak targetMBPerSec. */
+  protected void targetMBPerSecChanged() {
+  }
+
+  private static double nsToSec(long ns) {
+    return ns / 1000000000.0;
+  }
+
+  private static double bytesToMB(long bytes) {
+    return bytes/1024./1024.;
+  }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java
index ce28ab5..e4fab8d 100644
--- a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java
@@ -21,10 +21,9 @@
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
-import java.util.HashSet;
+import java.util.Locale;
 import java.util.List;
 import java.util.Queue;
-import java.util.Set;
 import java.util.concurrent.ConcurrentLinkedQueue;
 import java.util.concurrent.atomic.AtomicInteger;
 
@@ -37,7 +36,6 @@
 import org.apache.lucene.store.AlreadyClosedException;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.Accountable;
-import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.InfoStream;
 
 /**
@@ -562,11 +560,13 @@
     final double ramBufferSizeMB = config.getRAMBufferSizeMB();
     if (ramBufferSizeMB != IndexWriterConfig.DISABLE_AUTO_FLUSH &&
         flushControl.getDeleteBytesUsed() > (1024*1024*ramBufferSizeMB/2)) {
-      if (infoStream.isEnabled("DW")) {
-        infoStream.message("DW", "force apply deletes bytesUsed=" + flushControl.getDeleteBytesUsed() + " vs ramBuffer=" + (1024*1024*ramBufferSizeMB));
-      }
       hasEvents = true;
       if (!this.applyAllDeletes(deleteQueue)) {
+        if (infoStream.isEnabled("DW")) {
+          infoStream.message("DW", String.format(Locale.ROOT, "force apply deletes bytesUsed=%.1f MB vs ramBuffer=%.1f MB",
+                                                 flushControl.getDeleteBytesUsed()/(1024.*1024.),
+                                                 ramBufferSizeMB));
+        }
         putEvent(ApplyDeletesEvent.INSTANCE);
       }
     }
diff --git a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java
index 92c3549..250aef7 100644
--- a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java
+++ b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterFlushControl.java
@@ -77,7 +77,7 @@
 
   DocumentsWriterFlushControl(DocumentsWriter documentsWriter, LiveIndexWriterConfig config, BufferedUpdatesStream bufferedUpdatesStream, AtomicLong uniqueValuesRAM) {
     this.infoStream = config.getInfoStream();
-    this.stallControl = new DocumentsWriterStallControl();
+    this.stallControl = new DocumentsWriterStallControl(config);
     this.perThreadPool = documentsWriter.perThreadPool;
     this.flushPolicy = documentsWriter.flushPolicy;
     this.config = config;
diff --git a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java
index 94054ab..3d1f3f0 100644
--- a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java
+++ b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java
@@ -458,7 +458,7 @@
         final double newSegmentSize = segmentInfoPerCommit.sizeInBytes()/1024./1024.;
         infoStream.message("DWPT", "flushed: segment=" + segmentInfo.name + 
                 " ramUsed=" + nf.format(startMBUsed) + " MB" +
-                " newFlushedSize(includes docstores)=" + nf.format(newSegmentSize) + " MB" +
+                " newFlushedSize=" + nf.format(newSegmentSize) + " MB" +
                 " docs/MB=" + nf.format(flushState.segmentInfo.getDocCount() / newSegmentSize));
       }
 
@@ -497,7 +497,10 @@
     try {
       
       if (indexWriterConfig.getUseCompoundFile()) {
-        filesToDelete.addAll(IndexWriter.createCompoundFile(infoStream, directory, MergeState.CheckAbort.NONE, newSegment.info, context));
+        Set<String> originalFiles = newSegment.info.files();
+        // TODO: like addIndexes, we are relying on createCompoundFile to successfully cleanup...
+        IndexWriter.createCompoundFile(infoStream, new TrackingDirectoryWrapper(directory), newSegment.info, context);
+        filesToDelete.addAll(originalFiles);
         newSegment.info.setUseCompoundFile(true);
       }
 
diff --git a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterStallControl.java b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterStallControl.java
index a799fbf..c83813b 100644
--- a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterStallControl.java
+++ b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterStallControl.java
@@ -20,6 +20,7 @@
 import java.util.Map;
 
 import org.apache.lucene.index.DocumentsWriterPerThreadPool.ThreadState;
+import org.apache.lucene.util.InfoStream;
 import org.apache.lucene.util.ThreadInterruptedException;
 
 /**
@@ -43,6 +44,11 @@
   private int numWaiting; // only with assert
   private boolean wasStalled; // only with assert
   private final Map<Thread, Boolean> waiting = new IdentityHashMap<>(); // only with assert
+  private final InfoStream infoStream;
+
+  DocumentsWriterStallControl(LiveIndexWriterConfig iwc) {
+    infoStream = iwc.getInfoStream();
+  }
   
   /**
    * Update the stalled flag status. This method will set the stalled flag to
@@ -85,8 +91,13 @@
     return stalled;
   }
   
-  
+  long stallStartNS;
+
   private void incWaiters() {
+    stallStartNS = System.nanoTime();
+    if (infoStream.isEnabled("DW") && numWaiting == 0) {
+      infoStream.message("DW", "now stalling flushes");
+    }
     numWaiting++;
     assert waiting.put(Thread.currentThread(), Boolean.TRUE) == null;
     assert numWaiting > 0;
@@ -96,6 +107,10 @@
     numWaiting--;
     assert waiting.remove(Thread.currentThread()) != null;
     assert numWaiting >= 0;
+    if (infoStream.isEnabled("DW") && numWaiting == 0) {
+      long stallEndNS = System.nanoTime();
+      infoStream.message("DW", "done stalling flushes for " + ((stallEndNS - stallStartNS)/1000000.0) + " ms");
+    }
   }
   
   synchronized boolean hasBlocked() { // for tests
diff --git a/lucene/core/src/java/org/apache/lucene/index/FieldTermIterator.java b/lucene/core/src/java/org/apache/lucene/index/FieldTermIterator.java
new file mode 100644
index 0000000..2790c84
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/index/FieldTermIterator.java
@@ -0,0 +1,40 @@
+package org.apache.lucene.index;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.util.BytesRef;
+
+// TODO: maybe TermsFilter could use this?
+
+/** Iterates over terms in multiple fields, notifying the caller when a new field is started. */
+interface FieldTermIterator {
+  /** Advances to the next term, returning true if it's in a new field or there are no more terms.  Call {@link #field} to see which
+   *  field; if that returns null then the iteration ended. */
+  boolean next();
+
+  /** Returns current field, or null if the iteration ended. */
+  String field();
+
+  /** Returns current term. */
+  BytesRef term();
+
+  /** Del gen of the current term. */
+  // TODO: this is really per-iterator not per term, but when we use MergedPrefixCodedTermsIterator we need to know which iterator we are on
+  long delGen();
+}
+
diff --git a/lucene/core/src/java/org/apache/lucene/index/FilterCodecReader.java b/lucene/core/src/java/org/apache/lucene/index/FilterCodecReader.java
new file mode 100644
index 0000000..e234245
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/index/FilterCodecReader.java
@@ -0,0 +1,108 @@
+package org.apache.lucene.index;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.util.Objects;
+
+import org.apache.lucene.codecs.DocValuesProducer;
+import org.apache.lucene.codecs.FieldsProducer;
+import org.apache.lucene.codecs.NormsProducer;
+import org.apache.lucene.codecs.StoredFieldsReader;
+import org.apache.lucene.codecs.TermVectorsReader;
+import org.apache.lucene.document.FieldTypes;
+import org.apache.lucene.util.Bits;
+
+/** 
+ * A <code>FilterCodecReader</code> contains another CodecReader, which it
+ * uses as its basic source of data, possibly transforming the data along the
+ * way or providing additional functionality.
+ */
+public class FilterCodecReader extends CodecReader {
+  /** 
+   * The underlying CodecReader instance. 
+   */
+  protected final CodecReader in;
+  
+  /**
+   * Creates a new FilterCodecReader.
+   * @param in the underlying CodecReader instance.
+   */
+  public FilterCodecReader(CodecReader in) {
+    this.in = Objects.requireNonNull(in);
+  }
+
+  @Override
+  public StoredFieldsReader getFieldsReader() {
+    return in.getFieldsReader();
+  }
+
+  @Override
+  public TermVectorsReader getTermVectorsReader() {
+    return in.getTermVectorsReader();
+  }
+
+  @Override
+  public NormsProducer getNormsReader() {
+    return in.getNormsReader();
+  }
+
+  @Override
+  public DocValuesProducer getDocValuesReader() {
+    return in.getDocValuesReader();
+  }
+
+  @Override
+  public FieldsProducer getPostingsReader() {
+    return in.getPostingsReader();
+  }
+
+  @Override
+  public Bits getLiveDocs() {
+    return in.getLiveDocs();
+  }
+
+  @Override
+  public FieldInfos getFieldInfos() {
+    return in.getFieldInfos();
+  }
+
+  @Override
+  public int numDocs() {
+    return in.numDocs();
+  }
+
+  @Override
+  public int maxDoc() {
+    return in.maxDoc();
+  }
+
+  @Override
+  public void addCoreClosedListener(CoreClosedListener listener) {
+    in.addCoreClosedListener(listener);
+  }
+
+  @Override
+  public void removeCoreClosedListener(CoreClosedListener listener) {
+    in.removeCoreClosedListener(listener);
+  }
+
+  @Override
+  public FieldTypes getFieldTypes() {
+    return in.getFieldTypes();
+  }
+}
diff --git a/lucene/core/src/java/org/apache/lucene/index/FlushByRamOrCountsPolicy.java b/lucene/core/src/java/org/apache/lucene/index/FlushByRamOrCountsPolicy.java
index 441004e..617765b 100644
--- a/lucene/core/src/java/org/apache/lucene/index/FlushByRamOrCountsPolicy.java
+++ b/lucene/core/src/java/org/apache/lucene/index/FlushByRamOrCountsPolicy.java
@@ -72,7 +72,7 @@
          control.getDeleteBytesUsed() + control.uniqueValuesRAM.get() > (1024*1024*indexWriterConfig.getRAMBufferSizeMB()))) {
       control.setApplyAllDeletes();
      if (infoStream.isEnabled("FP")) {
-       infoStream.message("FP", "force apply deletes bytesUsed=" + control.getDeleteBytesUsed() + " vs ramBuffer=" + (1024*1024*indexWriterConfig.getRAMBufferSizeMB()));
+       infoStream.message("FP", "force apply deletes bytesUsed=" + control.getDeleteBytesUsed() + " vs ramBufferMB=" + indexWriterConfig.getRAMBufferSizeMB());
      }
    }
   }
diff --git a/lucene/core/src/java/org/apache/lucene/index/FrozenBufferedUpdates.java b/lucene/core/src/java/org/apache/lucene/index/FrozenBufferedUpdates.java
index 59e5525..a7801cb 100644
--- a/lucene/core/src/java/org/apache/lucene/index/FrozenBufferedUpdates.java
+++ b/lucene/core/src/java/org/apache/lucene/index/FrozenBufferedUpdates.java
@@ -26,6 +26,7 @@
 import org.apache.lucene.index.BufferedUpdatesStream.QueryAndLimit;
 import org.apache.lucene.index.DocValuesUpdate.BinaryDocValuesUpdate;
 import org.apache.lucene.index.DocValuesUpdate.NumericDocValuesUpdate;
+import org.apache.lucene.index.PrefixCodedTerms.TermIterator;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.util.ArrayUtil;
 import org.apache.lucene.util.RamUsageEstimator;
@@ -57,7 +58,7 @@
   
   final int bytesUsed;
   final int numTermDeletes;
-  private long gen = -1; // assigned by BufferedDeletesStream once pushed
+  private long gen = -1; // assigned by BufferedUpdatesStream once pushed
   final boolean isSegmentPrivate;  // set to true iff this frozen packet represents 
                                    // a segment private deletes. in that case is should
                                    // only have Queries 
@@ -122,6 +123,7 @@
   public void setDelGen(long gen) {
     assert this.gen == -1;
     this.gen = gen;
+    terms.setDelGen(gen);
   }
   
   public long delGen() {
@@ -129,13 +131,8 @@
     return gen;
   }
 
-  public Iterable<Term> termsIterable() {
-    return new Iterable<Term>() {
-      @Override
-      public Iterator<Term> iterator() {
-        return terms.iterator();
-      }
-    };
+  public TermIterator termIterator() {
+    return terms.iterator();
   }
 
   public Iterable<QueryAndLimit> queriesIterable() {
diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
index 0fcc93b..80ee3d7 100644
--- a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
@@ -32,13 +32,14 @@
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Locale;
-import java.util.Map;
 import java.util.Map.Entry;
+import java.util.Map;
 import java.util.Queue;
 import java.util.Set;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
 
+import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.codecs.FieldInfosFormat;
 import org.apache.lucene.codecs.lucene50.Lucene50Codec;
@@ -48,19 +49,23 @@
 import org.apache.lucene.index.DocValuesUpdate.NumericDocValuesUpdate;
 import org.apache.lucene.index.FieldInfos.FieldNumbers;
 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
-import org.apache.lucene.index.MergeState.CheckAbort;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.ReferenceManager;
 import org.apache.lucene.store.AlreadyClosedException;
 import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FilterDirectory;
+import org.apache.lucene.store.FlushInfo;
 import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.store.Lock;
 import org.apache.lucene.store.LockObtainFailedException;
 import org.apache.lucene.store.MergeInfo;
+import org.apache.lucene.store.RateLimitedIndexOutput;
 import org.apache.lucene.store.TrackingDirectoryWrapper;
 import org.apache.lucene.util.Accountable;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.CloseableThreadLocal;
 import org.apache.lucene.util.Constants;
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.InfoStream;
@@ -233,8 +238,8 @@
   public static final String SOURCE_MERGE = "merge";
   /** Source of a segment which results from a flush. */
   public static final String SOURCE_FLUSH = "flush";
-  /** Source of a segment which results from a call to {@link #addIndexes(IndexReader...)}. */
-  public static final String SOURCE_ADDINDEXES_READERS = "addIndexes(IndexReader...)";
+  /** Source of a segment which results from a call to {@link #addIndexes(CodecReader...)}. */
+  public static final String SOURCE_ADDINDEXES_READERS = "addIndexes(CodecReader...)";
 
   /**
    * Absolute hard maximum length for a term, in bytes once
@@ -249,6 +254,9 @@
   volatile Throwable tragedy;
 
   private final Directory directory;  // where this index resides
+  private final Directory mergeDirectory;  // used for merging
+  private final Analyzer analyzer;    // how to analyze text
+
   final FieldTypes fieldTypes; // schema
 
   private volatile long changeCount; // increments every time a change is completed
@@ -326,6 +334,8 @@
    *  card to make sure they can later charge you when you check out. */
   final AtomicLong pendingNumDocs = new AtomicLong();
 
+  final CloseableThreadLocal<MergeRateLimiter> rateLimiters = new CloseableThreadLocal<>();
+
   DirectoryReader getReader() throws IOException {
     return getReader(true);
   }
@@ -402,7 +412,7 @@
     poolReaders = true;
     DirectoryReader r = null;
     doBeforeFlush();
-    boolean anySegmentFlushed = false;
+    boolean anyChanges = false;
     /*
      * for releasing a NRT reader we must ensure that 
      * DW doesn't add any segments or deletes until we are
@@ -415,8 +425,8 @@
       synchronized (fullFlushLock) {
         boolean success = false;
         try {
-          anySegmentFlushed = docWriter.flushAllThreads();
-          if (!anySegmentFlushed) {
+          anyChanges = docWriter.flushAllThreads();
+          if (!anyChanges) {
             // prevent double increment since docWriter#doFlush increments the flushcount
             // if we flushed anything.
             flushCount.incrementAndGet();
@@ -426,7 +436,7 @@
           // reader; in theory we could instead do similar retry logic,
           // just like we do when loading segments_N
           synchronized(this) {
-            maybeApplyDeletes(applyAllDeletes);
+            anyChanges |= maybeApplyDeletes(applyAllDeletes);
             r = StandardDirectoryReader.open(this, segmentInfos, applyAllDeletes);
             if (infoStream.isEnabled("IW")) {
               infoStream.message("IW", "return reader version=" + r.getVersion() + " reader=" + r);
@@ -450,7 +460,7 @@
           }
         }
       }
-      if (anySegmentFlushed) {
+      if (anyChanges) {
         maybeMerge(config.getMergePolicy(), MergeTrigger.FULL_FLUSH, UNBOUNDED_MAX_MERGE_SEGMENTS);
       }
       if (infoStream.isEnabled("IW")) {
@@ -748,9 +758,17 @@
   public IndexWriter(Directory d, IndexWriterConfig conf) throws IOException {
     conf.setIndexWriter(this); // prevent reuse by other instances
     config = conf;
+
     directory = d;
+
+    // Directory we use for merging, so we can abort running merges, and so
+    // merge schedulers can optionally rate-limit per-merge IO:
+    mergeDirectory = addMergeRateLimiters(d);
+
+    analyzer = config.getAnalyzer();
     infoStream = config.getInfoStream();
     mergeScheduler = config.getMergeScheduler();
+    mergeScheduler.setInfoStream(infoStream);
 
     bufferedUpdatesStream = new BufferedUpdatesStream(infoStream);
     poolReaders = config.getReaderPooling();
@@ -1645,7 +1663,7 @@
             for(int i=0;i<size;i++) {
               final MergePolicy.OneMerge merge = mergeExceptions.get(i);
               if (merge.maxNumSegments != -1) {
-                throw new IOException("background merge hit exception: " + merge.segString(directory), merge.getException());
+                throw new IOException("background merge hit exception: " + merge.segString(), merge.getException());
               }
             }
           }
@@ -1735,7 +1753,7 @@
             }
             Throwable t = merge.getException();
             if (t != null) {
-              throw new IOException("background merge hit exception: " + merge.segString(directory), t);
+              throw new IOException("background merge hit exception: " + merge.segString(), t);
             }
           }
 
@@ -1918,6 +1936,8 @@
         stopMerges = true;
       }
 
+      rateLimiters.close();
+
       if (infoStream.isEnabled("IW")) {
         infoStream.message("IW", "rollback: done finish merges");
       }
@@ -2035,7 +2055,7 @@
    * 
    * <p>
    * NOTE: this method will forcefully abort all merges in progress. If other
-   * threads are running {@link #forceMerge}, {@link #addIndexes(IndexReader[])}
+   * threads are running {@link #forceMerge}, {@link #addIndexes(CodecReader[])}
    * or {@link #forceMergeDeletes} methods, they may receive
    * {@link MergePolicy.MergeAbortedException}s.
    */
@@ -2108,7 +2128,7 @@
       if (infoStream.isEnabled("IW")) {
         infoStream.message("IW", "now abort pending merge " + segString(merge.segments));
       }
-      merge.abort();
+      merge.rateLimiter.setAbort();
       mergeFinish(merge);
     }
     pendingMerges.clear();
@@ -2117,7 +2137,7 @@
       if (infoStream.isEnabled("IW")) {
         infoStream.message("IW", "now abort running merge " + segString(merge.segments));
       }
-      merge.abort();
+      merge.rateLimiter.setAbort();
     }
 
     // These merges periodically check whether they have
@@ -2363,7 +2383,7 @@
               infoStream.message("IW", "addIndexes: process segment origName=" + info.info.name + " newName=" + newSegName + " info=" + info);
             }
 
-            IOContext context = new IOContext(new MergeInfo(info.info.getDocCount(), info.sizeInBytes(), true, -1));
+            IOContext context = new IOContext(new FlushInfo(info.info.getDocCount(), info.sizeInBytes()));
 
             FieldInfos fis = readFieldInfos(info);
             for(FieldInfo fi : fis) {
@@ -2439,7 +2459,7 @@
    * index.
    * 
    * <p>
-   * <b>NOTE:</b> this method merges all given {@link IndexReader}s in one
+   * <b>NOTE:</b> this method merges all given {@link LeafReader}s in one
    * merge. If you intend to merge a large number of readers, it may be better
    * to call this method multiple times, each time with a small set of readers.
    * In principle, if you use a merge policy with a {@code mergeFactor} or
@@ -2454,34 +2474,30 @@
    * @throws IOException
    *           if there is a low-level IO error
    */
-  public void addIndexes(IndexReader... readers) throws IOException {
+  public void addIndexes(CodecReader... readers) throws IOException {
     ensureOpen();
     int numDocs = 0;
 
     try {
       if (infoStream.isEnabled("IW")) {
-        infoStream.message("IW", "flush at addIndexes(IndexReader...)");
+        infoStream.message("IW", "flush at addIndexes(CodecReader...)");
       }
       flush(false, true);
 
       String mergedName = newSegmentName();
-      final List<LeafReader> mergeReaders = new ArrayList<>();
-      for (IndexReader indexReader : readers) {
-        FieldTypes ft = indexReader.getFieldTypes();
+      for (CodecReader leaf : readers) {
+        FieldTypes ft = leaf.getFieldTypes();
         if (ft != null) {
           fieldTypes.addAll(ft);
         }
-        numDocs += indexReader.numDocs();
-        for (LeafReaderContext ctx : indexReader.leaves()) {
-          mergeReaders.add(ctx.reader());
-        }
+        numDocs += leaf.numDocs();
       }
 
       // Make sure adding the new documents to this index won't
       // exceed the limit:
       reserveDocs(numDocs);
 
-      final IOContext context = new IOContext(new MergeInfo(numDocs, -1, true, -1));
+      final IOContext context = new IOContext(new MergeInfo(numDocs, -1, false, -1));
 
       // TODO: somehow we should fix this merge so it's
       // abortable so that IW.close(false) is able to stop it
@@ -2490,10 +2506,12 @@
       SegmentInfo info = new SegmentInfo(directory, Version.LATEST, mergedName, -1,
                                          false, codec, null, StringHelper.randomId(), new HashMap<>());
 
-      SegmentMerger merger = new SegmentMerger(fieldTypes, mergeReaders, info, infoStream, trackingDir,
-                                               MergeState.CheckAbort.NONE, globalFieldNumberMap, 
+      SegmentMerger merger = new SegmentMerger(fieldTypes, Arrays.asList(readers), info, infoStream, trackingDir,
+                                               globalFieldNumberMap, 
                                                context);
       
+      rateLimiters.set(new MergeRateLimiter(null));
+
       if (!merger.shouldMerge()) {
         return;
       }
@@ -2531,8 +2549,11 @@
       // Now create the compound file if needed
       if (useCompoundFile) {
         Collection<String> filesToDelete = infoPerCommit.files();
+        TrackingDirectoryWrapper trackingCFSDir = new TrackingDirectoryWrapper(mergeDirectory);
+        // TODO: unlike merge, on exception we arent sniping any trash cfs files here?
+        // createCompoundFile tries to cleanup, but it might not always be able to...
         try {
-          createCompoundFile(infoStream, directory, MergeState.CheckAbort.NONE, info, context);
+          createCompoundFile(infoStream, trackingCFSDir, info, context);
         } finally {
           // delete new non cfs files directly: they were never
           // registered with IFD
@@ -2949,6 +2970,7 @@
     }
   }
 
+  /** Returns true a segment was flushed or deletes were applied. */
   private boolean doFlush(boolean applyAllDeletes) throws IOException {
     if (tragedy != null) {
       throw new IllegalStateException("this writer hit an unrecoverable error; cannot flush", tragedy);
@@ -2963,12 +2985,16 @@
         infoStream.message("IW", "  start flush: applyAllDeletes=" + applyAllDeletes);
         infoStream.message("IW", "  index before flush " + segString());
       }
-      final boolean anySegmentFlushed;
+      boolean anyChanges = false;
       
       synchronized (fullFlushLock) {
       boolean flushSuccess = false;
         try {
-          anySegmentFlushed = docWriter.flushAllThreads();
+          anyChanges = docWriter.flushAllThreads();
+          if (!anyChanges) {
+            // flushCount is incremented in flushAllThreads
+            flushCount.incrementAndGet();
+        }
           flushSuccess = true;
         } finally {
           docWriter.finishFullFlush(flushSuccess);
@@ -2976,14 +3002,10 @@
         }
       }
       synchronized(this) {
-        maybeApplyDeletes(applyAllDeletes);
+        anyChanges |= maybeApplyDeletes(applyAllDeletes);
         doAfterFlush();
-        if (!anySegmentFlushed) {
-          // flushCount is incremented in flushAllThreads
-          flushCount.incrementAndGet();
-        }
         success = true;
-        return anySegmentFlushed;
+        return anyChanges;
       }
     } catch (AbortingException | OutOfMemoryError tragedy) {
       tragicEvent(tragedy, "doFlush");
@@ -2998,20 +3020,25 @@
     }
   }
   
-  final synchronized void maybeApplyDeletes(boolean applyAllDeletes) throws IOException {
+  final synchronized boolean maybeApplyDeletes(boolean applyAllDeletes) throws IOException {
     if (applyAllDeletes) {
       if (infoStream.isEnabled("IW")) {
         infoStream.message("IW", "apply all deletes during flush");
       }
-      applyAllDeletesAndUpdates();
+      return applyAllDeletesAndUpdates();
     } else if (infoStream.isEnabled("IW")) {
       infoStream.message("IW", "don't apply deletes now delTermCount=" + bufferedUpdatesStream.numTerms() + " bytesUsed=" + bufferedUpdatesStream.ramBytesUsed());
     }
+
+    return false;
   }
 
-  final synchronized void applyAllDeletesAndUpdates() throws IOException {
+  final synchronized boolean applyAllDeletesAndUpdates() throws IOException {
     flushDeletesCount.incrementAndGet();
     final BufferedUpdatesStream.ApplyDeletesResult result;
+    if (infoStream.isEnabled("IW")) {
+      infoStream.message("IW", "now apply all deletes for all segments maxDoc=" + (docWriter.getNumDocs() + segmentInfos.totalDocCount()));
+    }
     result = bufferedUpdatesStream.applyDeletesAndUpdates(readerPool, segmentInfos.asList());
     if (result.anyDeletes) {
       checkpoint();
@@ -3034,6 +3061,7 @@
       checkpoint();
     }
     bufferedUpdatesStream.prune(segmentInfos);
+    return result.anyDeletes;
   }
 
   // for testing only
@@ -3326,7 +3354,7 @@
     // deleter.refresh() call that will remove any index
     // file that current segments does not reference), we
     // abort this merge
-    if (merge.isAborted()) {
+    if (merge.rateLimiter.getAbort()) {
       if (infoStream.isEnabled("IW")) {
         infoStream.message("IW", "commitMerge: skip: it was aborted");
       }
@@ -3485,6 +3513,8 @@
 
     boolean success = false;
 
+    rateLimiters.set(merge.rateLimiter);
+
     final long t0 = System.currentTimeMillis();
 
     final MergePolicy mergePolicy = config.getMergePolicy();
@@ -3522,7 +3552,7 @@
           // This merge (and, generally, any change to the
           // segments) may now enable new merges, so we call
           // merge policy & update pending merges.
-          if (success && !merge.isAborted() && (merge.maxNumSegments != -1 || (!closed && !closing))) {
+          if (success && merge.rateLimiter.getAbort() == false && (merge.maxNumSegments != -1 || (!closed && !closing))) {
             updatePendingMerges(mergePolicy, MergeTrigger.MERGE_FINISHED, merge.maxNumSegments);
           }
         }
@@ -3530,7 +3560,7 @@
     } catch (OutOfMemoryError oom) {
       tragicEvent(oom, "merge");
     }
-    if (merge.info != null && !merge.isAborted()) {
+    if (merge.info != null && merge.rateLimiter.getAbort() == false) {
       if (infoStream.isEnabled("IW")) {
         infoStream.message("IW", "merge time " + (System.currentTimeMillis()-t0) + " msec for " + merge.info.info.getDocCount() + " docs");
       }
@@ -3555,7 +3585,7 @@
     assert merge.segments.size() > 0;
 
     if (stopMerges) {
-      merge.abort();
+      merge.rateLimiter.setAbort();
       throw new MergePolicy.MergeAbortedException("merge is aborted: " + segString(merge.segments));
     }
 
@@ -3666,7 +3696,7 @@
       return;
     }
 
-    if (merge.isAborted()) {
+    if (merge.rateLimiter.getAbort()) {
       return;
     }
 
@@ -3675,6 +3705,10 @@
     // and then open them again for merging.  Maybe  we
     // could pre-pool them somehow in that case...
 
+    if (infoStream.isEnabled("IW")) {
+      infoStream.message("IW", "now apply deletes for " + merge.segments.size() + " merging segments");
+    }
+
     // Lock order: IW -> BD
     final BufferedUpdatesStream.ApplyDeletesResult result = bufferedUpdatesStream.applyDeletesAndUpdates(readerPool, merge.segments);
     
@@ -3811,14 +3845,13 @@
    *  instance */
   private int mergeMiddle(MergePolicy.OneMerge merge, MergePolicy mergePolicy) throws IOException {
 
-    merge.checkAborted(directory);
+    merge.rateLimiter.checkAbort();
 
     List<SegmentCommitInfo> sourceSegments = merge.segments;
     
     IOContext context = new IOContext(merge.getMergeInfo());
 
-    final MergeState.CheckAbort checkAbort = new MergeState.CheckAbort(merge, directory);
-    final TrackingDirectoryWrapper dirWrapper = new TrackingDirectoryWrapper(directory);
+    final TrackingDirectoryWrapper dirWrapper = new TrackingDirectoryWrapper(mergeDirectory);
 
     if (infoStream.isEnabled("IW")) {
       infoStream.message("IW", "merging " + segString(merge.segments));
@@ -3873,7 +3906,14 @@
           // fix the reader's live docs and del count
           assert delCount > reader.numDeletedDocs(); // beware of zombies
 
-          SegmentReader newReader = new SegmentReader(fieldTypes, info, reader, liveDocs, info.info.getDocCount() - delCount);
+          SegmentReader newReader;
+
+          synchronized (this) {
+            // We must also sync on IW here, because another thread could be writing
+            // new DV updates / remove old gen field infos files causing FNFE:
+            newReader = new SegmentReader(fieldTypes, info, reader, liveDocs, info.info.getDocCount() - delCount);
+          }
+
           boolean released = false;
           try {
             rld.release(reader);
@@ -3898,15 +3938,12 @@
       // OneMerge to return a view over the actual segments to merge
       final SegmentMerger merger = new SegmentMerger(fieldTypes, merge.getMergeReaders(),
                                                      merge.info.info, infoStream, dirWrapper,
-                                                     checkAbort, globalFieldNumberMap, 
+                                                     globalFieldNumberMap, 
                                                      context);
 
-      merge.checkAborted(directory);
+      merge.rateLimiter.checkAbort();
 
-      long mergeStartTime = 0;
-      if (infoStream.isEnabled("IW")) {
-        mergeStartTime = System.nanoTime();
-      }
+      merge.mergeStartNS = System.nanoTime();
 
       // This is where all the work happens:
       boolean success3 = false;
@@ -3926,13 +3963,13 @@
       assert mergeState.segmentInfo == merge.info.info;
       merge.info.info.setFiles(new HashSet<>(dirWrapper.getCreatedFiles()));
 
-      // Record which codec was used to write the segment
-
       if (infoStream.isEnabled("IW")) {
         if (merger.shouldMerge()) {
           long t1 = System.nanoTime();
-          double sec = (t1-mergeStartTime)/1000000000.;
+          double sec = (t1-merge.mergeStartNS)/1000000000.;
           double segmentMB = (merge.info.sizeInBytes()/1024./1024.);
+          double stoppedSec = merge.rateLimiter.getTotalStoppedNS()/1000000000.;
+          double throttleSec = merge.rateLimiter.getTotalPausedNS()/1000000000.;
           infoStream.message("IW", "merge codec=" + codec + " docCount=" + merge.info.info.getDocCount() + "; merged segment has " +
                              (mergeState.mergeFieldInfos.hasVectors() ? "vectors" : "no vectors") + "; " +
                              (mergeState.mergeFieldInfos.hasNorms() ? "norms" : "no norms") + "; " + 
@@ -3940,8 +3977,10 @@
                              (mergeState.mergeFieldInfos.hasProx() ? "prox" : "no prox") + "; " + 
                              (mergeState.mergeFieldInfos.hasProx() ? "freqs" : "no freqs") + "; " +
                              String.format(Locale.ROOT,
-                                           "%d msec to merge segment [%.2f MB, %.2f MB/sec]",
-                                           ((t1-mergeStartTime)/1000000),
+                                           "%.1f sec (%.1f sec stopped, %.1f sec paused) to merge segment [%.2f MB, %.2f MB/sec]",
+                                           sec,
+                                           stoppedSec,
+                                           throttleSec,
                                            segmentMB,
                                            segmentMB / sec));
         } else {
@@ -3970,15 +4009,14 @@
       if (useCompoundFile) {
         success = false;
 
-        String cfsFiles[] = merge.info.info.getCodec().compoundFormat().files(merge.info.info);
         Collection<String> filesToRemove = merge.info.files();
-
+        TrackingDirectoryWrapper trackingCFSDir = new TrackingDirectoryWrapper(mergeDirectory);
         try {
-          filesToRemove = createCompoundFile(infoStream, directory, checkAbort, merge.info.info, context);
+          createCompoundFile(infoStream, trackingCFSDir, merge.info.info, context);
           success = true;
         } catch (IOException ioe) {
           synchronized(this) {
-            if (merge.isAborted()) {
+            if (merge.rateLimiter.getAbort()) {
               // This can happen if rollback or close(false)
               // is called -- fall through to logic below to
               // remove the partially created CFS:
@@ -3995,6 +4033,7 @@
             }
 
             synchronized(this) {
+              Set<String> cfsFiles = new HashSet<>(trackingCFSDir.getCreatedFiles());
               for (String cfsFile : cfsFiles) {
                 deleter.deleteFile(cfsFile);
               }
@@ -4014,10 +4053,11 @@
           // registered with IFD
           deleter.deleteNewFiles(filesToRemove);
 
-          if (merge.isAborted()) {
+          if (merge.rateLimiter.getAbort()) {
             if (infoStream.isEnabled("IW")) {
               infoStream.message("IW", "abort merge after building CFS");
             }
+            Set<String> cfsFiles = new HashSet<>(trackingCFSDir.getCreatedFiles());
             for (String cfsFile : cfsFiles) {
               deleter.deleteFile(cfsFile);
             }
@@ -4470,36 +4510,34 @@
    * deletion files, this SegmentInfo must not reference such files when this
    * method is called, because they are not allowed within a compound file.
    */
-  static final Collection<String> createCompoundFile(InfoStream infoStream, Directory directory, CheckAbort checkAbort, final SegmentInfo info, IOContext context)
+  static final void createCompoundFile(InfoStream infoStream, TrackingDirectoryWrapper directory, final SegmentInfo info, IOContext context)
           throws IOException {
 
-    // TODO: use trackingdirectorywrapper instead of files() to know which files to delete when things fail:
-    String cfsFiles[] = info.getCodec().compoundFormat().files(info);
+    // maybe this check is not needed, but why take the risk?
+    if (!directory.getCreatedFiles().isEmpty()) {
+      throw new IllegalStateException("pass a clean trackingdir for CFS creation");
+    }
     
     if (infoStream.isEnabled("IW")) {
       infoStream.message("IW", "create compound file");
     }
-    // Now merge all added files
-    Collection<String> files = info.files();
-    
+    // Now merge all added files    
     boolean success = false;
     try {
-      info.getCodec().compoundFormat().write(directory, info, files, checkAbort, context);
+      info.getCodec().compoundFormat().write(directory, info, context);
       success = true;
     } finally {
       if (!success) {
-        IOUtils.deleteFilesIgnoringExceptions(directory, cfsFiles);
+        Set<String> cfsFiles = new HashSet<>(directory.getCreatedFiles());
+        for (String file : cfsFiles) {
+          IOUtils.deleteFilesIgnoringExceptions(directory, file);
+        }
       }
     }
 
     // Replace all previous files with the CFS/CFE files:
-    Set<String> siFiles = new HashSet<>();
-    for (String cfsFile : cfsFiles) {
-      siFiles.add(cfsFile);
-    }
+    Set<String> siFiles = new HashSet<>(directory.getCreatedFiles());
     info.setFiles(siFiles);
-
-    return files;
   }
   
   /**
@@ -4528,7 +4566,9 @@
     try {
       purge(forcePurge);
     } finally {
-      applyAllDeletesAndUpdates();
+      if (applyAllDeletesAndUpdates()) {
+        maybeMerge(config.getMergePolicy(), MergeTrigger.SEGMENT_FLUSH, UNBOUNDED_MAX_MERGE_SEGMENTS);
+      }
       flushCount.incrementAndGet();
     }
   }
@@ -4642,4 +4682,28 @@
 
     return v;
   }
+
+  /** Wraps the incoming {@link Directory} so that we assign a per-thread
+   *  {@link MergeRateLimiter} to all created {@link IndexOutput}s. */
+  private Directory addMergeRateLimiters(Directory in) {
+    return new FilterDirectory(in) {
+      @Override
+      public IndexOutput createOutput(String name, IOContext context) throws IOException {
+        ensureOpen();
+
+        // Paranoia defense: if this trips we have a bug somewhere...
+        IndexWriter.this.ensureOpen(false);
+
+        // This Directory is only supposed to be used during merging,
+        // so all writes should have MERGE context, else there is a bug 
+        // somewhere that is failing to pass down the right IOContext:
+        assert context.context == IOContext.Context.MERGE: "got context=" + context.context;
+
+        MergeRateLimiter rateLimiter = rateLimiters.get();
+        assert rateLimiter != null;
+
+        return new RateLimitedIndexOutput(rateLimiter, in.createOutput(name, context));
+      }
+    };
+  }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java
index c5be1de..89ace39 100644
--- a/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java
+++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java
@@ -521,7 +521,7 @@
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder(super.toString());
-    sb.append("writer=").append(writer).append("\n");
+    sb.append("writer=").append(writer.get()).append("\n");
     return sb.toString();
   }
   
diff --git a/lucene/core/src/java/org/apache/lucene/index/MappingMultiDocsAndPositionsEnum.java b/lucene/core/src/java/org/apache/lucene/index/MappingMultiDocsAndPositionsEnum.java
index bcc3735..8fd316a 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MappingMultiDocsAndPositionsEnum.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MappingMultiDocsAndPositionsEnum.java
@@ -17,8 +17,8 @@
  * limitations under the License.
  */
 
-import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.index.MultiDocsAndPositionsEnum.EnumWithSlice;
+import org.apache.lucene.util.BytesRef;
 
 import java.io.IOException;
 
@@ -97,13 +97,6 @@
 
       int doc = current.nextDoc();
       if (doc != NO_MORE_DOCS) {
-
-        mergeState.checkAbortCount++;
-        if (mergeState.checkAbortCount > 60000) {
-          mergeState.checkAbort.work(mergeState.checkAbortCount/5.0);
-          mergeState.checkAbortCount = 0;
-        }
-
         // compact deletions
         doc = currentMap.get(doc);
         if (doc == -1) {
diff --git a/lucene/core/src/java/org/apache/lucene/index/MappingMultiDocsEnum.java b/lucene/core/src/java/org/apache/lucene/index/MappingMultiDocsEnum.java
index aeaaa13..5e145da 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MappingMultiDocsEnum.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MappingMultiDocsEnum.java
@@ -98,13 +98,6 @@
 
       int doc = current.nextDoc();
       if (doc != NO_MORE_DOCS) {
-
-        mergeState.checkAbortCount++;
-        if (mergeState.checkAbortCount > 60000) {
-          mergeState.checkAbort.work(mergeState.checkAbortCount/5.0);
-          mergeState.checkAbortCount = 0;
-        }
-
         // compact deletions
         doc = currentMap.get(doc);
         if (doc == -1) {
diff --git a/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java
index 1bd7dda..e328393 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java
@@ -17,16 +17,17 @@
  * limitations under the License.
  */
 
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.MergeInfo;
-import org.apache.lucene.util.FixedBitSet;
-
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.MergeInfo;
+import org.apache.lucene.store.RateLimiter;
+import org.apache.lucene.util.FixedBitSet;
+
 /**
  * <p>Expert: a MergePolicy determines the sequence of
  * primitive merge operations.</p>
@@ -107,11 +108,14 @@
     /** Segments to be merged. */
     public final List<SegmentCommitInfo> segments;
 
+    /** A private {@link RateLimiter} for this merge, used to rate limit writes and abort. */
+    public final MergeRateLimiter rateLimiter;
+
+    volatile long mergeStartNS = -1;
+
     /** Total number of documents in segments to be merged, not accounting for deletions. */
     public final int totalDocCount;
-    boolean aborted;
     Throwable error;
-    boolean paused;
 
     /** Sole constructor.
      * @param segments List of {@link SegmentCommitInfo}s
@@ -127,6 +131,8 @@
         count += info.info.getDocCount();
       }
       totalDocCount = count;
+
+      rateLimiter = new MergeRateLimiter(this);
     }
 
     /** Called by {@link IndexWriter} after the merge is done and all readers have been closed. */
@@ -139,12 +145,12 @@
      *  reorders doc IDs, it must override {@link #getDocMap} too so that
      *  deletes that happened during the merge can be applied to the newly
      *  merged segment. */
-    public List<LeafReader> getMergeReaders() throws IOException {
+    public List<CodecReader> getMergeReaders() throws IOException {
       if (readers == null) {
         throw new IllegalStateException("IndexWriter has not initialized readers from the segment infos yet");
       }
-      final List<LeafReader> readers = new ArrayList<>(this.readers.size());
-      for (LeafReader reader : this.readers) {
+      final List<CodecReader> readers = new ArrayList<>(this.readers.size());
+      for (SegmentReader reader : this.readers) {
         if (reader.numDocs() > 0) {
           readers.add(reader);
         }
@@ -186,68 +192,16 @@
       return error;
     }
 
-    /** Mark this merge as aborted.  If this is called
-     *  before the merge is committed then the merge will
-     *  not be committed. */
-    synchronized void abort() {
-      aborted = true;
-      notifyAll();
-    }
-
-    /** Returns true if this merge was aborted. */
-    synchronized boolean isAborted() {
-      return aborted;
-    }
-
-    /** Called periodically by {@link IndexWriter} while
-     *  merging to see if the merge is aborted. */
-    public synchronized void checkAborted(Directory dir) throws MergeAbortedException {
-      if (aborted) {
-        throw new MergeAbortedException("merge is aborted: " + segString(dir));
-      }
-
-      while (paused) {
-        try {
-          // In theory we could wait() indefinitely, but we
-          // do 250 msec, defensively
-          wait(250);
-        } catch (InterruptedException ie) {
-          throw new RuntimeException(ie);
-        }
-        if (aborted) {
-          throw new MergeAbortedException("merge is aborted: " + segString(dir));
-        }
-      }
-    }
-
-    /** Set or clear whether this merge is paused paused (for example
-     *  {@link ConcurrentMergeScheduler} will pause merges
-     *  if too many are running). */
-    synchronized public void setPause(boolean paused) {
-      this.paused = paused;
-      if (!paused) {
-        // Wakeup merge thread, if it's waiting
-        notifyAll();
-      }
-    }
-
-    /** Returns true if this merge is paused.
-     *
-     *  @see #setPause(boolean) */
-    synchronized public boolean getPause() {
-      return paused;
-    }
-
     /** Returns a readable description of the current merge
      *  state. */
-    public String segString(Directory dir) {
+    public String segString() {
       StringBuilder b = new StringBuilder();
       final int numSegments = segments.size();
       for(int i=0;i<numSegments;i++) {
         if (i > 0) {
           b.append(' ');
         }
-        b.append(segments.get(i).toString(dir, 0));
+        b.append(segments.get(i).toString());
       }
       if (info != null) {
         b.append(" into ").append(info.info.name);
@@ -255,7 +209,7 @@
       if (maxNumSegments != -1) {
         b.append(" [maxNumSegments=" + maxNumSegments + "]");
       }
-      if (aborted) {
+      if (rateLimiter.getAbort()) {
         b.append(" [ABORTED]");
       }
       return b.toString();
@@ -321,7 +275,7 @@
       b.append("MergeSpec:\n");
       final int count = merges.size();
       for(int i=0;i<count;i++) {
-        b.append("  ").append(1 + i).append(": ").append(merges.get(i).segString(dir));
+        b.append("  ").append(1 + i).append(": ").append(merges.get(i).segString());
       }
       return b.toString();
     }
@@ -538,5 +492,4 @@
     v *= 1024 * 1024;
     this.maxCFSSegmentSize = v > Long.MAX_VALUE ? Long.MAX_VALUE : (long) v;
   }
-
 }
diff --git a/lucene/core/src/java/org/apache/lucene/index/MergeRateLimiter.java b/lucene/core/src/java/org/apache/lucene/index/MergeRateLimiter.java
new file mode 100644
index 0000000..9b19560
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/index/MergeRateLimiter.java
@@ -0,0 +1,197 @@
+package org.apache.lucene.index;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.store.RateLimiter;
+import org.apache.lucene.util.ThreadInterruptedException;
+
+import static org.apache.lucene.store.RateLimiter.SimpleRateLimiter;
+
+/** This is the {@link RateLimiter} that {@link IndexWriter} assigns to each running merge, to 
+ *  give {@link MergeScheduler}s ionice like control.
+ *
+ *  This is similar to {@link SimpleRateLimiter}, except it's merge-private,
+ *  it will wake up if its rate changes while it's paused, it tracks how
+ *  much time it spent stopped and paused, and it supports aborting.
+ *
+ *  @lucene.internal */
+
+public class MergeRateLimiter extends RateLimiter {
+
+  private final static int MIN_PAUSE_CHECK_MSEC = 25;
+  volatile long totalBytesWritten;
+
+  double mbPerSec;
+  private long lastNS;
+  private long minPauseCheckBytes;
+  private boolean abort;
+  long totalPausedNS;
+  long totalStoppedNS;
+  final MergePolicy.OneMerge merge;
+
+  /** Returned by {@link #maybePause}. */
+  private static enum PauseResult {NO, STOPPED, PAUSED};
+
+  /** Sole constructor. */
+  public MergeRateLimiter(MergePolicy.OneMerge merge) {
+    this.merge = merge;
+
+    // Initially no IO limit; use setter here so minPauseCheckBytes is set:
+    setMBPerSec(Double.POSITIVE_INFINITY);
+  }
+
+  @Override
+  public synchronized void setMBPerSec(double mbPerSec) {
+    // 0.0 is allowed: it means the merge is paused
+    if (mbPerSec < 0.0) {
+      throw new IllegalArgumentException("mbPerSec must be positive; got: " + mbPerSec);
+    }
+    this.mbPerSec = mbPerSec;
+    // NOTE: Double.POSITIVE_INFINITY casts to Long.MAX_VALUE
+    minPauseCheckBytes = Math.min(1024*1024, (long) ((MIN_PAUSE_CHECK_MSEC / 1000.0) * mbPerSec * 1024 * 1024));
+    assert minPauseCheckBytes >= 0;
+    notify();
+  }
+
+  @Override
+  public synchronized double getMBPerSec() {
+    return mbPerSec;
+  }
+
+  /** Returns total bytes written by this merge. */
+  public long getTotalBytesWritten() {
+    return totalBytesWritten;
+  }
+
+  @Override
+  public long pause(long bytes) throws MergePolicy.MergeAbortedException {
+
+    totalBytesWritten += bytes;
+
+    long startNS = System.nanoTime();
+    long curNS = startNS;
+
+    // While loop because 1) Thread.wait doesn't always sleep long
+    // enough, and 2) we wake up and check again when our rate limit
+    // is changed while we were pausing:
+    long pausedNS = 0;
+    while (true) {
+      PauseResult result = maybePause(bytes, curNS);
+      if (result == PauseResult.NO) {
+        // Set to curNS, not targetNS, to enforce the instant rate, not
+        // the "averaaged over all history" rate:
+        lastNS = curNS;
+        break;
+      }
+      curNS = System.nanoTime();
+      long ns = curNS - startNS;
+      startNS = curNS;
+
+      // Separately track when merge was stopped vs rate limited:
+      if (result == PauseResult.STOPPED) {
+        totalStoppedNS += ns;
+      } else {
+        assert result == PauseResult.PAUSED;
+        totalPausedNS += ns;
+      }
+      pausedNS += ns;
+    }
+
+    return pausedNS;
+  }
+
+  /** Total NS merge was stopped. */
+  public synchronized long getTotalStoppedNS() {
+    return totalStoppedNS;
+  } 
+
+  /** Total NS merge was paused to rate limit IO. */
+  public synchronized long getTotalPausedNS() {
+    return totalPausedNS;
+  } 
+
+  /** Returns NO if no pause happened, STOPPED if pause because rate was 0.0 (merge is stopped), PAUSED if paused with a normal rate limit. */
+  private synchronized PauseResult maybePause(long bytes, long curNS) throws MergePolicy.MergeAbortedException {
+
+    // Now is a good time to abort the merge:
+    checkAbort();
+
+    double secondsToPause = (bytes/1024./1024.) / mbPerSec;
+
+    // Time we should sleep until; this is purely instantaneous
+    // rate (just adds seconds onto the last time we had paused to);
+    // maybe we should also offer decayed recent history one?
+    long targetNS = lastNS + (long) (1000000000 * secondsToPause);
+
+    long curPauseNS = targetNS - curNS;
+
+    // NOTE: except maybe on real-time JVMs, minimum realistic
+    // wait/sleep time is 1 msec; if you pass just 1 nsec the impl
+    // rounds up to 1 msec, so we don't bother unless it's > 2 msec:
+
+    if (curPauseNS <= 2000000) {
+      return PauseResult.NO;
+    }
+
+    // Defensive: sleep for at most 250 msec; the loop above will call us again if we should keep sleeping:
+    if (curPauseNS > 250L*1000000) {
+      curPauseNS = 250L*1000000;
+    }
+
+    int sleepMS = (int) (curPauseNS / 1000000);
+    int sleepNS = (int) (curPauseNS % 1000000);
+
+    double rate = mbPerSec;
+
+    try {
+      // CMS can wake us up here if it changes our target rate:
+      wait(sleepMS, sleepNS);
+    } catch (InterruptedException ie) {
+      throw new ThreadInterruptedException(ie);
+    }
+
+    if (rate == 0.0) {
+      return PauseResult.STOPPED;
+    } else {
+      return PauseResult.PAUSED;
+    }
+  }
+
+  /** Throws {@link MergePolicy.MergeAbortedException} if this merge was aborted. */
+  public synchronized void checkAbort() throws MergePolicy.MergeAbortedException {
+    if (abort) {
+      throw new MergePolicy.MergeAbortedException("merge is aborted: " + merge.segString());
+    }
+  }
+
+  /** Mark this merge aborted. */
+  public synchronized void setAbort() {
+    abort = true;
+    notify();
+  }
+
+  /** Returns true if this merge was aborted. */
+  public synchronized boolean getAbort() {
+    return abort;
+  }
+
+  @Override
+  public long getMinPauseCheckBytes() {
+    return minPauseCheckBytes;
+  }
+}
diff --git a/lucene/core/src/java/org/apache/lucene/index/MergeScheduler.java b/lucene/core/src/java/org/apache/lucene/index/MergeScheduler.java
index b248634..ae451ce 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MergeScheduler.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MergeScheduler.java
@@ -20,6 +20,8 @@
 import java.io.Closeable;
 import java.io.IOException;
 
+import org.apache.lucene.util.InfoStream;
+
 /** <p>Expert: {@link IndexWriter} uses an instance
  *  implementing this interface to execute the merges
  *  selected by a {@link MergePolicy}.  The default
@@ -46,4 +48,34 @@
   /** Close this MergeScheduler. */
   @Override
   public abstract void close() throws IOException;
+
+  /** For messages about merge scheduling */
+  protected InfoStream infoStream;
+
+  /** IndexWriter calls this on init. */
+  final void setInfoStream(InfoStream infoStream) {
+    this.infoStream = infoStream;
+  }
+
+  /**
+   * Returns true if infoStream messages are enabled. This method is usually used in
+   * conjunction with {@link #message(String)}:
+   * 
+   * <pre class="prettyprint">
+   * if (verbose()) {
+   *   message(&quot;your message&quot;);
+   * }
+   * </pre>
+   */
+  protected boolean verbose() {
+    return infoStream != null && infoStream.isEnabled("MS");
+  }
+ 
+  /**
+   * Outputs the given message - this method assumes {@link #verbose()} was
+   * called and returned true.
+   */
+  protected void message(String message) {
+    infoStream.message("MS", message);
+  }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/index/MergeState.java b/lucene/core/src/java/org/apache/lucene/index/MergeState.java
index 93f17bb..706fde7 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MergeState.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MergeState.java
@@ -18,7 +18,6 @@
  */
 
 import java.io.IOException;
-import java.util.Iterator;
 import java.util.List;
 
 import org.apache.lucene.codecs.DocValuesProducer;
@@ -74,21 +73,13 @@
   /** Max docs per reader */
   public final int[] maxDocs;
 
-  /** Holds the CheckAbort instance, which is invoked
-   *  periodically to see if the merge has been aborted. */
-  public final CheckAbort checkAbort;
-
   /** InfoStream for debugging messages. */
   public final InfoStream infoStream;
 
   public final FieldTypes fieldTypes;
 
-  /** Counter used for periodic calls to checkAbort
-   * @lucene.internal */
-  public int checkAbortCount;
-
   /** Sole constructor. */
-  MergeState(FieldTypes fieldTypes, List<LeafReader> readers, SegmentInfo segmentInfo, InfoStream infoStream, CheckAbort checkAbort) throws IOException {
+  MergeState(FieldTypes fieldTypes, List<CodecReader> readers, SegmentInfo segmentInfo, InfoStream infoStream) throws IOException {
 
     this.fieldTypes = fieldTypes;
     int numReaders = readers.size();
@@ -103,7 +94,7 @@
     fieldInfos = new FieldInfos[numReaders];
     liveDocs = new Bits[numReaders];
     for(int i=0;i<numReaders;i++) {
-      final LeafReader reader = readers.get(i);
+      final CodecReader reader = readers.get(i);
       FieldTypes readerFieldTypes = reader.getFieldTypes();
       if (readerFieldTypes != null && fieldTypes != readerFieldTypes) {
         fieldTypes.addAll(readerFieldTypes);
@@ -113,224 +104,43 @@
       liveDocs[i] = reader.getLiveDocs();
       fieldInfos[i] = reader.getFieldInfos();
 
-      NormsProducer normsProducer;
-      DocValuesProducer docValuesProducer;
-      StoredFieldsReader storedFieldsReader;
-      TermVectorsReader termVectorsReader;
-      FieldsProducer fieldsProducer;
-      if (reader instanceof SegmentReader) {
-        SegmentReader segmentReader = (SegmentReader) reader;
-        normsProducer = segmentReader.getNormsReader();
-        if (normsProducer != null) {
-          normsProducer = normsProducer.getMergeInstance();
-        }
-        docValuesProducer = segmentReader.getDocValuesReader();
-        if (docValuesProducer != null) {
-          docValuesProducer = docValuesProducer.getMergeInstance();
-        }
-        storedFieldsReader = segmentReader.getFieldsReader();
-        if (storedFieldsReader != null) {
-          storedFieldsReader = storedFieldsReader.getMergeInstance();
-        }
-        termVectorsReader = segmentReader.getTermVectorsReader();
-        if (termVectorsReader != null) {
-          termVectorsReader = termVectorsReader.getMergeInstance();
-        }
-        fieldsProducer = segmentReader.fields().getMergeInstance();
-      } else {
-        // A "foreign" reader
-        normsProducer = readerToNormsProducer(reader);
-        docValuesProducer = readerToDocValuesProducer(reader);
-        storedFieldsReader = readerToStoredFieldsReader(reader);
-        termVectorsReader = readerToTermVectorsReader(reader);
-        fieldsProducer = readerToFieldsProducer(reader);
+      normsProducers[i] = reader.getNormsReader();
+      if (normsProducers[i] != null) {
+        normsProducers[i] = normsProducers[i].getMergeInstance();
       }
-
-      normsProducers[i] = normsProducer;
-      docValuesProducers[i] = docValuesProducer;
-      storedFieldsReaders[i] = storedFieldsReader;
-      termVectorsReaders[i] = termVectorsReader;
-      fieldsProducers[i] = fieldsProducer;
+      
+      docValuesProducers[i] = reader.getDocValuesReader();
+      if (docValuesProducers[i] != null) {
+        docValuesProducers[i] = docValuesProducers[i].getMergeInstance();
+      }
+      
+      storedFieldsReaders[i] = reader.getFieldsReader();
+      if (storedFieldsReaders[i] != null) {
+        storedFieldsReaders[i] = storedFieldsReaders[i].getMergeInstance();
+      }
+      
+      termVectorsReaders[i] = reader.getTermVectorsReader();
+      if (termVectorsReaders[i] != null) {
+        termVectorsReaders[i] = termVectorsReaders[i].getMergeInstance();
+      }
+      
+      fieldsProducers[i] = reader.getPostingsReader().getMergeInstance();
     }
 
     this.segmentInfo = segmentInfo;
     this.infoStream = infoStream;
-    this.checkAbort = checkAbort;
 
     setDocMaps(readers);
   }
 
-  private NormsProducer readerToNormsProducer(final LeafReader reader) {
-    return new NormsProducer() {
-
-      @Override
-      public NumericDocValues getNorms(FieldInfo field) throws IOException {
-        return reader.getNormValues(field.name);
-      }
-
-      @Override
-      public void checkIntegrity() throws IOException {
-        // We already checkIntegrity the entire reader up front in SegmentMerger
-      }
-
-      @Override
-      public void close() {
-      }
-
-      @Override
-      public long ramBytesUsed() {
-        return 0;
-      }
-    };
-  }
-
-  private DocValuesProducer readerToDocValuesProducer(final LeafReader reader) {
-    return new DocValuesProducer() {
-
-      @Override
-      public NumericDocValues getNumeric(FieldInfo field) throws IOException {  
-        return reader.getNumericDocValues(field.name);
-      }
-
-      @Override
-      public BinaryDocValues getBinary(FieldInfo field) throws IOException {
-        return reader.getBinaryDocValues(field.name);
-      }
-
-      @Override
-      public SortedDocValues getSorted(FieldInfo field) throws IOException {
-        return reader.getSortedDocValues(field.name);
-      }
-
-      @Override
-      public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException {
-        return reader.getSortedNumericDocValues(field.name);
-      }
-
-      @Override
-      public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException {
-        return reader.getSortedSetDocValues(field.name);
-      }
-
-      @Override
-      public Bits getDocsWithField(FieldInfo field) throws IOException {
-        return reader.getDocsWithField(field.name);
-      }
-
-      @Override
-      public void checkIntegrity() throws IOException {
-        // We already checkIntegrity the entire reader up front in SegmentMerger
-      }
-
-      @Override
-      public void close() {
-      }
-
-      @Override
-      public long ramBytesUsed() {
-        return 0;
-      }
-    };
-  }
-
-  private StoredFieldsReader readerToStoredFieldsReader(final LeafReader reader) {
-    return new StoredFieldsReader() {
-      @Override
-      public void visitDocument(int docID, StoredFieldVisitor visitor) throws IOException {
-        reader.document(docID, visitor);
-      }
-
-      @Override
-      public StoredFieldsReader clone() {
-        return readerToStoredFieldsReader(reader);
-      }
-
-      @Override
-      public void checkIntegrity() throws IOException {
-        // We already checkIntegrity the entire reader up front in SegmentMerger
-      }
-
-      @Override
-      public void close() {
-      }
-
-      @Override
-      public long ramBytesUsed() {
-        return 0;
-      }
-    };
-  }
-
-  private TermVectorsReader readerToTermVectorsReader(final LeafReader reader) {
-    return new TermVectorsReader() {
-      @Override
-      public Fields get(int docID) throws IOException {
-        return reader.getTermVectors(docID);
-      }
-
-      @Override
-      public TermVectorsReader clone() {
-        return readerToTermVectorsReader(reader);
-      }
-
-      @Override
-      public void checkIntegrity() throws IOException {
-        // We already checkIntegrity the entire reader up front in SegmentMerger
-      }
-
-      @Override
-      public void close() {
-      }
-
-      @Override
-      public long ramBytesUsed() {
-        return 0;
-      }
-    };
-  }
-
-  private FieldsProducer readerToFieldsProducer(final LeafReader reader) throws IOException {
-    final Fields fields = reader.fields();
-    return new FieldsProducer() {
-      @Override
-      public Iterator<String> iterator() {
-        return fields.iterator();
-      }
-
-      @Override
-      public Terms terms(String field) throws IOException {
-        return fields.terms(field);
-      }
-
-      @Override
-      public int size() {
-        return fields.size();
-      }
-
-      @Override
-      public void checkIntegrity() throws IOException {
-        // We already checkIntegrity the entire reader up front in SegmentMerger
-      }
-
-      @Override
-      public void close() {
-      }
-
-      @Override
-      public long ramBytesUsed() {
-        return 0;
-      }
-    };
-  }
-
   // NOTE: removes any "all deleted" readers from mergeState.readers
-  private void setDocMaps(List<LeafReader> readers) throws IOException {
+  private void setDocMaps(List<CodecReader> readers) throws IOException {
     final int numReaders = maxDocs.length;
 
     // Remap docIDs
     int docBase = 0;
     for(int i=0;i<numReaders;i++) {
-      final LeafReader reader = readers.get(i);
+      final CodecReader reader = readers.get(i);
       this.docBase[i] = docBase;
       final DocMap docMap = DocMap.build(reader);
       docMaps[i] = docMap;
@@ -341,47 +151,6 @@
   }
 
   /**
-   * Class for recording units of work when merging segments.
-   */
-  public static class CheckAbort {
-    private double workCount;
-    private final MergePolicy.OneMerge merge;
-    private final Directory dir;
-
-    /** Creates a #CheckAbort instance. */
-    public CheckAbort(MergePolicy.OneMerge merge, Directory dir) {
-      this.merge = merge;
-      this.dir = dir;
-    }
-
-    /**
-     * Records the fact that roughly units amount of work
-     * have been done since this method was last called.
-     * When adding time-consuming code into SegmentMerger,
-     * you should test different values for units to ensure
-     * that the time in between calls to merge.checkAborted
-     * is up to ~ 1 second.
-     */
-    public void work(double units) throws MergePolicy.MergeAbortedException {
-      workCount += units;
-      if (workCount >= 10000.0) {
-        merge.checkAborted(dir);
-        workCount = 0;
-      }
-    }
-
-    /** If you use this: IW.close(false) cannot abort your merge!
-     * @lucene.internal */
-    static final MergeState.CheckAbort NONE = new MergeState.CheckAbort(null, null) {
-      @Override
-      public void work(double units) {
-        // do nothing
-      }
-    };
-  }
-
-
-  /**
    * Remaps docids around deletes during merge
    */
   public static abstract class DocMap {
@@ -410,7 +179,7 @@
 
     /** Creates a {@link DocMap} instance appropriate for
      *  this reader. */
-    public static DocMap build(LeafReader reader) {
+    public static DocMap build(CodecReader reader) {
       final int maxDoc = reader.maxDoc();
       if (!reader.hasDeletions()) {
         return new NoDelDocMap(maxDoc);
diff --git a/lucene/core/src/java/org/apache/lucene/index/MergeTrigger.java b/lucene/core/src/java/org/apache/lucene/index/MergeTrigger.java
index f147e6e..eb31c2c 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MergeTrigger.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MergeTrigger.java
@@ -27,11 +27,13 @@
    * Merge was triggered by a segment flush.
    */
   SEGMENT_FLUSH,
+
   /**
    * Merge was triggered by a full flush. Full flushes
    * can be caused by a commit, NRT reader reopen or a close call on the index writer.
    */
   FULL_FLUSH,
+
   /**
    * Merge has been triggered explicitly by the user.
    */
diff --git a/lucene/core/src/java/org/apache/lucene/index/MergedPrefixCodedTermsIterator.java b/lucene/core/src/java/org/apache/lucene/index/MergedPrefixCodedTermsIterator.java
new file mode 100644
index 0000000..ff6d53a
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/index/MergedPrefixCodedTermsIterator.java
@@ -0,0 +1,134 @@
+package org.apache.lucene.index;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.util.List;
+
+import org.apache.lucene.index.PrefixCodedTerms.TermIterator;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.PriorityQueue;
+
+/** Merges multiple {@link FieldTermIterator}s */
+class MergedPrefixCodedTermsIterator implements FieldTermIterator {
+
+  private static class TermMergeQueue extends PriorityQueue<TermIterator> {
+    TermMergeQueue(int size) {
+      super(size);
+    }
+    
+    @Override
+    protected boolean lessThan(TermIterator a, TermIterator b) {
+      int cmp = a.bytes.compareTo(b.bytes);
+      if (cmp < 0) {
+        return true;
+      } else if (cmp > 0) {
+        return false;
+      } else {
+        return a.delGen() > b.delGen();
+      }
+    }
+  }
+
+  private static class FieldMergeQueue extends PriorityQueue<TermIterator> {
+    FieldMergeQueue(int size) {
+      super(size);
+    }
+    
+    @Override
+    protected boolean lessThan(TermIterator a, TermIterator b) {
+      return a.field.compareTo(b.field) < 0;
+    }
+  }
+
+  final TermMergeQueue termQueue;
+  final FieldMergeQueue fieldQueue;
+
+  public MergedPrefixCodedTermsIterator(List<PrefixCodedTerms> termsList) {
+    fieldQueue = new FieldMergeQueue(termsList.size());
+    for (PrefixCodedTerms terms : termsList) {
+      TermIterator iter = terms.iterator();
+      iter.next();
+      if (iter.field != null) {
+        fieldQueue.add(iter);
+      }
+    }
+
+    termQueue = new TermMergeQueue(termsList.size());
+  }
+
+  String field;
+
+  @Override
+  public boolean next() {
+    if (termQueue.size() == 0) {
+      // Current field is done:
+      if (fieldQueue.size() == 0) {
+        // No more fields:
+        field = null;
+        return true;
+      }
+
+      // Transfer all iterators on the next field into the term queue:
+      TermIterator top = fieldQueue.pop();
+      termQueue.add(top);
+      assert top.field() != null;
+
+      while (fieldQueue.size() != 0 && fieldQueue.top().field.equals(top.field)) {
+        termQueue.add(fieldQueue.pop());
+      }
+
+      field = top.field;
+      return true;
+    } else {
+      TermIterator top = termQueue.top();
+      if (top.next()) {
+        // New field
+        termQueue.pop();
+        if (top.field() != null) {
+          fieldQueue.add(top);
+        }
+      } else {
+        termQueue.updateTop();
+      }
+
+      if (termQueue.size() != 0) {
+        // Still terms left in this field
+        return false;
+      } else {
+        // Recurse (just once) to go to next field:                                                                                                                                        
+        return next();
+      }
+    }
+  }
+
+  @Override
+  public BytesRef term() {
+    return termQueue.top().bytes;
+  }
+
+  @Override
+  public String field() {
+    return field;
+  }
+
+  @Override
+  public long delGen() {
+    return termQueue.top().delGen();
+  }
+}
+
diff --git a/lucene/core/src/java/org/apache/lucene/index/NoMergeScheduler.java b/lucene/core/src/java/org/apache/lucene/index/NoMergeScheduler.java
index 9954031..c4a6bc1 100644
--- a/lucene/core/src/java/org/apache/lucene/index/NoMergeScheduler.java
+++ b/lucene/core/src/java/org/apache/lucene/index/NoMergeScheduler.java
@@ -46,5 +46,4 @@
   public MergeScheduler clone() {
     return this;
   }
-
 }
diff --git a/lucene/core/src/java/org/apache/lucene/index/PrefixCodedTerms.java b/lucene/core/src/java/org/apache/lucene/index/PrefixCodedTerms.java
index d3654c2..3e5f4e7 100644
--- a/lucene/core/src/java/org/apache/lucene/index/PrefixCodedTerms.java
+++ b/lucene/core/src/java/org/apache/lucene/index/PrefixCodedTerms.java
@@ -18,7 +18,6 @@
  */
 
 import java.io.IOException;
-import java.util.Iterator;
 
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.RAMFile;
@@ -32,9 +31,10 @@
  * Prefix codes term instances (prefixes are shared)
  * @lucene.experimental
  */
-class PrefixCodedTerms implements Iterable<Term>, Accountable {
+class PrefixCodedTerms implements Accountable {
   final RAMFile buffer;
-  
+  private long delGen;
+
   private PrefixCodedTerms(RAMFile buffer) {
     this.buffer = buffer;
   }
@@ -44,56 +44,9 @@
     return buffer.ramBytesUsed();
   }
 
-  /** @return iterator over the bytes */
-  @Override
-  public Iterator<Term> iterator() {
-    return new PrefixCodedTermsIterator();
-  }
-  
-  class PrefixCodedTermsIterator implements Iterator<Term> {
-    final IndexInput input;
-    String field = "";
-    BytesRefBuilder bytes = new BytesRefBuilder();
-    Term term = new Term(field, bytes.get());
-
-    PrefixCodedTermsIterator() {
-      try {
-        input = new RAMInputStream("PrefixCodedTermsIterator", buffer);
-      } catch (IOException e) {
-        throw new RuntimeException(e);
-      }
-    }
-
-    @Override
-    public boolean hasNext() {
-      return input.getFilePointer() < input.length();
-    }
-    
-    @Override
-    public Term next() {
-      assert hasNext();
-      try {
-        int code = input.readVInt();
-        if ((code & 1) != 0) {
-          // new field
-          field = input.readString();
-        }
-        int prefix = code >>> 1;
-        int suffix = input.readVInt();
-        bytes.grow(prefix + suffix);
-        input.readBytes(bytes.bytes(), prefix, suffix);
-        bytes.setLength(prefix + suffix);
-        term.set(field, bytes.get());
-        return term;
-      } catch (IOException e) {
-        throw new RuntimeException(e);
-      }
-    }
-    
-    @Override
-    public void remove() {
-      throw new UnsupportedOperationException();
-    }
+  /** Records del gen for this packet. */
+  public void setDelGen(long delGen) {
+    this.delGen = delGen;
   }
   
   /** Builds a PrefixCodedTerms: call add repeatedly, then finish. */
@@ -150,4 +103,71 @@
       return pos1;
     }
   }
+
+  public static class TermIterator implements FieldTermIterator {
+    final IndexInput input;
+    final BytesRefBuilder builder = new BytesRefBuilder();
+    final BytesRef bytes = builder.get();
+    final long end;
+    final long delGen;
+    String field = "";
+
+    public TermIterator(long delGen, RAMFile buffer) {
+      try {
+        input = new RAMInputStream("MergedPrefixCodedTermsIterator", buffer);
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+      end = input.length();
+      this.delGen = delGen;
+    }
+
+    @Override
+    public boolean next() {
+      if (input.getFilePointer() < end) {
+        try {
+          int code = input.readVInt();
+          boolean newField = (code & 1) != 0;
+          if (newField) {
+            field = input.readString();
+          }
+          int prefix = code >>> 1;
+          int suffix = input.readVInt();
+          readTermBytes(prefix, suffix);
+          return newField;
+        } catch (IOException e) {
+          throw new RuntimeException(e);
+        }
+      } else {
+        field = null;
+        return true;
+      }
+    }
+
+    // TODO: maybe we should freeze to FST or automaton instead?
+    private void readTermBytes(int prefix, int suffix) throws IOException {
+      builder.grow(prefix + suffix);
+      input.readBytes(builder.bytes(), prefix, suffix);
+      builder.setLength(prefix + suffix);
+    }
+
+    @Override
+    public BytesRef term() {
+      return bytes;
+    }
+
+    @Override
+    public String field() {
+      return field;
+    }
+
+    @Override
+    public long delGen() {
+      return delGen;
+    }
+  }
+
+  public TermIterator iterator() {
+    return new TermIterator(delGen, buffer);
+  }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/index/ReindexingReader.java b/lucene/core/src/java/org/apache/lucene/index/ReindexingReader.java
index b6a74df..bdca81d 100644
--- a/lucene/core/src/java/org/apache/lucene/index/ReindexingReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/ReindexingReader.java
@@ -539,15 +539,20 @@
       }
 
       @Override
-      public List<LeafReader> getMergeReaders() throws IOException {
+      public List<CodecReader> getMergeReaders() throws IOException {
         if (parallelReaders == null) {
           parallelReaders = new ArrayList<>();
-          for (LeafReader reader : super.getMergeReaders()) {
-            parallelReaders.add(getCurrentReader(reader, schemaGen));
+          for (CodecReader reader : super.getMergeReaders()) {
+            parallelReaders.add(getCurrentReader((SegmentReader) reader, schemaGen));
           }
         }
 
-        return parallelReaders;
+        // TODO: fix ParallelLeafReader, if this is a good use case
+        List<CodecReader> mergeReaders = new ArrayList<>();
+        for (LeafReader reader : parallelReaders) {
+          mergeReaders.add(SlowCodecReaderWrapper.wrap(reader));
+        }
+        return mergeReaders;
       }
 
       @Override
diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java b/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java
index 89be9e6..712f4a9 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SegmentCoreReaders.java
@@ -19,9 +19,7 @@
 
 import java.io.IOException;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.LinkedHashSet;
-import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.atomic.AtomicInteger;
 
@@ -35,16 +33,12 @@
 import org.apache.lucene.store.AlreadyClosedException;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
-import org.apache.lucene.util.Accountable;
 import org.apache.lucene.util.CloseableThreadLocal;
 import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.RamUsageEstimator;
 
 /** Holds core readers that are shared (unchanged) when
  * SegmentReader is cloned or reopened */
-final class SegmentCoreReaders implements Accountable {
-
-  private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(SegmentCoreReaders.class);
+final class SegmentCoreReaders {
 
   // Counts how many other readers share the core objects
   // (freqStream, proxStream, tis, etc.) of this reader;
@@ -84,13 +78,6 @@
     }
   };
 
-  final CloseableThreadLocal<Map<String,Object>> normsLocal = new CloseableThreadLocal<Map<String,Object>>() {
-    @Override
-    protected Map<String,Object> initialValue() {
-      return new HashMap<>();
-    }
-  };
-
   private final Set<CoreClosedListener> coreClosedListeners = 
       Collections.synchronizedSet(new LinkedHashSet<CoreClosedListener>());
   
@@ -157,31 +144,12 @@
     throw new AlreadyClosedException("SegmentCoreReaders is already closed");
   }
 
-  NumericDocValues getNormValues(FieldInfos infos, String field) throws IOException {
-    Map<String,Object> normFields = normsLocal.get();
-
-    NumericDocValues norms = (NumericDocValues) normFields.get(field);
-    if (norms != null) {
-      return norms;
-    } else {
-      FieldInfo fi = infos.fieldInfo(field);
-      if (fi == null || !fi.hasNorms()) {
-        // Field does not exist or does not index norms
-        return null;
-      }
-      assert normsProducer != null;
-      norms = normsProducer.getNorms(fi);
-      normFields.put(field, norms);
-      return norms;
-    }
-  }
-
   void decRef() throws IOException {
     if (ref.decrementAndGet() == 0) {
 //      System.err.println("--- closing core readers");
       Throwable th = null;
       try {
-        IOUtils.close(termVectorsLocal, fieldsReaderLocal, normsLocal, fields, termVectorsReaderOrig, fieldsReaderOrig,
+        IOUtils.close(termVectorsLocal, fieldsReaderLocal, fields, termVectorsReaderOrig, fieldsReaderOrig,
             cfsReader, normsProducer);
       } catch (Throwable throwable) {
         th = throwable;
@@ -217,14 +185,4 @@
   void removeCoreClosedListener(CoreClosedListener listener) {
     coreClosedListeners.remove(listener);
   }
-
-  // TODO: remove this, it can just be on SR
-  @Override
-  public long ramBytesUsed() {
-    return BASE_RAM_BYTES_USED +
-        ((normsProducer!=null) ? normsProducer.ramBytesUsed() : 0) +
-        ((fields!=null) ? fields.ramBytesUsed() : 0) + 
-        ((fieldsReaderOrig!=null)? fieldsReaderOrig.ramBytesUsed() : 0) + 
-        ((termVectorsReaderOrig!=null) ? termVectorsReaderOrig.ramBytesUsed() : 0);
-  }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java b/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java
index a8dcfa1..9aec81d 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java
@@ -49,18 +49,12 @@
   private final FieldInfos.Builder fieldInfosBuilder;
 
   // note, just like in codec apis Directory 'dir' is NOT the same as segmentInfo.dir!!
-  SegmentMerger(FieldTypes fieldTypes, List<LeafReader> readers, SegmentInfo segmentInfo, InfoStream infoStream, Directory dir,
-                MergeState.CheckAbort checkAbort, FieldInfos.FieldNumbers fieldNumbers, IOContext context) throws IOException {
-    // validate incoming readers
-    for (LeafReader reader : readers) {
-      if ((reader instanceof SegmentReader) == false) {
-        // We only validate foreign readers up front: each index component
-        // calls .checkIntegrity itself for each incoming producer
-        reader.checkIntegrity();
-      }
+  SegmentMerger(FieldTypes fieldTypes, List<CodecReader> readers, SegmentInfo segmentInfo, InfoStream infoStream, Directory dir,
+                FieldInfos.FieldNumbers fieldNumbers, IOContext context) throws IOException {
+    if (context.context != IOContext.Context.MERGE) {
+      throw new IllegalArgumentException("IOContext.context should be MERGE; got: " + context.context);
     }
-
-    mergeState = new MergeState(fieldTypes, readers, segmentInfo, infoStream, checkAbort);
+    mergeState = new MergeState(fieldTypes, readers, segmentInfo, infoStream);
     directory = dir;
     this.codec = segmentInfo.getCodec();
     this.context = context;
@@ -82,12 +76,6 @@
     if (!shouldMerge()) {
       throw new IllegalStateException("Merge would result in 0 document segment");
     }
-    // NOTE: it's important to add calls to
-    // checkAbort.work(...) if you make any changes to this
-    // method that will spend alot of time.  The frequency
-    // of this check impacts how long
-    // IndexWriter.close(false) takes to actually stop the
-    // background merge threads.
     mergeFieldInfos();
     long t0 = 0;
     if (mergeState.infoStream.isEnabled("SM")) {
diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java b/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
index 9176cf9..81c5ada 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
@@ -18,12 +18,7 @@
  */
 
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
 import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
 
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.codecs.DocValuesProducer;
@@ -36,12 +31,7 @@
 import org.apache.lucene.index.DocValuesType;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
-import org.apache.lucene.util.Accountable;
-import org.apache.lucene.util.Accountables;
 import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.CloseableThreadLocal;
-import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.RamUsageEstimator;
 
 /**
  * IndexReader implementation over a single segment. 
@@ -50,12 +40,8 @@
  * may share the same core data.
  * @lucene.experimental
  */
-public final class SegmentReader extends LeafReader implements Accountable {
-
-  private static final long BASE_RAM_BYTES_USED =
-        RamUsageEstimator.shallowSizeOfInstance(SegmentReader.class)
-      + RamUsageEstimator.shallowSizeOfInstance(SegmentDocValues.class);
-        
+public final class SegmentReader extends CodecReader {
+       
   private final SegmentCommitInfo si;
   private final Bits liveDocs;
 
@@ -68,20 +54,6 @@
   final SegmentDocValues segDocValues;
   final FieldTypes fieldTypes;
   
-  final CloseableThreadLocal<Map<String,Object>> docValuesLocal = new CloseableThreadLocal<Map<String,Object>>() {
-    @Override
-    protected Map<String,Object> initialValue() {
-      return new HashMap<>();
-    }
-  };
-
-  final CloseableThreadLocal<Map<String,Bits>> docsWithFieldLocal = new CloseableThreadLocal<Map<String,Bits>>() {
-    @Override
-    protected Map<String,Bits> initialValue() {
-      return new HashMap<>();
-    }
-  };
-
   final DocValuesProducer docValuesProducer;
   final FieldInfos fieldInfos;
   
@@ -207,7 +179,7 @@
       core.decRef();
     } finally {
       try {
-        IOUtils.close(docValuesLocal, docsWithFieldLocal);
+        super.doClose();
       } finally {
         if (docValuesProducer instanceof SegmentDocValuesProducer) {
           segDocValues.decRef(((SegmentDocValuesProducer)docValuesProducer).dvGens);
@@ -223,18 +195,6 @@
     ensureOpen();
     return fieldInfos;
   }
-  
-  @Override
-  public void document(int docID, StoredFieldVisitor visitor) throws IOException {
-    checkBounds(docID);
-    getFieldsReader().visitDocument(docID, visitor);
-  }
-
-  @Override
-  public FieldsProducer fields() {
-    ensureOpen();
-    return core.fields;
-  }
 
   @Override
   public int numDocs() {
@@ -248,50 +208,34 @@
     return si.info.getDocCount();
   }
 
-  /** Expert: retrieve thread-private {@link
-   *  TermVectorsReader}
-   *  @lucene.internal */
+  @Override
   public TermVectorsReader getTermVectorsReader() {
     ensureOpen();
     return core.termVectorsLocal.get();
   }
 
-  /** Expert: retrieve thread-private {@link
-   *  StoredFieldsReader}
-   *  @lucene.internal */
+  @Override
   public StoredFieldsReader getFieldsReader() {
     ensureOpen();
     return core.fieldsReaderLocal.get();
   }
   
-  /** Expert: retrieve underlying NormsProducer
-   *  @lucene.internal */
+  @Override
   public NormsProducer getNormsReader() {
     ensureOpen();
     return core.normsProducer;
   }
   
-  /** Expert: retrieve underlying DocValuesProducer
-   *  @lucene.internal */
+  @Override
   public DocValuesProducer getDocValuesReader() {
     ensureOpen();
     return docValuesProducer;
   }
 
   @Override
-  public Fields getTermVectors(int docID) throws IOException {
-    TermVectorsReader termVectorsReader = getTermVectorsReader();
-    if (termVectorsReader == null) {
-      return null;
-    }
-    checkBounds(docID);
-    return termVectorsReader.get(docID);
-  }
-  
-  private void checkBounds(int docID) {
-    if (docID < 0 || docID >= maxDoc()) {       
-      throw new IndexOutOfBoundsException("docID must be >= 0 and < maxDoc=" + maxDoc() + " (got docID=" + docID + ")");
-    }
+  public FieldsProducer getPostingsReader() {
+    ensureOpen();
+    return core.fields;
   }
 
   @Override
@@ -339,152 +283,6 @@
     return this;
   }
 
-  // returns the FieldInfo that corresponds to the given field and type, or
-  // null if the field does not exist, or not indexed as the requested
-  // DOVDOCVALUESTYPE.
-  private FieldInfo getDVField(String field, DocValuesType type) {
-    FieldInfo fi = fieldInfos.fieldInfo(field);
-    if (fi == null) {
-      // Field does not exist
-      return null;
-    }
-    if (fi.getDocValuesType() == DocValuesType.NONE) {
-      // Field was not indexed with doc values
-      return null;
-    }
-    if (fi.getDocValuesType() != type) {
-      // Field DocValues are different than requested type
-      return null;
-    }
-
-    return fi;
-  }
-  
-  @Override
-  public NumericDocValues getNumericDocValues(String field) throws IOException {
-    ensureOpen();
-    Map<String,Object> dvFields = docValuesLocal.get();
-
-    Object previous = dvFields.get(field);
-    if (previous != null && previous instanceof NumericDocValues) {
-      return (NumericDocValues) previous;
-    } else {
-      FieldInfo fi = getDVField(field, DocValuesType.NUMERIC);
-      if (fi == null) {
-        return null;
-      }
-      NumericDocValues dv = docValuesProducer.getNumeric(fi);
-      dvFields.put(field, dv);
-      return dv;
-    }
-  }
-
-  @Override
-  public Bits getDocsWithField(String field) throws IOException {
-    ensureOpen();
-    Map<String,Bits> dvFields = docsWithFieldLocal.get();
-
-    Bits previous = dvFields.get(field);
-    if (previous != null) {
-      return previous;
-    } else {
-      FieldInfo fi = fieldInfos.fieldInfo(field);
-      if (fi == null) {
-        // Field does not exist
-        return null;
-      }
-      if (fi.getDocValuesType() == DocValuesType.NONE) {
-        // Field was not indexed with doc values
-        return null;
-      }
-      Bits dv = docValuesProducer.getDocsWithField(fi);
-      dvFields.put(field, dv);
-      return dv;
-    }
-  }
-
-  @Override
-  public BinaryDocValues getBinaryDocValues(String field) throws IOException {
-    ensureOpen();
-    FieldInfo fi = getDVField(field, DocValuesType.BINARY);
-    if (fi == null) {
-      return null;
-    }
-
-    Map<String,Object> dvFields = docValuesLocal.get();
-
-    BinaryDocValues dvs = (BinaryDocValues) dvFields.get(field);
-    if (dvs == null) {
-      dvs = docValuesProducer.getBinary(fi);
-      dvFields.put(field, dvs);
-    }
-
-    return dvs;
-  }
-
-  @Override
-  public SortedDocValues getSortedDocValues(String field) throws IOException {
-    ensureOpen();
-    Map<String,Object> dvFields = docValuesLocal.get();
-    
-    Object previous = dvFields.get(field);
-    if (previous != null && previous instanceof SortedDocValues) {
-      return (SortedDocValues) previous;
-    } else {
-      FieldInfo fi = getDVField(field, DocValuesType.SORTED);
-      if (fi == null) {
-        return null;
-      }
-      SortedDocValues dv = docValuesProducer.getSorted(fi);
-      dvFields.put(field, dv);
-      return dv;
-    }
-  }
-  
-  @Override
-  public SortedNumericDocValues getSortedNumericDocValues(String field) throws IOException {
-    ensureOpen();
-    Map<String,Object> dvFields = docValuesLocal.get();
-
-    Object previous = dvFields.get(field);
-    if (previous != null && previous instanceof SortedNumericDocValues) {
-      return (SortedNumericDocValues) previous;
-    } else {
-      FieldInfo fi = getDVField(field, DocValuesType.SORTED_NUMERIC);
-      if (fi == null) {
-        return null;
-      }
-      SortedNumericDocValues dv = docValuesProducer.getSortedNumeric(fi);
-      dvFields.put(field, dv);
-      return dv;
-    }
-  }
-
-  @Override
-  public SortedSetDocValues getSortedSetDocValues(String field) throws IOException {
-    ensureOpen();
-    Map<String,Object> dvFields = docValuesLocal.get();
-    
-    Object previous = dvFields.get(field);
-    if (previous != null && previous instanceof SortedSetDocValues) {
-      return (SortedSetDocValues) previous;
-    } else {
-      FieldInfo fi = getDVField(field, DocValuesType.SORTED_SET);
-      if (fi == null) {
-        return null;
-      }
-      SortedSetDocValues dv = docValuesProducer.getSortedSet(fi);
-      dvFields.put(field, dv);
-      return dv;
-    }
-  }
-
-  @Override
-  public NumericDocValues getNormValues(String field) throws IOException {
-    ensureOpen();
-    return core.getNormValues(fieldInfos, field);
-  }
-  
   @Override
   public void addCoreClosedListener(CoreClosedListener listener) {
     ensureOpen();
@@ -496,66 +294,4 @@
     ensureOpen();
     core.removeCoreClosedListener(listener);
   }
-  
-  @Override
-  public long ramBytesUsed() {
-    ensureOpen();
-    long ramBytesUsed = BASE_RAM_BYTES_USED;
-    if (docValuesProducer != null) {
-      ramBytesUsed += docValuesProducer.ramBytesUsed();
-    }
-    if (core != null) {
-      ramBytesUsed += core.ramBytesUsed();
-    }
-    return ramBytesUsed;
-  }
-  
-  @Override
-  public Collection<Accountable> getChildResources() {
-    ensureOpen();
-    List<Accountable> resources = new ArrayList<>();
-    resources.add(Accountables.namedAccountable("postings", core.fields));
-    if (core.normsProducer != null) {
-      resources.add(Accountables.namedAccountable("norms", core.normsProducer));
-    }
-    if (docValuesProducer != null) {
-      resources.add(Accountables.namedAccountable("docvalues", docValuesProducer));
-    }
-    if (getFieldsReader() != null) {
-      resources.add(Accountables.namedAccountable("stored fields", getFieldsReader()));
-    }
-    if (getTermVectorsReader() != null) {
-      resources.add(Accountables.namedAccountable("term vectors", getTermVectorsReader()));
-    }
-    return Collections.unmodifiableList(resources);
-  }
-
-  @Override
-  public void checkIntegrity() throws IOException {
-    ensureOpen();
-
-    // stored fields
-    getFieldsReader().checkIntegrity();
-    
-    // term vectors
-    TermVectorsReader termVectorsReader = getTermVectorsReader();
-    if (termVectorsReader != null) {
-      termVectorsReader.checkIntegrity();
-    }
-    
-    // terms/postings
-    if (core.fields != null) {
-      core.fields.checkIntegrity();
-    }
-    
-    // norms
-    if (core.normsProducer != null) {
-      core.normsProducer.checkIntegrity();
-    }
-    
-    // docvalues
-    if (docValuesProducer != null) {
-      docValuesProducer.checkIntegrity();
-    }
-  }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/index/SlowCodecReaderWrapper.java b/lucene/core/src/java/org/apache/lucene/index/SlowCodecReaderWrapper.java
new file mode 100644
index 0000000..85af99a
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/index/SlowCodecReaderWrapper.java
@@ -0,0 +1,289 @@
+package org.apache.lucene.index;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.lucene.codecs.DocValuesProducer;
+import org.apache.lucene.codecs.FieldsProducer;
+import org.apache.lucene.codecs.NormsProducer;
+import org.apache.lucene.codecs.StoredFieldsReader;
+import org.apache.lucene.codecs.TermVectorsReader;
+import org.apache.lucene.document.FieldTypes;
+import org.apache.lucene.util.Bits;
+
+/**
+ * Wraps arbitrary readers for merging. Note that this can cause slow
+ * and memory-intensive merges. Consider using {@link FilterCodecReader}
+ * instead.
+ */
+public final class SlowCodecReaderWrapper {
+  
+  /** No instantiation */
+  private SlowCodecReaderWrapper() {}
+  
+  /**
+   * Returns a {@code CodecReader} view of reader. 
+   * <p>
+   * If {@code reader} is already a {@code CodecReader}, it is returned
+   * directly. Otherwise, a (slow) view is returned.
+   */
+  public static CodecReader wrap(final LeafReader reader) throws IOException {
+    if (reader instanceof CodecReader) {
+      return (CodecReader)reader;
+    } else {
+      // simulate it slowly, over the leafReader api:
+      reader.checkIntegrity();
+      return new CodecReader() {
+
+        @Override
+        public TermVectorsReader getTermVectorsReader() {
+          reader.ensureOpen();
+          return readerToTermVectorsReader(reader);
+        }
+
+        @Override
+        public StoredFieldsReader getFieldsReader() {
+          reader.ensureOpen();
+          return readerToStoredFieldsReader(reader);
+        }
+
+        @Override
+        public NormsProducer getNormsReader() {
+          reader.ensureOpen();
+          return readerToNormsProducer(reader);
+        }
+
+        @Override
+        public DocValuesProducer getDocValuesReader() {
+          reader.ensureOpen();
+          return readerToDocValuesProducer(reader);
+        }
+
+        @Override
+        public FieldsProducer getPostingsReader() {
+          reader.ensureOpen();
+          try {
+            return readerToFieldsProducer(reader);
+          } catch (IOException bogus) {
+            throw new AssertionError(bogus);
+          }
+        }
+
+        @Override
+        public FieldInfos getFieldInfos() {
+          return reader.getFieldInfos();
+        }
+
+        @Override
+        public Bits getLiveDocs() {
+          return reader.getLiveDocs();
+        }
+
+        @Override
+        public int numDocs() {
+          return reader.numDocs();
+        }
+
+        @Override
+        public int maxDoc() {
+          return reader.maxDoc();
+        }
+
+        @Override
+        public void addCoreClosedListener(CoreClosedListener listener) {
+          reader.addCoreClosedListener(listener);
+        }
+
+        @Override
+        public void removeCoreClosedListener(CoreClosedListener listener) {
+          reader.removeCoreClosedListener(listener);
+        }
+
+        @Override
+        public FieldTypes getFieldTypes() {
+          return reader.getFieldTypes();
+        }
+      };
+    }
+  }
+  
+  private static NormsProducer readerToNormsProducer(final LeafReader reader) {
+    return new NormsProducer() {
+
+      @Override
+      public NumericDocValues getNorms(FieldInfo field) throws IOException {
+        return reader.getNormValues(field.name);
+      }
+
+      @Override
+      public void checkIntegrity() throws IOException {
+        // We already checkIntegrity the entire reader up front
+      }
+
+      @Override
+      public void close() {
+      }
+
+      @Override
+      public long ramBytesUsed() {
+        return 0;
+      }
+    };
+  }
+
+  private static DocValuesProducer readerToDocValuesProducer(final LeafReader reader) {
+    return new DocValuesProducer() {
+
+      @Override
+      public NumericDocValues getNumeric(FieldInfo field) throws IOException {  
+        return reader.getNumericDocValues(field.name);
+      }
+
+      @Override
+      public BinaryDocValues getBinary(FieldInfo field) throws IOException {
+        return reader.getBinaryDocValues(field.name);
+      }
+
+      @Override
+      public SortedDocValues getSorted(FieldInfo field) throws IOException {
+        return reader.getSortedDocValues(field.name);
+      }
+
+      @Override
+      public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException {
+        return reader.getSortedNumericDocValues(field.name);
+      }
+
+      @Override
+      public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException {
+        return reader.getSortedSetDocValues(field.name);
+      }
+
+      @Override
+      public Bits getDocsWithField(FieldInfo field) throws IOException {
+        return reader.getDocsWithField(field.name);
+      }
+
+      @Override
+      public void checkIntegrity() throws IOException {
+        // We already checkIntegrity the entire reader up front
+      }
+
+      @Override
+      public void close() {
+      }
+
+      @Override
+      public long ramBytesUsed() {
+        return 0;
+      }
+    };
+  }
+
+  private static StoredFieldsReader readerToStoredFieldsReader(final LeafReader reader) {
+    return new StoredFieldsReader() {
+      @Override
+      public void visitDocument(int docID, StoredFieldVisitor visitor) throws IOException {
+        reader.document(docID, visitor);
+      }
+
+      @Override
+      public StoredFieldsReader clone() {
+        return readerToStoredFieldsReader(reader);
+      }
+
+      @Override
+      public void checkIntegrity() throws IOException {
+        // We already checkIntegrity the entire reader up front
+      }
+
+      @Override
+      public void close() {
+      }
+
+      @Override
+      public long ramBytesUsed() {
+        return 0;
+      }
+    };
+  }
+
+  private static TermVectorsReader readerToTermVectorsReader(final LeafReader reader) {
+    return new TermVectorsReader() {
+      @Override
+      public Fields get(int docID) throws IOException {
+        return reader.getTermVectors(docID);
+      }
+
+      @Override
+      public TermVectorsReader clone() {
+        return readerToTermVectorsReader(reader);
+      }
+
+      @Override
+      public void checkIntegrity() throws IOException {
+        // We already checkIntegrity the entire reader up front
+      }
+
+      @Override
+      public void close() {
+      }
+
+      @Override
+      public long ramBytesUsed() {
+        return 0;
+      }
+    };
+  }
+
+  private static FieldsProducer readerToFieldsProducer(final LeafReader reader) throws IOException {
+    final Fields fields = reader.fields();
+    return new FieldsProducer() {
+      @Override
+      public Iterator<String> iterator() {
+        return fields.iterator();
+      }
+
+      @Override
+      public Terms terms(String field) throws IOException {
+        return fields.terms(field);
+      }
+
+      @Override
+      public int size() {
+        return fields.size();
+      }
+
+      @Override
+      public void checkIntegrity() throws IOException {
+        // We already checkIntegrity the entire reader up front
+      }
+
+      @Override
+      public void close() {
+      }
+
+      @Override
+      public long ramBytesUsed() {
+        return 0;
+      }
+    };
+  }
+}
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesTermsEnum.java b/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesTermsEnum.java
index 16427cc..24e7d04 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesTermsEnum.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesTermsEnum.java
@@ -29,7 +29,6 @@
 class SortedDocValuesTermsEnum extends TermsEnum {
   private final SortedDocValues values;
   private int currentOrd = -1;
-  private BytesRef term;
   private final BytesRefBuilder scratch;
 
   /** Creates a new TermsEnum over the provided values */
@@ -44,7 +43,6 @@
     if (ord >= 0) {
       currentOrd = ord;
       scratch.copyBytes(text);
-      term = scratch.get();
       return SeekStatus.FOUND;
     } else {
       currentOrd = -ord-1;
@@ -52,7 +50,7 @@
         return SeekStatus.END;
       } else {
         // TODO: hmm can we avoid this "extra" lookup?:
-        term = values.lookupOrd(currentOrd);
+        scratch.copyBytes(values.lookupOrd(currentOrd));
         return SeekStatus.NOT_FOUND;
       }
     }
@@ -64,7 +62,6 @@
     if (ord >= 0) {
       currentOrd = ord;
       scratch.copyBytes(text);
-      term = scratch.get();
       return true;
     } else {
       return false;
@@ -75,7 +72,7 @@
   public void seekExact(long ord) throws IOException {
     assert ord >= 0 && ord < values.getValueCount();
     currentOrd = (int) ord;
-    term = values.lookupOrd(currentOrd);
+    scratch.copyBytes(values.lookupOrd(currentOrd));
   }
 
   @Override
@@ -84,13 +81,13 @@
     if (currentOrd >= values.getValueCount()) {
       return null;
     }
-    term = values.lookupOrd(currentOrd);
-    return term;
+    scratch.copyBytes(values.lookupOrd(currentOrd));
+    return scratch.get();
   }
 
   @Override
   public BytesRef term() throws IOException {
-    return term;
+    return scratch.get();
   }
 
   @Override
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesTermsEnum.java b/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesTermsEnum.java
index 64dba95..8f52d00 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesTermsEnum.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesTermsEnum.java
@@ -29,7 +29,6 @@
 class SortedSetDocValuesTermsEnum extends TermsEnum {
   private final SortedSetDocValues values;
   private long currentOrd = -1;
-  private BytesRef term;
   private final BytesRefBuilder scratch;
 
   /** Creates a new TermsEnum over the provided values */
@@ -44,7 +43,6 @@
     if (ord >= 0) {
       currentOrd = ord;
       scratch.copyBytes(text);
-      term = scratch.get();
       return SeekStatus.FOUND;
     } else {
       currentOrd = -ord-1;
@@ -52,7 +50,7 @@
         return SeekStatus.END;
       } else {
         // TODO: hmm can we avoid this "extra" lookup?:
-        term = values.lookupOrd(currentOrd);
+        scratch.copyBytes(values.lookupOrd(currentOrd));
         return SeekStatus.NOT_FOUND;
       }
     }
@@ -64,7 +62,6 @@
     if (ord >= 0) {
       currentOrd = ord;
       scratch.copyBytes(text);
-      term = scratch.get();
       return true;
     } else {
       return false;
@@ -75,7 +72,7 @@
   public void seekExact(long ord) throws IOException {
     assert ord >= 0 && ord < values.getValueCount();
     currentOrd = (int) ord;
-    term = values.lookupOrd(currentOrd);
+    scratch.copyBytes(values.lookupOrd(currentOrd));
   }
 
   @Override
@@ -84,13 +81,13 @@
     if (currentOrd >= values.getValueCount()) {
       return null;
     }
-    term = values.lookupOrd(currentOrd);
-    return term;
+    scratch.copyBytes(values.lookupOrd(currentOrd));
+    return scratch.get();
   }
 
   @Override
   public BytesRef term() throws IOException {
-    return term;
+    return scratch.get();
   }
 
   @Override
diff --git a/lucene/core/src/java/org/apache/lucene/index/Terms.java b/lucene/core/src/java/org/apache/lucene/index/Terms.java
index 7448f51..99195e2 100644
--- a/lucene/core/src/java/org/apache/lucene/index/Terms.java
+++ b/lucene/core/src/java/org/apache/lucene/index/Terms.java
@@ -62,6 +62,13 @@
    *  creates auto-prefix terms during indexing to reduce the
    *  number of terms visited. */
   public TermsEnum intersect(CompiledAutomaton compiled, final BytesRef startTerm) throws IOException {
+    
+    // TODO: could we factor out a common interface b/w
+    // CompiledAutomaton and FST?  Then we could pass FST there too,
+    // and likely speed up resolving terms to deleted docs ... but
+    // AutomatonTermsEnum makes this tricky because of its on-the-fly cycle
+    // detection
+    
     // TODO: eventually we could support seekCeil/Exact on
     // the returned enum, instead of only being able to seek
     // at the start
diff --git a/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java
index d6a691e..64a8860 100644
--- a/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java
+++ b/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java
@@ -568,7 +568,7 @@
       final int numToMerge = end - maxSegmentCount + 1;
       final OneMerge merge = new OneMerge(eligible.subList(end-numToMerge, end));
       if (verbose(writer)) {
-        message("add final merge=" + merge.segString(writer.getDirectory()), writer);
+        message("add final merge=" + merge.segString(), writer);
       }
       spec = new MergeSpecification();
       spec.add(merge);
diff --git a/lucene/core/src/java/org/apache/lucene/index/TrackingIndexWriter.java b/lucene/core/src/java/org/apache/lucene/index/TrackingIndexWriter.java
index 3d81428..2afcfff 100644
--- a/lucene/core/src/java/org/apache/lucene/index/TrackingIndexWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/TrackingIndexWriter.java
@@ -138,9 +138,9 @@
     return indexingGen.get();
   }
 
-  /** Calls {@link IndexWriter#addIndexes(IndexReader...)}
+  /** Calls {@link IndexWriter#addIndexes(CodecReader...)}
    *  and returns the generation that reflects this change. */
-  public long addIndexes(IndexReader... readers) throws IOException {
+  public long addIndexes(CodecReader... readers) throws IOException {
     writer.addIndexes(readers);
     // Return gen as of when indexing finished:
     return indexingGen.get();
diff --git a/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java b/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java
index 369681a..bef19ca 100644
--- a/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java
@@ -24,8 +24,8 @@
 import java.util.List;
 import java.util.Set;
 
-import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.BooleanClause.Occur;
 import org.apache.lucene.search.similarities.Similarity;
@@ -305,21 +305,19 @@
     }
 
     @Override
-    public BulkScorer bulkScorer(LeafReaderContext context, boolean scoreDocsInOrder,
-                                 Bits acceptDocs) throws IOException {
+    public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
 
-      if (scoreDocsInOrder || minNrShouldMatch > 1) {
+      if (minNrShouldMatch > 1) {
         // TODO: (LUCENE-4872) in some cases BooleanScorer may be faster for minNrShouldMatch
         // but the same is even true of pure conjunctions...
-        return super.bulkScorer(context, scoreDocsInOrder, acceptDocs);
+        return super.bulkScorer(context, acceptDocs);
       }
 
-      List<BulkScorer> prohibited = new ArrayList<BulkScorer>();
       List<BulkScorer> optional = new ArrayList<BulkScorer>();
       Iterator<BooleanClause> cIter = clauses.iterator();
       for (Weight w  : weights) {
         BooleanClause c =  cIter.next();
-        BulkScorer subScorer = w.bulkScorer(context, false, acceptDocs);
+        BulkScorer subScorer = w.bulkScorer(context, acceptDocs);
         if (subScorer == null) {
           if (c.isRequired()) {
             return null;
@@ -328,15 +326,20 @@
           // TODO: there are some cases where BooleanScorer
           // would handle conjunctions faster than
           // BooleanScorer2...
-          return super.bulkScorer(context, scoreDocsInOrder, acceptDocs);
+          return super.bulkScorer(context, acceptDocs);
         } else if (c.isProhibited()) {
-          prohibited.add(subScorer);
+          // TODO: there are some cases where BooleanScorer could do this faster
+          return super.bulkScorer(context, acceptDocs);
         } else {
           optional.add(subScorer);
         }
       }
 
-      return new BooleanScorer(this, disableCoord, minNrShouldMatch, optional, prohibited, maxCoord);
+      if (optional.size() == 0) {
+        return null;
+      }
+
+      return new BooleanScorer(this, disableCoord, maxCoord, optional);
     }
 
     @Override
@@ -431,30 +434,6 @@
       }
     }
     
-    @Override
-    public boolean scoresDocsOutOfOrder() {
-      if (minNrShouldMatch > 1) {
-        // BS2 (in-order) will be used by scorer()
-        return false;
-      }
-      int optionalCount = 0;
-      for (BooleanClause c : clauses) {
-        if (c.isRequired()) {
-          // BS2 (in-order) will be used by scorer()
-          return false;
-        } else if (!c.isProhibited()) {
-          optionalCount++;
-        }
-      }
-      
-      if (optionalCount == minNrShouldMatch) {
-        return false; // BS2 (in-order) will be used, as this means conjunction
-      }
-      
-      // scorer() will return an out-of-order scorer if requested.
-      return true;
-    }
-    
     private Scorer req(List<Scorer> required, boolean disableCoord) {
       if (required.size() == 1) {
         Scorer req = required.get(0);
diff --git a/lucene/core/src/java/org/apache/lucene/search/BooleanScorer.java b/lucene/core/src/java/org/apache/lucene/search/BooleanScorer.java
index 5c85bdb..08d5e63 100644
--- a/lucene/core/src/java/org/apache/lucene/search/BooleanScorer.java
+++ b/lucene/core/src/java/org/apache/lucene/search/BooleanScorer.java
@@ -18,255 +18,139 @@
  */
 
 import java.io.IOException;
-import java.util.List;
+import java.util.Arrays;
+import java.util.Collection;
 
 import org.apache.lucene.search.BooleanQuery.BooleanWeight;
+import org.apache.lucene.util.PriorityQueue;
 
-/* Description from Doug Cutting (excerpted from
- * LUCENE-1483):
- *
- * BooleanScorer uses an array to score windows of
- * 2K docs. So it scores docs 0-2K first, then docs 2K-4K,
- * etc. For each window it iterates through all query terms
- * and accumulates a score in table[doc%2K]. It also stores
- * in the table a bitmask representing which terms
- * contributed to the score. Non-zero scores are chained in
- * a linked list. At the end of scoring each window it then
- * iterates through the linked list and, if the bitmask
- * matches the boolean constraints, collects a hit. For
- * boolean queries with lots of frequent terms this can be
- * much faster, since it does not need to update a priority
- * queue for each posting, instead performing constant-time
- * operations per posting. The only downside is that it
- * results in hits being delivered out-of-order within the
- * window, which means it cannot be nested within other
- * scorers. But it works well as a top-level scorer.
- *
- * The new BooleanScorer2 implementation instead works by
- * merging priority queues of postings, albeit with some
- * clever tricks. For example, a pure conjunction (all terms
- * required) does not require a priority queue. Instead it
- * sorts the posting streams at the start, then repeatedly
- * skips the first to to the last. If the first ever equals
- * the last, then there's a hit. When some terms are
- * required and some terms are optional, the conjunction can
- * be evaluated first, then the optional terms can all skip
- * to the match and be added to the score. Thus the
- * conjunction can reduce the number of priority queue
- * updates for the optional terms. */
-
+/**
+ * BulkSorer that is used for pure disjunctions: no MUST clauses and
+ * minShouldMatch == 1. This scorer scores documents by batches of 2048 docs.
+ */
 final class BooleanScorer extends BulkScorer {
-  
-  private static final class BooleanScorerCollector extends SimpleCollector {
-    private BucketTable bucketTable;
-    private int mask;
-    private Scorer scorer;
-    
-    public BooleanScorerCollector(int mask, BucketTable bucketTable) {
-      this.mask = mask;
-      this.bucketTable = bucketTable;
-    }
-    
-    @Override
-    public void collect(final int doc) throws IOException {
-      final BucketTable table = bucketTable;
-      final int i = doc & BucketTable.MASK;
-      final Bucket bucket = table.buckets[i];
-      
-      if (bucket.doc != doc) {                    // invalid bucket
-        bucket.doc = doc;                         // set doc
-        bucket.score = scorer.score();            // initialize score
-        bucket.bits = mask;                       // initialize mask
-        bucket.coord = 1;                         // initialize coord
 
-        bucket.next = table.first;                // push onto valid list
-        table.first = bucket;
-      } else {                                    // valid bucket
-        bucket.score += scorer.score();           // increment score
-        bucket.bits |= mask;                      // add bits in mask
-        bucket.coord++;                           // increment coord
-      }
+  static final int SHIFT = 11;
+  static final int SIZE = 1 << SHIFT;
+  static final int MASK = SIZE - 1;
+  static final int SET_SIZE = 1 << (SHIFT - 6);
+  static final int SET_MASK = SET_SIZE - 1;
+
+  static class Bucket {
+    double score;
+    int freq;
+  }
+
+  static class BulkScorerAndDoc {
+    final BulkScorer scorer;
+    int next;
+
+    BulkScorerAndDoc(BulkScorer scorer) {
+      this.scorer = scorer;
+      this.next = 0;
     }
-    
+  }
+
+  final Bucket[] buckets = new Bucket[SIZE];
+  // This is basically an inlined FixedBitSet... seems to help with bound checks
+  final long[] matching = new long[SET_SIZE];
+
+  final float[] coordFactors;
+  final PriorityQueue<BulkScorerAndDoc> optionalScorers;
+  final FakeScorer fakeScorer = new FakeScorer();
+
+  final class OrCollector implements LeafCollector {
+    Scorer scorer;
+
     @Override
     public void setScorer(Scorer scorer) {
       this.scorer = scorer;
     }
-    
+
     @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return true;
+    public void collect(int doc) throws IOException {
+      final int i = doc & MASK;
+      final int idx = i >>> 6;
+      matching[idx] |= 1L << i;
+      final Bucket bucket = buckets[i];
+      bucket.freq++;
+      bucket.score += scorer.score();
     }
-
   }
-  
-  static final class Bucket {
-    int doc = -1;            // tells if bucket is valid
-    double score;             // incremental score
-    // TODO: break out bool anyProhibited, int
-    // numRequiredMatched; then we can remove 32 limit on
-    // required clauses
-    int bits;                // used for bool constraints
-    int coord;               // count of terms in score
-    Bucket next;             // next valid bucket
-  }
-  
-  /** A simple hash table of document scores within a range. */
-  static final class BucketTable {
-    public static final int SIZE = 1 << 11;
-    public static final int MASK = SIZE - 1;
 
-    final Bucket[] buckets = new Bucket[SIZE];
-    Bucket first = null;                          // head of valid list
-  
-    public BucketTable() {
-      // Pre-fill to save the lazy init when collecting
-      // each sub:
-      for(int idx=0;idx<SIZE;idx++) {
-        buckets[idx] = new Bucket();
+  final OrCollector orCollector = new OrCollector();
+
+  BooleanScorer(BooleanWeight weight, boolean disableCoord, int maxCoord, Collection<BulkScorer> optionalScorers) {
+    for (int i = 0; i < buckets.length; i++) {
+      buckets[i] = new Bucket();
+    }
+    this.optionalScorers = new PriorityQueue<BulkScorerAndDoc>(optionalScorers.size()) {
+      @Override
+      protected boolean lessThan(BulkScorerAndDoc a, BulkScorerAndDoc b) {
+        return a.next < b.next;
       }
-    }
-
-    public LeafCollector newCollector(int mask) {
-      return new BooleanScorerCollector(mask, this);
-    }
-
-    public int size() { return SIZE; }
-  }
-
-  static final class SubScorer {
-    public BulkScorer scorer;
-    // TODO: re-enable this if BQ ever sends us required clauses
-    //public boolean required = false;
-    public boolean prohibited;
-    public LeafCollector collector;
-    public SubScorer next;
-    public boolean more;
-
-    public SubScorer(BulkScorer scorer, boolean required, boolean prohibited,
-        LeafCollector collector, SubScorer next) {
-      if (required) {
-        throw new IllegalArgumentException("this scorer cannot handle required=true");
-      }
-      this.scorer = scorer;
-      this.more = true;
-      // TODO: re-enable this if BQ ever sends us required clauses
-      //this.required = required;
-      this.prohibited = prohibited;
-      this.collector = collector;
-      this.next = next;
-    }
-  }
-  
-  private SubScorer scorers = null;
-  private BucketTable bucketTable = new BucketTable();
-  private final float[] coordFactors;
-  // TODO: re-enable this if BQ ever sends us required clauses
-  //private int requiredMask = 0;
-  private final int minNrShouldMatch;
-  private int end;
-  private Bucket current;
-  // Any time a prohibited clause matches we set bit 0:
-  private static final int PROHIBITED_MASK = 1;
-
-  private final Weight weight;
-
-  BooleanScorer(BooleanWeight weight, boolean disableCoord, int minNrShouldMatch,
-      List<BulkScorer> optionalScorers, List<BulkScorer> prohibitedScorers, int maxCoord) throws IOException {
-    this.minNrShouldMatch = minNrShouldMatch;
-    this.weight = weight;
-
+    };
     for (BulkScorer scorer : optionalScorers) {
-      scorers = new SubScorer(scorer, false, false, bucketTable.newCollector(0), scorers);
-    }
-    
-    for (BulkScorer scorer : prohibitedScorers) {
-      scorers = new SubScorer(scorer, false, true, bucketTable.newCollector(PROHIBITED_MASK), scorers);
+      this.optionalScorers.add(new BulkScorerAndDoc(scorer));
     }
 
     coordFactors = new float[optionalScorers.size() + 1];
     for (int i = 0; i < coordFactors.length; i++) {
-      coordFactors[i] = disableCoord ? 1.0f : weight.coord(i, maxCoord); 
+      coordFactors[i] = disableCoord ? 1.0f : weight.coord(i, maxCoord);
     }
   }
 
-  @Override
-  public boolean score(LeafCollector collector, int max) throws IOException {
+  private void scoreDocument(LeafCollector collector, int base, int i) throws IOException {
+    final FakeScorer fakeScorer = this.fakeScorer;
+    final Bucket bucket = buckets[i];
+    fakeScorer.freq = bucket.freq;
+    fakeScorer.score = (float) bucket.score * coordFactors[bucket.freq];
+    final int doc = base | i;
+    fakeScorer.doc = doc;
+    collector.collect(doc);
+    bucket.freq = 0;
+    bucket.score = 0;
+  }
 
-    boolean more;
-    Bucket tmp;
-    FakeScorer fs = new FakeScorer();
+  private void scoreMatches(LeafCollector collector, int base) throws IOException {
+    long matching[] = this.matching;
+    for (int idx = 0; idx < matching.length; idx++) {
+      long bits = matching[idx];
+      while (bits != 0L) {
+        int ntz = Long.numberOfTrailingZeros(bits);
+        int doc = idx << 6 | ntz;
+        scoreDocument(collector, base, doc);
+        bits ^= 1L << ntz;
+      }
+    }
+  }
 
-    // The internal loop will set the score and doc before calling collect.
-    collector.setScorer(fs);
+  private BulkScorerAndDoc scoreWindow(LeafCollector collector, int base, int min, int max,
+      PriorityQueue<BulkScorerAndDoc> optionalScorers, BulkScorerAndDoc top) throws IOException {
+    assert top.next < max;
     do {
-      bucketTable.first = null;
-      
-      while (current != null) {         // more queued 
+      top.next = top.scorer.score(orCollector, min, max);
+      top = optionalScorers.updateTop();
+    } while (top.next < max);
 
-        // check prohibited & required
-        if ((current.bits & PROHIBITED_MASK) == 0) {
-
-          // TODO: re-enable this if BQ ever sends us required
-          // clauses
-          //&& (current.bits & requiredMask) == requiredMask) {
-          
-          // NOTE: Lucene always passes max =
-          // Integer.MAX_VALUE today, because we never embed
-          // a BooleanScorer inside another (even though
-          // that should work)... but in theory an outside
-          // app could pass a different max so we must check
-          // it:
-          if (current.doc >= max) {
-            tmp = current;
-            current = current.next;
-            tmp.next = bucketTable.first;
-            bucketTable.first = tmp;
-            continue;
-          }
-          
-          if (current.coord >= minNrShouldMatch) {
-            fs.score = (float) (current.score * coordFactors[current.coord]);
-            fs.doc = current.doc;
-            fs.freq = current.coord;
-            collector.collect(current.doc);
-          }
-        }
-        
-        current = current.next;         // pop the queue
-      }
-      
-      if (bucketTable.first != null){
-        current = bucketTable.first;
-        bucketTable.first = current.next;
-        return true;
-      }
-
-      // refill the queue
-      more = false;
-      end += BucketTable.SIZE;
-      for (SubScorer sub = scorers; sub != null; sub = sub.next) {
-        if (sub.more) {
-          sub.more = sub.scorer.score(sub.collector, end);
-          more |= sub.more;
-        }
-      }
-      current = bucketTable.first;
-      
-    } while (current != null || more);
-
-    return false;
+    scoreMatches(collector, base);
+    Arrays.fill(matching, 0L);
+    return top;
   }
 
   @Override
-  public String toString() {
-    StringBuilder buffer = new StringBuilder();
-    buffer.append("boolean(");
-    for (SubScorer sub = scorers; sub != null; sub = sub.next) {
-      buffer.append(sub.scorer.toString());
-      buffer.append(" ");
+  public int score(LeafCollector collector, int min, int max) throws IOException {
+    fakeScorer.doc = -1;
+    collector.setScorer(fakeScorer);
+    final PriorityQueue<BulkScorerAndDoc> optionalScorers = this.optionalScorers;
+
+    BulkScorerAndDoc top = optionalScorers.top();
+    for (int windowMin = Math.max(min, top.next); windowMin < max; windowMin = top.next) {
+      final int windowBase = windowMin & ~MASK; // find the window that windowMin belongs to
+      final int windowMax = Math.min(max, windowBase + SIZE);
+      top = scoreWindow(collector, windowBase, windowMin, windowMax, optionalScorers, top);
+      assert top.next >= windowMax;
     }
-    buffer.append(")");
-    return buffer.toString();
+    return top.next;
   }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/search/BulkScorer.java b/lucene/core/src/java/org/apache/lucene/search/BulkScorer.java
index 7ba1b39..56bcf51 100644
--- a/lucene/core/src/java/org/apache/lucene/search/BulkScorer.java
+++ b/lucene/core/src/java/org/apache/lucene/search/BulkScorer.java
@@ -32,15 +32,46 @@
    * @param collector The collector to which all matching documents are passed.
    */
   public void score(LeafCollector collector) throws IOException {
-    score(collector, Integer.MAX_VALUE);
+    final int next = score(collector, 0, DocIdSetIterator.NO_MORE_DOCS);
+    assert next == DocIdSetIterator.NO_MORE_DOCS;
   }
 
   /**
-   * Collects matching documents in a range.
-   * 
-   * @param collector The collector to which all matching documents are passed.
-   * @param max Score up to, but not including, this doc
-   * @return true if more matching documents may remain.
+   * Collects matching documents in a range and return an estimation of the
+   * next matching document which is on or after {@code max}.
+   * <p>The return value must be:</p><ul>
+   *   <li>&gt;= {@code max},</li>
+   *   <li>{@link DocIdSetIterator#NO_MORE_DOCS} if there are no more matches,</li>
+   *   <li>&lt;= the first matching document that is &gt;= {@code max} otherwise.</li>
+   * </ul>
+   * <p>{@code min} is the minimum document to be considered for matching. All
+   * documents strictly before this value must be ignored.</p>
+   * <p>Although {@code max} would be a legal return value for this method, higher
+   * values might help callers skip more efficiently over non-matching portions
+   * of the docID space.</p>
+   * <p>For instance, a {@link Scorer}-based implementation could look like
+   * below:</p>
+   * <pre class="prettyprint">
+   * private final Scorer scorer; // set via constructor
+   *
+   * public int score(LeafCollector collector, int min, int max) throws IOException {
+   *   collector.setScorer(scorer);
+   *   int doc = scorer.docID();
+   *   if (doc &lt; min) {
+   *     doc = scorer.advance(min);
+   *   }
+   *   while (doc &lt; max) {
+   *     collector.collect(doc);
+   *     doc = scorer.nextDoc();
+   *   }
+   *   return doc;
+   * }
+   * </pre>
+   *
+   * @param  collector The collector to which all matching documents are passed.
+   * @param  min Score starting at, including, this document 
+   * @param  max Score up to, but not including, this doc
+   * @return an under-estimation of the next matching doc after max
    */
-  public abstract boolean score(LeafCollector collector, int max) throws IOException;
+  public abstract int score(LeafCollector collector, int min, int max) throws IOException;
 }
diff --git a/lucene/core/src/java/org/apache/lucene/search/CachingCollector.java b/lucene/core/src/java/org/apache/lucene/search/CachingCollector.java
index 0fe0ea9..d50bec3 100644
--- a/lucene/core/src/java/org/apache/lucene/search/CachingCollector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/CachingCollector.java
@@ -82,7 +82,6 @@
 
   private static class NoScoreCachingCollector extends CachingCollector {
 
-    List<Boolean> acceptDocsOutOfOrders;
     List<LeafReaderContext> contexts;
     List<int[]> docs;
     int maxDocsToCache;
@@ -92,7 +91,6 @@
       super(in);
       this.maxDocsToCache = maxDocsToCache;
       contexts = new ArrayList<>();
-      acceptDocsOutOfOrders = new ArrayList<>();
       docs = new ArrayList<>();
     }
 
@@ -105,7 +103,6 @@
       final LeafCollector in = this.in.getLeafCollector(context);
       if (contexts != null) {
         contexts.add(context);
-        acceptDocsOutOfOrders.add(in.acceptsDocsOutOfOrder());
       }
       if (maxDocsToCache >= 0) {
         return lastCollector = wrap(in, maxDocsToCache);
@@ -152,14 +149,7 @@
       assert docs.size() == contexts.size();
       for (int i = 0; i < contexts.size(); ++i) {
         final LeafReaderContext context = contexts.get(i);
-        final boolean docsInOrder = !acceptDocsOutOfOrders.get(i);
         final LeafCollector collector = other.getLeafCollector(context);
-        if (!collector.acceptsDocsOutOfOrder() && !docsInOrder) {
-          throw new IllegalArgumentException(
-                "cannot replay: given collector does not support "
-                    + "out-of-order collection, while the wrapped collector does. "
-                    + "Therefore cached documents may be out-of-order.");
-        }
         collect(collector, i);
       }
     }
@@ -300,16 +290,9 @@
    * Creates a {@link CachingCollector} which does not wrap another collector.
    * The cached documents and scores can later be {@link #replay(Collector)
    * replayed}.
-   *
-   * @param acceptDocsOutOfOrder
-   *          whether documents are allowed to be collected out-of-order
    */
-  public static CachingCollector create(final boolean acceptDocsOutOfOrder, boolean cacheScores, double maxRAMMB) {
+  public static CachingCollector create(boolean cacheScores, double maxRAMMB) {
     Collector other = new SimpleCollector() {
-      @Override
-      public boolean acceptsDocsOutOfOrder() {
-        return acceptDocsOutOfOrder;
-      }
 
       @Override
       public void collect(int doc) {}
diff --git a/lucene/core/src/java/org/apache/lucene/search/ConstantScoreQuery.java b/lucene/core/src/java/org/apache/lucene/search/ConstantScoreQuery.java
index d9e7f32..e6c7a03 100644
--- a/lucene/core/src/java/org/apache/lucene/search/ConstantScoreQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/ConstantScoreQuery.java
@@ -134,14 +134,14 @@
     }
 
     @Override
-    public BulkScorer bulkScorer(LeafReaderContext context, boolean scoreDocsInOrder, Bits acceptDocs) throws IOException {
+    public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
       final DocIdSetIterator disi;
       if (filter != null) {
         assert query == null;
-        return super.bulkScorer(context, scoreDocsInOrder, acceptDocs);
+        return super.bulkScorer(context, acceptDocs);
       } else {
         assert query != null && innerWeight != null;
-        BulkScorer bulkScorer = innerWeight.bulkScorer(context, scoreDocsInOrder, acceptDocs);
+        BulkScorer bulkScorer = innerWeight.bulkScorer(context, acceptDocs);
         if (bulkScorer == null) {
           return null;
         }
@@ -171,11 +171,6 @@
     }
 
     @Override
-    public boolean scoresDocsOutOfOrder() {
-      return (innerWeight != null) ? innerWeight.scoresDocsOutOfOrder() : false;
-    }
-
-    @Override
     public Explanation explain(LeafReaderContext context, int doc) throws IOException {
       final Scorer cs = scorer(context, context.reader().getLiveDocs());
       final boolean exists = (cs != null && cs.advance(doc) == doc);
@@ -212,8 +207,8 @@
     }
 
     @Override
-    public boolean score(LeafCollector collector, int max) throws IOException {
-      return bulkScorer.score(wrapCollector(collector), max);
+    public int score(LeafCollector collector, int min, int max) throws IOException {
+      return bulkScorer.score(wrapCollector(collector), min, max);
     }
 
     private LeafCollector wrapCollector(LeafCollector collector) {
diff --git a/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java b/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java
index 438dad2..36937e1 100644
--- a/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java
+++ b/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java
@@ -17,6 +17,7 @@
  * limitations under the License.
  */
 
+
 import java.io.IOException;
 
 import org.apache.lucene.index.BinaryDocValues;
@@ -39,46 +40,24 @@
  * sorting, by exposing a tight interaction with {@link
  * FieldValueHitQueue} as it visits hits.  Whenever a hit is
  * competitive, it's enrolled into a virtual slot, which is
- * an int ranging from 0 to numHits-1.  The {@link
- * FieldComparator} is made aware of segment transitions
- * during searching in case any internal state it's tracking
- * needs to be recomputed during these transitions.</p>
- *
- * <p>A comparator must define these functions:</p>
- *
+ * an int ranging from 0 to numHits-1. Segment transitions are
+ * handled by creating a dedicated per-segment
+ * {@link LeafFieldComparator} which also needs to interact
+ * with the {@link FieldValueHitQueue} but can optimize based
+ * on the segment to collect.</p>
+ * 
+ * <p>The following functions need to be implemented</p>
  * <ul>
- *
  *  <li> {@link #compare} Compare a hit at 'slot a'
  *       with hit 'slot b'.
- *
- *  <li> {@link #setBottom} This method is called by
- *       {@link FieldValueHitQueue} to notify the
- *       FieldComparator of the current weakest ("bottom")
- *       slot.  Note that this slot may not hold the weakest
- *       value according to your comparator, in cases where
- *       your comparator is not the primary one (ie, is only
- *       used to break ties from the comparators before it).
- *
- *  <li> {@link #compareBottom} Compare a new hit (docID)
- *       against the "weakest" (bottom) entry in the queue.
- *
+ * 
  *  <li> {@link #setTopValue} This method is called by
  *       {@link TopFieldCollector} to notify the
  *       FieldComparator of the top most value, which is
- *       used by future calls to {@link #compareTop}.
- *
- *  <li> {@link #compareBottom} Compare a new hit (docID)
- *       against the "weakest" (bottom) entry in the queue.
- *
- *  <li> {@link #compareTop} Compare a new hit (docID)
- *       against the top value previously set by a call to
- *       {@link #setTopValue}.
- *
- *  <li> {@link #copy} Installs a new hit into the
- *       priority queue.  The {@link FieldValueHitQueue}
- *       calls this method when a new hit is competitive.
- *
- *  <li> {@link #setNextReader(org.apache.lucene.index.LeafReaderContext)} Invoked
+ *       used by future calls to
+ *       {@link LeafFieldComparator#compareTop}.
+ * 
+ *  <li> {@link #getLeafComparator(org.apache.lucene.index.LeafReaderContext)} Invoked
  *       when the search is switching to the next segment.
  *       You may need to update internal state of the
  *       comparator, for example retrieving new values from
@@ -90,6 +69,7 @@
  *       FieldDoc#fields} when returning the top results.
  * </ul>
  *
+ * @see LeafFieldComparator
  * @lucene.experimental
  */
 public abstract class FieldComparator<T> {
@@ -106,93 +86,14 @@
   public abstract int compare(int slot1, int slot2);
 
   /**
-   * Set the bottom slot, ie the "weakest" (sorted last)
-   * entry in the queue.  When {@link #compareBottom} is
-   * called, you should compare against this slot.  This
-   * will always be called before {@link #compareBottom}.
-   * 
-   * @param slot the currently weakest (sorted last) slot in the queue
-   */
-  public abstract void setBottom(final int slot);
-
-  /**
    * Record the top value, for future calls to {@link
-   * #compareTop}.  This is only called for searches that
+   * LeafFieldComparator#compareTop}.  This is only called for searches that
    * use searchAfter (deep paging), and is called before any
-   * calls to {@link #setNextReader}.
+   * calls to {@link #getLeafComparator(LeafReaderContext)}.
    */
   public abstract void setTopValue(T value);
 
   /**
-   * Compare the bottom of the queue with this doc.  This will
-   * only invoked after setBottom has been called.  This
-   * should return the same result as {@link
-   * #compare(int,int)}} as if bottom were slot1 and the new
-   * document were slot 2.
-   *    
-   * <p>For a search that hits many results, this method
-   * will be the hotspot (invoked by far the most
-   * frequently).</p>
-   * 
-   * @param doc that was hit
-   * @return any {@code N < 0} if the doc's value is sorted after
-   * the bottom entry (not competitive), any {@code N > 0} if the
-   * doc's value is sorted before the bottom entry and {@code 0} if
-   * they are equal.
-   */
-  public abstract int compareBottom(int doc) throws IOException;
-
-  /**
-   * Compare the top value with this doc.  This will
-   * only invoked after setTopValue has been called.  This
-   * should return the same result as {@link
-   * #compare(int,int)}} as if topValue were slot1 and the new
-   * document were slot 2.  This is only called for searches that
-   * use searchAfter (deep paging).
-   *    
-   * @param doc that was hit
-   * @return any {@code N < 0} if the doc's value is sorted after
-   * the bottom entry (not competitive), any {@code N > 0} if the
-   * doc's value is sorted before the bottom entry and {@code 0} if
-   * they are equal.
-   */
-  public abstract int compareTop(int doc) throws IOException;
-
-  /**
-   * This method is called when a new hit is competitive.
-   * You should copy any state associated with this document
-   * that will be required for future comparisons, into the
-   * specified slot.
-   * 
-   * @param slot which slot to copy the hit to
-   * @param doc docID relative to current reader
-   */
-  public abstract void copy(int slot, int doc) throws IOException;
-
-  /**
-   * Set a new {@link org.apache.lucene.index.LeafReaderContext}. All subsequent docIDs are relative to
-   * the current reader (you must add docBase if you need to
-   * map it to a top-level docID).
-   * 
-   * @param context current reader context
-   * @return the comparator to use for this segment; most
-   *   comparators can just return "this" to reuse the same
-   *   comparator across segments
-   * @throws IOException if there is a low-level IO error
-   */
-  public abstract FieldComparator<T> setNextReader(LeafReaderContext context) throws IOException;
-
-  /** Sets the Scorer to use in case a document's score is
-   *  needed.
-   * 
-   * @param scorer Scorer instance that you should use to
-   * obtain the current hit's score, if necessary. */
-  public void setScorer(Scorer scorer) {
-    // Empty implementation since most comparators don't need the score. This
-    // can be overridden by those that need it.
-  }
-  
-  /**
    * Return the actual value in the slot.
    *
    * @param slot the value
@@ -200,7 +101,20 @@
    */
   public abstract T value(int slot);
 
-  /** Returns -1 if first is less than second.  Default
+  /**
+   * Get a per-segment {@link LeafFieldComparator} to collect the given
+   * {@link org.apache.lucene.index.LeafReaderContext}. All docIDs supplied to
+   * this {@link LeafFieldComparator} are relative to the current reader (you
+   * must add docBase if you need to map it to a top-level docID).
+   * 
+   * @param context current reader context
+   * @return the comparator to use for this segment
+   * @throws IOException if there is a low-level IO error
+   */
+  public abstract LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException;
+
+  /** Returns a negative integer if first is less than second,
+   *  0 if they are equal and a positive integer otherwise. Default
    *  impl to assume the type implements Comparable and
    *  invoke .compareTo; be sure to override this method if
    *  your FieldComparator's type isn't a Comparable or
@@ -226,7 +140,7 @@
    *  using {@link TopScoreDocCollector} directly (which {@link
    *  IndexSearcher#search} uses when no {@link Sort} is
    *  specified). */
-  public static final class RelevanceComparator extends FieldComparator<Float> {
+  public static final class RelevanceComparator extends FieldComparator<Float> implements LeafFieldComparator {
     private final float[] scores;
     private float bottom;
     private Scorer scorer;
@@ -256,7 +170,7 @@
     }
 
     @Override
-    public FieldComparator<Float> setNextReader(LeafReaderContext context) {
+    public LeafFieldComparator getLeafComparator(LeafReaderContext context) {
       return this;
     }
     
@@ -304,7 +218,7 @@
   }
 
   /** Sorts by ascending docID */
-  public static final class DocComparator extends FieldComparator<Integer> {
+  public static final class DocComparator extends FieldComparator<Integer> implements LeafFieldComparator {
     private final int[] docIDs;
     private int docBase;
     private int bottom;
@@ -333,7 +247,7 @@
     }
 
     @Override
-    public FieldComparator<Integer> setNextReader(LeafReaderContext context) {
+    public LeafFieldComparator getLeafComparator(LeafReaderContext context) {
       // TODO: can we "map" our docIDs to the current
       // reader? saves having to then subtract on every
       // compare call
@@ -361,6 +275,9 @@
       int docValue = docBase + doc;
       return Integer.compare(topValue, docValue);
     }
+
+    @Override
+    public void setScorer(Scorer scorer) {}
   }
   
   /** Sorts by field's natural Term sort order, using
@@ -372,7 +289,7 @@
    *  to large results, this comparator will be much faster
    *  than {@link org.apache.lucene.search.FieldComparator.TermValComparator}.  For very small
    *  result sets it may be slower. */
-  public static class TermOrdValComparator extends FieldComparator<BytesRef> {
+  public static class TermOrdValComparator extends FieldComparator<BytesRef> implements LeafFieldComparator {
     /* Ords for each slot.
        @lucene.internal */
     final int[] ords;
@@ -516,7 +433,7 @@
     }
     
     @Override
-    public FieldComparator<BytesRef> setNextReader(LeafReaderContext context) throws IOException {
+    public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException {
       termsIndex = getSortedDocValues(context, field);
       currentReaderGen++;
 
@@ -534,7 +451,7 @@
         topOrd = missingOrd;
         topSameReader = true;
       }
-      //System.out.println("  setNextReader topOrd=" + topOrd + " topSameReader=" + topSameReader);
+      //System.out.println("  getLeafComparator topOrd=" + topOrd + " topSameReader=" + topSameReader);
 
       if (bottomSlot != -1) {
         // Recompute bottomOrd/SameReader
@@ -622,13 +539,16 @@
       }
       return val1.compareTo(val2);
     }
+
+    @Override
+    public void setScorer(Scorer scorer) {}
   }
   
   /** Sorts by field's natural Term sort order.  All
    *  comparisons are done using BytesRef.compareTo, which is
    *  slow for medium to large result sets but possibly
    *  very fast for very small results sets. */
-  public static class TermValComparator extends FieldComparator<BytesRef> {
+  public static class TermValComparator extends FieldComparator<BytesRef> implements LeafFieldComparator {
     
     private final BytesRef[] values;
     private final BytesRefBuilder[] tempBRs;
@@ -694,7 +614,7 @@
     }
 
     @Override
-    public FieldComparator<BytesRef> setNextReader(LeafReaderContext context) throws IOException {
+    public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException {
       docTerms = getBinaryDocValues(context, field);
       docsWithField = getDocsWithField(context, field);
       if (docsWithField instanceof Bits.MatchAllBits) {
@@ -749,5 +669,8 @@
       }
       return term;
     }
+
+    @Override
+    public void setScorer(Scorer scorer) {}
   }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/search/FieldValueHitQueue.java b/lucene/core/src/java/org/apache/lucene/search/FieldValueHitQueue.java
index e7c368c..db8fb59 100644
--- a/lucene/core/src/java/org/apache/lucene/search/FieldValueHitQueue.java
+++ b/lucene/core/src/java/org/apache/lucene/search/FieldValueHitQueue.java
@@ -19,6 +19,7 @@
 
 import java.io.IOException;
 
+import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.util.PriorityQueue;
 
 /**
@@ -53,17 +54,16 @@
    * there is just one comparator.
    */
   private static final class OneComparatorFieldValueHitQueue<T extends FieldValueHitQueue.Entry> extends FieldValueHitQueue<T> {
+    
     private final int oneReverseMul;
+    private final FieldComparator<?> oneComparator;
     
     public OneComparatorFieldValueHitQueue(SortField[] fields, int size)
         throws IOException {
       super(fields, size);
-
-      SortField field = fields[0];
-      setComparator(0, field.getComparator(size, 0));
-      oneReverseMul = field.reverse ? -1 : 1;
-
-      reverseMul[0] = oneReverseMul;
+      assert fields.length == 1;
+      oneComparator = comparators[0];
+      oneReverseMul = reverseMul[0];
     }
 
     /**
@@ -78,7 +78,7 @@
       assert hitA != hitB;
       assert hitA.slot != hitB.slot;
 
-      final int c = oneReverseMul * firstComparator.compare(hitA.slot, hitB.slot);
+      final int c = oneReverseMul * oneComparator.compare(hitA.slot, hitB.slot);
       if (c != 0) {
         return c > 0;
       }
@@ -98,14 +98,6 @@
     public MultiComparatorsFieldValueHitQueue(SortField[] fields, int size)
         throws IOException {
       super(fields, size);
-
-      int numComparators = comparators.length;
-      for (int i = 0; i < numComparators; ++i) {
-        SortField field = fields[i];
-
-        reverseMul[i] = field.reverse ? -1 : 1;
-        setComparator(i, field.getComparator(size, i));
-      }
     }
   
     @Override
@@ -130,8 +122,7 @@
   }
   
   // prevent instantiation and extension.
-  @SuppressWarnings({"rawtypes","unchecked"})
-  private FieldValueHitQueue(SortField[] fields, int size) {
+  private FieldValueHitQueue(SortField[] fields, int size) throws IOException {
     super(size);
     // When we get here, fields.length is guaranteed to be > 0, therefore no
     // need to check it again.
@@ -141,8 +132,14 @@
     // anyway.
     this.fields = fields;
     int numComparators = fields.length;
-    comparators = new FieldComparator[numComparators];
+    comparators = new FieldComparator<?>[numComparators];
     reverseMul = new int[numComparators];
+    for (int i = 0; i < numComparators; ++i) {
+      SortField field = fields[i];
+
+      reverseMul[i] = field.reverse ? -1 : 1;
+      comparators[i] = field.getComparator(size, i);
+    }
   }
 
   /**
@@ -179,15 +176,17 @@
     return reverseMul;
   }
 
-  public void setComparator(int pos, FieldComparator<?> comparator) {
-    if (pos==0) firstComparator = comparator;
-    comparators[pos] = comparator;
+  public LeafFieldComparator[] getComparators(LeafReaderContext context) throws IOException {
+    LeafFieldComparator[] comparators = new LeafFieldComparator[this.comparators.length];
+    for (int i = 0; i < comparators.length; ++i) {
+      comparators[i] = this.comparators[i].getLeafComparator(context);
+    }
+    return comparators;
   }
 
   /** Stores the sort criteria being used. */
   protected final SortField[] fields;
-  protected final FieldComparator<?>[] comparators;  // use setComparator to change this array
-  protected FieldComparator<?> firstComparator;      // this must always be equal to comparators[0]
+  protected final FieldComparator<?>[] comparators;
   protected final int[] reverseMul;
 
   @Override
diff --git a/lucene/core/src/java/org/apache/lucene/search/FilterCache.java b/lucene/core/src/java/org/apache/lucene/search/FilterCache.java
index 23292a5..8c4a97e 100644
--- a/lucene/core/src/java/org/apache/lucene/search/FilterCache.java
+++ b/lucene/core/src/java/org/apache/lucene/search/FilterCache.java
@@ -20,6 +20,7 @@
 /**
  * A cache for filters.
  *
+ * @see LRUFilterCache
  * @lucene.experimental
  */
 public interface FilterCache {
diff --git a/lucene/core/src/java/org/apache/lucene/search/FilterCachingPolicy.java b/lucene/core/src/java/org/apache/lucene/search/FilterCachingPolicy.java
index 107f4ec..d9eb38b 100644
--- a/lucene/core/src/java/org/apache/lucene/search/FilterCachingPolicy.java
+++ b/lucene/core/src/java/org/apache/lucene/search/FilterCachingPolicy.java
@@ -29,6 +29,8 @@
  *
  * Implementations of this class must be thread-safe.
  *
+ * @see UsageTrackingFilterCachingPolicy
+ * @see LRUFilterCache
  * @lucene.experimental
  */
 // TODO: add APIs for integration with IndexWriter.IndexReaderWarmer
diff --git a/lucene/core/src/java/org/apache/lucene/search/FilterLeafCollector.java b/lucene/core/src/java/org/apache/lucene/search/FilterLeafCollector.java
index e3ae9a8..5d79663 100644
--- a/lucene/core/src/java/org/apache/lucene/search/FilterLeafCollector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/FilterLeafCollector.java
@@ -44,11 +44,6 @@
   }
 
   @Override
-  public boolean acceptsDocsOutOfOrder() {
-    return in.acceptsDocsOutOfOrder();
-  }
-
-  @Override
   public String toString() {
     return getClass().getSimpleName() + "(" + in + ")";
   }
diff --git a/lucene/core/src/java/org/apache/lucene/search/FilteredQuery.java b/lucene/core/src/java/org/apache/lucene/search/FilteredQuery.java
index 9d791c7..c95f05b 100644
--- a/lucene/core/src/java/org/apache/lucene/search/FilteredQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/FilteredQuery.java
@@ -81,11 +81,6 @@
   public Weight createWeight(final IndexSearcher searcher) throws IOException {
     final Weight weight = query.createWeight (searcher);
     return new Weight() {
-      
-      @Override
-      public boolean scoresDocsOutOfOrder() {
-        return true;
-      }
 
       @Override
       public float getValueForNormalization() throws IOException { 
@@ -138,7 +133,7 @@
 
       // return a filtering top scorer
       @Override
-      public BulkScorer bulkScorer(LeafReaderContext context, boolean scoreDocsInOrder, Bits acceptDocs) throws IOException {
+      public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
         assert filter != null;
 
         DocIdSet filterDocIdSet = filter.getDocIdSet(context, acceptDocs);
@@ -147,7 +142,7 @@
           return null;
         }
 
-        return strategy.filteredBulkScorer(context, weight, scoreDocsInOrder, filterDocIdSet);
+        return strategy.filteredBulkScorer(context, weight, filterDocIdSet);
       }
     };
   }
@@ -225,12 +220,12 @@
     }
 
     @Override
-    public boolean score(LeafCollector collector, int maxDoc) throws IOException {
+    public int score(LeafCollector collector, int min, int maxDoc) throws IOException {
       // the normalization trick already applies the boost of this query,
       // so we can use the wrapped scorer directly:
       collector.setScorer(scorer);
-      if (scorer.docID() == -1) {
-        scorer.nextDoc();
+      if (scorer.docID() < min) {
+        scorer.advance(min);
       }
       while (true) {
         final int scorerDoc = scorer.docID();
@@ -244,7 +239,7 @@
         }
       }
 
-      return scorer.docID() != Scorer.NO_MORE_DOCS;
+      return scorer.docID();
     }
   }
   
@@ -480,7 +475,7 @@
      * @return a filtered top scorer
      */
     public BulkScorer filteredBulkScorer(LeafReaderContext context,
-        Weight weight, boolean scoreDocsInOrder, DocIdSet docIdSet) throws IOException {
+        Weight weight, DocIdSet docIdSet) throws IOException {
       Scorer scorer = filteredScorer(context, weight, docIdSet);
       if (scorer == null) {
         return null;
@@ -603,13 +598,12 @@
     @Override
     public BulkScorer filteredBulkScorer(final LeafReaderContext context,
         Weight weight,
-        boolean scoreDocsInOrder, // ignored (we always top-score in order)
         DocIdSet docIdSet) throws IOException {
       Bits filterAcceptDocs = docIdSet.bits();
       if (filterAcceptDocs == null) {
         // Filter does not provide random-access Bits; we
         // must fallback to leapfrog:
-        return LEAP_FROG_QUERY_FIRST_STRATEGY.filteredBulkScorer(context, weight, scoreDocsInOrder, docIdSet);
+        return LEAP_FROG_QUERY_FIRST_STRATEGY.filteredBulkScorer(context, weight, docIdSet);
       }
       final Scorer scorer = weight.scorer(context, null);
       return scorer == null ? null : new QueryFirstBulkScorer(scorer, filterAcceptDocs);
diff --git a/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java b/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java
index d3906f7..1bf92e9 100644
--- a/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java
+++ b/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java
@@ -18,19 +18,14 @@
  */
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Iterator;
 import java.util.List;
-import java.util.NoSuchElementException;
 import java.util.Set;
 import java.util.concurrent.Callable;
-import java.util.concurrent.CompletionService;
 import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Executor;
-import java.util.concurrent.ExecutorCompletionService;
 import java.util.concurrent.ExecutorService;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReentrantLock;
+import java.util.concurrent.Future;
 
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.FieldTypes;
@@ -444,28 +439,21 @@
     if (executor == null) {
       return search(leafContexts, weight, after, nDocs);
     } else {
-      final HitQueue hq = new HitQueue(nDocs, false);
-      final Lock lock = new ReentrantLock();
-      final ExecutionHelper<TopDocs> runner = new ExecutionHelper<>(executor);
-    
-      for (int i = 0; i < leafSlices.length; i++) { // search each sub
-        runner.submit(new SearcherCallableNoSort(lock, this, leafSlices[i], weight, after, nDocs, hq));
+      final List<Future<TopDocs>> topDocsFutures = new ArrayList<>(leafSlices.length);
+      for (int i = 0; i < leafSlices.length; i++) { // search each leaf slice
+        topDocsFutures.add(executor.submit(new SearcherCallableNoSort(this, leafSlices[i], weight, after, nDocs)));
       }
-
-      int totalHits = 0;
-      float maxScore = Float.NEGATIVE_INFINITY;
-      for (final TopDocs topDocs : runner) {
-        if(topDocs.totalHits != 0) {
-          totalHits += topDocs.totalHits;
-          maxScore = Math.max(maxScore, topDocs.getMaxScore());
+      final TopDocs[] topDocs = new TopDocs[leafSlices.length];
+      for (int i = 0; i < leafSlices.length; i++) {
+        try {
+          topDocs[i] = topDocsFutures.get(i).get();
+        } catch (InterruptedException e) {
+          throw new ThreadInterruptedException(e);
+        } catch (ExecutionException e) {
+          throw new RuntimeException(e);
         }
       }
-
-      final ScoreDoc[] scoreDocs = new ScoreDoc[hq.size()];
-      for (int i = hq.size() - 1; i >= 0; i--) // put docs in array
-        scoreDocs[i] = hq.pop();
-
-      return new TopDocs(totalHits, scoreDocs, maxScore);
+      return TopDocs.merge(null, nDocs, topDocs);
     }
   }
 
@@ -484,7 +472,7 @@
       limit = 1;
     }
     nDocs = Math.min(nDocs, limit);
-    TopScoreDocCollector collector = TopScoreDocCollector.create(nDocs, after, !weight.scoresDocsOutOfOrder());
+    TopScoreDocCollector collector = TopScoreDocCollector.create(nDocs, after);
     search(leaves, weight, collector);
     return collector.topDocs();
   }
@@ -529,31 +517,21 @@
       // use all leaves here!
       return search(leafContexts, weight, after, nDocs, sort, fillFields, doDocScores, doMaxScore);
     } else {
-      final TopFieldCollector topCollector = TopFieldCollector.create(sort, nDocs,
-                                                                      after,
-                                                                      fillFields,
-                                                                      doDocScores,
-                                                                      doMaxScore,
-                                                                      false);
-
-      final Lock lock = new ReentrantLock();
-      final ExecutionHelper<TopFieldDocs> runner = new ExecutionHelper<>(executor);
+      final List<Future<TopFieldDocs>> topDocsFutures = new ArrayList<>(leafSlices.length);
       for (int i = 0; i < leafSlices.length; i++) { // search each leaf slice
-        runner.submit(
-                      new SearcherCallableWithSort(lock, this, leafSlices[i], weight, after, nDocs, topCollector, sort, doDocScores, doMaxScore));
+        topDocsFutures.add(executor.submit(new SearcherCallableWithSort(this, leafSlices[i], weight, after, nDocs, sort, doDocScores, doMaxScore)));
       }
-      int totalHits = 0;
-      float maxScore = Float.NEGATIVE_INFINITY;
-      for (final TopFieldDocs topFieldDocs : runner) {
-        if (topFieldDocs.totalHits != 0) {
-          totalHits += topFieldDocs.totalHits;
-          maxScore = Math.max(maxScore, topFieldDocs.getMaxScore());
+      final TopFieldDocs[] topDocs = new TopFieldDocs[leafSlices.length];
+      for (int i = 0; i < leafSlices.length; i++) {
+        try {
+          topDocs[i] = topDocsFutures.get(i).get();
+        } catch (InterruptedException e) {
+          throw new ThreadInterruptedException(e);
+        } catch (ExecutionException e) {
+          throw new RuntimeException(e);
         }
       }
-
-      final TopFieldDocs topDocs = (TopFieldDocs) topCollector.topDocs();
-
-      return new TopFieldDocs(totalHits, topDocs.scoreDocs, topDocs.fields, topDocs.getMaxScore());
+      return (TopFieldDocs) TopDocs.merge(sort, nDocs, topDocs);
     }
   }
   
@@ -574,7 +552,7 @@
 
     TopFieldCollector collector = TopFieldCollector.create(sort, nDocs, after,
                                                            fillFields, doDocScores,
-                                                           doMaxScore, !weight.scoresDocsOutOfOrder());
+                                                           doMaxScore);
     search(leaves, weight, collector);
     return (TopFieldDocs) collector.topDocs();
   }
@@ -613,7 +591,7 @@
         // continue with the following leaf
         continue;
       }
-      BulkScorer scorer = weight.bulkScorer(ctx, !leafCollector.acceptsDocsOutOfOrder(), ctx.reader().getLiveDocs());
+      BulkScorer scorer = weight.bulkScorer(ctx, ctx.reader().getLiveDocs());
       if (scorer != null) {
         try {
           scorer.score(leafCollector);
@@ -703,42 +681,24 @@
    */
   private static final class SearcherCallableNoSort implements Callable<TopDocs> {
 
-    private final Lock lock;
     private final IndexSearcher searcher;
     private final Weight weight;
     private final ScoreDoc after;
     private final int nDocs;
-    private final HitQueue hq;
     private final LeafSlice slice;
 
-    public SearcherCallableNoSort(Lock lock, IndexSearcher searcher, LeafSlice slice,  Weight weight,
-        ScoreDoc after, int nDocs, HitQueue hq) {
-      this.lock = lock;
+    public SearcherCallableNoSort(IndexSearcher searcher, LeafSlice slice, Weight weight,
+        ScoreDoc after, int nDocs) {
       this.searcher = searcher;
       this.weight = weight;
       this.after = after;
       this.nDocs = nDocs;
-      this.hq = hq;
       this.slice = slice;
     }
 
     @Override
     public TopDocs call() throws IOException {
-      final TopDocs docs = searcher.search(Arrays.asList(slice.leaves), weight, after, nDocs);
-      final ScoreDoc[] scoreDocs = docs.scoreDocs;
-      //it would be so nice if we had a thread-safe insert 
-      lock.lock();
-      try {
-        for (int j = 0; j < scoreDocs.length; j++) { // merge scoreDocs into hq
-          final ScoreDoc scoreDoc = scoreDocs[j];
-          if (scoreDoc == hq.insertWithOverflow(scoreDoc)) {
-            break;
-          }
-        }
-      } finally {
-        lock.unlock();
-      }
-      return docs;
+      return searcher.search(Arrays.asList(slice.leaves), weight, after, nDocs);
     }
   }
 
@@ -748,25 +708,21 @@
    */
   private static final class SearcherCallableWithSort implements Callable<TopFieldDocs> {
 
-    private final Lock lock;
     private final IndexSearcher searcher;
     private final Weight weight;
     private final int nDocs;
-    private final TopFieldCollector hq;
     private final Sort sort;
     private final LeafSlice slice;
     private final FieldDoc after;
     private final boolean doDocScores;
     private final boolean doMaxScore;
 
-    public SearcherCallableWithSort(Lock lock, IndexSearcher searcher, LeafSlice slice, Weight weight,
-                                    FieldDoc after, int nDocs, TopFieldCollector hq, Sort sort,
+    public SearcherCallableWithSort(IndexSearcher searcher, LeafSlice slice, Weight weight,
+                                    FieldDoc after, int nDocs, Sort sort,
                                     boolean doDocScores, boolean doMaxScore) {
-      this.lock = lock;
       this.searcher = searcher;
       this.weight = weight;
       this.nDocs = nDocs;
-      this.hq = hq;
       this.sort = sort;
       this.slice = slice;
       this.after = after;
@@ -774,85 +730,11 @@
       this.doMaxScore = doMaxScore;
     }
 
-    private final FakeScorer fakeScorer = new FakeScorer();
-
     @Override
     public TopFieldDocs call() throws IOException {
       assert slice.leaves.length == 1;
-      final TopFieldDocs docs = searcher.search(Arrays.asList(slice.leaves),
-          weight, after, nDocs, sort, true, doDocScores || sort.needsScores(), doMaxScore);
-      lock.lock();
-      try {
-        final LeafReaderContext ctx = slice.leaves[0];
-        final int base = ctx.docBase;
-        final LeafCollector collector = hq.getLeafCollector(ctx);
-        collector.setScorer(fakeScorer);
-        for(ScoreDoc scoreDoc : docs.scoreDocs) {
-          fakeScorer.doc = scoreDoc.doc - base;
-          fakeScorer.score = scoreDoc.score;
-          collector.collect(scoreDoc.doc-base);
-        }
-
-        // Carry over maxScore from sub:
-        if (doMaxScore && docs.getMaxScore() > hq.maxScore) {
-          hq.maxScore = docs.getMaxScore();
-        }
-      } finally {
-        lock.unlock();
-      }
-      return docs;
-    }
-  }
-
-  /**
-   * A helper class that wraps a {@link CompletionService} and provides an
-   * iterable interface to the completed {@link Callable} instances.
-   * 
-   * @param <T>
-   *          the type of the {@link Callable} return value
-   */
-  private static final class ExecutionHelper<T> implements Iterator<T>, Iterable<T> {
-    private final CompletionService<T> service;
-    private int numTasks;
-
-    ExecutionHelper(final Executor executor) {
-      this.service = new ExecutorCompletionService<>(executor);
-    }
-
-    @Override
-    public boolean hasNext() {
-      return numTasks > 0;
-    }
-
-    public void submit(Callable<T> task) {
-      this.service.submit(task);
-      ++numTasks;
-    }
-
-    @Override
-    public T next() {
-      if(!this.hasNext()) 
-        throw new NoSuchElementException("next() is called but hasNext() returned false");
-      try {
-        return service.take().get();
-      } catch (InterruptedException e) {
-        throw new ThreadInterruptedException(e);
-      } catch (ExecutionException e) {
-        throw new RuntimeException(e);
-      } finally {
-        --numTasks;
-      }
-    }
-
-    @Override
-    public void remove() {
-      throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public Iterator<T> iterator() {
-      // use the shortcut here - this is only used in a private context
-      return this;
+      return searcher.search(Arrays.asList(slice.leaves),
+          weight, after, nDocs, sort, true, doDocScores, doMaxScore);
     }
   }
 
diff --git a/lucene/core/src/java/org/apache/lucene/search/LRUFilterCache.java b/lucene/core/src/java/org/apache/lucene/search/LRUFilterCache.java
index aff78c9..0e9c0ad 100644
--- a/lucene/core/src/java/org/apache/lucene/search/LRUFilterCache.java
+++ b/lucene/core/src/java/org/apache/lucene/search/LRUFilterCache.java
@@ -49,6 +49,38 @@
  * {@link FilterCachingPolicy caching policies} that only cache on "large"
  * segments, and it is advised to not share this cache across too many indices.
  *
+ * Typical usage looks like this:
+ * <pre class="prettyprint">
+ *   final int maxNumberOfCachedFilters = 256;
+ *   final long maxRamBytesUsed = 50 * 1024L * 1024L; // 50MB
+ *   // these cache and policy instances can be shared across several filters and readers
+ *   // it is fine to eg. store them into static variables
+ *   final FilterCache filterCache = new LRUFilterCache(maxNumberOfCachedFilters, maxRamBytesUsed);
+ *   final FilterCachingPolicy defaultCachingPolicy = new UsageTrackingFilterCachingPolicy();
+ *   
+ *   // ...
+ *   
+ *   // Then at search time
+ *   Filter myFilter = ...;
+ *   Filter myCacheFilter = filterCache.doCache(myFilter, defaultCachingPolicy);
+ *   // myCacheFilter is now a wrapper around the original filter that will interact with the cache
+ *   IndexSearcher searcher = ...;
+ *   TopDocs topDocs = searcher.search(new ConstantScoreQuery(myCacheFilter), 10);
+ * </pre>
+ *
+ * This cache exposes some global statistics ({@link #getHitCount() hit count},
+ * {@link #getMissCount() miss count}, {@link #getCacheSize() number of cache
+ * entries}, {@link #getCacheCount() total number of DocIdSets that have ever
+ * been cached}, {@link #getEvictionCount() number of evicted entries}). In
+ * case you would like to have more fine-grained statistics, such as per-index
+ * or per-filter-class statistics, it is possible to override various callbacks:
+ * {@link #onHit}, {@link #onMiss},
+ * {@link #onFilterCache}, {@link #onFilterEviction},
+ * {@link #onDocIdSetCache}, {@link #onDocIdSetEviction} and {@link #onClear}.
+ * It is better to not perform heavy computations in these methods though since
+ * they are called synchronously and under a lock.
+ *
+ * @see FilterCachingPolicy
  * @lucene.experimental
  */
 public class LRUFilterCache implements FilterCache, Accountable {
@@ -96,6 +128,80 @@
     ramBytesUsed = 0;
   }
 
+  /**
+   * Expert: callback when there is a cache hit on a given filter.
+   * Implementing this method is typically useful in order to compute more
+   * fine-grained statistics about the filter cache.
+   * @see #onMiss
+   * @lucene.experimental
+   */
+  protected void onHit(Object readerCoreKey, Filter filter) {
+    hitCount += 1;
+  }
+
+  /**
+   * Expert: callback when there is a cache miss on a given filter.
+   * @see #onHit
+   * @lucene.experimental
+   */
+  protected void onMiss(Object readerCoreKey, Filter filter) {
+    assert filter != null;
+    missCount += 1;
+  }
+
+  /**
+   * Expert: callback when a filter is added to this cache.
+   * Implementing this method is typically useful in order to compute more
+   * fine-grained statistics about the filter cache.
+   * @see #onFilterEviction
+   * @lucene.experimental
+   */
+  protected void onFilterCache(Filter filter, long ramBytesUsed) {
+    this.ramBytesUsed += ramBytesUsed;
+  }
+
+  /**
+   * Expert: callback when a filter is evicted from this cache.
+   * @see #onFilterCache
+   * @lucene.experimental
+   */
+  protected void onFilterEviction(Filter filter, long ramBytesUsed) {
+    this.ramBytesUsed -= ramBytesUsed;
+  }
+
+  /**
+   * Expert: callback when a {@link DocIdSet} is added to this cache.
+   * Implementing this method is typically useful in order to compute more
+   * fine-grained statistics about the filter cache.
+   * @see #onDocIdSetEviction
+   * @lucene.experimental
+   */
+  protected void onDocIdSetCache(Object readerCoreKey, long ramBytesUsed) {
+    cacheSize += 1;
+    cacheCount += 1;
+    this.ramBytesUsed += ramBytesUsed;
+  }
+  
+  /**
+   * Expert: callback when one or more {@link DocIdSet}s are removed from this
+   * cache.
+   * @see #onDocIdSetCache
+   * @lucene.experimental
+   */
+  protected void onDocIdSetEviction(Object readerCoreKey, int numEntries, long sumRamBytesUsed) {
+    this.ramBytesUsed -= sumRamBytesUsed;
+    cacheSize -= numEntries;
+  }
+
+  /**
+   * Expert: callback when the cache is completely cleared.
+   * @lucene.experimental
+   */
+  protected void onClear() {
+    ramBytesUsed = 0;
+    cacheSize = 0;
+  }
+
   /** Whether evictions are required. */
   boolean requiresEviction() {
     final int size = mostRecentlyUsedFilters.size();
@@ -107,22 +213,23 @@
   }
 
   synchronized DocIdSet get(Filter filter, LeafReaderContext context) {
-    final LeafCache leafCache = cache.get(context.reader().getCoreCacheKey());
+    final Object readerKey = context.reader().getCoreCacheKey();
+    final LeafCache leafCache = cache.get(readerKey);
     if (leafCache == null) {
-      missCount += 1;
+      onMiss(readerKey, filter);
       return null;
     }
     // this get call moves the filter to the most-recently-used position
     final Filter singleton = uniqueFilters.get(filter);
     if (singleton == null) {
-      missCount += 1;
+      onMiss(readerKey, filter);
       return null;
     }
     final DocIdSet cached = leafCache.get(singleton);
     if (cached == null) {
-      missCount += 1;
+      onMiss(readerKey, singleton);
     } else {
-      hitCount += 1;
+      onHit(readerKey, singleton);
     }
     return cached;
   }
@@ -132,13 +239,14 @@
     assert set.isCacheable();
     Filter singleton = uniqueFilters.putIfAbsent(filter, filter);
     if (singleton == null) {
-      ramBytesUsed += LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY + ramBytesUsed(filter);
+      onFilterCache(singleton, LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY + ramBytesUsed(filter));
     } else {
       filter = singleton;
     }
-    LeafCache leafCache = cache.get(context.reader().getCoreCacheKey());
+    final Object key = context.reader().getCoreCacheKey();
+    LeafCache leafCache = cache.get(key);
     if (leafCache == null) {
-      leafCache = new LeafCache();
+      leafCache = new LeafCache(key);
       final LeafCache previous = cache.put(context.reader().getCoreCacheKey(), leafCache);
       ramBytesUsed += HASHTABLE_RAM_BYTES_PER_ENTRY;
       assert previous == null;
@@ -172,8 +280,8 @@
   public synchronized void clearCoreCacheKey(Object coreKey) {
     final LeafCache leafCache = cache.remove(coreKey);
     if (leafCache != null) {
-      ramBytesUsed -= leafCache.ramBytesUsed + HASHTABLE_RAM_BYTES_PER_ENTRY;
-      cacheSize -= leafCache.cache.size();
+      ramBytesUsed -= HASHTABLE_RAM_BYTES_PER_ENTRY;
+      onDocIdSetEviction(coreKey, leafCache.cache.size(), leafCache.ramBytesUsed);
     }
   }
 
@@ -188,7 +296,7 @@
   }
 
   private void onEviction(Filter singleton) {
-    ramBytesUsed -= LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY + ramBytesUsed(singleton);
+    onFilterEviction(singleton, LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY + ramBytesUsed(singleton));
     for (LeafCache leafCache : cache.values()) {
       leafCache.remove(singleton);
     }
@@ -200,8 +308,7 @@
   public synchronized void clear() {
     cache.clear();
     mostRecentlyUsedFilters.clear();
-    ramBytesUsed = 0;
-    cacheSize = 0;
+    onClear();
   }
 
   // pkg-private for testing
@@ -388,17 +495,24 @@
   // this class is not thread-safe, everything but ramBytesUsed needs to be called under a lock
   private class LeafCache implements Accountable {
 
+    private final Object key;
     private final Map<Filter, DocIdSet> cache;
     private volatile long ramBytesUsed;
 
-    LeafCache() {
+    LeafCache(Object key) {
+      this.key = key;
       cache = new IdentityHashMap<>();
       ramBytesUsed = 0;
     }
 
-    private void incrementRamBytesUsed(long inc) {
-      ramBytesUsed += inc;
-      LRUFilterCache.this.ramBytesUsed += inc;
+    private void onDocIdSetCache(long ramBytesUsed) {
+      this.ramBytesUsed += ramBytesUsed;
+      LRUFilterCache.this.onDocIdSetCache(key, ramBytesUsed);
+    }
+
+    private void onDocIdSetEviction(long ramBytesUsed) {
+      this.ramBytesUsed -= ramBytesUsed;
+      LRUFilterCache.this.onDocIdSetEviction(key, 1, ramBytesUsed);
     }
 
     DocIdSet get(Filter filter) {
@@ -408,17 +522,14 @@
     void putIfAbsent(Filter filter, DocIdSet set) {
       if (cache.putIfAbsent(filter, set) == null) {
         // the set was actually put
-        cacheCount += 1;
-        cacheSize += 1;
-        incrementRamBytesUsed(HASHTABLE_RAM_BYTES_PER_ENTRY + set.ramBytesUsed());
+        onDocIdSetCache(HASHTABLE_RAM_BYTES_PER_ENTRY + set.ramBytesUsed());
       }
     }
 
     void remove(Filter filter) {
       DocIdSet removed = cache.remove(filter);
       if (removed != null) {
-        cacheSize -= 1;
-        incrementRamBytesUsed(-(HASHTABLE_RAM_BYTES_PER_ENTRY + removed.ramBytesUsed()));
+        onDocIdSetEviction(HASHTABLE_RAM_BYTES_PER_ENTRY + removed.ramBytesUsed());
       }
     }
 
diff --git a/lucene/core/src/java/org/apache/lucene/search/LeafCollector.java b/lucene/core/src/java/org/apache/lucene/search/LeafCollector.java
index 38a05aa..75cc4cd 100644
--- a/lucene/core/src/java/org/apache/lucene/search/LeafCollector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/LeafCollector.java
@@ -56,14 +56,9 @@
  *         bits.set(docBase + doc);
  *       }
  *
- *       // accept docs out of order (for a BitSet it doesn't matter)
- *       public boolean acceptsDocsOutOfOrder() {
- *         return true;
- *       }
- *          
  *     };
  *   }
- *      
+ *
  * });
  * </pre>
  *
@@ -98,22 +93,4 @@
    */
   void collect(int doc) throws IOException;
 
-  /**
-   * Return <code>true</code> if this collector does not
-   * require the matching docIDs to be delivered in int sort
-   * order (smallest to largest) to {@link #collect}.
-   *
-   * <p> Most Lucene Query implementations will visit
-   * matching docIDs in order.  However, some queries
-   * (currently limited to certain cases of {@link
-   * BooleanQuery}) can achieve faster searching if the
-   * <code>Collector</code> allows them to deliver the
-   * docIDs out of order.</p>
-   *
-   * <p> Many collectors don't mind getting docIDs out of
-   * order, so it's important to return <code>true</code>
-   * here.
-   */
-  boolean acceptsDocsOutOfOrder();
-
 }
diff --git a/lucene/core/src/java/org/apache/lucene/search/LeafFieldComparator.java b/lucene/core/src/java/org/apache/lucene/search/LeafFieldComparator.java
new file mode 100644
index 0000000..3d1e43e
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/search/LeafFieldComparator.java
@@ -0,0 +1,122 @@
+package org.apache.lucene.search;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+
+/**
+ * Expert: comparator that gets instantiated on each leaf
+ * from a top-level {@link FieldComparator} instance.
+ *
+ * <p>A leaf comparator must define these functions:</p>
+ *
+ * <ul>
+ *
+ *  <li> {@link #setBottom} This method is called by
+ *       {@link FieldValueHitQueue} to notify the
+ *       FieldComparator of the current weakest ("bottom")
+ *       slot.  Note that this slot may not hold the weakest
+ *       value according to your comparator, in cases where
+ *       your comparator is not the primary one (ie, is only
+ *       used to break ties from the comparators before it).
+ *
+ *  <li> {@link #compareBottom} Compare a new hit (docID)
+ *       against the "weakest" (bottom) entry in the queue.
+ *
+ *  <li> {@link #compareBottom} Compare a new hit (docID)
+ *       against the "weakest" (bottom) entry in the queue.
+ *
+ *  <li> {@link #compareTop} Compare a new hit (docID)
+ *       against the top value previously set by a call to
+ *       {@link FieldComparator#setTopValue}.
+ *
+ *  <li> {@link #copy} Installs a new hit into the
+ *       priority queue.  The {@link FieldValueHitQueue}
+ *       calls this method when a new hit is competitive.
+ *
+ * </ul>
+ *
+ * @see FieldComparator
+ * @lucene.experimental
+ */
+public interface LeafFieldComparator {
+
+  /**
+   * Set the bottom slot, ie the "weakest" (sorted last)
+   * entry in the queue.  When {@link #compareBottom} is
+   * called, you should compare against this slot.  This
+   * will always be called before {@link #compareBottom}.
+   * 
+   * @param slot the currently weakest (sorted last) slot in the queue
+   */
+  void setBottom(final int slot);
+
+  /**
+   * Compare the bottom of the queue with this doc.  This will
+   * only invoked after setBottom has been called.  This
+   * should return the same result as {@link
+   * FieldComparator#compare(int,int)}} as if bottom were slot1 and the new
+   * document were slot 2.
+   *    
+   * <p>For a search that hits many results, this method
+   * will be the hotspot (invoked by far the most
+   * frequently).</p>
+   * 
+   * @param doc that was hit
+   * @return any {@code N < 0} if the doc's value is sorted after
+   * the bottom entry (not competitive), any {@code N > 0} if the
+   * doc's value is sorted before the bottom entry and {@code 0} if
+   * they are equal.
+   */
+  int compareBottom(int doc) throws IOException;
+
+  /**
+   * Compare the top value with this doc.  This will
+   * only invoked after setTopValue has been called.  This
+   * should return the same result as {@link
+   * FieldComparator#compare(int,int)}} as if topValue were slot1 and the new
+   * document were slot 2.  This is only called for searches that
+   * use searchAfter (deep paging).
+   *    
+   * @param doc that was hit
+   * @return any {@code N < 0} if the doc's value is sorted after
+   * the bottom entry (not competitive), any {@code N > 0} if the
+   * doc's value is sorted before the bottom entry and {@code 0} if
+   * they are equal.
+   */
+  int compareTop(int doc) throws IOException;
+
+  /**
+   * This method is called when a new hit is competitive.
+   * You should copy any state associated with this document
+   * that will be required for future comparisons, into the
+   * specified slot.
+   * 
+   * @param slot which slot to copy the hit to
+   * @param doc docID relative to current reader
+   */
+  void copy(int slot, int doc) throws IOException;
+
+  /** Sets the Scorer to use in case a document's score is
+   *  needed.
+   * 
+   * @param scorer Scorer instance that you should use to
+   * obtain the current hit's score, if necessary. */
+  void setScorer(Scorer scorer);
+
+}
diff --git a/lucene/core/src/java/org/apache/lucene/search/MatchAllDocsQuery.java b/lucene/core/src/java/org/apache/lucene/search/MatchAllDocsQuery.java
index d8b751b..ed49b3c 100644
--- a/lucene/core/src/java/org/apache/lucene/search/MatchAllDocsQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/MatchAllDocsQuery.java
@@ -56,7 +56,7 @@
       while(liveDocs != null && doc < maxDoc && !liveDocs.get(doc)) {
         doc++;
       }
-      if (doc == maxDoc) {
+      if (doc >= maxDoc) { // can be > maxDoc when called from advance()
         doc = NO_MORE_DOCS;
       }
       return doc;
diff --git a/lucene/core/src/java/org/apache/lucene/search/MultiCollector.java b/lucene/core/src/java/org/apache/lucene/search/MultiCollector.java
index b901515..495fbf7 100644
--- a/lucene/core/src/java/org/apache/lucene/search/MultiCollector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/MultiCollector.java
@@ -124,16 +124,6 @@
       }
     }
 
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      for (LeafCollector c : collectors) {
-        if (!c.acceptsDocsOutOfOrder()) {
-          return false;
-        }
-      }
-      return true;
-    }
-
   }
 
 }
diff --git a/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java b/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java
index ffaa701..3d1fa5e 100644
--- a/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java
@@ -499,7 +499,7 @@
   @Override
   public final int nextDoc() throws IOException {
     if (_queue.size() == 0) {
-      return NO_MORE_DOCS;
+      return _doc = NO_MORE_DOCS;
     }
 
     // TODO: move this init into positions(): if the search
diff --git a/lucene/core/src/java/org/apache/lucene/search/NumericComparator.java b/lucene/core/src/java/org/apache/lucene/search/NumericComparator.java
index a7bc7be..f594c8c 100644
--- a/lucene/core/src/java/org/apache/lucene/search/NumericComparator.java
+++ b/lucene/core/src/java/org/apache/lucene/search/NumericComparator.java
@@ -27,7 +27,7 @@
 /**
  * Base FieldComparator class for numeric types
  */
-public abstract class NumericComparator<T extends Number> extends FieldComparator<T> {
+public abstract class NumericComparator<T extends Number> extends SimpleFieldComparator<T> {
   private final long[] values;
   private final long missingValue;
   private long bottom;
@@ -78,14 +78,13 @@
   }
 
   @Override
-  public FieldComparator<T> setNextReader(LeafReaderContext context) throws IOException {
+  public void doSetNextReader(LeafReaderContext context) throws IOException {
     currentReaderValues = getNumericDocValues(context, field);
     docsWithField = DocValues.getDocsWithField(context.reader(), field);
     // optimization to remove unneeded checks on the bit interface:
     if (docsWithField instanceof Bits.MatchAllBits) {
       docsWithField = null;
     }
-    return this;
   }
     
   @Override
diff --git a/lucene/core/src/java/org/apache/lucene/search/SimpleCollector.java b/lucene/core/src/java/org/apache/lucene/search/SimpleCollector.java
index 960d965..dbe90a7 100644
--- a/lucene/core/src/java/org/apache/lucene/search/SimpleCollector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/SimpleCollector.java
@@ -45,9 +45,6 @@
   // redeclare methods so that javadocs are inherited on sub-classes
 
   @Override
-  public abstract boolean acceptsDocsOutOfOrder();
-
-  @Override
   public abstract void collect(int doc) throws IOException;
 
 }
diff --git a/lucene/core/src/java/org/apache/lucene/search/SimpleFieldComparator.java b/lucene/core/src/java/org/apache/lucene/search/SimpleFieldComparator.java
new file mode 100644
index 0000000..eb304c1
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/search/SimpleFieldComparator.java
@@ -0,0 +1,42 @@
+package org.apache.lucene.search;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+
+import org.apache.lucene.index.LeafReaderContext;
+
+/**
+ * Base {@link FieldComparator} implementation that is used for all contexts.
+ *
+ * @lucene.experimental
+ */
+public abstract class SimpleFieldComparator<T> extends FieldComparator<T> implements LeafFieldComparator {
+
+  /** This method is called before collecting <code>context</code>. */
+  protected abstract void doSetNextReader(LeafReaderContext context) throws IOException;
+
+  @Override
+  public final LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException {
+    doSetNextReader(context);
+    return this;
+  }
+
+  @Override
+  public void setScorer(Scorer scorer) {}
+}
diff --git a/lucene/core/src/java/org/apache/lucene/search/SortRescorer.java b/lucene/core/src/java/org/apache/lucene/search/SortRescorer.java
index 8d51440..af9cd67 100644
--- a/lucene/core/src/java/org/apache/lucene/search/SortRescorer.java
+++ b/lucene/core/src/java/org/apache/lucene/search/SortRescorer.java
@@ -53,7 +53,7 @@
 
     List<LeafReaderContext> leaves = searcher.getIndexReader().leaves();
 
-    TopFieldCollector collector = TopFieldCollector.create(sort, topN, true, true, true, false);
+    TopFieldCollector collector = TopFieldCollector.create(sort, topN, true, true, true);
 
     // Now merge sort docIDs from hits, with reader's leaves:
     int hitUpto = 0;
@@ -61,6 +61,7 @@
     int endDoc = 0;
     int docBase = 0;
 
+    LeafCollector leafCollector = null;
     FakeScorer fakeScorer = new FakeScorer();
 
     while (hitUpto < hits.length) {
@@ -75,15 +76,15 @@
 
       if (readerContext != null) {
         // We advanced to another segment:
-        collector.getLeafCollector(readerContext);
-        collector.setScorer(fakeScorer);
+        leafCollector = collector.getLeafCollector(readerContext);
+        leafCollector.setScorer(fakeScorer);
         docBase = readerContext.docBase;
       }
 
       fakeScorer.score = hit.score;
       fakeScorer.doc = docID - docBase;
 
-      collector.collect(fakeScorer.doc);
+      leafCollector.collect(fakeScorer.doc);
 
       hitUpto++;
     }
diff --git a/lucene/core/src/java/org/apache/lucene/search/TopDocsCollector.java b/lucene/core/src/java/org/apache/lucene/search/TopDocsCollector.java
index cbef3b3..d9da8ae 100644
--- a/lucene/core/src/java/org/apache/lucene/search/TopDocsCollector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/TopDocsCollector.java
@@ -31,7 +31,7 @@
  * however, you might want to consider overriding all methods, in order to avoid
  * a NullPointerException.
  */
-public abstract class TopDocsCollector<T extends ScoreDoc> extends SimpleCollector {
+public abstract class TopDocsCollector<T extends ScoreDoc> implements Collector {
 
   /** This is used in case topDocs() is called with illegal parameters, or there
    *  simply aren't (enough) results. */
diff --git a/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java b/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java
index 092ad3c..b7c2ca4 100644
--- a/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java
@@ -27,127 +27,184 @@
  * A {@link Collector} that sorts by {@link SortField} using
  * {@link FieldComparator}s.
  * <p/>
- * See the {@link #create(org.apache.lucene.search.Sort, int, boolean, boolean, boolean, boolean)} method
+ * See the {@link #create(org.apache.lucene.search.Sort, int, boolean, boolean, boolean)} method
  * for instantiating a TopFieldCollector.
- * 
+ *
  * @lucene.experimental
  */
 public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
-  
+
   // TODO: one optimization we could do is to pre-fill
   // the queue with sentinel value that guaranteed to
   // always compare lower than a real hit; this would
   // save having to check queueFull on each insert
 
-  /*
-   * Implements a TopFieldCollector over one SortField criteria, without
-   * tracking document scores and maxScore.
-   */
-  private static class OneComparatorNonScoringCollector extends 
-      TopFieldCollector {
+  private static abstract class OneComparatorLeafCollector implements LeafCollector {
 
-    FieldComparator<?> comparator;
+    final LeafFieldComparator comparator;
     final int reverseMul;
-    final FieldValueHitQueue<Entry> queue;
-    
-    public OneComparatorNonScoringCollector(FieldValueHitQueue<Entry> queue,
-        int numHits, boolean fillFields) {
-      super(queue, numHits, fillFields);
-      this.queue = queue;
-      comparator = queue.getComparators()[0];
-      reverseMul = queue.getReverseMul()[0];
-    }
-    
-    final void updateBottom(int doc) {
-      // bottom.score is already set to Float.NaN in add().
-      bottom.doc = docBase + doc;
-      bottom = pq.updateTop();
+    Scorer scorer;
+
+    OneComparatorLeafCollector(LeafFieldComparator comparator, int reverseMul) {
+      this.comparator = comparator;
+      this.reverseMul = reverseMul;
     }
 
     @Override
-    public void collect(int doc) throws IOException {
-      ++totalHits;
-      if (queueFull) {
-        if ((reverseMul * comparator.compareBottom(doc)) <= 0) {
-          // since docs are visited in doc Id order, if compare is 0, it means
-          // this document is larger than anything else in the queue, and
-          // therefore not competitive.
-          return;
-        }
-        
-        // This hit is competitive - replace bottom element in queue & adjustTop
-        comparator.copy(bottom.slot, doc);
-        updateBottom(doc);
-        comparator.setBottom(bottom.slot);
-      } else {
-        // Startup transient: queue hasn't gathered numHits yet
-        final int slot = totalHits - 1;
-        // Copy hit into queue
-        comparator.copy(slot, doc);
-        add(slot, doc, Float.NaN);
-        if (queueFull) {
-          comparator.setBottom(bottom.slot);
-        }
-      }
-    }
-    
-    @Override
-    protected void doSetNextReader(LeafReaderContext context) throws IOException {
-      this.docBase = context.docBase;
-      queue.setComparator(0, comparator.setNextReader(context));
-      comparator = queue.firstComparator;
-    }
-    
-    @Override
     public void setScorer(Scorer scorer) throws IOException {
+      this.scorer = scorer;
       comparator.setScorer(scorer);
     }
-    
+  }
+
+  private static abstract class MultiComparatorLeafCollector implements LeafCollector {
+
+    final LeafFieldComparator[] comparators;
+    final int[] reverseMul;
+    final LeafFieldComparator firstComparator;
+    final int firstReverseMul;
+    Scorer scorer;
+
+    MultiComparatorLeafCollector(LeafFieldComparator[] comparators, int[] reverseMul) {
+      this.comparators = comparators;
+      this.reverseMul = reverseMul;
+      firstComparator = comparators[0];
+      firstReverseMul = reverseMul[0];
+    }
+
+    protected final int compareBottom(int doc) throws IOException {
+      int cmp = firstReverseMul * firstComparator.compareBottom(doc);
+      if (cmp != 0) {
+        return cmp;
+      }
+      for (int i = 1; i < comparators.length; ++i) {
+        cmp = reverseMul[i] * comparators[i].compareBottom(doc);
+        if (cmp != 0) {
+          return cmp;
+        }
+      }
+      return 0;
+    }
+
+    protected final void copy(int slot, int doc) throws IOException {
+      for (LeafFieldComparator comparator : comparators) {
+        comparator.copy(slot, doc);
+      }
+    }
+
+    protected final void setBottom(int slot) {
+      for (LeafFieldComparator comparator : comparators) {
+        comparator.setBottom(slot);
+      }
+    }
+
+    protected final int compareTop(int doc) throws IOException {
+      int cmp = firstReverseMul * firstComparator.compareTop(doc);
+      if (cmp != 0) {
+        return cmp;
+      }
+      for (int i = 1; i < comparators.length; ++i) {
+        cmp = reverseMul[i] * comparators[i].compareTop(doc);
+        if (cmp != 0) {
+          return cmp;
+        }
+      }
+      return 0;
+    }
+
+    @Override
+    public void setScorer(Scorer scorer) throws IOException {
+      this.scorer = scorer;
+      for (LeafFieldComparator comparator : comparators) {
+        comparator.setScorer(scorer);
+      }
+    }
   }
 
   /*
    * Implements a TopFieldCollector over one SortField criteria, without
-   * tracking document scores and maxScore, and assumes out of orderness in doc
-   * Ids collection.
+   * tracking document scores and maxScore.
    */
-  private static class OutOfOrderOneComparatorNonScoringCollector extends
-      OneComparatorNonScoringCollector {
+  private static class NonScoringCollector extends TopFieldCollector {
 
-    public OutOfOrderOneComparatorNonScoringCollector(FieldValueHitQueue<Entry> queue,
-        int numHits, boolean fillFields) {
+    final FieldValueHitQueue<Entry> queue;
+
+    public NonScoringCollector(FieldValueHitQueue<Entry> queue, int numHits, boolean fillFields) {
       super(queue, numHits, fillFields);
+      this.queue = queue;
     }
-    
+
     @Override
-    public void collect(int doc) throws IOException {
-      ++totalHits;
-      if (queueFull) {
-        // Fastmatch: return if this hit is not competitive
-        final int cmp = reverseMul * comparator.compareBottom(doc);
-        if (cmp < 0 || (cmp == 0 && doc + docBase > bottom.doc)) {
-          return;
-        }
-        
-        // This hit is competitive - replace bottom element in queue & adjustTop
-        comparator.copy(bottom.slot, doc);
-        updateBottom(doc);
-        comparator.setBottom(bottom.slot);
+    public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
+      docBase = context.docBase;
+
+      final LeafFieldComparator[] comparators = queue.getComparators(context);
+      final int[] reverseMul = queue.getReverseMul();
+
+      if (comparators.length == 1) {
+        return new OneComparatorLeafCollector(comparators[0], reverseMul[0]) {
+
+          @Override
+          public void collect(int doc) throws IOException {
+            ++totalHits;
+            if (queueFull) {
+              if ((reverseMul * comparator.compareBottom(doc)) <= 0) {
+                // since docs are visited in doc Id order, if compare is 0, it means
+                // this document is larger than anything else in the queue, and
+                // therefore not competitive.
+                return;
+              }
+
+              // This hit is competitive - replace bottom element in queue & adjustTop
+              comparator.copy(bottom.slot, doc);
+              updateBottom(doc);
+              comparator.setBottom(bottom.slot);
+            } else {
+              // Startup transient: queue hasn't gathered numHits yet
+              final int slot = totalHits - 1;
+              // Copy hit into queue
+              comparator.copy(slot, doc);
+              add(slot, doc, Float.NaN);
+              if (queueFull) {
+                comparator.setBottom(bottom.slot);
+              }
+            }
+          }
+
+        };
       } else {
-        // Startup transient: queue hasn't gathered numHits yet
-        final int slot = totalHits - 1;
-        // Copy hit into queue
-        comparator.copy(slot, doc);
-        add(slot, doc, Float.NaN);
-        if (queueFull) {
-          comparator.setBottom(bottom.slot);
-        }
+        return new MultiComparatorLeafCollector(comparators, reverseMul) {
+
+          @Override
+          public void collect(int doc) throws IOException {
+            ++totalHits;
+            if (queueFull) {
+              if ((compareBottom(doc)) <= 0) {
+                // since docs are visited in doc Id order, if compare is 0, it means
+                // this document is larger than anything else in the queue, and
+                // therefore not competitive.
+                return;
+              }
+
+              // This hit is competitive - replace bottom element in queue & adjustTop
+              copy(bottom.slot, doc);
+              updateBottom(doc);
+              setBottom(bottom.slot);
+            } else {
+              // Startup transient: queue hasn't gathered numHits yet
+              final int slot = totalHits - 1;
+              // Copy hit into queue
+              copy(slot, doc);
+              add(slot, doc, Float.NaN);
+              if (queueFull) {
+                setBottom(bottom.slot);
+              }
+            }
+          }
+
+        };
       }
     }
-    
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return true;
-    }
 
   }
 
@@ -155,112 +212,98 @@
    * Implements a TopFieldCollector over one SortField criteria, while tracking
    * document scores but no maxScore.
    */
-  private static class OneComparatorScoringNoMaxScoreCollector extends
-      OneComparatorNonScoringCollector {
+  private static class ScoringNoMaxScoreCollector extends TopFieldCollector {
 
-    Scorer scorer;
+    final FieldValueHitQueue<Entry> queue;
 
-    public OneComparatorScoringNoMaxScoreCollector(FieldValueHitQueue<Entry> queue,
-        int numHits, boolean fillFields) {
+    public ScoringNoMaxScoreCollector(FieldValueHitQueue<Entry> queue, int numHits, boolean fillFields) {
       super(queue, numHits, fillFields);
-    }
-    
-    final void updateBottom(int doc, float score) {
-      bottom.doc = docBase + doc;
-      bottom.score = score;
-      bottom = pq.updateTop();
+      this.queue = queue;
     }
 
     @Override
-    public void collect(int doc) throws IOException {
-      ++totalHits;
-      if (queueFull) {
-        if ((reverseMul * comparator.compareBottom(doc)) <= 0) {
-          // since docs are visited in doc Id order, if compare is 0, it means
-          // this document is largest than anything else in the queue, and
-          // therefore not competitive.
-          return;
-        }
-        
-        // Compute the score only if the hit is competitive.
-        final float score = scorer.score();
+    public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
+      docBase = context.docBase;
 
-        // This hit is competitive - replace bottom element in queue & adjustTop
-        comparator.copy(bottom.slot, doc);
-        updateBottom(doc, score);
-        comparator.setBottom(bottom.slot);
+      final LeafFieldComparator[] comparators = queue.getComparators(context);
+      final int[] reverseMul = queue.getReverseMul();
+
+      if (comparators.length == 1) {
+        return new OneComparatorLeafCollector(comparators[0], reverseMul[0]) {
+
+          @Override
+          public void collect(int doc) throws IOException {
+            ++totalHits;
+            if (queueFull) {
+              if ((reverseMul * comparator.compareBottom(doc)) <= 0) {
+                // since docs are visited in doc Id order, if compare is 0, it means
+                // this document is largest than anything else in the queue, and
+                // therefore not competitive.
+                return;
+              }
+
+              // Compute the score only if the hit is competitive.
+              final float score = scorer.score();
+
+              // This hit is competitive - replace bottom element in queue & adjustTop
+              comparator.copy(bottom.slot, doc);
+              updateBottom(doc, score);
+              comparator.setBottom(bottom.slot);
+            } else {
+              // Compute the score only if the hit is competitive.
+              final float score = scorer.score();
+
+              // Startup transient: queue hasn't gathered numHits yet
+              final int slot = totalHits - 1;
+              // Copy hit into queue
+              comparator.copy(slot, doc);
+              add(slot, doc, score);
+              if (queueFull) {
+                comparator.setBottom(bottom.slot);
+              }
+            }
+          }
+
+        };
       } else {
-        // Compute the score only if the hit is competitive.
-        final float score = scorer.score();
+        return new MultiComparatorLeafCollector(comparators, reverseMul) {
 
-        // Startup transient: queue hasn't gathered numHits yet
-        final int slot = totalHits - 1;
-        // Copy hit into queue
-        comparator.copy(slot, doc);
-        add(slot, doc, score);
-        if (queueFull) {
-          comparator.setBottom(bottom.slot);
-        }
+          @Override
+          public void collect(int doc) throws IOException {
+            ++totalHits;
+            if (queueFull) {
+              if ((compareBottom(doc)) <= 0) {
+                // since docs are visited in doc Id order, if compare is 0, it means
+                // this document is largest than anything else in the queue, and
+                // therefore not competitive.
+                return;
+              }
+
+              // Compute the score only if the hit is competitive.
+              final float score = scorer.score();
+
+              // This hit is competitive - replace bottom element in queue & adjustTop
+              copy(bottom.slot, doc);
+              updateBottom(doc, score);
+              setBottom(bottom.slot);
+            } else {
+              // Compute the score only if the hit is competitive.
+              final float score = scorer.score();
+
+              // Startup transient: queue hasn't gathered numHits yet
+              final int slot = totalHits - 1;
+              // Copy hit into queue
+              copy(slot, doc);
+              add(slot, doc, score);
+              if (queueFull) {
+                setBottom(bottom.slot);
+              }
+            }
+          }
+
+        };
       }
     }
-    
-    @Override
-    public void setScorer(Scorer scorer) throws IOException {
-      this.scorer = scorer;
-      comparator.setScorer(scorer);
-    }
-    
-  }
-
-  /*
-   * Implements a TopFieldCollector over one SortField criteria, while tracking
-   * document scores but no maxScore, and assumes out of orderness in doc Ids
-   * collection.
-   */
-  private static class OutOfOrderOneComparatorScoringNoMaxScoreCollector extends
-      OneComparatorScoringNoMaxScoreCollector {
-
-    public OutOfOrderOneComparatorScoringNoMaxScoreCollector(
-        FieldValueHitQueue<Entry> queue, int numHits, boolean fillFields) {
-      super(queue, numHits, fillFields);
-    }
-    
-    @Override
-    public void collect(int doc) throws IOException {
-      ++totalHits;
-      if (queueFull) {
-        // Fastmatch: return if this hit is not competitive
-        final int cmp = reverseMul * comparator.compareBottom(doc);
-        if (cmp < 0 || (cmp == 0 && doc + docBase > bottom.doc)) {
-          return;
-        }
-        
-        // Compute the score only if the hit is competitive.
-        final float score = scorer.score();
-
-        // This hit is competitive - replace bottom element in queue & adjustTop
-        comparator.copy(bottom.slot, doc);
-        updateBottom(doc, score);
-        comparator.setBottom(bottom.slot);
-      } else {
-        // Compute the score only if the hit is competitive.
-        final float score = scorer.score();
-
-        // Startup transient: queue hasn't gathered numHits yet
-        final int slot = totalHits - 1;
-        // Copy hit into queue
-        comparator.copy(slot, doc);
-        add(slot, doc, score);
-        if (queueFull) {
-          comparator.setBottom(bottom.slot);
-        }
-      }
-    }
-    
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return true;
-    }
 
   }
 
@@ -268,577 +311,96 @@
    * Implements a TopFieldCollector over one SortField criteria, with tracking
    * document scores and maxScore.
    */
-  private static class OneComparatorScoringMaxScoreCollector extends
-      OneComparatorNonScoringCollector {
+  private static class ScoringMaxScoreCollector extends TopFieldCollector {
 
-    Scorer scorer;
-    
-    public OneComparatorScoringMaxScoreCollector(FieldValueHitQueue<Entry> queue,
-        int numHits, boolean fillFields) {
-      super(queue, numHits, fillFields);
-      // Must set maxScore to NEG_INF, or otherwise Math.max always returns NaN.
-      maxScore = Float.NEGATIVE_INFINITY;
-    }
-    
-    final void updateBottom(int doc, float score) {
-      bottom.doc = docBase + doc;
-      bottom.score = score;
-      bottom =  pq.updateTop();
-    }
-
-    @Override
-    public void collect(int doc) throws IOException {
-      final float score = scorer.score();
-      if (score > maxScore) {
-        maxScore = score;
-      }
-      ++totalHits;
-      if (queueFull) {
-        if ((reverseMul * comparator.compareBottom(doc)) <= 0) {
-          // since docs are visited in doc Id order, if compare is 0, it means
-          // this document is largest than anything else in the queue, and
-          // therefore not competitive.
-          return;
-        }
-        
-        // This hit is competitive - replace bottom element in queue & adjustTop
-        comparator.copy(bottom.slot, doc);
-        updateBottom(doc, score);
-        comparator.setBottom(bottom.slot);
-      } else {
-        // Startup transient: queue hasn't gathered numHits yet
-        final int slot = totalHits - 1;
-        // Copy hit into queue
-        comparator.copy(slot, doc);
-        add(slot, doc, score);
-        if (queueFull) {
-          comparator.setBottom(bottom.slot);
-        }
-      }
-
-    }
-    
-    @Override
-    public void setScorer(Scorer scorer) throws IOException {
-      this.scorer = scorer;
-      super.setScorer(scorer);
-    }
-  }
-
-  /*
-   * Implements a TopFieldCollector over one SortField criteria, with tracking
-   * document scores and maxScore, and assumes out of orderness in doc Ids
-   * collection.
-   */
-  private static class OutOfOrderOneComparatorScoringMaxScoreCollector extends
-      OneComparatorScoringMaxScoreCollector {
-
-    public OutOfOrderOneComparatorScoringMaxScoreCollector(FieldValueHitQueue<Entry> queue,
-        int numHits, boolean fillFields) {
-      super(queue, numHits, fillFields);
-    }
-    
-    @Override
-    public void collect(int doc) throws IOException {
-      final float score = scorer.score();
-      if (score > maxScore) {
-        maxScore = score;
-      }
-      ++totalHits;
-      if (queueFull) {
-        // Fastmatch: return if this hit is not competitive
-        final int cmp = reverseMul * comparator.compareBottom(doc);
-        if (cmp < 0 || (cmp == 0 && doc + docBase > bottom.doc)) {
-          return;
-        }
-        
-        // This hit is competitive - replace bottom element in queue & adjustTop
-        comparator.copy(bottom.slot, doc);
-        updateBottom(doc, score);
-        comparator.setBottom(bottom.slot);
-      } else {
-        // Startup transient: queue hasn't gathered numHits yet
-        final int slot = totalHits - 1;
-        // Copy hit into queue
-        comparator.copy(slot, doc);
-        add(slot, doc, score);
-        if (queueFull) {
-          comparator.setBottom(bottom.slot);
-        }
-      }
-    }
-    
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return true;
-    }
-
-  }
-
-  /*
-   * Implements a TopFieldCollector over multiple SortField criteria, without
-   * tracking document scores and maxScore.
-   */
-  private static class MultiComparatorNonScoringCollector extends TopFieldCollector {
-    
-    final FieldComparator<?>[] comparators;
-    final int[] reverseMul;
     final FieldValueHitQueue<Entry> queue;
-    public MultiComparatorNonScoringCollector(FieldValueHitQueue<Entry> queue,
-        int numHits, boolean fillFields) {
+
+    public ScoringMaxScoreCollector(FieldValueHitQueue<Entry> queue, int numHits, boolean fillFields) {
       super(queue, numHits, fillFields);
       this.queue = queue;
-      comparators = queue.getComparators();
-      reverseMul = queue.getReverseMul();
-    }
-    
-    final void updateBottom(int doc) {
-      // bottom.score is already set to Float.NaN in add().
-      bottom.doc = docBase + doc;
-      bottom = pq.updateTop();
+      maxScore = Float.MIN_NORMAL; // otherwise we would keep NaN
     }
 
     @Override
-    public void collect(int doc) throws IOException {
-      ++totalHits;
-      if (queueFull) {
-        // Fastmatch: return if this hit is not competitive
-        for (int i = 0;; i++) {
-          final int c = reverseMul[i] * comparators[i].compareBottom(doc);
-          if (c < 0) {
-            // Definitely not competitive.
-            return;
-          } else if (c > 0) {
-            // Definitely competitive.
-            break;
-          } else if (i == comparators.length - 1) {
-            // Here c=0. If we're at the last comparator, this doc is not
-            // competitive, since docs are visited in doc Id order, which means
-            // this doc cannot compete with any other document in the queue.
-            return;
-          }
-        }
-
-        // This hit is competitive - replace bottom element in queue & adjustTop
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].copy(bottom.slot, doc);
-        }
-
-        updateBottom(doc);
-
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].setBottom(bottom.slot);
-        }
-      } else {
-        // Startup transient: queue hasn't gathered numHits yet
-        final int slot = totalHits - 1;
-        // Copy hit into queue
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].copy(slot, doc);
-        }
-        add(slot, doc, Float.NaN);
-        if (queueFull) {
-          for (int i = 0; i < comparators.length; i++) {
-            comparators[i].setBottom(bottom.slot);
-          }
-        }
-      }
-    }
-
-    @Override
-    protected void doSetNextReader(LeafReaderContext context) throws IOException {
+    public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
       docBase = context.docBase;
-      for (int i = 0; i < comparators.length; i++) {
-        queue.setComparator(i, comparators[i].setNextReader(context));
-      }
-    }
 
-    @Override
-    public void setScorer(Scorer scorer) throws IOException {
-      // set the scorer on all comparators
-      for (int i = 0; i < comparators.length; i++) {
-        comparators[i].setScorer(scorer);
-      }
-    }
-  }
-  
-  /*
-   * Implements a TopFieldCollector over multiple SortField criteria, without
-   * tracking document scores and maxScore, and assumes out of orderness in doc
-   * Ids collection.
-   */
-  private static class OutOfOrderMultiComparatorNonScoringCollector extends
-      MultiComparatorNonScoringCollector {
-    
-    public OutOfOrderMultiComparatorNonScoringCollector(FieldValueHitQueue<Entry> queue,
-        int numHits, boolean fillFields) {
-      super(queue, numHits, fillFields);
-    }
-    
-    @Override
-    public void collect(int doc) throws IOException {
-      ++totalHits;
-      if (queueFull) {
-        // Fastmatch: return if this hit is not competitive
-        for (int i = 0;; i++) {
-          final int c = reverseMul[i] * comparators[i].compareBottom(doc);
-          if (c < 0) {
-            // Definitely not competitive.
-            return;
-          } else if (c > 0) {
-            // Definitely competitive.
-            break;
-          } else if (i == comparators.length - 1) {
-            // This is the equals case.
-            if (doc + docBase > bottom.doc) {
-              // Definitely not competitive
-              return;
+      final LeafFieldComparator[] comparators = queue.getComparators(context);
+      final int[] reverseMul = queue.getReverseMul();
+
+      if (comparators.length == 1) {
+        return new OneComparatorLeafCollector(comparators[0], reverseMul[0]) {
+
+          @Override
+          public void collect(int doc) throws IOException {
+            final float score = scorer.score();
+            if (score > maxScore) {
+              maxScore = score;
             }
-            break;
-          }
-        }
+            ++totalHits;
+            if (queueFull) {
+              if (reverseMul * comparator.compareBottom(doc) <= 0) {
+                // since docs are visited in doc Id order, if compare is 0, it means
+                // this document is largest than anything else in the queue, and
+                // therefore not competitive.
+                return;
+              }
 
-        // This hit is competitive - replace bottom element in queue & adjustTop
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].copy(bottom.slot, doc);
-        }
-
-        updateBottom(doc);
-
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].setBottom(bottom.slot);
-        }
-      } else {
-        // Startup transient: queue hasn't gathered numHits yet
-        final int slot = totalHits - 1;
-        // Copy hit into queue
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].copy(slot, doc);
-        }
-        add(slot, doc, Float.NaN);
-        if (queueFull) {
-          for (int i = 0; i < comparators.length; i++) {
-            comparators[i].setBottom(bottom.slot);
-          }
-        }
-      }
-    }
-    
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return true;
-    }
-
-  }
-
-  /*
-   * Implements a TopFieldCollector over multiple SortField criteria, with
-   * tracking document scores and maxScore.
-   */
-  private static class MultiComparatorScoringMaxScoreCollector extends MultiComparatorNonScoringCollector {
-    
-    Scorer scorer;
-    
-    public MultiComparatorScoringMaxScoreCollector(FieldValueHitQueue<Entry> queue,
-        int numHits, boolean fillFields) {
-      super(queue, numHits, fillFields);
-      // Must set maxScore to NEG_INF, or otherwise Math.max always returns NaN.
-      maxScore = Float.NEGATIVE_INFINITY;
-    }
-    
-    final void updateBottom(int doc, float score) {
-      bottom.doc = docBase + doc;
-      bottom.score = score;
-      bottom =  pq.updateTop();
-    }
-
-    @Override
-    public void collect(int doc) throws IOException {
-      final float score = scorer.score();
-      if (score > maxScore) {
-        maxScore = score;
-      }
-      ++totalHits;
-      if (queueFull) {
-        // Fastmatch: return if this hit is not competitive
-        for (int i = 0;; i++) {
-          final int c = reverseMul[i] * comparators[i].compareBottom(doc);
-          if (c < 0) {
-            // Definitely not competitive.
-            return;
-          } else if (c > 0) {
-            // Definitely competitive.
-            break;
-          } else if (i == comparators.length - 1) {
-            // Here c=0. If we're at the last comparator, this doc is not
-            // competitive, since docs are visited in doc Id order, which means
-            // this doc cannot compete with any other document in the queue.
-            return;
-          }
-        }
-
-        // This hit is competitive - replace bottom element in queue & adjustTop
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].copy(bottom.slot, doc);
-        }
-
-        updateBottom(doc, score);
-
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].setBottom(bottom.slot);
-        }
-      } else {
-        // Startup transient: queue hasn't gathered numHits yet
-        final int slot = totalHits - 1;
-        // Copy hit into queue
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].copy(slot, doc);
-        }
-        add(slot, doc, score);
-        if (queueFull) {
-          for (int i = 0; i < comparators.length; i++) {
-            comparators[i].setBottom(bottom.slot);
-          }
-        }
-      }
-    }
-
-    @Override
-    public void setScorer(Scorer scorer) throws IOException {
-      this.scorer = scorer;
-      super.setScorer(scorer);
-    }
-  }
-
-  /*
-   * Implements a TopFieldCollector over multiple SortField criteria, with
-   * tracking document scores and maxScore, and assumes out of orderness in doc
-   * Ids collection.
-   */
-  private final static class OutOfOrderMultiComparatorScoringMaxScoreCollector
-      extends MultiComparatorScoringMaxScoreCollector {
-    
-    public OutOfOrderMultiComparatorScoringMaxScoreCollector(FieldValueHitQueue<Entry> queue,
-        int numHits, boolean fillFields) {
-      super(queue, numHits, fillFields);
-    }
-    
-    @Override
-    public void collect(int doc) throws IOException {
-      final float score = scorer.score();
-      if (score > maxScore) {
-        maxScore = score;
-      }
-      ++totalHits;
-      if (queueFull) {
-        // Fastmatch: return if this hit is not competitive
-        for (int i = 0;; i++) {
-          final int c = reverseMul[i] * comparators[i].compareBottom(doc);
-          if (c < 0) {
-            // Definitely not competitive.
-            return;
-          } else if (c > 0) {
-            // Definitely competitive.
-            break;
-          } else if (i == comparators.length - 1) {
-            // This is the equals case.
-            if (doc + docBase > bottom.doc) {
-              // Definitely not competitive
-              return;
+              // This hit is competitive - replace bottom element in queue & adjustTop
+              comparator.copy(bottom.slot, doc);
+              updateBottom(doc, score);
+              comparator.setBottom(bottom.slot);
+            } else {
+              // Startup transient: queue hasn't gathered numHits yet
+              final int slot = totalHits - 1;
+              // Copy hit into queue
+              comparator.copy(slot, doc);
+              add(slot, doc, score);
+              if (queueFull) {
+                comparator.setBottom(bottom.slot);
+              }
             }
-            break;
           }
-        }
 
-        // This hit is competitive - replace bottom element in queue & adjustTop
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].copy(bottom.slot, doc);
-        }
-
-        updateBottom(doc, score);
-
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].setBottom(bottom.slot);
-        }
+        };
       } else {
-        // Startup transient: queue hasn't gathered numHits yet
-        final int slot = totalHits - 1;
-        // Copy hit into queue
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].copy(slot, doc);
-        }
-        add(slot, doc, score);
-        if (queueFull) {
-          for (int i = 0; i < comparators.length; i++) {
-            comparators[i].setBottom(bottom.slot);
-          }
-        }
-      }
-    }
-    
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return true;
-    }
+        return new MultiComparatorLeafCollector(comparators, reverseMul) {
 
-  }
-
-  /*
-   * Implements a TopFieldCollector over multiple SortField criteria, with
-   * tracking document scores and maxScore.
-   */
-  private static class MultiComparatorScoringNoMaxScoreCollector extends MultiComparatorNonScoringCollector {
-    
-    Scorer scorer;
-    
-    public MultiComparatorScoringNoMaxScoreCollector(FieldValueHitQueue<Entry> queue,
-        int numHits, boolean fillFields) {
-      super(queue, numHits, fillFields);
-    }
-    
-    final void updateBottom(int doc, float score) {
-      bottom.doc = docBase + doc;
-      bottom.score = score;
-      bottom = pq.updateTop();
-    }
-
-    @Override
-    public void collect(int doc) throws IOException {
-      ++totalHits;
-      if (queueFull) {
-        // Fastmatch: return if this hit is not competitive
-        for (int i = 0;; i++) {
-          final int c = reverseMul[i] * comparators[i].compareBottom(doc);
-          if (c < 0) {
-            // Definitely not competitive.
-            return;
-          } else if (c > 0) {
-            // Definitely competitive.
-            break;
-          } else if (i == comparators.length - 1) {
-            // Here c=0. If we're at the last comparator, this doc is not
-            // competitive, since docs are visited in doc Id order, which means
-            // this doc cannot compete with any other document in the queue.
-            return;
-          }
-        }
-
-        // This hit is competitive - replace bottom element in queue & adjustTop
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].copy(bottom.slot, doc);
-        }
-
-        // Compute score only if it is competitive.
-        final float score = scorer.score();
-        updateBottom(doc, score);
-
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].setBottom(bottom.slot);
-        }
-      } else {
-        // Startup transient: queue hasn't gathered numHits yet
-        final int slot = totalHits - 1;
-        // Copy hit into queue
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].copy(slot, doc);
-        }
-
-        // Compute score only if it is competitive.
-        final float score = scorer.score();
-        add(slot, doc, score);
-        if (queueFull) {
-          for (int i = 0; i < comparators.length; i++) {
-            comparators[i].setBottom(bottom.slot);
-          }
-        }
-      }
-    }
-
-    @Override
-    public void setScorer(Scorer scorer) throws IOException {
-      this.scorer = scorer;
-      super.setScorer(scorer);
-    }
-  }
-
-  /*
-   * Implements a TopFieldCollector over multiple SortField criteria, with
-   * tracking document scores and maxScore, and assumes out of orderness in doc
-   * Ids collection.
-   */
-  private final static class OutOfOrderMultiComparatorScoringNoMaxScoreCollector
-      extends MultiComparatorScoringNoMaxScoreCollector {
-    
-    public OutOfOrderMultiComparatorScoringNoMaxScoreCollector(
-        FieldValueHitQueue<Entry> queue, int numHits, boolean fillFields) {
-      super(queue, numHits, fillFields);
-    }
-    
-    @Override
-    public void collect(int doc) throws IOException {
-      ++totalHits;
-      if (queueFull) {
-        // Fastmatch: return if this hit is not competitive
-        for (int i = 0;; i++) {
-          final int c = reverseMul[i] * comparators[i].compareBottom(doc);
-          if (c < 0) {
-            // Definitely not competitive.
-            return;
-          } else if (c > 0) {
-            // Definitely competitive.
-            break;
-          } else if (i == comparators.length - 1) {
-            // This is the equals case.
-            if (doc + docBase > bottom.doc) {
-              // Definitely not competitive
-              return;
+          @Override
+          public void collect(int doc) throws IOException {
+            final float score = scorer.score();
+            if (score > maxScore) {
+              maxScore = score;
             }
-            break;
+            ++totalHits;
+            if (queueFull) {
+              if (compareBottom(doc) <= 0) {
+                // since docs are visited in doc Id order, if compare is 0, it means
+                // this document is largest than anything else in the queue, and
+                // therefore not competitive.
+                return;
+              }
+
+              // This hit is competitive - replace bottom element in queue & adjustTop
+              copy(bottom.slot, doc);
+              updateBottom(doc, score);
+              setBottom(bottom.slot);
+            } else {
+              // Startup transient: queue hasn't gathered numHits yet
+              final int slot = totalHits - 1;
+              // Copy hit into queue
+              copy(slot, doc);
+              add(slot, doc, score);
+              if (queueFull) {
+                setBottom(bottom.slot);
+              }
+            }
           }
-        }
 
-        // This hit is competitive - replace bottom element in queue & adjustTop
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].copy(bottom.slot, doc);
-        }
-
-        // Compute score only if it is competitive.
-        final float score = scorer.score();
-        updateBottom(doc, score);
-
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].setBottom(bottom.slot);
-        }
-      } else {
-        // Startup transient: queue hasn't gathered numHits yet
-        final int slot = totalHits - 1;
-        // Copy hit into queue
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].copy(slot, doc);
-        }
-
-        // Compute score only if it is competitive.
-        final float score = scorer.score();
-        add(slot, doc, score);
-        if (queueFull) {
-          for (int i = 0; i < comparators.length; i++) {
-            comparators[i].setBottom(bottom.slot);
-          }
-        }
+        };
       }
     }
 
-    @Override
-    public void setScorer(Scorer scorer) throws IOException {
-      this.scorer = scorer;
-      super.setScorer(scorer);
-    }
-    
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return true;
-    }
-
   }
 
   /*
@@ -846,30 +408,24 @@
    */
   private final static class PagingFieldCollector extends TopFieldCollector {
 
-    Scorer scorer;
     int collectedHits;
-    final FieldComparator<?>[] comparators;
-    final int[] reverseMul;
     final FieldValueHitQueue<Entry> queue;
     final boolean trackDocScores;
     final boolean trackMaxScore;
     final FieldDoc after;
-    int afterDoc;
-    
-    public PagingFieldCollector(
-                                FieldValueHitQueue<Entry> queue, FieldDoc after, int numHits, boolean fillFields,
+
+    public PagingFieldCollector(FieldValueHitQueue<Entry> queue, FieldDoc after, int numHits, boolean fillFields,
                                 boolean trackDocScores, boolean trackMaxScore) {
       super(queue, numHits, fillFields);
       this.queue = queue;
       this.trackDocScores = trackDocScores;
       this.trackMaxScore = trackMaxScore;
       this.after = after;
-      comparators = queue.getComparators();
-      reverseMul = queue.getReverseMul();
 
       // Must set maxScore to NEG_INF, or otherwise Math.max always returns NaN.
       maxScore = Float.NEGATIVE_INFINITY;
 
+      FieldComparator<?>[] comparators = queue.comparators;
       // Tell all comparators their top value:
       for(int i=0;i<comparators.length;i++) {
         @SuppressWarnings("unchecked")
@@ -877,141 +433,81 @@
         comparator.setTopValue(after.fields[i]);
       }
     }
-    
-    void updateBottom(int doc, float score) {
-      bottom.doc = docBase + doc;
-      bottom.score = score;
-      bottom = pq.updateTop();
-    }
 
-    @SuppressWarnings({"unchecked", "rawtypes"})
     @Override
-    public void collect(int doc) throws IOException {
-      //System.out.println("  collect doc=" + doc);
+    public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
+      docBase = context.docBase;
+      final int afterDoc = after.doc - docBase;
+      return new MultiComparatorLeafCollector(queue.getComparators(context), queue.getReverseMul()) {
 
-      totalHits++;
+        @Override
+        public void collect(int doc) throws IOException {
+          //System.out.println("  collect doc=" + doc);
 
-      float score = Float.NaN;
-      if (trackMaxScore) {
-        score = scorer.score();
-        if (score > maxScore) {
-          maxScore = score;
-        }
-      }
+          totalHits++;
 
-      if (queueFull) {
-        // Fastmatch: return if this hit is no better than
-        // the worst hit currently in the queue:
-        for (int i = 0;; i++) {
-          final int c = reverseMul[i] * comparators[i].compareBottom(doc);
-          if (c < 0) {
-            // Definitely not competitive.
-            return;
-          } else if (c > 0) {
-            // Definitely competitive.
-            break;
-          } else if (i == comparators.length - 1) {
-            // This is the equals case.
-            if (doc + docBase > bottom.doc) {
-              // Definitely not competitive
+          float score = Float.NaN;
+          if (trackMaxScore) {
+            score = scorer.score();
+            if (score > maxScore) {
+              maxScore = score;
+            }
+          }
+
+          if (queueFull) {
+            // Fastmatch: return if this hit is no better than
+            // the worst hit currently in the queue:
+            final int cmp = compareBottom(doc);
+            if (cmp <= 0) {
+              // not competitive since documents are visited in doc id order
               return;
             }
-            break;
+          }
+
+          final int topCmp = compareTop(doc);
+          if (topCmp > 0 || (topCmp == 0 && doc <= afterDoc)) {
+            // Already collected on a previous page
+            return;
+          }
+
+          if (queueFull) {
+            // This hit is competitive - replace bottom element in queue & adjustTop
+            copy(bottom.slot, doc);
+
+            // Compute score only if it is competitive.
+            if (trackDocScores && !trackMaxScore) {
+              score = scorer.score();
+            }
+            updateBottom(doc, score);
+
+            setBottom(bottom.slot);
+          } else {
+            collectedHits++;
+
+            // Startup transient: queue hasn't gathered numHits yet
+            final int slot = collectedHits - 1;
+            //System.out.println("    slot=" + slot);
+            // Copy hit into queue
+            copy(slot, doc);
+
+            // Compute score only if it is competitive.
+            if (trackDocScores && !trackMaxScore) {
+              score = scorer.score();
+            }
+            bottom = pq.add(new Entry(slot, docBase + doc, score));
+            queueFull = collectedHits == numHits;
+            if (queueFull) {
+              setBottom(bottom.slot);
+            }
           }
         }
-      }
-
-      // Check if this hit was already collected on a
-      // previous page:
-      boolean sameValues = true;
-      for(int compIDX=0;compIDX<comparators.length;compIDX++) {
-        final FieldComparator comp = comparators[compIDX];
-
-        final int cmp = reverseMul[compIDX] * comp.compareTop(doc);
-        if (cmp > 0) {
-          // Already collected on a previous page
-          //System.out.println("    skip: before");
-          return;
-        } else if (cmp < 0) {
-          // Not yet collected
-          sameValues = false;
-          //System.out.println("    keep: after; reverseMul=" + reverseMul[compIDX]);
-          break;
-        }
-      }
-
-      // Tie-break by docID:
-      if (sameValues && doc <= afterDoc) {
-        // Already collected on a previous page
-        //System.out.println("    skip: tie-break");
-        return;
-      }
-
-      if (queueFull) {
-        // This hit is competitive - replace bottom element in queue & adjustTop
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].copy(bottom.slot, doc);
-        }
-
-        // Compute score only if it is competitive.
-        if (trackDocScores && !trackMaxScore) {
-          score = scorer.score();
-        }
-        updateBottom(doc, score);
-
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].setBottom(bottom.slot);
-        }
-      } else {
-        collectedHits++;
-
-        // Startup transient: queue hasn't gathered numHits yet
-        final int slot = collectedHits - 1;
-        //System.out.println("    slot=" + slot);
-        // Copy hit into queue
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].copy(slot, doc);
-        }
-
-        // Compute score only if it is competitive.
-        if (trackDocScores && !trackMaxScore) {
-          score = scorer.score();
-        }
-        bottom = pq.add(new Entry(slot, docBase + doc, score));
-        queueFull = collectedHits == numHits;
-        if (queueFull) {
-          for (int i = 0; i < comparators.length; i++) {
-            comparators[i].setBottom(bottom.slot);
-          }
-        }
-      }
+      };
     }
 
-    @Override
-    public void setScorer(Scorer scorer) {
-      this.scorer = scorer;
-      for (int i = 0; i < comparators.length; i++) {
-        comparators[i].setScorer(scorer);
-      }
-    }
-    
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return true;
-    }
-
-    @Override
-    protected void doSetNextReader(LeafReaderContext context) throws IOException {
-      docBase = context.docBase;
-      afterDoc = after.doc - docBase;
-      for (int i = 0; i < comparators.length; i++) {
-        queue.setComparator(i, comparators[i].setNextReader(context));
-      }
-    }
   }
 
   private static final ScoreDoc[] EMPTY_SCOREDOCS = new ScoreDoc[0];
-  
+
   private final boolean fillFields;
 
   /*
@@ -1024,7 +520,7 @@
   FieldValueHitQueue.Entry bottom = null;
   boolean queueFull;
   int docBase;
-  
+
   // Declaring the constructor private prevents extending this class by anyone
   // else. Note that the class cannot be final since it's extended by the
   // internal versions. If someone will define a constructor with any other
@@ -1043,7 +539,7 @@
    * <p><b>NOTE</b>: The instances returned by this method
    * pre-allocate a full array of length
    * <code>numHits</code>.
-   * 
+   *
    * @param sort
    *          the sort criteria (SortFields).
    * @param numHits
@@ -1065,18 +561,14 @@
    *          true affects performance as it incurs the score computation on
    *          each result. Also, setting this true automatically sets
    *          <code>trackDocScores</code> to true as well.
-   * @param docsScoredInOrder
-   *          specifies whether documents are scored in doc Id order or not by
-   *          the given {@link Scorer} in {@link #setScorer(Scorer)}.
    * @return a {@link TopFieldCollector} instance which will sort the results by
    *         the sort criteria.
    * @throws IOException if there is a low-level I/O error
    */
   public static TopFieldCollector create(Sort sort, int numHits,
-      boolean fillFields, boolean trackDocScores, boolean trackMaxScore,
-      boolean docsScoredInOrder)
+      boolean fillFields, boolean trackDocScores, boolean trackMaxScore)
       throws IOException {
-    return create(sort, numHits, null, fillFields, trackDocScores, trackMaxScore, docsScoredInOrder);
+    return create(sort, numHits, null, fillFields, trackDocScores, trackMaxScore);
   }
 
   /**
@@ -1086,7 +578,7 @@
    * <p><b>NOTE</b>: The instances returned by this method
    * pre-allocate a full array of length
    * <code>numHits</code>.
-   * 
+   *
    * @param sort
    *          the sort criteria (SortFields).
    * @param numHits
@@ -1110,22 +602,18 @@
    *          true affects performance as it incurs the score computation on
    *          each result. Also, setting this true automatically sets
    *          <code>trackDocScores</code> to true as well.
-   * @param docsScoredInOrder
-   *          specifies whether documents are scored in doc Id order or not by
-   *          the given {@link Scorer} in {@link #setScorer(Scorer)}.
    * @return a {@link TopFieldCollector} instance which will sort the results by
    *         the sort criteria.
    * @throws IOException if there is a low-level I/O error
    */
   public static TopFieldCollector create(Sort sort, int numHits, FieldDoc after,
-      boolean fillFields, boolean trackDocScores, boolean trackMaxScore,
-      boolean docsScoredInOrder)
+      boolean fillFields, boolean trackDocScores, boolean trackMaxScore)
       throws IOException {
 
     if (sort.fields.length == 0) {
       throw new IllegalArgumentException("Sort must contain at least one field");
     }
-    
+
     if (numHits <= 0) {
       throw new IllegalArgumentException("numHits must be > 0; please use TotalHitCountCollector if you just need the total hit count");
     }
@@ -1133,43 +621,12 @@
     FieldValueHitQueue<Entry> queue = FieldValueHitQueue.create(sort.fields, numHits);
 
     if (after == null) {
-      if (queue.getComparators().length == 1) {
-        if (docsScoredInOrder) {
-          if (trackMaxScore) {
-            return new OneComparatorScoringMaxScoreCollector(queue, numHits, fillFields);
-          } else if (trackDocScores) {
-            return new OneComparatorScoringNoMaxScoreCollector(queue, numHits, fillFields);
-          } else {
-            return new OneComparatorNonScoringCollector(queue, numHits, fillFields);
-          }
-        } else {
-          if (trackMaxScore) {
-            return new OutOfOrderOneComparatorScoringMaxScoreCollector(queue, numHits, fillFields);
-          } else if (trackDocScores) {
-            return new OutOfOrderOneComparatorScoringNoMaxScoreCollector(queue, numHits, fillFields);
-          } else {
-            return new OutOfOrderOneComparatorNonScoringCollector(queue, numHits, fillFields);
-          }
-        }
-      }
-
-      // multiple comparators.
-      if (docsScoredInOrder) {
-        if (trackMaxScore) {
-          return new MultiComparatorScoringMaxScoreCollector(queue, numHits, fillFields);
-        } else if (trackDocScores) {
-          return new MultiComparatorScoringNoMaxScoreCollector(queue, numHits, fillFields);
-        } else {
-          return new MultiComparatorNonScoringCollector(queue, numHits, fillFields);
-        }
+      if (trackMaxScore) {
+        return new ScoringMaxScoreCollector(queue, numHits, fillFields);
+      } else if (trackDocScores) {
+        return new ScoringNoMaxScoreCollector(queue, numHits, fillFields);
       } else {
-        if (trackMaxScore) {
-          return new OutOfOrderMultiComparatorScoringMaxScoreCollector(queue, numHits, fillFields);
-        } else if (trackDocScores) {
-          return new OutOfOrderMultiComparatorScoringNoMaxScoreCollector(queue, numHits, fillFields);
-        } else {
-          return new OutOfOrderMultiComparatorNonScoringCollector(queue, numHits, fillFields);
-        }
+        return new NonScoringCollector(queue, numHits, fillFields);
       }
     } else {
       if (after.fields == null) {
@@ -1183,12 +640,24 @@
       return new PagingFieldCollector(queue, after, numHits, fillFields, trackDocScores, trackMaxScore);
     }
   }
-  
+
   final void add(int slot, int doc, float score) {
     bottom = pq.add(new Entry(slot, docBase + doc, score));
     queueFull = totalHits == numHits;
   }
 
+  final void updateBottom(int doc) {
+    // bottom.score is already set to Float.NaN in add().
+    bottom.doc = docBase + doc;
+    bottom = pq.updateTop();
+  }
+
+  final void updateBottom(int doc, float score) {
+    bottom.doc = docBase + doc;
+    bottom.score = score;
+    bottom = pq.updateTop();
+  }
+
   /*
    * Only the following callback methods need to be overridden since
    * topDocs(int, int) calls them to return the results.
@@ -1209,7 +678,7 @@
       }
     }
   }
-  
+
   @Override
   protected TopDocs newTopDocs(ScoreDoc[] results, int start) {
     if (results == null) {
@@ -1218,12 +687,8 @@
       maxScore = Float.NaN;
     }
 
-    // If this is a maxScoring tracking collector and there were no results, 
+    // If this is a maxScoring tracking collector and there were no results,
     return new TopFieldDocs(totalHits, results, ((FieldValueHitQueue<Entry>) pq).getFields(), maxScore);
   }
-  
-  @Override
-  public boolean acceptsDocsOutOfOrder() {
-    return false;
-  }
+
 }
diff --git a/lucene/core/src/java/org/apache/lucene/search/TopScoreDocCollector.java b/lucene/core/src/java/org/apache/lucene/search/TopScoreDocCollector.java
index e343e67..8d591ab 100644
--- a/lucene/core/src/java/org/apache/lucene/search/TopScoreDocCollector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/TopScoreDocCollector.java
@@ -36,242 +36,151 @@
  */
 public abstract class TopScoreDocCollector extends TopDocsCollector<ScoreDoc> {
 
-  // Assumes docs are scored in order.
-  private static class InOrderTopScoreDocCollector extends TopScoreDocCollector {
-    private InOrderTopScoreDocCollector(int numHits) {
+  abstract static class ScorerLeafCollector implements LeafCollector {
+
+    Scorer scorer;
+
+    @Override
+    public void setScorer(Scorer scorer) throws IOException {
+      this.scorer = scorer;
+    }
+
+  }
+
+  private static class SimpleTopScoreDocCollector extends TopScoreDocCollector {
+
+    SimpleTopScoreDocCollector(int numHits) {
       super(numHits);
     }
-    
-    @Override
-    public void collect(int doc) throws IOException {
-      float score = scorer.score();
 
-      // This collector cannot handle these scores:
-      assert score != Float.NEGATIVE_INFINITY;
-      assert !Float.isNaN(score);
-
-      totalHits++;
-      if (score <= pqTop.score) {
-        // Since docs are returned in-order (i.e., increasing doc Id), a document
-        // with equal score to pqTop.score cannot compete since HitQueue favors
-        // documents with lower doc Ids. Therefore reject those docs too.
-        return;
-      }
-      pqTop.doc = doc + docBase;
-      pqTop.score = score;
-      pqTop = pq.updateTop();
-    }
-    
     @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return false;
+    public LeafCollector getLeafCollector(LeafReaderContext context)
+        throws IOException {
+      final int docBase = context.docBase;
+      return new ScorerLeafCollector() {
+
+        @Override
+        public void collect(int doc) throws IOException {
+          float score = scorer.score();
+
+          // This collector cannot handle these scores:
+          assert score != Float.NEGATIVE_INFINITY;
+          assert !Float.isNaN(score);
+
+          totalHits++;
+          if (score <= pqTop.score) {
+            // Since docs are returned in-order (i.e., increasing doc Id), a document
+            // with equal score to pqTop.score cannot compete since HitQueue favors
+            // documents with lower doc Ids. Therefore reject those docs too.
+            return;
+          }
+          pqTop.doc = doc + docBase;
+          pqTop.score = score;
+          pqTop = pq.updateTop();
+        }
+
+      };
     }
+
   }
-  
-  // Assumes docs are scored in order.
-  private static class InOrderPagingScoreDocCollector extends TopScoreDocCollector {
+
+  private static class PagingTopScoreDocCollector extends TopScoreDocCollector {
+
     private final ScoreDoc after;
-    // this is always after.doc - docBase, to save an add when score == after.score
-    private int afterDoc;
     private int collectedHits;
 
-    private InOrderPagingScoreDocCollector(ScoreDoc after, int numHits) {
+    PagingTopScoreDocCollector(int numHits, ScoreDoc after) {
       super(numHits);
       this.after = after;
-    }
-    
-    @Override
-    public void collect(int doc) throws IOException {
-      float score = scorer.score();
-
-      // This collector cannot handle these scores:
-      assert score != Float.NEGATIVE_INFINITY;
-      assert !Float.isNaN(score);
-
-      totalHits++;
-      
-      if (score > after.score || (score == after.score && doc <= afterDoc)) {
-        // hit was collected on a previous page
-        return;
-      }
-      
-      if (score <= pqTop.score) {
-        // Since docs are returned in-order (i.e., increasing doc Id), a document
-        // with equal score to pqTop.score cannot compete since HitQueue favors
-        // documents with lower doc Ids. Therefore reject those docs too.
-        return;
-      }
-      collectedHits++;
-      pqTop.doc = doc + docBase;
-      pqTop.score = score;
-      pqTop = pq.updateTop();
-    }
-
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return false;
-    }
-
-    @Override
-    protected void doSetNextReader(LeafReaderContext context) throws IOException {
-      super.doSetNextReader(context);
-      afterDoc = after.doc - context.docBase;
+      this.collectedHits = 0;
     }
 
     @Override
     protected int topDocsSize() {
       return collectedHits < pq.size() ? collectedHits : pq.size();
     }
-    
+
     @Override
     protected TopDocs newTopDocs(ScoreDoc[] results, int start) {
       return results == null ? new TopDocs(totalHits, new ScoreDoc[0], Float.NaN) : new TopDocs(totalHits, results);
     }
-  }
 
-  // Assumes docs are scored out of order.
-  private static class OutOfOrderTopScoreDocCollector extends TopScoreDocCollector {
-    private OutOfOrderTopScoreDocCollector(int numHits) {
-      super(numHits);
-    }
-    
     @Override
-    public void collect(int doc) throws IOException {
-      float score = scorer.score();
+    public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
+      final int docBase = context.docBase;
+      final int afterDoc = after.doc - context.docBase;
+      return new ScorerLeafCollector() {
+        @Override
+        public void collect(int doc) throws IOException {
+          float score = scorer.score();
 
-      // This collector cannot handle NaN
-      assert !Float.isNaN(score);
+          // This collector cannot handle these scores:
+          assert score != Float.NEGATIVE_INFINITY;
+          assert !Float.isNaN(score);
 
-      totalHits++;
-      if (score < pqTop.score) {
-        // Doesn't compete w/ bottom entry in queue
-        return;
-      }
-      doc += docBase;
-      if (score == pqTop.score && doc > pqTop.doc) {
-        // Break tie in score by doc ID:
-        return;
-      }
-      pqTop.doc = doc;
-      pqTop.score = score;
-      pqTop = pq.updateTop();
-    }
-    
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return true;
-    }
-  }
-  
-  // Assumes docs are scored out of order.
-  private static class OutOfOrderPagingScoreDocCollector extends TopScoreDocCollector {
-    private final ScoreDoc after;
-    // this is always after.doc - docBase, to save an add when score == after.score
-    private int afterDoc;
-    private int collectedHits;
+          totalHits++;
 
-    private OutOfOrderPagingScoreDocCollector(ScoreDoc after, int numHits) {
-      super(numHits);
-      this.after = after;
-    }
-    
-    @Override
-    public void collect(int doc) throws IOException {
-      float score = scorer.score();
+          if (score > after.score || (score == after.score && doc <= afterDoc)) {
+            // hit was collected on a previous page
+            return;
+          }
 
-      // This collector cannot handle NaN
-      assert !Float.isNaN(score);
-
-      totalHits++;
-      if (score > after.score || (score == after.score && doc <= afterDoc)) {
-        // hit was collected on a previous page
-        return;
-      }
-      if (score < pqTop.score) {
-        // Doesn't compete w/ bottom entry in queue
-        return;
-      }
-      doc += docBase;
-      if (score == pqTop.score && doc > pqTop.doc) {
-        // Break tie in score by doc ID:
-        return;
-      }
-      collectedHits++;
-      pqTop.doc = doc;
-      pqTop.score = score;
-      pqTop = pq.updateTop();
-    }
-    
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return true;
-    }
-    
-    @Override
-    protected void doSetNextReader(LeafReaderContext context) throws IOException {
-      super.doSetNextReader(context);
-      afterDoc = after.doc - context.docBase;
-    }
-    
-    @Override
-    protected int topDocsSize() {
-      return collectedHits < pq.size() ? collectedHits : pq.size();
-    }
-    
-    @Override
-    protected TopDocs newTopDocs(ScoreDoc[] results, int start) {
-      return results == null ? new TopDocs(totalHits, new ScoreDoc[0], Float.NaN) : new TopDocs(totalHits, results);
+          if (score <= pqTop.score) {
+            // Since docs are returned in-order (i.e., increasing doc Id), a document
+            // with equal score to pqTop.score cannot compete since HitQueue favors
+            // documents with lower doc Ids. Therefore reject those docs too.
+            return;
+          }
+          collectedHits++;
+          pqTop.doc = doc + docBase;
+          pqTop.score = score;
+          pqTop = pq.updateTop();
+        }
+      };
     }
   }
 
   /**
    * Creates a new {@link TopScoreDocCollector} given the number of hits to
    * collect and whether documents are scored in order by the input
-   * {@link Scorer} to {@link #setScorer(Scorer)}.
+   * {@link Scorer} to {@link LeafCollector#setScorer(Scorer)}.
    *
    * <p><b>NOTE</b>: The instances returned by this method
    * pre-allocate a full array of length
    * <code>numHits</code>, and fill the array with sentinel
    * objects.
    */
-  public static TopScoreDocCollector create(int numHits, boolean docsScoredInOrder) {
-    return create(numHits, null, docsScoredInOrder);
+  public static TopScoreDocCollector create(int numHits) {
+    return create(numHits, null);
   }
-  
+
   /**
    * Creates a new {@link TopScoreDocCollector} given the number of hits to
    * collect, the bottom of the previous page, and whether documents are scored in order by the input
-   * {@link Scorer} to {@link #setScorer(Scorer)}.
+   * {@link Scorer} to {@link LeafCollector#setScorer(Scorer)}.
    *
    * <p><b>NOTE</b>: The instances returned by this method
    * pre-allocate a full array of length
    * <code>numHits</code>, and fill the array with sentinel
    * objects.
    */
-  public static TopScoreDocCollector create(int numHits, ScoreDoc after, boolean docsScoredInOrder) {
-    
+  public static TopScoreDocCollector create(int numHits, ScoreDoc after) {
+
     if (numHits <= 0) {
       throw new IllegalArgumentException("numHits must be > 0; please use TotalHitCountCollector if you just need the total hit count");
     }
-    
-    if (docsScoredInOrder) {
-      return after == null 
-        ? new InOrderTopScoreDocCollector(numHits) 
-        : new InOrderPagingScoreDocCollector(after, numHits);
+
+    if (after == null) {
+      return new SimpleTopScoreDocCollector(numHits);
     } else {
-      return after == null
-        ? new OutOfOrderTopScoreDocCollector(numHits)
-        : new OutOfOrderPagingScoreDocCollector(after, numHits);
+      return new PagingTopScoreDocCollector(numHits, after);
     }
-    
   }
-  
+
   ScoreDoc pqTop;
-  int docBase = 0;
-  Scorer scorer;
-    
+
   // prevents instantiation
-  private TopScoreDocCollector(int numHits) {
+  TopScoreDocCollector(int numHits) {
     super(new HitQueue(numHits, true));
     // HitQueue implements getSentinelObject to return a ScoreDoc, so we know
     // that at this point top() is already initialized.
@@ -283,7 +192,7 @@
     if (results == null) {
       return EMPTY_TOPDOCS;
     }
-    
+
     // We need to compute maxScore in order to set it in TopDocs. If start == 0,
     // it means the largest element is already in results, use its score as
     // maxScore. Otherwise pop everything else, until the largest element is
@@ -295,17 +204,7 @@
       for (int i = pq.size(); i > 1; i--) { pq.pop(); }
       maxScore = pq.pop().score;
     }
-    
+
     return new TopDocs(totalHits, results, maxScore);
   }
-  
-  @Override
-  protected void doSetNextReader(LeafReaderContext context) throws IOException {
-    docBase = context.docBase;
-  }
-  
-  @Override
-  public void setScorer(Scorer scorer) throws IOException {
-    this.scorer = scorer;
-  }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/search/TotalHitCountCollector.java b/lucene/core/src/java/org/apache/lucene/search/TotalHitCountCollector.java
index 4fc5be6..fb06e0a 100644
--- a/lucene/core/src/java/org/apache/lucene/search/TotalHitCountCollector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/TotalHitCountCollector.java
@@ -34,9 +34,4 @@
   public void collect(int doc) {
     totalHits++;
   }
-
-  @Override
-  public boolean acceptsDocsOutOfOrder() {
-    return true;
-  }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/search/Weight.java b/lucene/core/src/java/org/apache/lucene/search/Weight.java
index 422356c..741dc88 100644
--- a/lucene/core/src/java/org/apache/lucene/search/Weight.java
+++ b/lucene/core/src/java/org/apache/lucene/search/Weight.java
@@ -79,11 +79,6 @@
    * Returns a {@link Scorer} which scores documents in/out-of order according
    * to <code>scoreDocsInOrder</code>.
    * <p>
-   * <b>NOTE:</b> even if <code>scoreDocsInOrder</code> is false, it is
-   * recommended to check whether the returned <code>Scorer</code> indeed scores
-   * documents out of order (i.e., call {@link #scoresDocsOutOfOrder()}), as
-   * some <code>Scorer</code> implementations will always return documents
-   * in-order.<br>
    * <b>NOTE:</b> null can be returned if no documents will be scored by this
    * query.
    * 
@@ -108,15 +103,6 @@
    *
    * @param context
    *          the {@link org.apache.lucene.index.LeafReaderContext} for which to return the {@link Scorer}.
-   * @param scoreDocsInOrder
-   *          specifies whether in-order scoring of documents is required. Note
-   *          that if set to false (i.e., out-of-order scoring is required),
-   *          this method can return whatever scoring mode it supports, as every
-   *          in-order scorer is also an out-of-order one. However, an
-   *          out-of-order scorer may not support {@link Scorer#nextDoc()}
-   *          and/or {@link Scorer#advance(int)}, therefore it is recommended to
-   *          request an in-order scorer if use of these
-   *          methods is required.
    * @param acceptDocs
    *          Bits that represent the allowable docs to match (typically deleted docs
    *          but possibly filtering other documents)
@@ -125,7 +111,7 @@
    * passes them to a collector.
    * @throws IOException if there is a low-level I/O error
    */
-  public BulkScorer bulkScorer(LeafReaderContext context, boolean scoreDocsInOrder, Bits acceptDocs) throws IOException {
+  public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
 
     Scorer scorer = scorer(context, acceptDocs);
     if (scorer == null) {
@@ -150,7 +136,7 @@
     }
 
     @Override
-    public boolean score(LeafCollector collector, int max) throws IOException {
+    public int score(LeafCollector collector, int min, int max) throws IOException {
       // TODO: this may be sort of weird, when we are
       // embedded in a BooleanScorer, because we are
       // called for every chunk of 2048 documents.  But,
@@ -158,13 +144,13 @@
       // Collector doing something "interesting" in
       // setScorer will be forced to use BS2 anyways:
       collector.setScorer(scorer);
-      if (max == DocIdSetIterator.NO_MORE_DOCS) {
+      if (scorer.docID() == -1 && min == 0 && max == DocIdSetIterator.NO_MORE_DOCS) {
         scoreAll(collector, scorer);
-        return false;
+        return DocIdSetIterator.NO_MORE_DOCS;
       } else {
         int doc = scorer.docID();
-        if (doc < 0) {
-          doc = scorer.nextDoc();
+        if (doc < min) {
+          doc = scorer.advance(min);
         }
         return scoreRange(collector, scorer, doc, max);
       }
@@ -174,12 +160,12 @@
      *  separate this from {@link #scoreAll} to help out
      *  hotspot.
      *  See <a href="https://issues.apache.org/jira/browse/LUCENE-5487">LUCENE-5487</a> */
-    static boolean scoreRange(LeafCollector collector, Scorer scorer, int currentDoc, int end) throws IOException {
+    static int scoreRange(LeafCollector collector, Scorer scorer, int currentDoc, int end) throws IOException {
       while (currentDoc < end) {
         collector.collect(currentDoc);
         currentDoc = scorer.nextDoc();
       }
-      return currentDoc != DocIdSetIterator.NO_MORE_DOCS;
+      return currentDoc;
     }
     
     /** Specialized method to bulk-score all hits; we
@@ -187,25 +173,9 @@
      *  hotspot.
      *  See <a href="https://issues.apache.org/jira/browse/LUCENE-5487">LUCENE-5487</a> */
     static void scoreAll(LeafCollector collector, Scorer scorer) throws IOException {
-      int doc;
-      while ((doc = scorer.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
+      for (int doc = scorer.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = scorer.nextDoc()) {
         collector.collect(doc);
       }
     }
   }
-
-  /**
-   * Returns true iff this implementation scores docs only out of order. This
-   * method is used in conjunction with {@link Collector}'s
-   * {@link LeafCollector#acceptsDocsOutOfOrder() acceptsDocsOutOfOrder} and
-   * {@link #bulkScorer(org.apache.lucene.index.LeafReaderContext, boolean, Bits)} to
-   * create a matching {@link Scorer} instance for a given {@link Collector}, or
-   * vice versa.
-   * <p>
-   * <b>NOTE:</b> the default implementation returns <code>false</code>, i.e.
-   * the <code>Scorer</code> scores documents in-order.
-   */
-  public boolean scoresDocsOutOfOrder() {
-    return false;
-  }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/search/package.html b/lucene/core/src/java/org/apache/lucene/search/package.html
index d61e91c..1635797 100644
--- a/lucene/core/src/java/org/apache/lucene/search/package.html
+++ b/lucene/core/src/java/org/apache/lucene/search/package.html
@@ -443,8 +443,8 @@
                 given the Query.
             </li>
             <li>
-                {@link org.apache.lucene.search.Weight#bulkScorer(org.apache.lucene.index.LeafReaderContext, boolean, org.apache.lucene.util.Bits)
-                  scorer(LeafReaderContext context, boolean scoreDocsInOrder, Bits acceptDocs)} &mdash;
+                {@link org.apache.lucene.search.Weight#bulkScorer(org.apache.lucene.index.LeafReaderContext, org.apache.lucene.util.Bits)
+                  scorer(LeafReaderContext context, Bits acceptDocs)} &mdash;
                 Construct a new {@link org.apache.lucene.search.BulkScorer BulkScorer} for this Weight. See <a href="#bulkScorerClass">The BulkScorer Class</a>
                 below for help defining a BulkScorer. This is an optional method, and most queries do not implement it.
             </li>
@@ -508,7 +508,7 @@
         abstract method:
         <ol>
             <li>
-                {@link org.apache.lucene.search.BulkScorer#score(org.apache.lucene.search.LeafCollector,int) score(LeafCollector,int)} &mdash;
+                {@link org.apache.lucene.search.BulkScorer#score(org.apache.lucene.search.LeafCollector,int,int) score(LeafCollector,int,int)} &mdash;
 		Score all documents up to but not including the specified max document.
 	    </li>
         </ol>
diff --git a/lucene/core/src/java/org/apache/lucene/store/ByteBufferIndexInput.java b/lucene/core/src/java/org/apache/lucene/store/ByteBufferIndexInput.java
index 5951ae1..b0a234a 100644
--- a/lucene/core/src/java/org/apache/lucene/store/ByteBufferIndexInput.java
+++ b/lucene/core/src/java/org/apache/lucene/store/ByteBufferIndexInput.java
@@ -170,9 +170,7 @@
         this.curBufIndex = bi;
         this.curBuf = b;
       }
-    } catch (ArrayIndexOutOfBoundsException aioobe) {
-      throw new EOFException("seek past EOF: " + this);
-    } catch (IllegalArgumentException iae) {
+    } catch (ArrayIndexOutOfBoundsException | IllegalArgumentException e) {
       throw new EOFException("seek past EOF: " + this);
     } catch (NullPointerException npe) {
       throw new AlreadyClosedException("Already closed: " + this);
@@ -198,9 +196,7 @@
       b.position((int) (pos & chunkSizeMask));
       this.curBufIndex = bi;
       this.curBuf = b;
-    } catch (ArrayIndexOutOfBoundsException aioobe) {
-      throw new EOFException("seek past EOF: " + this);
-    } catch (IllegalArgumentException iae) {
+    } catch (ArrayIndexOutOfBoundsException | IllegalArgumentException aioobe) {
       throw new EOFException("seek past EOF: " + this);
     } catch (NullPointerException npe) {
       throw new AlreadyClosedException("Already closed: " + this);
diff --git a/lucene/core/src/java/org/apache/lucene/store/FilterDirectory.java b/lucene/core/src/java/org/apache/lucene/store/FilterDirectory.java
index 76c4ed7..765b5c2 100644
--- a/lucene/core/src/java/org/apache/lucene/store/FilterDirectory.java
+++ b/lucene/core/src/java/org/apache/lucene/store/FilterDirectory.java
@@ -23,7 +23,7 @@
 /** Directory implementation that delegates calls to another directory.
  *  This class can be used to add limitations on top of an existing
  *  {@link Directory} implementation such as
- *  {@link RateLimitedDirectoryWrapper rate limiting} or to add additional
+ *  {@link NRTCachingDirectory} or to add additional
  *  sanity checks for tests. However, if you plan to write your own
  *  {@link Directory} implementation, you should consider extending directly
  *  {@link Directory} or {@link BaseDirectory} rather than try to reuse
diff --git a/lucene/core/src/java/org/apache/lucene/store/LockStressTest.java b/lucene/core/src/java/org/apache/lucene/store/LockStressTest.java
index d4156b2..4840ae9 100644
--- a/lucene/core/src/java/org/apache/lucene/store/LockStressTest.java
+++ b/lucene/core/src/java/org/apache/lucene/store/LockStressTest.java
@@ -139,7 +139,7 @@
     // try to create a new instance
     try {
       return Class.forName(lockFactoryClassName).asSubclass(FSLockFactory.class).newInstance();
-    } catch (IllegalAccessException | InstantiationException | ClassCastException | ClassNotFoundException e) {
+    } catch (ReflectiveOperationException | ClassCastException e) {
       // fall-through
     }
 
diff --git a/lucene/core/src/java/org/apache/lucene/store/RateLimitedDirectoryWrapper.java b/lucene/core/src/java/org/apache/lucene/store/RateLimitedDirectoryWrapper.java
deleted file mode 100644
index 7f53ad6..0000000
--- a/lucene/core/src/java/org/apache/lucene/store/RateLimitedDirectoryWrapper.java
+++ /dev/null
@@ -1,145 +0,0 @@
-package org.apache.lucene.store;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import java.io.IOException;
-
-import org.apache.lucene.store.IOContext.Context;
-
-/**
- * 
- * A {@link Directory} wrapper that allows {@link IndexOutput} rate limiting using
- * {@link IOContext.Context IO context} specific {@link RateLimiter rate limiters}.
- * 
- *  @see #setRateLimiter(RateLimiter, IOContext.Context)
- * @lucene.experimental
- */
-public final class RateLimitedDirectoryWrapper extends FilterDirectory {
-
-  // we need to be volatile here to make sure we see all the values that are set
-  // / modified concurrently
-  private volatile RateLimiter[] contextRateLimiters = new RateLimiter[IOContext.Context
-      .values().length];
-  
-  public RateLimitedDirectoryWrapper(Directory wrapped) {
-    super(wrapped);
-  }
-  
-  @Override
-  public IndexOutput createOutput(String name, IOContext context)
-      throws IOException {
-    ensureOpen();
-    final IndexOutput output = super.createOutput(name, context);
-    final RateLimiter limiter = getRateLimiter(context.context);
-    if (limiter != null) {
-      return new RateLimitedIndexOutput(limiter, output);
-    }
-    return output;
-  }
-
-  @Override
-  public void copyFrom(Directory from, String src, String dest, IOContext context) throws IOException {
-    ensureOpen();
-    in.copyFrom(from, src, dest, context);
-  }
-  
-  private RateLimiter getRateLimiter(IOContext.Context context) {
-    assert context != null;
-    return contextRateLimiters[context.ordinal()];
-  }
-  
-  /**
-   * Sets the maximum (approx) MB/sec allowed by all write IO performed by
-   * {@link IndexOutput} created with the given {@link IOContext.Context}. Pass
-   * <code>null</code> to have no limit.
-   * 
-   * <p>
-   * <b>NOTE</b>: For already created {@link IndexOutput} instances there is no
-   * guarantee this new rate will apply to them; it will only be guaranteed to
-   * apply for new created {@link IndexOutput} instances.
-   * <p>
-   * <b>NOTE</b>: this is an optional operation and might not be respected by
-   * all Directory implementations. Currently only {@link FSDirectory buffered}
-   * Directory implementations use rate-limiting.
-   * 
-   * @throws IllegalArgumentException
-   *           if context is <code>null</code>
-   * @throws AlreadyClosedException if the {@link Directory} is already closed
-   * @lucene.experimental
-   */
-  public void setMaxWriteMBPerSec(Double mbPerSec, IOContext.Context context) {
-    ensureOpen();
-    if (context == null) {
-      throw new IllegalArgumentException("Context must not be null");
-    }
-    final int ord = context.ordinal();
-    final RateLimiter limiter = contextRateLimiters[ord];
-    if (mbPerSec == null) {
-      if (limiter != null) {
-        limiter.setMbPerSec(Double.MAX_VALUE);
-        contextRateLimiters[ord] = null;
-      }
-    } else if (limiter != null) {
-      limiter.setMbPerSec(mbPerSec);
-      contextRateLimiters[ord] = limiter; // cross the mem barrier again
-    } else {
-      contextRateLimiters[ord] = new RateLimiter.SimpleRateLimiter(mbPerSec);
-    }
-  }
-  
-  /**
-   * Sets the rate limiter to be used to limit (approx) MB/sec allowed by all IO
-   * performed with the given {@link IOContext.Context context}. Pass <code>null</code> to
-   * have no limit.
-   * 
-   * <p>
-   * Passing an instance of rate limiter compared to setting it using
-   * {@link #setMaxWriteMBPerSec(Double, IOContext.Context)}
-   * allows to use the same limiter instance across several directories globally
-   * limiting IO across them.
-   * 
-   * @throws IllegalArgumentException
-   *           if context is <code>null</code>
-   * @throws AlreadyClosedException if the {@link Directory} is already closed           
-   * @lucene.experimental
-   */
-  public void setRateLimiter(RateLimiter mergeWriteRateLimiter,
-      Context context) {
-    ensureOpen();
-    if (context == null) {
-      throw new IllegalArgumentException("Context must not be null");
-    }
-    contextRateLimiters[context.ordinal()] = mergeWriteRateLimiter;
-  }
-  
-  /**
-   * See {@link #setMaxWriteMBPerSec}.
-   * 
-   * @throws IllegalArgumentException
-   *           if context is <code>null</code>
-   * @throws AlreadyClosedException if the {@link Directory} is already closed
-   * @lucene.experimental
-   */
-  public Double getMaxWriteMBPerSec(IOContext.Context context) {
-    ensureOpen();
-    if (context == null) {
-      throw new IllegalArgumentException("Context must not be null");
-    }
-    RateLimiter limiter = getRateLimiter(context);
-    return limiter == null ? null : limiter.getMbPerSec();
-  }
-  
-}
diff --git a/lucene/core/src/java/org/apache/lucene/store/RateLimitedIndexOutput.java b/lucene/core/src/java/org/apache/lucene/store/RateLimitedIndexOutput.java
index f8535f2..5fdc6d7 100644
--- a/lucene/core/src/java/org/apache/lucene/store/RateLimitedIndexOutput.java
+++ b/lucene/core/src/java/org/apache/lucene/store/RateLimitedIndexOutput.java
@@ -24,7 +24,8 @@
  * 
  * @lucene.internal
  */
-final class RateLimitedIndexOutput extends IndexOutput {
+
+public final class RateLimitedIndexOutput extends IndexOutput {
   
   private final IndexOutput delegate;
   private final RateLimiter rateLimiter;
@@ -36,7 +37,7 @@
    * which does volatile read. */
   private long currentMinPauseCheckBytes;
 
-  RateLimitedIndexOutput(final RateLimiter rateLimiter, final IndexOutput delegate) {
+  public RateLimitedIndexOutput(final RateLimiter rateLimiter, final IndexOutput delegate) {
     super("RateLimitedIndexOutput(" + delegate + ")");
     this.delegate = delegate;
     this.rateLimiter = rateLimiter;
@@ -72,7 +73,7 @@
     delegate.writeBytes(b, offset, length);
   }
   
-  private void checkRate() {
+  private void checkRate() throws IOException {
     if (bytesSinceLastPause > currentMinPauseCheckBytes) {
       rateLimiter.pause(bytesSinceLastPause);
       bytesSinceLastPause = 0;
diff --git a/lucene/core/src/java/org/apache/lucene/store/RateLimiter.java b/lucene/core/src/java/org/apache/lucene/store/RateLimiter.java
index b5759f6..99ed3c7 100644
--- a/lucene/core/src/java/org/apache/lucene/store/RateLimiter.java
+++ b/lucene/core/src/java/org/apache/lucene/store/RateLimiter.java
@@ -17,6 +17,8 @@
  * limitations under the License.
  */
 
+import java.io.IOException;
+
 import org.apache.lucene.util.ThreadInterruptedException;
 
 /** Abstract base class to rate limit IO.  Typically implementations are
@@ -27,14 +29,14 @@
 public abstract class RateLimiter {
 
   /**
-   * Sets an updated mb per second rate limit.
+   * Sets an updated MB per second rate limit.
    */
-  public abstract void setMbPerSec(double mbPerSec);
+  public abstract void setMBPerSec(double mbPerSec);
 
   /**
-   * The current mb per second rate limit.
+   * The current MB per second rate limit.
    */
-  public abstract double getMbPerSec();
+  public abstract double getMBPerSec();
   
   /** Pauses, if necessary, to keep the instantaneous IO
    *  rate at or below the target. 
@@ -43,7 +45,7 @@
    *  </p>
    *  @return the pause time in nano seconds 
    * */
-  public abstract long pause(long bytes);
+  public abstract long pause(long bytes) throws IOException;
   
   /** How many bytes caller should add up itself before invoking {@link #pause}. */
   public abstract long getMinPauseCheckBytes();
@@ -65,7 +67,7 @@
 
     /** mbPerSec is the MB/sec max IO rate */
     public SimpleRateLimiter(double mbPerSec) {
-      setMbPerSec(mbPerSec);
+      setMBPerSec(mbPerSec);
       lastNS = System.nanoTime();
     }
 
@@ -73,7 +75,7 @@
      * Sets an updated mb per second rate limit.
      */
     @Override
-    public void setMbPerSec(double mbPerSec) {
+    public void setMBPerSec(double mbPerSec) {
       this.mbPerSec = mbPerSec;
       minPauseCheckBytes = (long) ((MIN_PAUSE_CHECK_MSEC / 1000.0) * mbPerSec * 1024 * 1024);
     }
@@ -87,7 +89,7 @@
      * The current mb per second rate limit.
      */
     @Override
-    public double getMbPerSec() {
+    public double getMBPerSec() {
       return this.mbPerSec;
     }
     
diff --git a/lucene/core/src/java/org/apache/lucene/util/IOUtils.java b/lucene/core/src/java/org/apache/lucene/util/IOUtils.java
index d6574ba..504b1f8 100644
--- a/lucene/core/src/java/org/apache/lucene/util/IOUtils.java
+++ b/lucene/core/src/java/org/apache/lucene/util/IOUtils.java
@@ -407,7 +407,8 @@
     }
     
     if (isDir) {
-      assert (Constants.LINUX || Constants.MAC_OS_X) == false :
+      // TODO: LUCENE-6169 - Fix this assert once Java 9 problems are solved!
+      assert (Constants.LINUX || Constants.MAC_OS_X) == false || Constants.JRE_IS_MINIMUM_JAVA9 :
         "On Linux and MacOSX fsyncing a directory should not throw IOException, "+
         "we just don't want to rely on that in production (undocumented). Got: " + exc;
       // Ignore exception if it is a directory
diff --git a/lucene/core/src/java/org/apache/lucene/util/PrintStreamInfoStream.java b/lucene/core/src/java/org/apache/lucene/util/PrintStreamInfoStream.java
index 33413be..190d0fa 100644
--- a/lucene/core/src/java/org/apache/lucene/util/PrintStreamInfoStream.java
+++ b/lucene/core/src/java/org/apache/lucene/util/PrintStreamInfoStream.java
@@ -19,7 +19,10 @@
 
 import java.io.IOException;
 import java.io.PrintStream;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
 import java.util.Date;
+import java.util.Locale;
 import java.util.concurrent.atomic.AtomicInteger;
 
 /**
@@ -32,6 +35,8 @@
   // Used for printing messages
   private static final AtomicInteger MESSAGE_ID = new AtomicInteger();
   protected final int messageID;
+
+  private static final DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS", Locale.ROOT);
   
   protected final PrintStream stream;
   
@@ -46,7 +51,7 @@
   
   @Override
   public void message(String component, String message) {
-    stream.println(component + " " + messageID + " [" + new Date() + "; " + Thread.currentThread().getName() + "]: " + message);    
+    stream.println(component + " " + messageID + " [" + dateFormat.format(new Date()) + "; " + Thread.currentThread().getName() + "]: " + message);    
   }
 
   @Override
diff --git a/lucene/core/src/java/org/apache/lucene/util/StringHelper.java b/lucene/core/src/java/org/apache/lucene/util/StringHelper.java
index 0931e35..2668635 100644
--- a/lucene/core/src/java/org/apache/lucene/util/StringHelper.java
+++ b/lucene/core/src/java/org/apache/lucene/util/StringHelper.java
@@ -253,7 +253,7 @@
       x0 = Long.parseLong(prop, 16);
       x1 = x0;
     } else {
-      // "Rough randomess" from 3 different sources:
+      // Randomess from 3 different sources:
       x0 = System.nanoTime();
       x1 = StringHelper.class.hashCode() << 32;
       StringBuilder sb = new StringBuilder();
diff --git a/lucene/core/src/java/org/apache/lucene/util/Version.java b/lucene/core/src/java/org/apache/lucene/util/Version.java
index 7601fc1..ef2d331 100644
--- a/lucene/core/src/java/org/apache/lucene/util/Version.java
+++ b/lucene/core/src/java/org/apache/lucene/util/Version.java
@@ -39,6 +39,13 @@
   @Deprecated
   public static final Version LUCENE_5_0_0 = new Version(5, 0, 0);
 
+  /**
+   * Match settings and bugs in Lucene's 5.1.0 release.
+   * @deprecated Use latest
+   */
+  @Deprecated
+  public static final Version LUCENE_5_1_0 = new Version(5, 1, 0);
+
   /** Match settings and bugs in Lucene's 6.0 release.
    *  <p>
    *  Use this to get the latest &amp; greatest settings, bug
diff --git a/lucene/core/src/java/org/apache/lucene/util/automaton/Automaton.java b/lucene/core/src/java/org/apache/lucene/util/automaton/Automaton.java
index d907211..3c6a8c5 100644
--- a/lucene/core/src/java/org/apache/lucene/util/automaton/Automaton.java
+++ b/lucene/core/src/java/org/apache/lucene/util/automaton/Automaton.java
@@ -19,16 +19,21 @@
 
 //import java.io.IOException;
 //import java.io.PrintWriter;
+
 import java.util.Arrays;
 import java.util.BitSet;
 import java.util.HashSet;
 import java.util.Set;
 
+import org.apache.lucene.util.Accountable;
 import org.apache.lucene.util.ArrayUtil;
 import org.apache.lucene.util.InPlaceMergeSorter;
+import org.apache.lucene.util.RamUsageEstimator;
 import org.apache.lucene.util.Sorter;
 
 
+
+
 // TODO
 //   - could use packed int arrays instead
 //   - could encode dest w/ delta from to?
@@ -47,7 +52,8 @@
  *
  * @lucene.experimental */
 
-public class Automaton {
+public class Automaton implements Accountable {
+
   /** Where we next write to the int[] states; this increments by 2 for
    *  each added state because we pack a pointer to the transitions
    *  array and a count of how many transitions leave the state.  */
@@ -879,4 +885,14 @@
       }
     }
   }
+
+  @Override
+  public long ramBytesUsed() {
+    // TODO: BitSet RAM usage (isAccept.size()/8) isn't fully accurate...
+    return RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + RamUsageEstimator.sizeOf(states) + RamUsageEstimator.sizeOf(transitions) +
+      RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + (isAccept.size() / 8) + RamUsageEstimator.NUM_BYTES_OBJECT_REF +
+      2 * RamUsageEstimator.NUM_BYTES_OBJECT_REF +
+      3 * RamUsageEstimator.NUM_BYTES_INT +
+      RamUsageEstimator.NUM_BYTES_BOOLEAN;
+  }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/util/automaton/CompiledAutomaton.java b/lucene/core/src/java/org/apache/lucene/util/automaton/CompiledAutomaton.java
index 185cd04..a6e5c88 100644
--- a/lucene/core/src/java/org/apache/lucene/util/automaton/CompiledAutomaton.java
+++ b/lucene/core/src/java/org/apache/lucene/util/automaton/CompiledAutomaton.java
@@ -321,6 +321,7 @@
     if (this.finite) {
       commonSuffixRef = null;
     } else {
+      // NOTE: this is a very costly operation!  We should test if it's really warranted in practice...
       commonSuffixRef = Operations.getCommonSuffixBytesRef(binary, maxDeterminizedStates);
     }
     runAutomaton = new ByteRunAutomaton(binary, true, maxDeterminizedStates);
diff --git a/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java b/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java
index cc53d89..02e7c0d 100644
--- a/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java
+++ b/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java
@@ -16,6 +16,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 import java.io.IOException;
 
 import org.apache.lucene.analysis.MockAnalyzer;
@@ -55,21 +56,20 @@
     @Override
     protected MergeThread getMergeThread(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
       MergeThread thread = new MyMergeThread(writer, merge);
-      thread.setThreadPriority(getMergeThreadPriority());
       thread.setDaemon(true);
       thread.setName("MyMergeThread");
       return thread;
     }
 
     @Override
-    protected void handleMergeException(Throwable t) {
+    protected void handleMergeException(Directory dir, Throwable t) {
       excCalled = true;
     }
 
-    @Override
-    protected void doMerge(MergePolicy.OneMerge merge) throws IOException {
+    ;@Override
+    protected void doMerge(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
       mergeCalled = true;
-      super.doMerge(merge);
+      super.doMerge(writer, merge);
     }
   }
 
@@ -118,7 +118,7 @@
       OneMerge merge = null;
       while ((merge = writer.getNextMerge()) != null) {
         if (VERBOSE) {
-          System.out.println("executing merge " + merge.segString(writer.getDirectory()));
+          System.out.println("executing merge " + merge.segString());
         }
         writer.merge(merge);
       }
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java
index 5d6eedb..c626421 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java
@@ -25,15 +25,18 @@
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.LowSchemaField;
 import org.apache.lucene.index.BaseStoredFieldsFormatTestCase;
+import org.apache.lucene.index.CodecReader;
+import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexOptions;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.NoMergePolicy;
 import org.apache.lucene.store.ByteArrayDataInput;
 import org.apache.lucene.store.ByteArrayDataOutput;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.MockDirectoryWrapper;
 import org.junit.Test;
-
 import com.carrotsearch.randomizedtesting.generators.RandomInts;
 
 public class TestCompressingStoredFieldsFormat extends BaseStoredFieldsFormatTestCase {
@@ -260,4 +263,50 @@
       out.reset(buffer);
     }
   }
+  
+  /**
+   * writes some tiny segments with incomplete compressed blocks,
+   * and ensures merge recompresses them.
+   */
+  public void testChunkCleanup() throws IOException {
+    Directory dir = newDirectory();
+    IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
+    iwConf.setMergePolicy(NoMergePolicy.INSTANCE);
+    
+    // we have to enforce certain things like maxDocsPerChunk to cause dirty chunks to be created
+    // by this test.
+    iwConf.setCodec(CompressingCodec.randomInstance(random(), 4*1024, 100, false, 8));
+    IndexWriter iw = new IndexWriter(dir, iwConf);
+    DirectoryReader ir = DirectoryReader.open(iw, true);
+    for (int i = 0; i < 5; i++) {
+      Document doc = iw.newDocument();
+      doc.addStoredString("text", "not very long at all");
+      iw.addDocument(doc);
+      // force flush
+      DirectoryReader ir2 = DirectoryReader.openIfChanged(ir);
+      assertNotNull(ir2);
+      ir.close();
+      ir = ir2;
+      // examine dirty counts:
+      for (LeafReaderContext leaf : ir2.leaves()) {
+        CodecReader sr = (CodecReader) leaf.reader();
+        CompressingStoredFieldsReader reader = (CompressingStoredFieldsReader)sr.getFieldsReader();
+        assertEquals(1, reader.getNumChunks());
+        assertEquals(1, reader.getNumDirtyChunks());
+      }
+    }
+    iw.getConfig().setMergePolicy(newLogMergePolicy());
+    iw.forceMerge(1);
+    DirectoryReader ir2 = DirectoryReader.openIfChanged(ir);
+    assertNotNull(ir2);
+    ir.close();
+    ir = ir2;
+    CodecReader sr = getOnlySegmentReader(ir);
+    CompressingStoredFieldsReader reader = (CompressingStoredFieldsReader)sr.getFieldsReader();
+    // we could get lucky, and have zero, but typically one.
+    assertTrue(reader.getNumDirtyChunks() <= 1);
+    ir.close();
+    iw.close();
+    dir.close();
+  }
 }
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingTermVectorsFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingTermVectorsFormat.java
index b0dade4..3532fb0 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingTermVectorsFormat.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingTermVectorsFormat.java
@@ -1,13 +1,23 @@
 package org.apache.lucene.codecs.compressing;
 
+import java.io.IOException;
+
+import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.document.Document;
+import org.apache.lucene.document.FieldTypes;
 import org.apache.lucene.index.BaseTermVectorsFormatTestCase;
+import org.apache.lucene.index.CodecReader;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.NoMergePolicy;
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.index.TermsEnum.SeekStatus;
+import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.BytesRef;
 
@@ -65,4 +75,52 @@
     iw.close();
     dir.close();
   }
+  
+  /**
+   * writes some tiny segments with incomplete compressed blocks,
+   * and ensures merge recompresses them.
+   */
+  public void testChunkCleanup() throws IOException {
+    Directory dir = newDirectory();
+    IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
+    iwConf.setMergePolicy(NoMergePolicy.INSTANCE);
+    
+    // we have to enforce certain things like maxDocsPerChunk to cause dirty chunks to be created
+    // by this test.
+    iwConf.setCodec(CompressingCodec.randomInstance(random(), 4*1024, 100, false, 8));
+    IndexWriter iw = new IndexWriter(dir, iwConf);
+    FieldTypes fieldTypes = iw.getFieldTypes();
+    fieldTypes.enableTermVectors("text");
+    DirectoryReader ir = DirectoryReader.open(iw, true);
+    for (int i = 0; i < 5; i++) {
+      Document doc = iw.newDocument();
+      doc.addShortText("text", "not very long at all");
+      iw.addDocument(doc);
+      // force flush
+      DirectoryReader ir2 = DirectoryReader.openIfChanged(ir);
+      assertNotNull(ir2);
+      ir.close();
+      ir = ir2;
+      // examine dirty counts:
+      for (LeafReaderContext leaf : ir2.leaves()) {
+        CodecReader sr = (CodecReader) leaf.reader();
+        CompressingTermVectorsReader reader = (CompressingTermVectorsReader)sr.getTermVectorsReader();
+        assertEquals(1, reader.getNumChunks());
+        assertEquals(1, reader.getNumDirtyChunks());
+      }
+    }
+    iw.getConfig().setMergePolicy(newLogMergePolicy());
+    iw.forceMerge(1);
+    DirectoryReader ir2 = DirectoryReader.openIfChanged(ir);
+    assertNotNull(ir2);
+    ir.close();
+    ir = ir2;
+    CodecReader sr = getOnlySegmentReader(ir);
+    CompressingTermVectorsReader reader = (CompressingTermVectorsReader)sr.getTermVectorsReader();
+    // we could get lucky, and have zero, but typically one.
+    assertTrue(reader.getNumDirtyChunks() <= 1);
+    ir.close();
+    iw.close();
+    dir.close();
+  }
 }
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestBlockPostingsFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestBlockPostingsFormat.java
index d279ccd..75eaadd 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestBlockPostingsFormat.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestBlockPostingsFormat.java
@@ -67,4 +67,21 @@
     w.close();
     d.close();
   }
+
+  private void shouldFail(int minItemsInBlock, int maxItemsInBlock) {
+    try {
+      new Lucene50PostingsFormat(minItemsInBlock, maxItemsInBlock);
+      fail("did not hit exception");
+    } catch (IllegalArgumentException iae) {
+      // expected
+    }
+  }
+
+  public void testInvalidBlockSizes() throws Exception {
+    shouldFail(0, 0);
+    shouldFail(10, 8);
+    shouldFail(-1, 10);
+    shouldFail(10, -1);
+    shouldFail(10, 12);
+  }
 }
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50DocValuesFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50DocValuesFormat.java
index 23684e5..0806679 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50DocValuesFormat.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50DocValuesFormat.java
@@ -32,12 +32,14 @@
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.index.SerialMergeScheduler;
+import org.apache.lucene.index.SortedSetDocValues;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.index.TermsEnum.SeekStatus;
+import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.TestUtil;
@@ -177,10 +179,27 @@
       writer.deleteDocuments(new Term("id", Integer.toString(id)));
     }
     
+    // compare per-segment
+    DirectoryReader ir = writer.getReader();
+    for (LeafReaderContext context : ir.leaves()) {
+      LeafReader r = context.reader();
+      Terms terms = r.terms("indexed");
+      if (terms != null) {
+        SortedSetDocValues ssdv = r.getSortedSetDocValues("dv");
+        assertEquals(terms.size(), ssdv.getValueCount());
+        TermsEnum expected = terms.iterator(null);
+        TermsEnum actual = r.getSortedSetDocValues("dv").termsEnum();
+        assertEquals(terms.size(), expected, actual);
+
+        doTestSortedSetEnumAdvanceIndependently(ssdv);
+      }
+    }
+    ir.close();
+    
     writer.forceMerge(1);
     
     // now compare again after the merge
-    DirectoryReader ir = writer.getReader();
+    ir = writer.getReader();
     LeafReader ar = getOnlySegmentReader(ir);
     Terms terms = ar.terms("indexed");
     if (terms != null) {
diff --git a/lucene/core/src/test/org/apache/lucene/document/TestFieldTypes.java b/lucene/core/src/test/org/apache/lucene/document/TestFieldTypes.java
index 0795a22..fe10ab0 100644
--- a/lucene/core/src/test/org/apache/lucene/document/TestFieldTypes.java
+++ b/lucene/core/src/test/org/apache/lucene/document/TestFieldTypes.java
@@ -22,8 +22,10 @@
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.MultiReader;
+import org.apache.lucene.index.SlowCodecReaderWrapper;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.TestUtil;
 
 public class TestFieldTypes extends LuceneTestCase {
 
@@ -102,7 +104,7 @@
     doc.addInt("field", 5);
     w.addDocument(doc);
     w.close();
-    IndexReader sub = DirectoryReader.open(dir);
+    DirectoryReader sub = DirectoryReader.open(dir);
 
     w = newIndexWriter(newIndexWriterConfig().setOpenMode(IndexWriterConfig.OpenMode.CREATE));
     doc = w.newDocument();
@@ -110,7 +112,7 @@
     w.addDocument(doc);
 
     try {
-      w.addIndexes(sub);
+      TestUtil.addIndexesSlowly(w, sub);
       fail("did not hit exception");
     } catch (IllegalStateException ise) {
       assertEquals("field \"field\": cannot change value type from SHORT_TEXT to INT", ise.getMessage());
diff --git a/lucene/core/src/test/org/apache/lucene/index/Test2BPostingsBytes.java b/lucene/core/src/test/org/apache/lucene/index/Test2BPostingsBytes.java
index ae93b5a..ef71b0a 100644
--- a/lucene/core/src/test/org/apache/lucene/index/Test2BPostingsBytes.java
+++ b/lucene/core/src/test/org/apache/lucene/index/Test2BPostingsBytes.java
@@ -82,31 +82,29 @@
     w.close();
     
     DirectoryReader oneThousand = DirectoryReader.open(dir);
-    IndexReader subReaders[] = new IndexReader[1000];
+    DirectoryReader subReaders[] = new DirectoryReader[1000];
     Arrays.fill(subReaders, oneThousand);
-    MultiReader mr = new MultiReader(subReaders);
     BaseDirectoryWrapper dir2 = newFSDirectory(createTempDir("2BPostingsBytes2"));
     if (dir2 instanceof MockDirectoryWrapper) {
       ((MockDirectoryWrapper)dir2).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
     }
     IndexWriter w2 = new IndexWriter(dir2,
         new IndexWriterConfig(null));
-    w2.addIndexes(mr);
+    TestUtil.addIndexesSlowly(w2, subReaders);
     w2.forceMerge(1);
     w2.close();
     oneThousand.close();
     
     DirectoryReader oneMillion = DirectoryReader.open(dir2);
-    subReaders = new IndexReader[2000];
+    subReaders = new DirectoryReader[2000];
     Arrays.fill(subReaders, oneMillion);
-    mr = new MultiReader(subReaders);
     BaseDirectoryWrapper dir3 = newFSDirectory(createTempDir("2BPostingsBytes3"));
     if (dir3 instanceof MockDirectoryWrapper) {
       ((MockDirectoryWrapper)dir3).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
     }
     IndexWriter w3 = new IndexWriter(dir3,
         new IndexWriterConfig(null));
-    w3.addIndexes(mr);
+    TestUtil.addIndexesSlowly(w3, subReaders);
     w3.forceMerge(1);
     w3.close();
     oneMillion.close();
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAbuseSchema.java b/lucene/core/src/test/org/apache/lucene/index/TestAbuseSchema.java
index 4197534..c6ad09d 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestAbuseSchema.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestAbuseSchema.java
@@ -789,9 +789,9 @@
     Directory dir2 = newDirectory();
     conf = newIndexWriterConfig(new MockAnalyzer(random()));
     writer = new IndexWriter(dir2, conf);
-    IndexReader[] readers = new IndexReader[] {DirectoryReader.open(dir)};
-    writer.addIndexes(readers);
-    readers[0].close();
+    DirectoryReader reader = DirectoryReader.open(dir);
+    TestUtil.addIndexesSlowly(writer, reader);
+    reader.close();
     field = new LowSchemaField(a, "dv", new BytesRef("foo"), IndexOptions.NONE, false);
     field.setDocValuesType(DocValuesType.BINARY);
     doc = new ArrayList<>();
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java b/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java
index cb39eb7..7e4c542 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java
@@ -649,7 +649,7 @@
     IndexWriter writer2;
     final List<Throwable> failures = new ArrayList<>();
     volatile boolean didClose;
-    final IndexReader[] readers;
+    final DirectoryReader[] readers;
     final int NUM_COPY;
     final static int NUM_THREADS = 5;
     final Thread[] threads = new Thread[NUM_THREADS];
@@ -668,7 +668,7 @@
       writer2.commit();
       
 
-      readers = new IndexReader[NUM_COPY];
+      readers = new DirectoryReader[NUM_COPY];
       for(int i=0;i<NUM_COPY;i++)
         readers[i] = DirectoryReader.open(dir);
     }
@@ -767,9 +767,9 @@
         break;
       case 2:
         if (VERBOSE) {
-          System.out.println(Thread.currentThread().getName() + ": TEST: addIndexes(IndexReader[])");
+          System.out.println(Thread.currentThread().getName() + ": TEST: addIndexes(LeafReader[])");
         }
-        writer2.addIndexes(readers);
+        TestUtil.addIndexesSlowly(writer2, readers);
         break;
       case 3:
         if (VERBOSE) {
@@ -872,9 +872,9 @@
         break;
       case 2:
         if (VERBOSE) {
-          System.out.println("TEST: " + Thread.currentThread().getName() + ": addIndexes(IR[])");
+          System.out.println("TEST: " + Thread.currentThread().getName() + ": addIndexes(LR[])");
         }
-        writer2.addIndexes(readers);
+        TestUtil.addIndexesSlowly(writer2, readers);
         break;
       case 3:
         if (VERBOSE) {
@@ -952,10 +952,16 @@
       System.out.println("TEST: now force rollback");
     }
     c.didClose = true;
+    MergeScheduler ms = c.writer2.getConfig().getMergeScheduler();
+
     c.writer2.rollback();
 
     c.joinThreads();
 
+    if (ms instanceof ConcurrentMergeScheduler) {
+      assertEquals(0, ((ConcurrentMergeScheduler) ms).mergeThreadCount());
+    }
+
     c.closeDir();
 
     assertTrue(c.failures.size() == 0);
@@ -979,11 +985,8 @@
 
     // Now delete the document
     writer.deleteDocuments(new Term("id", "myid"));
-    IndexReader r = DirectoryReader.open(dirs[1]);
-    try {
-      writer.addIndexes(r);
-    } finally {
-      r.close();
+    try (DirectoryReader r = DirectoryReader.open(dirs[1])) {
+      TestUtil.addIndexesSlowly(writer, r);
     }
     writer.commit();
     assertEquals("Documents from the incoming index should not have been deleted", 1, writer.numDocs());
@@ -1098,7 +1101,7 @@
       w.close();
     }
     
-    IndexReader[] readers = new IndexReader[] { DirectoryReader.open(dirs[0]), DirectoryReader.open(dirs[1]) };
+    DirectoryReader[] readers = new DirectoryReader[] { DirectoryReader.open(dirs[0]), DirectoryReader.open(dirs[1]) };
     
     MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory());
     dir.setEnableVirusScanner(false); // we check for specific list of files
@@ -1108,7 +1111,7 @@
     lmp.setNoCFSRatio(1.0);
     lmp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY);
     IndexWriter w3 = new IndexWriter(dir, conf);
-    w3.addIndexes(readers);
+    TestUtil.addIndexesSlowly(w3, readers);
     w3.close();
     // we should now see segments_X,
     // _Y.cfs,_Y.cfe, _Z.si
@@ -1177,7 +1180,7 @@
     doc.addAtom("f1", "doc1 field1");
     doc.addAtom("id", "1");
     w.addDocument(doc);
-    IndexReader r1 = w.getReader();
+    DirectoryReader r1 = w.getReader();
     w.close();
 
     Directory d2 = newDirectory();
@@ -1186,12 +1189,12 @@
     doc.addAtom("f2", "doc2 field2");
     doc.addAtom("id", "2");
     w.addDocument(doc);
-    IndexReader r2 = w.getReader();
+    DirectoryReader r2 = w.getReader();
     w.close();
 
     Directory d3 = newDirectory();
     w = new RandomIndexWriter(random(), d3);
-    w.addIndexes(r1, r2);
+    TestUtil.addIndexesSlowly(w.w, r1, r2);
     r1.close();
     d1.close();
     r2.close();
@@ -1215,8 +1218,7 @@
   public void testAddEmpty() throws Exception {
     Directory d1 = newDirectory();
     RandomIndexWriter w = new RandomIndexWriter(random(), d1);
-    MultiReader empty = new MultiReader();
-    w.addIndexes(empty);
+    w.addIndexes(new CodecReader[0]);
     w.close();
     DirectoryReader dr = DirectoryReader.open(d1);
     for (LeafReaderContext ctx : dr.leaves()) {
@@ -1234,11 +1236,11 @@
     Directory src = newDirectory(), dest = newDirectory();
     RandomIndexWriter w = new RandomIndexWriter(random(), src);
     w.addDocument(w.newDocument());
-    IndexReader allDeletedReader = new AllDeletedFilterReader(w.getReader().leaves().get(0).reader());
+    LeafReader allDeletedReader = new AllDeletedFilterReader(w.getReader().leaves().get(0).reader());
     w.close();
     
     w = new RandomIndexWriter(random(), dest);
-    w.addIndexes(allDeletedReader);
+    w.addIndexes(SlowCodecReaderWrapper.wrap(allDeletedReader));
     w.close();
     DirectoryReader dr = DirectoryReader.open(src);
     for (LeafReaderContext ctx : dr.leaves()) {
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java b/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java
index 371b549..199bc66 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java
@@ -311,42 +311,6 @@
     dir.close();
   }
   
-  public void testUpdateAndDeleteSameDocument() throws Exception {
-    // update and delete same document in same commit session
-    Directory dir = newDirectory();
-    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
-    conf.setMaxBufferedDocs(10); // control segment flushing
-    IndexWriter writer = new IndexWriter(dir, conf);
-    FieldTypes fieldTypes = writer.getFieldTypes();
-    fieldTypes.disableSorting("val");
-    
-    writer.addDocument(doc(writer, 0));
-    writer.addDocument(doc(writer, 1));
-    
-    if (random().nextBoolean()) {
-      writer.commit();
-    }
-    
-    writer.deleteDocuments(new Term("id", "doc-0"));
-    writer.updateBinaryDocValue(new Term("id", "doc-0"), "val", toBytes(17L));
-    
-    final DirectoryReader reader;
-    if (random().nextBoolean()) { // not NRT
-      writer.close();
-      reader = DirectoryReader.open(dir);
-    } else { // NRT
-      reader = DirectoryReader.open(writer, true);
-      writer.close();
-    }
-    
-    LeafReader r = reader.leaves().get(0).reader();
-    assertFalse(r.getLiveDocs().get(0));
-    assertEquals(1, getValue(r.getBinaryDocValues("val"), 0)); // deletes are currently applied first
-    
-    reader.close();
-    dir.close();
-  }
-  
   public void testMultipleDocValuesTypes() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -669,7 +633,7 @@
     reader.close();
     dir.close();
   }
-  
+
   public void testManyReopensAndFields() throws Exception {
     Directory dir = newDirectory();
     final Random random = random();
@@ -688,6 +652,7 @@
       writer.commit();
       reader = DirectoryReader.open(dir);
     }
+    //System.out.println("TEST: isNRT=" + isNRT);
     
     final int numFields = random.nextInt(4) + 3; // 3-7
     final long[] fieldValues = new long[numFields];
@@ -700,7 +665,7 @@
     int docID = 0;
     for (int i = 0; i < numRounds; i++) {
       int numDocs = atLeast(5);
-//      System.out.println("[" + Thread.currentThread().getName() + "]: round=" + i + ", numDocs=" + numDocs);
+      //System.out.println("[" + Thread.currentThread().getName() + "]: round=" + i + ", numDocs=" + numDocs);
       for (int j = 0; j < numDocs; j++) {
         Document doc = writer.newDocument();
         doc.addAtom("id", "doc-" + docID);
@@ -1143,7 +1108,7 @@
       writer.addIndexes(dir1);
     } else {
       DirectoryReader reader = DirectoryReader.open(dir1);
-      writer.addIndexes(reader);
+      TestUtil.addIndexesSlowly(writer, reader);
       reader.close();
     }
     writer.close();
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java b/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java
index 9621799..942d70b 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java
@@ -292,7 +292,7 @@
     ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler() {
 
       @Override
-      protected void doMerge(MergePolicy.OneMerge merge) throws IOException {
+      protected void doMerge(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
         try {
           // Stall all incoming merges until we see
           // maxMergeCount:
@@ -311,7 +311,7 @@
             // Then sleep a bit to give a chance for the bug
             // (too many pending merges) to appear:
             Thread.sleep(20);
-            super.doMerge(merge);
+            super.doMerge(writer, merge);
           } finally {
             runningMergeCount.decrementAndGet();
           }
@@ -357,10 +357,10 @@
     }
 
     @Override
-    public void doMerge(MergePolicy.OneMerge merge) throws IOException {
+    public void doMerge(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
       totMergedBytes += merge.totalBytesSize();
       atLeastOneMerge.countDown();
-      super.doMerge(merge);
+      super.doMerge(writer, merge);
     }
   }
 
@@ -429,7 +429,7 @@
         final AtomicInteger runningMergeCount = new AtomicInteger();
 
         @Override
-        public void doMerge(MergePolicy.OneMerge merge) throws IOException {
+        public void doMerge(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
           int count = runningMergeCount.incrementAndGet();
           // evil?
           synchronized (this) {
@@ -438,7 +438,7 @@
             }
           }
           try {
-            super.doMerge(merge);
+            super.doMerge(writer, merge);
           } finally {
             runningMergeCount.decrementAndGet();
           }
@@ -461,7 +461,6 @@
 
     // No merges should have run so far, because TMP has high segmentsPerTier:
     assertEquals(0, maxRunningMergeCount.get());
-
     w.forceMerge(1);
 
     // At most 5 merge threads should have launched at once:
@@ -490,8 +489,9 @@
     IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
     iwc.setMergeScheduler(new ConcurrentMergeScheduler() {
         @Override
-        protected void maybeStall() {
+        protected boolean maybeStall(IndexWriter writer) {
           wasCalled.set(true);
+          return true;
         }
       });
     IndexWriter w = new IndexWriter(dir, iwc);
@@ -515,14 +515,14 @@
     final CountDownLatch mergeFinish = new CountDownLatch(1);
     ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler() {
         @Override
-        protected void doMerge(MergePolicy.OneMerge merge) throws IOException {
+        protected void doMerge(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
           mergeStart.countDown();
           try {
             mergeFinish.await();
           } catch (InterruptedException ie) {
             throw new RuntimeException(ie);
           }
-          super.doMerge(merge);
+          super.doMerge(writer, merge);
         }
       };
     cms.setMaxMergesAndThreads(1, 1);
@@ -630,7 +630,7 @@
     ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
     cms.setDefaultMaxMergesAndThreads(true);
     assertEquals(1, cms.getMaxThreadCount());
-    assertEquals(2, cms.getMaxMergeCount());
+    assertEquals(6, cms.getMaxMergeCount());
   }
 
   public void testNonSpinningDefaults() throws Exception {
@@ -638,7 +638,45 @@
     cms.setDefaultMaxMergesAndThreads(false);
     int threadCount = cms.getMaxThreadCount();
     assertTrue(threadCount >= 1);
-    assertTrue(threadCount <= 3);
-    assertEquals(cms.getMaxMergeCount(), 2+threadCount);
+    assertTrue(threadCount <= 4);
+    assertEquals(5+threadCount, cms.getMaxMergeCount());
+  }
+
+  // LUCENE-6197
+  public void testNoStallMergeThreads() throws Exception {
+    MockDirectoryWrapper dir = newMockDirectory();
+
+    IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
+    iwc.setMergePolicy(NoMergePolicy.INSTANCE);
+    iwc.setMaxBufferedDocs(2);
+    IndexWriter w = new IndexWriter(dir, iwc);
+    for(int i=0;i<1000;i++) {
+      Document doc = w.newDocument();
+      doc.addAtom("field", ""+i);
+      w.addDocument(doc);
+    }
+    w.close();
+
+    iwc = newIndexWriterConfig(new MockAnalyzer(random()));
+    AtomicBoolean failed = new AtomicBoolean();
+    ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler() {
+        @Override
+        protected void doStall() {
+          if (Thread.currentThread().getName().startsWith("Lucene Merge Thread")) {
+            failed.set(true);
+          }
+          super.doStall();
+        }
+      };
+    cms.setMaxMergesAndThreads(2, 1);
+    iwc.setMergeScheduler(cms);
+    iwc.setMaxBufferedDocs(2);
+
+    w = new IndexWriter(dir, iwc);
+    w.forceMerge(1);
+    w.close();
+    dir.close();
+
+    assertFalse(failed.get());
   }
 }
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDoc.java b/lucene/core/src/test/org/apache/lucene/index/TestDoc.java
index b8a2583..b4238f2 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestDoc.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestDoc.java
@@ -40,6 +40,7 @@
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.MergeInfo;
 import org.apache.lucene.store.MockDirectoryWrapper;
 import org.apache.lucene.store.TrackingDirectoryWrapper;
 import org.apache.lucene.util.InfoStream;
@@ -47,235 +48,235 @@
 import org.apache.lucene.util.StringHelper;
 import org.apache.lucene.util.Version;
 
-
 /** JUnit adaptation of an older test case DocTest. */
 public class TestDoc extends LuceneTestCase {
 
-    private Path workDir;
-    private Path indexDir;
-    private LinkedList<Path> files;
+  private Path workDir;
+  private Path indexDir;
+  private LinkedList<Path> files;
 
-    /** Set the test case. This test case needs
-     *  a few text files created in the current working directory.
-     */
-    @Override
-    public void setUp() throws Exception {
-        super.setUp();
-        if (VERBOSE) {
-          System.out.println("TEST: setUp");
-        }
-        workDir = createTempDir("TestDoc");
-        indexDir = createTempDir("testIndex");
-
-        Directory directory = newFSDirectory(indexDir);
-        directory.close();
-
-        files = new LinkedList<>();
-        files.add(createOutput("test.txt",
-            "This is the first test file"
-        ));
-
-        files.add(createOutput("test2.txt",
-            "This is the second test file"
-        ));
+  /** Set the test case. This test case needs
+   *  a few text files created in the current working directory.
+   */
+  @Override
+  public void setUp() throws Exception {
+    super.setUp();
+    if (VERBOSE) {
+      System.out.println("TEST: setUp");
     }
+    workDir = createTempDir("TestDoc");
+    indexDir = createTempDir("testIndex");
 
-    private Path createOutput(String name, String text) throws IOException {
-        Writer fw = null;
-        PrintWriter pw = null;
+    Directory directory = newFSDirectory(indexDir);
+    directory.close();
 
-        try {
-            Path path = workDir.resolve(name);
-            Files.deleteIfExists(path);
+    files = new LinkedList<>();
+    files.add(createOutput("test.txt",
+                           "This is the first test file"
+                           ));
 
-            fw = new OutputStreamWriter(Files.newOutputStream(path), StandardCharsets.UTF_8);
-            pw = new PrintWriter(fw);
-            pw.println(text);
-            return path;
+    files.add(createOutput("test2.txt",
+                           "This is the second test file"
+                           ));
+  }
 
-        } finally {
-            if (pw != null) pw.close();
-            if (fw != null) fw.close();
-        }
+  private Path createOutput(String name, String text) throws IOException {
+    Writer fw = null;
+    PrintWriter pw = null;
+
+    try {
+      Path path = workDir.resolve(name);
+      Files.deleteIfExists(path);
+
+      fw = new OutputStreamWriter(Files.newOutputStream(path), StandardCharsets.UTF_8);
+      pw = new PrintWriter(fw);
+      pw.println(text);
+      return path;
+
+    } finally {
+      if (pw != null) pw.close();
+      if (fw != null) fw.close();
     }
+  }
 
 
-    /** This test executes a number of merges and compares the contents of
-     *  the segments created when using compound file or not using one.
-     *
-     *  TODO: the original test used to print the segment contents to System.out
-     *        for visual validation. To have the same effect, a new method
-     *        checkSegment(String name, ...) should be created that would
-     *        assert various things about the segment.
-     */
-    public void testIndexAndMerge() throws Exception {
-      StringWriter sw = new StringWriter();
-      PrintWriter out = new PrintWriter(sw, true);
+  /** This test executes a number of merges and compares the contents of
+   *  the segments created when using compound file or not using one.
+   *
+   *  TODO: the original test used to print the segment contents to System.out
+   *        for visual validation. To have the same effect, a new method
+   *        checkSegment(String name, ...) should be created that would
+   *        assert various things about the segment.
+   */
+  public void testIndexAndMerge() throws Exception {
+    StringWriter sw = new StringWriter();
+    PrintWriter out = new PrintWriter(sw, true);
       
-      Directory directory = newFSDirectory(indexDir);
+    Directory directory = newFSDirectory(indexDir);
 
-      if (directory instanceof MockDirectoryWrapper) {
-        // We create unreferenced files (we don't even write
-        // a segments file):
-        ((MockDirectoryWrapper) directory).setAssertNoUnrefencedFilesOnClose(false);
-        // this test itself deletes files (has no retry mechanism)
-        ((MockDirectoryWrapper) directory).setEnableVirusScanner(false);
-      }
+    if (directory instanceof MockDirectoryWrapper) {
+      // We create unreferenced files (we don't even write
+      // a segments file):
+      ((MockDirectoryWrapper) directory).setAssertNoUnrefencedFilesOnClose(false);
+      // this test itself deletes files (has no retry mechanism)
+      ((MockDirectoryWrapper) directory).setEnableVirusScanner(false);
+    }
 
-      IndexWriter writer = new IndexWriter(
-          directory,
-          newIndexWriterConfig(new MockAnalyzer(random())).
-              setOpenMode(OpenMode.CREATE).
-              setMaxBufferedDocs(-1).
-              setMergePolicy(newLogMergePolicy(10))
-      );
+    IndexWriter writer = new IndexWriter(
+                                         directory,
+                                         newIndexWriterConfig(new MockAnalyzer(random())).
+                                         setOpenMode(OpenMode.CREATE).
+                                         setMaxBufferedDocs(-1).
+                                         setMergePolicy(newLogMergePolicy(10))
+                                         );
       FieldTypes fieldTypes = writer.getFieldTypes();
       fieldTypes.disableExistsFilters();
 
-      SegmentCommitInfo si1 = indexDoc(writer, "test.txt");
-      printSegment(out, si1);
+    SegmentCommitInfo si1 = indexDoc(writer, "test.txt");
+    printSegment(out, si1);
 
-      SegmentCommitInfo si2 = indexDoc(writer, "test2.txt");
-      printSegment(out, si2);
-      writer.close();
+    SegmentCommitInfo si2 = indexDoc(writer, "test2.txt");
+    printSegment(out, si2);
+    writer.close();
 
-      SegmentCommitInfo siMerge = merge(directory, si1, si2, "_merge", false);
-      printSegment(out, siMerge);
+    SegmentCommitInfo siMerge = merge(directory, si1, si2, "_merge", false);
+    printSegment(out, siMerge);
 
-      SegmentCommitInfo siMerge2 = merge(directory, si1, si2, "_merge2", false);
-      printSegment(out, siMerge2);
+    SegmentCommitInfo siMerge2 = merge(directory, si1, si2, "_merge2", false);
+    printSegment(out, siMerge2);
 
-      SegmentCommitInfo siMerge3 = merge(directory, siMerge, siMerge2, "_merge3", false);
-      printSegment(out, siMerge3);
+    SegmentCommitInfo siMerge3 = merge(directory, siMerge, siMerge2, "_merge3", false);
+    printSegment(out, siMerge3);
       
-      directory.close();
-      out.close();
-      sw.close();
+    directory.close();
+    out.close();
+    sw.close();
 
-      String multiFileOutput = sw.toString();
-      //System.out.println(multiFileOutput);
+    String multiFileOutput = sw.toString();
+    //System.out.println(multiFileOutput);
 
-      sw = new StringWriter();
-      out = new PrintWriter(sw, true);
+    sw = new StringWriter();
+    out = new PrintWriter(sw, true);
 
-      directory = newFSDirectory(indexDir);
+    directory = newFSDirectory(indexDir);
 
-      if (directory instanceof MockDirectoryWrapper) {
-        // We create unreferenced files (we don't even write
-        // a segments file):
-        ((MockDirectoryWrapper) directory).setAssertNoUnrefencedFilesOnClose(false);
-        // this test itself deletes files (has no retry mechanism)
-        ((MockDirectoryWrapper) directory).setEnableVirusScanner(false);
+    if (directory instanceof MockDirectoryWrapper) {
+      // We create unreferenced files (we don't even write
+      // a segments file):
+      ((MockDirectoryWrapper) directory).setAssertNoUnrefencedFilesOnClose(false);
+      // this test itself deletes files (has no retry mechanism)
+      ((MockDirectoryWrapper) directory).setEnableVirusScanner(false);
+    }
+
+    writer = new IndexWriter(
+                             directory,
+                             newIndexWriterConfig(new MockAnalyzer(random())).
+                             setOpenMode(OpenMode.CREATE).
+                             setMaxBufferedDocs(-1).
+                             setMergePolicy(newLogMergePolicy(10))
+                             );
+
+    fieldTypes = writer.getFieldTypes();
+    fieldTypes.disableExistsFilters();
+
+    si1 = indexDoc(writer, "test.txt");
+    printSegment(out, si1);
+
+    si2 = indexDoc(writer, "test2.txt");
+    printSegment(out, si2);
+    writer.close();
+
+    siMerge = merge(directory, si1, si2, "_merge", true);
+    printSegment(out, siMerge);
+
+    siMerge2 = merge(directory, si1, si2, "_merge2", true);
+    printSegment(out, siMerge2);
+
+    siMerge3 = merge(directory, siMerge, siMerge2, "_merge3", true);
+    printSegment(out, siMerge3);
+      
+    directory.close();
+    out.close();
+    sw.close();
+    String singleFileOutput = sw.toString();
+
+    assertEquals(multiFileOutput, singleFileOutput);
+  }
+
+  private SegmentCommitInfo indexDoc(IndexWriter writer, String fileName)
+    throws Exception {
+    Path path = workDir.resolve(fileName);
+    Document doc = writer.newDocument();
+    InputStreamReader is = new InputStreamReader(Files.newInputStream(path), StandardCharsets.UTF_8);
+    doc.addLargeText("contents", is);
+    writer.addDocument(doc);
+    writer.commit();
+    is.close();
+    return writer.newestSegment();
+  }
+
+  private SegmentCommitInfo merge(Directory dir, SegmentCommitInfo si1, SegmentCommitInfo si2, String merged, boolean useCompoundFile)
+    throws Exception {
+    FieldTypes fieldTypes = FieldTypes.getFieldTypes(dir, null);
+    IOContext context = newIOContext(random(), new IOContext(new MergeInfo(-1, -1, false, -1)));
+    SegmentReader r1 = new SegmentReader(fieldTypes, si1, context);
+    SegmentReader r2 = new SegmentReader(fieldTypes, si2, context);
+
+    final Codec codec = Codec.getDefault();
+    TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.info.dir);
+    final SegmentInfo si = new SegmentInfo(si1.info.dir, Version.LATEST, merged, -1, false, codec, null, StringHelper.randomId(), new HashMap<>());
+
+    SegmentMerger merger = new SegmentMerger(fieldTypes, Arrays.<CodecReader>asList(r1, r2),
+                                             si, InfoStream.getDefault(), trackingDir,
+                                             new FieldInfos.FieldNumbers(), context);
+
+    MergeState mergeState = merger.merge();
+    r1.close();
+    r2.close();;
+    si.setFiles(new HashSet<>(trackingDir.getCreatedFiles()));
+      
+    if (useCompoundFile) {
+      Collection<String> filesToDelete = si.files();
+      IndexWriter.createCompoundFile(InfoStream.getDefault(), new TrackingDirectoryWrapper(dir), si, newIOContext(random()));
+      si.setUseCompoundFile(true);
+      for (final String fileToDelete : filesToDelete) {
+        si1.info.dir.deleteFile(fileToDelete);
       }
+    }
 
-      writer = new IndexWriter(
-          directory,
-          newIndexWriterConfig(new MockAnalyzer(random())).
-              setOpenMode(OpenMode.CREATE).
-              setMaxBufferedDocs(-1).
-              setMergePolicy(newLogMergePolicy(10))
-      );
-
-      fieldTypes = writer.getFieldTypes();
-      fieldTypes.disableExistsFilters();
-
-      si1 = indexDoc(writer, "test.txt");
-      printSegment(out, si1);
-
-      si2 = indexDoc(writer, "test2.txt");
-      printSegment(out, si2);
-      writer.close();
-
-      siMerge = merge(directory, si1, si2, "_merge", true);
-      printSegment(out, siMerge);
-
-      siMerge2 = merge(directory, si1, si2, "_merge2", true);
-      printSegment(out, siMerge2);
-
-      siMerge3 = merge(directory, siMerge, siMerge2, "_merge3", true);
-      printSegment(out, siMerge3);
-      
-      directory.close();
-      out.close();
-      sw.close();
-      String singleFileOutput = sw.toString();
-
-      assertEquals(multiFileOutput, singleFileOutput);
-   }
-
-   private SegmentCommitInfo indexDoc(IndexWriter writer, String fileName)
-     throws Exception {
-     Path path = workDir.resolve(fileName);
-     Document doc = writer.newDocument();
-     InputStreamReader is = new InputStreamReader(Files.newInputStream(path), StandardCharsets.UTF_8);
-     doc.addLargeText("contents", is);
-     writer.addDocument(doc);
-     writer.commit();
-     is.close();
-     return writer.newestSegment();
-   }
-
-   private SegmentCommitInfo merge(Directory dir, SegmentCommitInfo si1, SegmentCommitInfo si2, String merged, boolean useCompoundFile)
-     throws Exception {
-     FieldTypes fieldTypes = FieldTypes.getFieldTypes(dir, null);
-     IOContext context = newIOContext(random());
-     SegmentReader r1 = new SegmentReader(fieldTypes, si1, context);
-     SegmentReader r2 = new SegmentReader(fieldTypes, si2, context);
-
-     final Codec codec = Codec.getDefault();
-     TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.info.dir);
-     final SegmentInfo si = new SegmentInfo(si1.info.dir, Version.LATEST, merged, -1, false, codec, null, StringHelper.randomId(), new HashMap<>());
-
-     SegmentMerger merger = new SegmentMerger(fieldTypes, Arrays.<LeafReader>asList(r1, r2),
-                                              si, InfoStream.getDefault(), trackingDir,
-                                              MergeState.CheckAbort.NONE, new FieldInfos.FieldNumbers(), context);
-
-     MergeState mergeState = merger.merge();
-     r1.close();
-     r2.close();;
-     si.setFiles(new HashSet<>(trackingDir.getCreatedFiles()));
-      
-     if (useCompoundFile) {
-       Collection<String> filesToDelete = IndexWriter.createCompoundFile(InfoStream.getDefault(), dir, MergeState.CheckAbort.NONE, si, newIOContext(random()));
-       si.setUseCompoundFile(true);
-       for (final String fileToDelete : filesToDelete) {
-         si1.info.dir.deleteFile(fileToDelete);
-       }
-     }
-
-     return new SegmentCommitInfo(si, 0, -1L, -1L, -1L);
-   }
+    return new SegmentCommitInfo(si, 0, -1L, -1L, -1L);
+  }
 
 
-   private void printSegment(PrintWriter out, SegmentCommitInfo si)
-   throws Exception {
-      SegmentReader reader = new SegmentReader(null, si, newIOContext(random()));
+  private void printSegment(PrintWriter out, SegmentCommitInfo si)
+    throws Exception {
+    SegmentReader reader = new SegmentReader(null, si, newIOContext(random()));
 
-      for (int i = 0; i < reader.numDocs(); i++)
-        out.println(reader.document(i));
+    for (int i = 0; i < reader.numDocs(); i++)
+      out.println(reader.document(i));
 
-      Fields fields = reader.fields();
-      for (String field : fields)  {
-        Terms terms = fields.terms(field);
-        assertNotNull(terms);
-        TermsEnum tis = terms.iterator(null);
-        while(tis.next() != null) {
+    Fields fields = reader.fields();
+    for (String field : fields)  {
+      Terms terms = fields.terms(field);
+      assertNotNull(terms);
+      TermsEnum tis = terms.iterator(null);
+      while(tis.next() != null) {
 
-          out.print("  term=" + field + ":" + tis.term());
-          out.println("    DF=" + tis.docFreq());
+        out.print("  term=" + field + ":" + tis.term());
+        out.println("    DF=" + tis.docFreq());
 
-          DocsAndPositionsEnum positions = tis.docsAndPositions(reader.getLiveDocs(), null);
-          while (positions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
-            out.print(" doc=" + positions.docID());
-            out.print(" TF=" + positions.freq());
-            out.print(" pos=");
-            out.print(positions.nextPosition());
-            for (int j = 1; j < positions.freq(); j++)
-              out.print("," + positions.nextPosition());
-            out.println("");
-          }
+        DocsAndPositionsEnum positions = tis.docsAndPositions(reader.getLiveDocs(), null);
+        while (positions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
+          out.print(" doc=" + positions.docID());
+          out.print(" TF=" + positions.freq());
+          out.print(" pos=");
+          out.print(positions.nextPosition());
+          for (int j = 1; j < positions.freq(); j++)
+            out.print("," + positions.nextPosition());
+          out.println("");
         }
       }
-      reader.close();
     }
+    reader.close();
+  }
 }
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java b/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java
index 6423e34..00f0ed9 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestDocValuesIndexing.java
@@ -29,6 +29,7 @@
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.TestUtil;
 
 /**
  * 
@@ -62,7 +63,7 @@
 
     Directory d3 = newDirectory();
     w = new RandomIndexWriter(random(), d3);
-    w.addIndexes(SlowCompositeReaderWrapper.wrap(r1), SlowCompositeReaderWrapper.wrap(r2));
+    w.addIndexes(SlowCodecReaderWrapper.wrap(SlowCompositeReaderWrapper.wrap(r1)), SlowCodecReaderWrapper.wrap(SlowCompositeReaderWrapper.wrap(r2)));
     r1.close();
     d1.close();
     r2.close();
@@ -519,9 +520,9 @@
       // expected
     }
 
-    IndexReader r = DirectoryReader.open(dir2);
+    DirectoryReader r = DirectoryReader.open(dir2);
     try {
-      w.addIndexes(new IndexReader[] {r});
+      TestUtil.addIndexesSlowly(w, r);
       fail("didn't hit expected exception");
     } catch (IllegalStateException iae) {
       // expected
@@ -685,14 +686,14 @@
     doc = writer.newDocument();
     doc.addAtom("dv", new BytesRef("foo"));
     writer.addDocument(doc);
-    IndexReader[] readers = new IndexReader[] {DirectoryReader.open(dir)};
+    DirectoryReader reader = DirectoryReader.open(dir);
     try {
-      writer.addIndexes(readers);
+      TestUtil.addIndexesSlowly(writer, reader);
       fail("did not hit exception");
     } catch (IllegalStateException ise) {
       // expected
     }
-    readers[0].close();
+    reader.close();
     writer.close();
 
     dir.close();
@@ -724,6 +725,33 @@
     dir.close();
   }
 
+  public void testTypeChangeViaAddIndexesIR2() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
+    IndexWriter writer = new IndexWriter(dir, conf);
+    Document doc = writer.newDocument();
+    doc.addLong("dv", 0L);
+    writer.addDocument(doc);
+    writer.close();
+
+    Directory dir2 = newDirectory();
+    conf = newIndexWriterConfig(new MockAnalyzer(random()));
+    writer = new IndexWriter(dir2, conf);
+    DirectoryReader reader = DirectoryReader.open(dir);
+    TestUtil.addIndexesSlowly(writer, reader);
+    reader.close();
+    doc = writer.newDocument();
+    try {
+      doc.addAtom("dv", new BytesRef("foo"));
+      fail("did not hit exception");
+    } catch (IllegalStateException ise) {
+      // expected
+    }
+    writer.close();
+    dir2.close();
+    dir.close();
+  }
+
   public void testDocsWithField() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterDeleteQueue.java b/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterDeleteQueue.java
index 1191bc0..6e0c55c 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterDeleteQueue.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterDeleteQueue.java
@@ -16,6 +16,7 @@
  * License for the specific language governing permissions and limitations under
  * the License.
  */
+
 import java.lang.reflect.Field;
 import java.util.HashSet;
 import java.util.Set;
@@ -24,12 +25,14 @@
 import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.lucene.index.DocumentsWriterDeleteQueue.DeleteSlice;
+import org.apache.lucene.index.PrefixCodedTerms.TermIterator;
 import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.BytesRefBuilder;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.ThreadInterruptedException;
 
+
+
 /**
  * Unit test for {@link DocumentsWriterDeleteQueue}
  */
@@ -75,9 +78,18 @@
     assertEquals(uniqueValues, bd2.terms.keySet());
     HashSet<Term> frozenSet = new HashSet<>();
     BytesRefBuilder bytesRef = new BytesRefBuilder();
-    for (Term t : queue.freezeGlobalBuffer(null).termsIterable()) {
-      bytesRef.copyBytes(t.bytes);
-      frozenSet.add(new Term(t.field, bytesRef.toBytesRef()));
+    TermIterator iter = queue.freezeGlobalBuffer(null).termIterator();
+    String field = null;
+    while (true) {
+      boolean newField = iter.next();
+      if (newField) {
+        field = iter.field;
+        if (field == null) {
+          break;
+        }
+      }
+      bytesRef.copyBytes(iter.bytes);
+      frozenSet.add(new Term(field, bytesRef.toBytesRef()));
     }
     assertEquals(uniqueValues, frozenSet);
     assertEquals("num deletes must be 0 after freeze", 0, queue
@@ -204,10 +216,21 @@
     queue.tryApplyGlobalSlice();
     Set<Term> frozenSet = new HashSet<>();
     BytesRefBuilder builder = new BytesRefBuilder();
-    for (Term t : queue.freezeGlobalBuffer(null).termsIterable()) {
-      builder.copyBytes(t.bytes);
-      frozenSet.add(new Term(t.field, builder.toBytesRef()));
+
+    TermIterator iter = queue.freezeGlobalBuffer(null).termIterator();
+    String field = null;
+    while (true) {
+      boolean newField = iter.next();
+      if (newField) {
+        field = iter.field;
+        if (field == null) {
+          break;
+        }
+      }
+      builder.copyBytes(iter.bytes);
+      frozenSet.add(new Term(field, builder.toBytesRef()));
     }
+
     assertEquals("num deletes must be 0 after freeze", 0, queue
         .numGlobalTermDeletes());
     assertEquals(uniqueValues.size(), frozenSet.size());
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterStallControl.java b/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterStallControl.java
index 319d7bc..c656965 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterStallControl.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestDocumentsWriterStallControl.java
@@ -33,7 +33,7 @@
 public class TestDocumentsWriterStallControl extends LuceneTestCase {
   
   public void testSimpleStall() throws InterruptedException {
-    DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl();
+    DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl(newIndexWriterConfig());
    
     ctrl.updateStalled(false);
     Thread[] waitThreads = waitThreads(atLeast(1), ctrl);
@@ -55,7 +55,7 @@
   }
   
   public void testRandom() throws InterruptedException {
-    final DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl();
+    final DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl(newIndexWriterConfig());
     ctrl.updateStalled(false);
     
     Thread[] stallThreads = new Thread[atLeast(3)];
@@ -96,7 +96,7 @@
   }
   
   public void testAccquireReleaseRace() throws InterruptedException {
-    final DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl();
+    final DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl(newIndexWriterConfig());
     ctrl.updateStalled(false);
     final AtomicBoolean stop = new AtomicBoolean(false);
     final AtomicBoolean checkPoint = new AtomicBoolean(true);
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java b/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java
index 83f9ae8..2e89c3e 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestDuelingCodecs.java
@@ -26,6 +26,7 @@
 import org.apache.lucene.document.FieldTypes;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LineFileDocs;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
@@ -41,7 +42,9 @@
   private Directory rightDir;
   private IndexReader rightReader;
   private Codec rightCodec;
-  
+  private RandomIndexWriter leftWriter;
+  private RandomIndexWriter rightWriter;
+  private long seed;
   private String info;  // for debugging
 
   @Override
@@ -55,10 +58,10 @@
     leftCodec = Codec.forName("SimpleText");
     rightCodec = new RandomCodec(random());
 
-    leftDir = newDirectory();
-    rightDir = newDirectory();
+    leftDir = newFSDirectory(createTempDir("leftDir"));
+    rightDir = newFSDirectory(createTempDir("rightDir"));
 
-    long seed = random().nextLong();
+    seed = random().nextLong();
 
     // must use same seed because of random payloads, etc
     int maxTermLength = TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH);
@@ -80,44 +83,23 @@
     rightConfig.setMergePolicy(newLogMergePolicy());
 
     // must use same seed because of random docvalues fields, etc
-    RandomIndexWriter leftWriter = new RandomIndexWriter(new Random(seed), leftDir, leftConfig);
-    RandomIndexWriter rightWriter = new RandomIndexWriter(new Random(seed), rightDir, rightConfig);
-    
-    int numdocs = atLeast(100);
-    createRandomIndex(numdocs, leftWriter, seed);
-    createRandomIndex(numdocs, rightWriter, seed);
+    leftWriter = new RandomIndexWriter(new Random(seed), leftDir, leftConfig);
+    rightWriter = new RandomIndexWriter(new Random(seed), rightDir, rightConfig);
 
-    leftReader = maybeWrapReader(leftWriter.getReader());
-    leftWriter.close();
-    rightReader = maybeWrapReader(rightWriter.getReader());
-    rightWriter.close();
-    
-    // check that our readers are valid
-    TestUtil.checkReader(leftReader);
-    TestUtil.checkReader(rightReader);
-    
     info = "left: " + leftCodec.toString() + " / right: " + rightCodec.toString();
   }
   
   @Override
   public void tearDown() throws Exception {
-    if (leftReader != null) {
-      leftReader.close();
-    }
-    if (rightReader != null) {
-      rightReader.close();   
-    }
-
-    if (leftDir != null) {
-      leftDir.close();
-    }
-    if (rightDir != null) {
-      rightDir.close();
-    }
-    
+    IOUtils.close(leftWriter,
+                  rightWriter,
+                  leftReader,
+                  rightReader,
+                  leftDir,
+                  rightDir);
     super.tearDown();
   }
-  
+
   /**
    * populates a writer with random stuff. this must be fully reproducable with the seed!
    */
@@ -160,7 +142,28 @@
    * checks the two indexes are equivalent
    */
   public void testEquals() throws IOException {
+    int numdocs = TEST_NIGHTLY ? atLeast(2000) : atLeast(100);
+    createRandomIndex(numdocs, leftWriter, seed);
+    createRandomIndex(numdocs, rightWriter, seed);
+
+    leftReader = leftWriter.getReader();
+    rightReader = rightWriter.getReader();
+    
     assertReaderEquals(info, leftReader, rightReader);
   }
 
+  public void testCrazyReaderEquals() throws IOException {
+    int numdocs = atLeast(100);
+    createRandomIndex(numdocs, leftWriter, seed);
+    createRandomIndex(numdocs, rightWriter, seed);
+
+    leftReader = wrapReader(leftWriter.getReader());
+    rightReader = wrapReader(rightWriter.getReader());
+    
+    // check that our readers are valid
+    TestUtil.checkReader(leftReader);
+    TestUtil.checkReader(rightReader);
+    
+    assertReaderEquals(info, leftReader, rightReader);
+  }
 }
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java b/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java
index ed47293..15b03d0 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestFilterLeafReader.java
@@ -137,11 +137,11 @@
     ((BaseDirectoryWrapper) target).setCrossCheckTermVectorsOnClose(false);
 
     writer = new IndexWriter(target, newIndexWriterConfig(new MockAnalyzer(random())));
-    IndexReader reader = new TestReader(DirectoryReader.open(directory));
-    writer.addIndexes(reader);
+    try (LeafReader reader = new TestReader(DirectoryReader.open(directory))) {
+      writer.addIndexes(SlowCodecReaderWrapper.wrap(reader));
+    }
     writer.close();
-    reader.close();
-    reader = DirectoryReader.open(target);
+    IndexReader reader = DirectoryReader.open(target);
     
     TermsEnum terms = MultiFields.getTerms(reader, "default").iterator(null);
     while (terms.next() != null) {
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java
index 8a29a30..0b8646a 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java
@@ -22,6 +22,7 @@
 import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.codecs.simpletext.SimpleTextCodec;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
 import org.apache.lucene.store.AlreadyClosedException;
@@ -114,7 +115,7 @@
     // non-existent segment:
     copyFile(dir, "_0_1" + ext, "_188_1" + ext);
 
-    String cfsFiles0[] = si0.getCodec().compoundFormat().files(si0);
+    String cfsFiles0[] = si0.getCodec() instanceof SimpleTextCodec ? new String[] { "_0.scf" } : new String[] { "_0.cfs", "_0.cfe" };
     
     // Create a bogus segment file:
     copyFile(dir, cfsFiles0[0], "_188.cfs");
@@ -127,12 +128,12 @@
     // TODO: assert is bogus (relies upon codec-specific filenames)
     assertTrue(slowFileExists(dir, "_3.fdt") || slowFileExists(dir, "_3.fld"));
     
-    String cfsFiles3[] = si3.getCodec().compoundFormat().files(si3);
+    String cfsFiles3[] = si3.getCodec() instanceof SimpleTextCodec ? new String[] { "_3.scf" } : new String[] { "_3.cfs", "_3.cfe" };
     for (String f : cfsFiles3) {
       assertTrue(!slowFileExists(dir, f));
     }
     
-    String cfsFiles1[] = si1.getCodec().compoundFormat().files(si1);
+    String cfsFiles1[] = si1.getCodec() instanceof SimpleTextCodec ? new String[] { "_1.scf" } : new String[] { "_1.cfs", "_1.cfe" };
     copyFile(dir, cfsFiles1[0], "_3.cfs");
     
     String[] filesPre = dir.listAll();
@@ -430,7 +431,7 @@
     if (ms instanceof ConcurrentMergeScheduler) {
       final ConcurrentMergeScheduler suppressFakeFail = new ConcurrentMergeScheduler() {
           @Override
-          protected void handleMergeException(Throwable exc) {
+          protected void handleMergeException(Directory dir, Throwable exc) {
             // suppress only FakeIOException:
             if (exc instanceof RuntimeException && exc.getMessage().equals("fake fail")) {
               // ok to ignore
@@ -438,13 +439,12 @@
                         && exc.getCause() != null && "fake fail".equals(exc.getCause().getMessage())) {
               // also ok to ignore
             } else {
-              super.handleMergeException(exc);
+              super.handleMergeException(dir, exc);
             }
           }
         };
       final ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) ms;
       suppressFakeFail.setMaxMergesAndThreads(cms.getMaxMergeCount(), cms.getMaxThreadCount());
-      suppressFakeFail.setMergeThreadPriority(cms.getMergeThreadPriority());
       iwc.setMergeScheduler(suppressFakeFail);
     }
 
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
index ca323de..ae1e573 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
@@ -767,8 +767,8 @@
     writer2.commit();
     writer2.close();
 
-    IndexReader r1 = DirectoryReader.open(dir2);
-    writer.addIndexes(r1, r1);
+    DirectoryReader r1 = DirectoryReader.open(dir2);
+    TestUtil.addIndexesSlowly(writer, r1, r1);
     writer.close();
 
     IndexReader r3 = DirectoryReader.open(dir);
@@ -2277,7 +2277,7 @@
 
     iwc.setMergeScheduler(new ConcurrentMergeScheduler() {
         @Override
-        public void doMerge(MergePolicy.OneMerge merge) throws IOException {
+        public void doMerge(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
           mergeStarted.countDown();
           try {
             closeStarted.await();
@@ -2285,7 +2285,7 @@
             Thread.currentThread().interrupt();
             throw new RuntimeException(ie);
           }
-          super.doMerge(merge);
+          super.doMerge(writer, merge);
         }
 
         @Override
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java
index 692755b..3b8336a8 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java
@@ -799,7 +799,7 @@
       doc.addLargeText("city", text[i]);
       modifier.addDocument(doc);
     }
-    // flush (and commit if ac)
+    // flush
 
     if (VERBOSE) {
       System.out.println("TEST: now full merge");
@@ -828,7 +828,7 @@
 
     modifier.deleteDocuments(term);
 
-    // add a doc (needed for the !ac case; see below)
+    // add a doc
     // doc remains buffered
 
     if (VERBOSE) {
@@ -1250,4 +1250,122 @@
     r.close();
     d.close();
   }
+
+  public void testOnlyDeletesTriggersMergeOnClose() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
+    iwc.setMaxBufferedDocs(2);
+    LogDocMergePolicy mp = new LogDocMergePolicy();
+    mp.setMinMergeDocs(1);
+    iwc.setMergePolicy(mp);
+    iwc.setMergeScheduler(new SerialMergeScheduler());
+    IndexWriter w = new IndexWriter(dir, iwc);
+    for(int i=0;i<38;i++) {
+      Document doc = w.newDocument();
+      doc.addAtom("id", ""+i);
+      w.addDocument(doc);
+    }
+    w.commit();
+
+    for(int i=0;i<18;i++) {
+      w.deleteDocuments(new Term("id", ""+i));
+    }
+
+    w.close();
+    DirectoryReader r = DirectoryReader.open(dir);
+    assertEquals(1, r.leaves().size());
+    r.close();
+
+    dir.close();
+  }
+
+  public void testOnlyDeletesTriggersMergeOnGetReader() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
+    iwc.setMaxBufferedDocs(2);
+    LogDocMergePolicy mp = new LogDocMergePolicy();
+    mp.setMinMergeDocs(1);
+    iwc.setMergePolicy(mp);
+    iwc.setMergeScheduler(new SerialMergeScheduler());
+    IndexWriter w = new IndexWriter(dir, iwc);
+    for(int i=0;i<38;i++) {
+      Document doc = w.newDocument();
+      doc.addAtom("id", ""+i);
+      w.addDocument(doc);
+    }
+    w.commit();
+
+    for(int i=0;i<18;i++) {
+      w.deleteDocuments(new Term("id", ""+i));
+    }
+
+    // First one triggers, but does not reflect, the merge:
+    DirectoryReader.open(w, true).close();
+    IndexReader r =DirectoryReader.open(w, true);
+    assertEquals(1, r.leaves().size());
+    r.close();
+
+    w.close();
+    dir.close();
+  }
+
+  public void testOnlyDeletesTriggersMergeOnFlush() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
+    iwc.setMaxBufferedDocs(2);
+    LogDocMergePolicy mp = new LogDocMergePolicy();
+    mp.setMinMergeDocs(1);
+    iwc.setMergePolicy(mp);
+    iwc.setMergeScheduler(new SerialMergeScheduler());
+    iwc.setMaxBufferedDeleteTerms(18);
+    IndexWriter w = new IndexWriter(dir, iwc);
+    for(int i=0;i<38;i++) {
+      Document doc = w.newDocument();
+      doc.addAtom("id", ""+i);
+      w.addDocument(doc);
+    }
+    w.commit();
+
+    for(int i=0;i<18;i++) {
+      w.deleteDocuments(new Term("id", ""+i));
+    }
+    w.commit();
+
+    DirectoryReader r = DirectoryReader.open(dir);
+    assertEquals(1, r.leaves().size());
+    r.close();
+
+    w.close();
+    dir.close();
+  }
+
+  public void testOnlyDeletesDeleteAllDocs() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
+    iwc.setMaxBufferedDocs(2);
+    LogDocMergePolicy mp = new LogDocMergePolicy();
+    mp.setMinMergeDocs(1);
+    iwc.setMergePolicy(mp);
+    iwc.setMergeScheduler(new SerialMergeScheduler());
+    iwc.setMaxBufferedDeleteTerms(18);
+    IndexWriter w = new IndexWriter(dir, iwc);
+    for(int i=0;i<38;i++) {
+      Document doc = w.newDocument();
+      doc.addAtom("id", ""+i);
+      w.addDocument(doc);
+    }
+    w.commit();
+
+    for(int i=0;i<38;i++) {
+      w.deleteDocuments(new Term("id", ""+i));
+    }
+
+    DirectoryReader r = DirectoryReader.open(w, true);
+    assertEquals(0, r.leaves().size());
+    assertEquals(0, r.maxDoc());
+    r.close();
+
+    w.close();
+    dir.close();
+  }
 }
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
index dfe3749..c7c8f6c 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
@@ -23,6 +23,7 @@
 import java.nio.file.NoSuchFileException;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Random;
@@ -48,15 +49,15 @@
 import org.apache.lucene.store.IOContext;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
-import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
 import org.apache.lucene.store.MockDirectoryWrapper;
+import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
 import org.apache.lucene.store.RAMDirectory;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.InfoStream;
 import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.TestUtil;
 import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
+import org.apache.lucene.util.TestUtil;
 
 @SuppressCodecs("SimpleText") // too slow here
 public class TestIndexWriterExceptions extends LuceneTestCase {
@@ -360,12 +361,19 @@
   // LUCENE-1208
   public void testExceptionJustBeforeFlush() throws IOException {
     Directory dir = newDirectory();
+
+    final AtomicBoolean doCrash = new AtomicBoolean();
+
     Analyzer analyzer = new Analyzer(Analyzer.PER_FIELD_REUSE_STRATEGY) {
       @Override
       public TokenStreamComponents createComponents(String fieldName) {
         MockTokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
         tokenizer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
-        return new TokenStreamComponents(tokenizer, new CrashingFilter(fieldName, tokenizer));
+        TokenStream stream = tokenizer;
+        if (doCrash.get()) {
+          stream = new CrashingFilter(fieldName, stream);
+        }
+        return new TokenStreamComponents(tokenizer, stream);
       }
     };
 
@@ -379,6 +387,7 @@
 
     Document crashDoc = w.newDocument();
     crashDoc.addLargeText("crash", "do it on token 4");
+    doCrash.set(true);
     try {
       w.addDocument(crashDoc);
       fail("did not hit expected exception");
@@ -1184,7 +1193,7 @@
       dir.close();
   }
 
-  // Simulate a corrupt index by removing one of the cfs
+  // Simulate a corrupt index by removing one of the
   // files and make sure we get an IOException trying to
   // open the index:
   public void testSimulatedCorruptIndex2() throws IOException {
@@ -1222,8 +1231,9 @@
     SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
     for (SegmentCommitInfo si : sis) {
       assertTrue(si.info.getUseCompoundFile());
-      String cfsFiles[] = si.info.getCodec().compoundFormat().files(si.info);
-      dir.deleteFile(cfsFiles[0]);
+      List<String> victims = new ArrayList<String>(si.info.files());
+      Collections.shuffle(victims, random());
+      dir.deleteFile(victims.get(0));
       corrupted = true;
       break;
     }
@@ -1859,16 +1869,15 @@
         if (ms instanceof ConcurrentMergeScheduler) {
           final ConcurrentMergeScheduler suppressFakeIOE = new ConcurrentMergeScheduler() {
               @Override
-              protected void handleMergeException(Throwable exc) {
+              protected void handleMergeException(Directory dir, Throwable exc) {
                 // suppress only FakeIOException:
                 if (!(exc instanceof FakeIOException)) {
-                  super.handleMergeException(exc);
+                  super.handleMergeException(dir, exc);
                 }
               }
             };
           final ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) ms;
           suppressFakeIOE.setMaxMergesAndThreads(cms.getMaxMergeCount(), cms.getMaxThreadCount());
-          suppressFakeIOE.setMergeThreadPriority(cms.getMergeThreadPriority());
           iwc.setMergeScheduler(suppressFakeIOE);
         }
         
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMaxDocs.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMaxDocs.java
index 40e3b0b..58fa981 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMaxDocs.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMaxDocs.java
@@ -30,7 +30,9 @@
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
 import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.TestUtil;
 import org.apache.lucene.util.TimeUnits;
+
 import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
 
 @SuppressCodecs({ "SimpleText", "Memory", "Direct" })
@@ -278,9 +280,9 @@
         // expected
       }
       assertEquals(1, w2.maxDoc());
-      IndexReader ir = DirectoryReader.open(dir);
+      DirectoryReader ir = DirectoryReader.open(dir);
       try {
-        w2.addIndexes(new IndexReader[] {ir});
+        TestUtil.addIndexesSlowly(w2, ir);
         fail("didn't hit exception");
       } catch (IllegalStateException ise) {
         // expected
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
index 302e000..b89f085 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
@@ -319,12 +319,12 @@
               }
               writer.forceMerge(1);
             } else if (1 == method) {
-              IndexReader readers[] = new IndexReader[dirs.length];
+              DirectoryReader readers[] = new DirectoryReader[dirs.length];
               for(int i=0;i<dirs.length;i++) {
                 readers[i] = DirectoryReader.open(dirs[i]);
               }
               try {
-                writer.addIndexes(readers);
+                TestUtil.addIndexesSlowly(writer, readers);
               } finally {
                 for(int i=0;i<dirs.length;i++) {
                   readers[i].close();
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOutOfFileDescriptors.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOutOfFileDescriptors.java
index 2b901b4..ad212ec 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOutOfFileDescriptors.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOutOfFileDescriptors.java
@@ -39,7 +39,7 @@
     dir.setRandomIOExceptionRateOnOpen(rate);
     int iters = atLeast(20);
     LineFileDocs docs = null;
-    IndexReader r = null;
+    DirectoryReader r = null;
     DirectoryReader r2 = null;
     boolean any = false;
     MockDirectoryWrapper dirCopy = null;
@@ -69,9 +69,9 @@
         if (r != null && random().nextInt(5) == 3) {
           if (random().nextBoolean()) {
             if (VERBOSE) {
-              System.out.println("TEST: addIndexes IR[]");
+              System.out.println("TEST: addIndexes LR[]");
             }
-            w.addIndexes(new IndexReader[] {r});
+            TestUtil.addIndexesSlowly(w, r);
           } else {
             if (VERBOSE) {
               System.out.println("TEST: addIndexes Directory[]");
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java
index 4ac6af7..d9160bc 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterReader.java
@@ -398,7 +398,7 @@
     final Thread[] threads = new Thread[numThreads];
     IndexWriter mainWriter;
     final List<Throwable> failures = new ArrayList<>();
-    IndexReader[] readers;
+    DirectoryReader[] readers;
     boolean didClose = false;
     AtomicInteger count = new AtomicInteger(0);
     AtomicInteger numaddIndexes = new AtomicInteger(0);
@@ -416,7 +416,7 @@
         
       writer.close();
       
-      readers = new IndexReader[numDirs];
+      readers = new DirectoryReader[numDirs];
       for (int i = 0; i < numDirs; i++) {
         readers[i] = DirectoryReader.open(addDir);
       }
@@ -499,7 +499,7 @@
           numaddIndexes.incrementAndGet();
           break;
         case 2:
-          mainWriter.addIndexes(readers);
+          TestUtil.addIndexesSlowly(mainWriter, readers);
           break;
         case 3:
           mainWriter.commit();
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java
index 3393273..766fb55 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterWithThreads.java
@@ -630,9 +630,7 @@
                       writerRef.get().prepareCommit();
                     }
                     writerRef.get().commit();
-                  } catch (AlreadyClosedException ace) {
-                    // ok
-                  } catch (NullPointerException npe) {
+                  } catch (AlreadyClosedException | NullPointerException ace) {
                     // ok
                   } finally {
                     commitLock.unlock();
@@ -644,11 +642,7 @@
                   }
                   try {
                     writerRef.get().addDocument(docs.nextDoc());
-                  } catch (AlreadyClosedException ace) {
-                    // ok
-                  } catch (NullPointerException npe) {
-                    // ok
-                  } catch (AssertionError ae) {
+                  } catch (AlreadyClosedException | NullPointerException | AssertionError ace) {
                     // ok
                   }
                   break;
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestMergeRateLimiter.java b/lucene/core/src/test/org/apache/lucene/index/TestMergeRateLimiter.java
new file mode 100644
index 0000000..bd1e416
--- /dev/null
+++ b/lucene/core/src/test/org/apache/lucene/index/TestMergeRateLimiter.java
@@ -0,0 +1,38 @@
+package org.apache.lucene.index;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.util.Collections;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.LuceneTestCase;
+
+public class TestMergeRateLimiter extends LuceneTestCase {
+  public void testInitDefaults() throws Exception {
+    Directory dir = newDirectory();
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir);
+    w.addDocument(w.newDocument());
+    w.close();
+    MergePolicy.OneMerge merge = new MergePolicy.OneMerge(SegmentInfos.readLatestCommit(dir).asList());
+    MergeRateLimiter rateLimiter = new MergeRateLimiter(merge);
+    assertEquals(Double.POSITIVE_INFINITY, rateLimiter.getMBPerSec(), 0.0);
+    assertTrue(rateLimiter.getMinPauseCheckBytes() > 0);
+    dir.close();
+  }
+}
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestMixedDocValuesUpdates.java b/lucene/core/src/test/org/apache/lucene/index/TestMixedDocValuesUpdates.java
index 554e41a..68e5b03 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestMixedDocValuesUpdates.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestMixedDocValuesUpdates.java
@@ -70,7 +70,7 @@
     int docID = 0;
     for (int i = 0; i < numRounds; i++) {
       int numDocs = atLeast(5);
-//      System.out.println("[" + Thread.currentThread().getName() + "]: round=" + i + ", numDocs=" + numDocs);
+      // System.out.println("TEST: round=" + i + ", numDocs=" + numDocs);
       for (int j = 0; j < numDocs; j++) {
         Document doc = writer.newDocument();
         doc.addAtom("id", "doc-" + docID);
@@ -94,8 +94,8 @@
       } else {
         writer.updateBinaryDocValue(new Term("key", "all"), updateField, TestBinaryDocValuesUpdates.toBytes(++fieldValues[fieldIdx]));
       }
-//      System.out.println("[" + Thread.currentThread().getName() + "]: updated field '" + updateField + "' to value " + fieldValues[fieldIdx]);
-      
+      //System.out.println("TEST: updated field '" + updateField + "' to value " + fieldValues[fieldIdx]);
+
       if (random.nextDouble() < 0.2) {
         int deleteDoc = random.nextInt(docID); // might also delete an already deleted document, ok!
         writer.deleteDocuments(new Term("id", "doc-" + deleteDoc));
@@ -136,9 +136,9 @@
 //              System.out.println("doc=" + (doc + context.docBase) + " f='" + f + "' vslue=" + getValue(bdv, doc, scratch));
               assertTrue(docsWithField.get(doc));
               if (field < numNDVFields) {
-                assertEquals("invalid value for doc=" + doc + ", field=" + f + ", reader=" + r, fieldValues[field], ndv.get(doc));
+                assertEquals("invalid numeric value for doc=" + doc + ", field=" + f + ", reader=" + r, fieldValues[field], ndv.get(doc));
               } else {
-                assertEquals("invalid value for doc=" + doc + ", field=" + f + ", reader=" + r, fieldValues[field], TestBinaryDocValuesUpdates.getValue(bdv, doc));
+                assertEquals("invalid binary value for doc=" + doc + ", field=" + f + ", reader=" + r, fieldValues[field], TestBinaryDocValuesUpdates.getValue(bdv, doc));
               }
             }
           }
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java b/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java
index a13eff7..4d6a99f 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestNumericDocValuesUpdates.java
@@ -284,42 +284,7 @@
     reader.close();
     dir.close();
   }
-  
-  @Test
-  public void testUpdateAndDeleteSameDocument() throws Exception {
-    // update and delete same document in same commit session
-    Directory dir = newDirectory();
-    IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
-    conf.setMaxBufferedDocs(10); // control segment flushing
-    IndexWriter writer = new IndexWriter(dir, conf);
-    
-    writer.addDocument(doc(writer, 0));
-    writer.addDocument(doc(writer, 1));
-    
-    if (random().nextBoolean()) {
-      writer.commit();
-    }
-    
-    writer.deleteDocuments(new Term("id", "doc-0"));
-    writer.updateNumericDocValue(new Term("id", "doc-0"), "val", 17L);
-    
-    final DirectoryReader reader;
-    if (random().nextBoolean()) { // not NRT
-      writer.close();
-      reader = DirectoryReader.open(dir);
-    } else { // NRT
-      reader = DirectoryReader.open(writer, true);
-      writer.close();
-    }
-    
-    LeafReader r = reader.leaves().get(0).reader();
-    assertFalse(r.getLiveDocs().get(0));
-    assertEquals(1, r.getNumericDocValues("val").get(0)); // deletes are currently applied first
-    
-    reader.close();
-    dir.close();
-  }
-  
+
   @Test
   public void testMultipleDocValuesTypes() throws Exception {
     Directory dir = newDirectory();
@@ -1186,7 +1151,7 @@
       writer.addIndexes(dir1);
     } else {
       DirectoryReader reader = DirectoryReader.open(dir1);
-      writer.addIndexes(reader);
+      TestUtil.addIndexesSlowly(writer, reader);
       reader.close();
     }
     writer.close();
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java b/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java
index e68122a..9a66fe4 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java
@@ -436,10 +436,6 @@
     protected void doSetNextReader(LeafReaderContext context) throws IOException {
       docBase = context.docBase;
     }
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return true;
-    }
   }
   
   /** test that when freqs are omitted, that totalTermFreq and sumTotalTermFreq are -1 */
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java b/lucene/core/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java
index a3c4579..d58ee28 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java
@@ -18,6 +18,8 @@
  */
 
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
@@ -51,11 +53,11 @@
         SlowCompositeReaderWrapper.wrap(DirectoryReader.open(rd2)));
     
     // When unpatched, Lucene crashes here with a NoSuchElementException (caused by ParallelTermEnum)
-    iwOut.addIndexes(apr);
+    iwOut.addIndexes(SlowCodecReaderWrapper.wrap(apr));
     iwOut.forceMerge(1);
     
     // 2nd try with a readerless parallel reader
-    iwOut.addIndexes(new ParallelLeafReader());
+    iwOut.addIndexes(SlowCodecReaderWrapper.wrap(new ParallelLeafReader()));
     iwOut.forceMerge(1);
 
     ParallelCompositeReader cpr = new ParallelCompositeReader(
@@ -63,11 +65,11 @@
         DirectoryReader.open(rd2));
     
     // When unpatched, Lucene crashes here with a NoSuchElementException (caused by ParallelTermEnum)
-    iwOut.addIndexes(cpr);
-    iwOut.forceMerge(1);
-    
-    // 2nd try with a readerless parallel reader
-    iwOut.addIndexes(new ParallelCompositeReader());
+    List<CodecReader> leaves = new ArrayList<>();
+    for (LeafReaderContext leaf : cpr.leaves()) {
+      leaves.add(SlowCodecReaderWrapper.wrap(leaf.reader()));
+    }
+    iwOut.addIndexes(leaves.toArray(new CodecReader[0]));
     iwOut.forceMerge(1);
     
     iwOut.close();
@@ -140,7 +142,7 @@
         SlowCompositeReaderWrapper.wrap(reader2 = DirectoryReader.open(rd2)));
 
     // When unpatched, Lucene crashes here with an ArrayIndexOutOfBoundsException (caused by TermVectorsWriter)
-    iwOut.addIndexes(pr);
+    iwOut.addIndexes(SlowCodecReaderWrapper.wrap(pr));
 
     // ParallelReader closes any IndexReader you added to it:
     pr.close();
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java b/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java
index 1c61c24..3c91b5d 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestPayloads.java
@@ -173,7 +173,6 @@
     // flush
     writer.close();
         
-        
     /*
      * Verify the index
      * first we test if all payloads are stored correctly
@@ -272,7 +271,6 @@
     payloadData = generateRandomData(2000);
     analyzer.setPayloadData(fieldName, payloadData, 100, 1500);
     writer.addDocument(d);
-
         
     writer.forceMerge(1);
     // flush
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPrefixCodedTerms.java b/lucene/core/src/test/org/apache/lucene/index/TestPrefixCodedTerms.java
index 9303115..4fe4a06 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestPrefixCodedTerms.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestPrefixCodedTerms.java
@@ -17,14 +17,14 @@
  * limitations under the License.
  */
 
-import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Iterator;
-import java.util.List;
 import java.util.Set;
 import java.util.TreeSet;
 
+import org.apache.lucene.index.PrefixCodedTerms.TermIterator;
+import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.MergedIterator;
 import org.apache.lucene.util.TestUtil;
 
 public class TestPrefixCodedTerms extends LuceneTestCase {
@@ -32,7 +32,9 @@
   public void testEmpty() {
     PrefixCodedTerms.Builder b = new PrefixCodedTerms.Builder();
     PrefixCodedTerms pb = b.finish();
-    assertFalse(pb.iterator().hasNext());
+    TermIterator iter = pb.iterator();
+    assertTrue(iter.next());
+    assertNull(iter.field);
   }
   
   public void testOne() {
@@ -40,9 +42,12 @@
     PrefixCodedTerms.Builder b = new PrefixCodedTerms.Builder();
     b.add(term);
     PrefixCodedTerms pb = b.finish();
-    Iterator<Term> iterator = pb.iterator();
-    assertTrue(iterator.hasNext());
-    assertEquals(term, iterator.next());
+    TermIterator iter = pb.iterator();
+    assertTrue(iter.next());
+    assertEquals("foo", iter.field);
+    assertEquals("bogus", iter.bytes.utf8ToString());
+    assertTrue(iter.next());
+    assertNull(iter.field);
   }
   
   public void testRandom() {
@@ -59,11 +64,23 @@
     }
     PrefixCodedTerms pb = b.finish();
     
+    TermIterator iter = pb.iterator();
     Iterator<Term> expected = terms.iterator();
-    for (Term t : pb) {
+    String field = "";
+    //System.out.println("TEST: now iter");
+    while (true) {
+      boolean newField = iter.next();
+      //System.out.println("  newField=" + newField);
+      if (newField) {
+        field = iter.field;
+        if (field == null) {
+          break;
+        }
+      }
       assertTrue(expected.hasNext());
-      assertEquals(expected.next(), t);
+      assertEquals(expected.next(), new Term(field, iter.bytes));
     }
+
     assertFalse(expected.hasNext());
   }
 
@@ -78,12 +95,15 @@
     PrefixCodedTerms.Builder b2 = new PrefixCodedTerms.Builder();
     b2.add(t2);
     PrefixCodedTerms pb2 = b2.finish();
-    
-    Iterator<Term> merged = new MergedIterator<>(pb1.iterator(), pb2.iterator());
-    assertTrue(merged.hasNext());
-    assertEquals(t1, merged.next());
-    assertTrue(merged.hasNext());
-    assertEquals(t2, merged.next());
+
+    MergedPrefixCodedTermsIterator merged = new MergedPrefixCodedTermsIterator(Arrays.asList(new PrefixCodedTerms[] {pb1, pb2}));
+    assertTrue(merged.next());
+    assertEquals("foo", merged.field());
+    assertEquals("a", merged.term().utf8ToString());
+    assertFalse(merged.next());
+    assertEquals("b", merged.term().utf8ToString());
+    assertTrue(merged.next());
+    assertNull(merged.field());
   }
 
   @SuppressWarnings({"unchecked","rawtypes"})
@@ -95,31 +115,49 @@
       Set<Term> terms = new TreeSet<>();
       int nterms = TestUtil.nextInt(random(), 0, 10000);
       for (int j = 0; j < nterms; j++) {
-        Term term = new Term(TestUtil.randomUnicodeString(random(), 2), TestUtil.randomUnicodeString(random(), 4));
+        String field = TestUtil.randomUnicodeString(random(), 2);
+        //String field = TestUtil.randomSimpleString(random(), 2);
+        Term term = new Term(field, TestUtil.randomUnicodeString(random(), 4));
         terms.add(term);
       }
       superSet.addAll(terms);
     
       PrefixCodedTerms.Builder b = new PrefixCodedTerms.Builder();
+      //System.out.println("TEST: sub " + i + " has " + terms.size() + " terms");
       for (Term ref: terms) {
+        //System.out.println("  add " + ref.field() + " " + ref.bytes());
         b.add(ref);
       }
       pb[i] = b.finish();
     }
     
-    List<Iterator<Term>> subs = new ArrayList<>();
-    for (int i = 0; i < pb.length; i++) {
-      subs.add(pb[i].iterator());
-    }
-    
     Iterator<Term> expected = superSet.iterator();
-    // NOTE: currenlty using diamond operator on MergedIterator (without explicit Term class) causes
-    // errors on Eclipse Compiler (ecj) used for javadoc lint
-    Iterator<Term> actual = new MergedIterator<Term>(subs.toArray(new Iterator[0]));
-    while (actual.hasNext()) {
+
+    MergedPrefixCodedTermsIterator actual = new MergedPrefixCodedTermsIterator(Arrays.asList(pb));
+    String field = "";
+
+    BytesRef lastTerm = null;
+
+    while (true) {
+      if (actual.next()) {
+        field = actual.field();
+        if (field == null) {
+          break;
+        }
+        lastTerm = null;
+        //System.out.println("\nTEST: new field: " + field);
+      }
+      if (lastTerm != null && lastTerm.equals(actual.term())) {
+        continue;
+      }
+      //System.out.println("TEST: iter: field=" + field + " term=" + actual.term());
+      lastTerm = BytesRef.deepCopyOf(actual.term());
       assertTrue(expected.hasNext());
-      assertEquals(expected.next(), actual.next());
+
+      Term expectedTerm = expected.next();
+      assertEquals(expectedTerm, new Term(field, actual.term()));
     }
+
     assertFalse(expected.hasNext());
   }
 }
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestReindexingReader.java b/lucene/core/src/test/org/apache/lucene/index/TestReindexingReader.java
index 0d007c7..91492d7 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestReindexingReader.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestReindexingReader.java
@@ -232,6 +232,10 @@
         // We write tiny docs, so we need tiny floor to avoid O(N^2) merging:
         tmp.setFloorSegmentMB(.01);
         iwc.setMergePolicy(tmp);
+        if (TEST_NIGHTLY) {
+          // during nightly tests, we might use too many files if we arent careful
+          iwc.setUseCompoundFile(true);
+        }
         return iwc;
       }
 
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestRollingUpdates.java b/lucene/core/src/test/org/apache/lucene/index/TestRollingUpdates.java
index 89617c2..3656f2f 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestRollingUpdates.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestRollingUpdates.java
@@ -100,7 +100,13 @@
       updateCount++;
 
       if (doUpdate) {
-        w.updateDocument(idTerm, doc);
+        if (random().nextBoolean()) {
+          w.updateDocument(idTerm, doc);
+        } else {
+          // It's OK to not be atomic for this test (no separate thread reopening readers):
+          w.deleteDocuments(new TermQuery(idTerm));
+          w.addDocument(doc);
+        }
       } else {
         w.addDocument(doc);
       }
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java
index b3df2cf..1378a64 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java
@@ -29,6 +29,8 @@
 import org.apache.lucene.document.FieldTypes;
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.MergeInfo;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.FixedBitSet;
 import org.apache.lucene.util.InfoStream;
@@ -83,9 +85,10 @@
     final SegmentInfo si = new SegmentInfo(mergedDir, Version.LATEST, mergedSegment, -1, false, codec, null, StringHelper.randomId(), new HashMap<>());
 
     FieldTypes fieldTypes = FieldTypes.getFieldTypes(merge1Dir, new MockAnalyzer(random()));
-    SegmentMerger merger = new SegmentMerger(fieldTypes, Arrays.<LeafReader>asList(reader1, reader2),
+    SegmentMerger merger = new SegmentMerger(fieldTypes, Arrays.<CodecReader>asList(reader1, reader2),
         si, InfoStream.getDefault(), mergedDir,
-        MergeState.CheckAbort.NONE, new FieldInfos.FieldNumbers(), newIOContext(random()));
+        new FieldInfos.FieldNumbers(),
+        newIOContext(random(), new IOContext(new MergeInfo(-1, -1, false, -1))));
     MergeState mergeState = merger.merge();
     int docsMerged = mergeState.segmentInfo.getDocCount();
     assertTrue(docsMerged == 2);
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestStressDeletes.java b/lucene/core/src/test/org/apache/lucene/index/TestStressDeletes.java
index cd2ab74..aff3779 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestStressDeletes.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestStressDeletes.java
@@ -49,6 +49,7 @@
     final Map<Integer,Boolean> exists = new ConcurrentHashMap<>();
     Thread[] threads = new Thread[TestUtil.nextInt(random(), 2, 6)];
     final CountDownLatch startingGun = new CountDownLatch(1);
+    final int deleteMode = random().nextInt(3);
     for(int i=0;i<threads.length;i++) {
       threads[i] = new Thread() {
           @Override
@@ -61,11 +62,24 @@
                   Boolean v = exists.get(id);
                   if (v == null || v.booleanValue() == false) {
                     Document doc = w.newDocument();
-                    doc.addUniqueInt("id", id);
+                    doc.addInt("id", id);
                     w.addDocument(doc);
                     exists.put(id, true);
                   } else {
-                    w.deleteDocuments(fieldTypes.newIntTerm("id", id));
+                    if (deleteMode == 0) {
+                      // Always delete by term
+                      w.deleteDocuments(fieldTypes.newIntTerm("id", id));
+                    } else if (deleteMode == 1) {
+                      // Always delete by query
+                      w.deleteDocuments(fieldTypes.newExactIntQuery("id", id));
+                    } else {
+                      // Mixed
+                      if (random().nextBoolean()) {
+                        w.deleteDocuments(fieldTypes.newIntTerm("id", id));
+                      } else {
+                        w.deleteDocuments(fieldTypes.newExactIntQuery("id", id));
+                      }
+                    }
                     exists.put(id, false);
                   }
                 }
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTermVectors.java b/lucene/core/src/test/org/apache/lucene/index/TestTermVectors.java
similarity index 96%
rename from lucene/core/src/test/org/apache/lucene/search/TestTermVectors.java
rename to lucene/core/src/test/org/apache/lucene/index/TestTermVectors.java
index cf9a49d..59126d1 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestTermVectors.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestTermVectors.java
@@ -1,4 +1,4 @@
-package org.apache.lucene.search;
+package org.apache.lucene.index;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
@@ -31,6 +31,7 @@
 import org.apache.lucene.util.English;
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.TestUtil;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 
@@ -154,8 +155,8 @@
     
     IndexWriter writer = createWriter(target);
     for (Directory dir : input) {
-      IndexReader r = DirectoryReader.open(dir);
-      writer.addIndexes(r);
+      DirectoryReader r = DirectoryReader.open(dir);
+      TestUtil.addIndexesSlowly(writer, r);
       r.close();
     }
     writer.forceMerge(1);
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestUniqueFields.java b/lucene/core/src/test/org/apache/lucene/index/TestUniqueFields.java
index 56018a9..8432e0d 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestUniqueFields.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestUniqueFields.java
@@ -520,8 +520,8 @@
     } catch (NotUniqueException nue) {
       // expected
     }
-    IndexReader r = mgr.acquire();
-    w.addIndexes(new IndexReader[] {r});
+    DirectoryReader r = mgr.acquire();
+    TestUtil.addIndexesSlowly(w, r);
     r.close();
     w.close();
 
diff --git a/lucene/core/src/test/org/apache/lucene/search/JustCompileSearch.java b/lucene/core/src/test/org/apache/lucene/search/JustCompileSearch.java
index cf11e26..8071eda 100644
--- a/lucene/core/src/test/org/apache/lucene/search/JustCompileSearch.java
+++ b/lucene/core/src/test/org/apache/lucene/search/JustCompileSearch.java
@@ -52,11 +52,6 @@
     public void setScorer(Scorer scorer) {
       throw new UnsupportedOperationException(UNSUPPORTED_MSG);
     }
-    
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      throw new UnsupportedOperationException(UNSUPPORTED_MSG);
-    }
 
   }
   
@@ -99,42 +94,22 @@
   static final class JustCompileFieldComparator extends FieldComparator<Object> {
 
     @Override
-    public int compare(int slot1, int slot2) {
-      throw new UnsupportedOperationException(UNSUPPORTED_MSG);
-    }
-
-    @Override
-    public int compareBottom(int doc) {
-      throw new UnsupportedOperationException(UNSUPPORTED_MSG);
-    }
-
-    @Override
-    public void copy(int slot, int doc) {
-      throw new UnsupportedOperationException(UNSUPPORTED_MSG);
-    }
-
-    @Override
-    public void setBottom(int slot) {
-      throw new UnsupportedOperationException(UNSUPPORTED_MSG);
-    }
-
-    @Override
     public void setTopValue(Object value) {
       throw new UnsupportedOperationException(UNSUPPORTED_MSG);
     }
 
     @Override
-    public FieldComparator<Object> setNextReader(LeafReaderContext context) {
-      throw new UnsupportedOperationException(UNSUPPORTED_MSG);
-    }
-
-    @Override
     public Object value(int slot) {
       throw new UnsupportedOperationException(UNSUPPORTED_MSG);
     }
 
     @Override
-    public int compareTop(int doc) {
+    public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException {
+      throw new UnsupportedOperationException(UNSUPPORTED_MSG);
+    }
+
+    @Override
+    public int compare(int slot1, int slot2) {
       throw new UnsupportedOperationException(UNSUPPORTED_MSG);
     }
   }
@@ -260,23 +235,8 @@
     }
 
     @Override
-    public void collect(int doc) {
-      throw new UnsupportedOperationException(UNSUPPORTED_MSG);
-    }
-
-    @Override
-    protected void doSetNextReader(LeafReaderContext context) throws IOException {
-      throw new UnsupportedOperationException(UNSUPPORTED_MSG);
-    }
-
-    @Override
-    public void setScorer(Scorer scorer) {
-      throw new UnsupportedOperationException(UNSUPPORTED_MSG);
-    }
-    
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      throw new UnsupportedOperationException(UNSUPPORTED_MSG);
+    public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
+      throw new UnsupportedOperationException( UNSUPPORTED_MSG );
     }
 
     @Override
diff --git a/lucene/core/src/test/org/apache/lucene/search/MultiCollectorTest.java b/lucene/core/src/test/org/apache/lucene/search/MultiCollectorTest.java
index d4f0d74..fbf6326 100644
--- a/lucene/core/src/test/org/apache/lucene/search/MultiCollectorTest.java
+++ b/lucene/core/src/test/org/apache/lucene/search/MultiCollectorTest.java
@@ -27,18 +27,11 @@
 
   private static class DummyCollector extends SimpleCollector {
 
-    boolean acceptsDocsOutOfOrderCalled = false;
     boolean collectCalled = false;
     boolean setNextReaderCalled = false;
     boolean setScorerCalled = false;
 
     @Override
-    public boolean acceptsDocsOutOfOrder() {
-      acceptsDocsOutOfOrderCalled = true;
-      return true;
-    }
-
-    @Override
     public void collect(int doc) throws IOException {
       collectCalled = true;
     }
@@ -70,7 +63,6 @@
     Collector c = MultiCollector.wrap(new DummyCollector(), null, new DummyCollector());
     assertTrue(c instanceof MultiCollector);
     final LeafCollector ac = c.getLeafCollector(null);
-    assertTrue(ac.acceptsDocsOutOfOrder());
     ac.collect(1);
     c.getLeafCollector(null);
     c.getLeafCollector(null).setScorer(null);
@@ -93,13 +85,11 @@
     DummyCollector[] dcs = new DummyCollector[] { new DummyCollector(), new DummyCollector() };
     Collector c = MultiCollector.wrap(dcs);
     LeafCollector ac = c.getLeafCollector(null);
-    assertTrue(ac.acceptsDocsOutOfOrder());
     ac.collect(1);
     ac = c.getLeafCollector(null);
     ac.setScorer(null);
 
     for (DummyCollector dc : dcs) {
-      assertTrue(dc.acceptsDocsOutOfOrderCalled);
       assertTrue(dc.collectCalled);
       assertTrue(dc.setNextReaderCalled);
       assertTrue(dc.setScorerCalled);
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java b/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java
index 82f403f..fb4f134 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java
@@ -129,11 +129,14 @@
   };
 
   public void queriesTest(Query query, int[] expDocNrs) throws Exception {
-    TopScoreDocCollector collector = TopScoreDocCollector.create(1000, false);
+    // The asserting searcher will sometimes return the bulk scorer and
+    // sometimes return a default impl around the scorer so that we can
+    // compare BS1 and BS2
+    TopScoreDocCollector collector = TopScoreDocCollector.create(1000);
     searcher.search(query, null, collector);
     ScoreDoc[] hits1 = collector.topDocs().scoreDocs;
 
-    collector = TopScoreDocCollector.create(1000, true);
+    collector = TopScoreDocCollector.create(1000);
     searcher.search(query, null, collector);
     ScoreDoc[] hits2 = collector.topDocs().scoreDocs; 
 
@@ -281,13 +284,13 @@
         }
 
         TopFieldCollector collector = TopFieldCollector.create(sort, 1000,
-            false, true, true, true);
+            false, true, true);
 
         searcher.search(q1, null, collector);
         ScoreDoc[] hits1 = collector.topDocs().scoreDocs;
 
         collector = TopFieldCollector.create(sort, 1000,
-            false, true, true, false);
+            false, true, true);
         
         searcher.search(q1, null, collector);
         ScoreDoc[] hits2 = collector.topDocs().scoreDocs;
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanCoord.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanCoord.java
index af08cb2..d4ff9b5 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanCoord.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanCoord.java
@@ -713,34 +713,27 @@
     assertEquals(0, scorer.nextDoc());
     assertEquals(expected, scorer.score(), 0.0001f);
     
-    // test out-of-order (if supported)
-    if (weight.scoresDocsOutOfOrder()) {
-      final AtomicBoolean seen = new AtomicBoolean(false);
-      BulkScorer bulkScorer = weight.bulkScorer(reader.leaves().get(0), false, null);
-      assertNotNull(bulkScorer);
-      bulkScorer.score(new LeafCollector() {
-        Scorer scorer;
-        
-        @Override
-        public void setScorer(Scorer scorer) throws IOException {
-          this.scorer = scorer;
-        }
-        
-        @Override
-        public void collect(int doc) throws IOException {
-          assertFalse(seen.get());
-          assertEquals(0, doc);
-          assertEquals(expected, scorer.score(), 0.0001f);
-          seen.set(true);
-        }
-
-        @Override
-        public boolean acceptsDocsOutOfOrder() {
-          return true;
-        }
-      }, 1);
-      assertTrue(seen.get());
-    }
+    // test bulk scorer
+    final AtomicBoolean seen = new AtomicBoolean(false);
+    BulkScorer bulkScorer = weight.bulkScorer(reader.leaves().get(0), null);
+    assertNotNull(bulkScorer);
+    bulkScorer.score(new LeafCollector() {
+      Scorer scorer;
+      
+      @Override
+      public void setScorer(Scorer scorer) throws IOException {
+        this.scorer = scorer;
+      }
+      
+      @Override
+      public void collect(int doc) throws IOException {
+        assertFalse(seen.get());
+        assertEquals(0, doc);
+        assertEquals(expected, scorer.score(), 0.0001f);
+        seen.set(true);
+      }
+    }, 0, 1);
+    assertTrue(seen.get());
     
     // test the explanation
     Explanation expl = weight.explain(reader.leaves().get(0), 0);
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanMinShouldMatch.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanMinShouldMatch.java
index 5f799af..a13b2dd 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanMinShouldMatch.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanMinShouldMatch.java
@@ -82,7 +82,6 @@
     index = null;
   }
 
-
   public void verifyNrHits(Query q, int expected) throws Exception {
     // bs1
     ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;
@@ -92,7 +91,7 @@
     assertEquals("result count", expected, h.length);
     //System.out.println("TEST: now check");
     // bs2
-    TopScoreDocCollector collector = TopScoreDocCollector.create(1000, true);
+    TopScoreDocCollector collector = TopScoreDocCollector.create(1000);
     s.search(q, collector);
     ScoreDoc[] h2 = collector.topDocs().scoreDocs;
     if (expected != h2.length) {
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java
index a3fbb90..2e19829 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java
@@ -16,6 +16,11 @@
  * limitations under the License.
  */
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
 import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.lucene.analysis.MockAnalyzer;
@@ -28,6 +33,8 @@
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.TestUtil;
 
+import com.carrotsearch.randomizedtesting.generators.RandomInts;
+
 public class TestBooleanOr extends LuceneTestCase {
 
   private static String FIELD_T = "T";
@@ -179,7 +186,7 @@
     Weight w = s.createNormalizedWeight(bq);
 
     assertEquals(1, s.getIndexReader().leaves().size());
-    BulkScorer scorer = w.bulkScorer(s.getIndexReader().leaves().get(0), false, null);
+    BulkScorer scorer = w.bulkScorer(s.getIndexReader().leaves().get(0), null);
 
     final FixedBitSet hits = new FixedBitSet(docCount);
     final AtomicInteger end = new AtomicInteger();
@@ -190,21 +197,65 @@
           assertTrue("collected doc=" + doc + " beyond max=" + end, doc < end.intValue());
           hits.set(doc);
         }
-
-        @Override
-        public boolean acceptsDocsOutOfOrder() {
-          return true;
-        }
       };
 
     while (end.intValue() < docCount) {
+      final int min = end.intValue();
       final int inc = TestUtil.nextInt(random(), 1, 1000);
-      end.getAndAdd(inc);
-      scorer.score(c, end.intValue());
+      final int max = end.addAndGet(inc);
+      scorer.score(c, min, max);
     }
 
     assertEquals(docCount, hits.cardinality());
     r.close();
     dir.close();
   }
+
+  private static BulkScorer scorer(int... matches) {
+    return new BulkScorer() {
+      final FakeScorer scorer = new FakeScorer();
+      int i = 0;
+      @Override
+      public int score(LeafCollector collector, int min, int max) throws IOException {
+        collector.setScorer(scorer);
+        while (i < matches.length && matches[i] < min) {
+          i += 1;
+        }
+        while (i < matches.length && matches[i] < max) {
+          scorer.doc = matches[i];
+          collector.collect(scorer.doc);
+          i += 1;
+        }
+        if (i == matches.length) {
+          return DocIdSetIterator.NO_MORE_DOCS;
+        }
+        return RandomInts.randomIntBetween(random(), max, matches[i]);
+      }
+    };
+  }
+
+  // Make sure that BooleanScorer keeps working even if the sub clauses return
+  // next matching docs which are less than the actual next match
+  public void testSubScorerNextIsNotMatch() throws IOException {
+    final List<BulkScorer> optionalScorers = Arrays.asList(
+        scorer(100000, 1000001, 9999999),
+        scorer(4000, 1000051),
+        scorer(5000, 100000, 9999998, 9999999)
+    );
+    Collections.shuffle(optionalScorers, random());
+    BooleanScorer scorer = new BooleanScorer(null, true, 0, optionalScorers);
+    final List<Integer> matches = new ArrayList<>();
+    scorer.score(new LeafCollector() {
+
+      @Override
+      public void setScorer(Scorer scorer) throws IOException {}
+
+      @Override
+      public void collect(int doc) throws IOException {
+        matches.add(doc);
+      }
+      
+    });
+    assertEquals(Arrays.asList(4000, 5000, 100000, 1000001, 1000051, 9999998, 9999999), matches);
+  }
 }
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanQuery.java
index f21b29d..07fcb18 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanQuery.java
@@ -312,7 +312,7 @@
     SpanQuery sq2 = new SpanTermQuery(new Term(FIELD, "clckwork"));
     query.add(sq1, BooleanClause.Occur.SHOULD);
     query.add(sq2, BooleanClause.Occur.SHOULD);
-    TopScoreDocCollector collector = TopScoreDocCollector.create(1000, true);
+    TopScoreDocCollector collector = TopScoreDocCollector.create(1000);
     searcher.search(query, collector);
     hits = collector.topDocs().scoreDocs.length;
     for (ScoreDoc scoreDoc : collector.topDocs().scoreDocs){
@@ -324,32 +324,6 @@
     directory.close();
   }
 
-  // LUCENE-5487
-  public void testInOrderWithMinShouldMatch() throws Exception {
-    Directory dir = newDirectory();
-    RandomIndexWriter w = new RandomIndexWriter(random(), dir);
-    Document doc = w.newDocument();
-    doc.addLargeText("field", "some text here");
-    w.addDocument(doc);
-    IndexReader r = w.getReader();
-    w.close();
-    IndexSearcher s = new IndexSearcher(r) {
-        @Override
-        protected void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
-          assertEquals(-1, collector.getClass().getSimpleName().indexOf("OutOfOrder"));
-          super.search(leaves, weight, collector);
-        }
-      };
-    BooleanQuery bq = new BooleanQuery();
-    bq.add(new TermQuery(new Term("field", "some")), BooleanClause.Occur.SHOULD);
-    bq.add(new TermQuery(new Term("field", "text")), BooleanClause.Occur.SHOULD);
-    bq.add(new TermQuery(new Term("field", "here")), BooleanClause.Occur.SHOULD);
-    bq.setMinimumNumberShouldMatch(2);
-    s.search(bq, 10);
-    r.close();
-    dir.close();
-  }
-
   public void testOneClauseRewriteOptimization() throws Exception {
     final float BOOST = 3.5F;
     final String FIELD = "content";
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanQueryVisitSubscorers.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanQueryVisitSubscorers.java
index f99b1e0..dd73c5b 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanQueryVisitSubscorers.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanQueryVisitSubscorers.java
@@ -34,13 +34,17 @@
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.BooleanClause.Occur;
+import org.apache.lucene.search.BooleanQuery.BooleanWeight;
 import org.apache.lucene.search.Scorer.ChildScorer;
+import org.apache.lucene.search.Weight.DefaultBulkScorer;
 import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.LuceneTestCase;
 
 // TODO: refactor to a base class, that collects freqs from the scorer tree
 // and test all queries with it
 public class TestBooleanQueryVisitSubscorers extends LuceneTestCase {
+
   Analyzer analyzer;
   IndexReader reader;
   IndexSearcher searcher;
@@ -62,7 +66,9 @@
     writer.addDocument(doc(writer, "nutch", "nutch is an internet search engine with web crawler and is using lucene and hadoop"));
     reader = writer.getReader();
     writer.close();
-    searcher = newSearcher(reader);
+    // we do not use newSearcher because the assertingXXX layers break
+    // the toString representations we are relying on
+    searcher = new IndexSearcher(reader);
   }
   
   @Override
@@ -73,7 +79,7 @@
   }
 
   public void testDisjunctions() throws IOException {
-    BooleanQuery bq = new BooleanQuery();
+    BooleanQuery2 bq = new BooleanQuery2();
     bq.add(new TermQuery(new Term(F1, "lucene")), BooleanClause.Occur.SHOULD);
     bq.add(new TermQuery(new Term(F2, "lucene")), BooleanClause.Occur.SHOULD);
     bq.add(new TermQuery(new Term(F2, "search")), BooleanClause.Occur.SHOULD);
@@ -85,9 +91,9 @@
   }
   
   public void testNestedDisjunctions() throws IOException {
-    BooleanQuery bq = new BooleanQuery();
+    BooleanQuery2 bq = new BooleanQuery2();
     bq.add(new TermQuery(new Term(F1, "lucene")), BooleanClause.Occur.SHOULD);
-    BooleanQuery bq2 = new BooleanQuery();
+    BooleanQuery2 bq2 = new BooleanQuery2();
     bq2.add(new TermQuery(new Term(F2, "lucene")), BooleanClause.Occur.SHOULD);
     bq2.add(new TermQuery(new Term(F2, "search")), BooleanClause.Occur.SHOULD);
     bq.add(bq2, BooleanClause.Occur.SHOULD);
@@ -128,7 +134,7 @@
     private final Set<Scorer> tqsSet = new HashSet<>();
     
     MyCollector() {
-      super(TopScoreDocCollector.create(10, true));
+      super(TopScoreDocCollector.create(10));
     }
 
     public LeafCollector getLeafCollector(LeafReaderContext context)
@@ -137,11 +143,6 @@
       return new FilterLeafCollector(super.getLeafCollector(context)) {
         
         @Override
-        public boolean acceptsDocsOutOfOrder() {
-          return false;
-        }
-        
-        @Override
         public void setScorer(Scorer scorer) throws IOException {
           super.setScorer(scorer);
           tqsSet.clear();
@@ -203,7 +204,7 @@
   }
 
   public void testGetChildrenBoosterScorer() throws IOException {
-    final BooleanQuery query = new BooleanQuery();
+    final BooleanQuery2 query = new BooleanQuery2();
     query.add(new TermQuery(new Term(F2, "nutch")), Occur.SHOULD);
     query.add(new TermQuery(new Term(F2, "miss")), Occur.SHOULD);
     ScorerSummarizingCollector collector = new ScorerSummarizingCollector();
@@ -244,11 +245,6 @@
         public void collect(int doc) throws IOException {
           numHits[0]++;
         }
-
-        @Override
-        public boolean acceptsDocsOutOfOrder() {
-          return false;
-        }
       };
     }
 
@@ -274,4 +270,22 @@
       return builder;
     }
   }
+
+  static class BooleanQuery2 extends BooleanQuery {
+
+    @Override
+    public Weight createWeight(IndexSearcher searcher) throws IOException {
+      return new BooleanWeight(searcher, false) {
+        @Override
+        public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
+          Scorer scorer = scorer(context, acceptDocs);
+          if (scorer == null) {
+            return null;
+          }
+          return new DefaultBulkScorer(scorer);
+        }
+      };
+    }
+
+  }
 }
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java
index 3ec0ae5..4a10afa 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java
@@ -18,17 +18,12 @@
  */
 
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
 
 import org.apache.lucene.document.Document;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.index.Term;
-import org.apache.lucene.search.BooleanQuery.BooleanWeight;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.LuceneTestCase;
@@ -64,115 +59,6 @@
     ir.close();
     directory.close();
   }
-  
-  public void testEmptyBucketWithMoreDocs() throws Exception {
-    // This test checks the logic of nextDoc() when all sub scorers have docs
-    // beyond the first bucket (for example). Currently, the code relies on the
-    // 'more' variable to work properly, and this test ensures that if the logic
-    // changes, we have a test to back it up.
-    
-    Directory directory = newDirectory();
-    RandomIndexWriter writer = new RandomIndexWriter(random(), directory);
-    writer.commit();
-    IndexReader ir = writer.getReader();
-    writer.close();
-    IndexSearcher searcher = newSearcher(ir);
-    BooleanWeight weight = (BooleanWeight) new BooleanQuery().createWeight(searcher);
-    BulkScorer[] scorers = new BulkScorer[] {new BulkScorer() {
-      private int doc = -1;
-
-      @Override
-      public boolean score(LeafCollector c, int maxDoc) throws IOException {
-        assert doc == -1;
-        doc = 3000;
-        FakeScorer fs = new FakeScorer();
-        fs.doc = doc;
-        fs.score = 1.0f;
-        c.setScorer(fs);
-        c.collect(3000);
-        return false;
-      }
-    }};
-    
-    BooleanScorer bs = new BooleanScorer(weight, false, 1, Arrays.asList(scorers), Collections.<BulkScorer>emptyList(), scorers.length);
-
-    final List<Integer> hits = new ArrayList<>();
-    bs.score(new SimpleCollector() {
-      int docBase;
-      @Override
-      public void setScorer(Scorer scorer) {
-      }
-      
-      @Override
-      public void collect(int doc) {
-        hits.add(docBase+doc);
-      }
-      
-      @Override
-      protected void doSetNextReader(LeafReaderContext context) throws IOException {
-        docBase = context.docBase;
-      }
-      
-      @Override
-      public boolean acceptsDocsOutOfOrder() {
-        return true;
-      }
-      });
-
-    assertEquals("should have only 1 hit", 1, hits.size());
-    assertEquals("hit should have been docID=3000", 3000, hits.get(0).intValue());
-    ir.close();
-    directory.close();
-  }
-
-  public void testMoreThan32ProhibitedClauses() throws Exception {
-    final Directory d = newDirectory();
-    final RandomIndexWriter w = new RandomIndexWriter(random(), d);
-    Document doc = w.newDocument();
-    doc.addLargeText("field", "0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33");
-    w.addDocument(doc);
-    doc = w.newDocument();
-    doc.addLargeText("field", "33");
-    w.addDocument(doc);
-    final IndexReader r = w.getReader();
-    w.close();
-    // we don't wrap with AssertingIndexSearcher in order to have the original scorer in setScorer.
-    final IndexSearcher s = newSearcher(r, true, false);
-
-    final BooleanQuery q = new BooleanQuery();
-    for(int term=0;term<33;term++) {
-      q.add(new BooleanClause(new TermQuery(new Term("field", ""+term)),
-                              BooleanClause.Occur.MUST_NOT));
-    }
-    q.add(new BooleanClause(new TermQuery(new Term("field", "33")),
-                            BooleanClause.Occur.SHOULD));
-                            
-    final int[] count = new int[1];
-    s.search(q, new SimpleCollector() {
-    
-      @Override
-      public void setScorer(Scorer scorer) {
-        // Make sure we got BooleanScorer:
-        final Class<?> clazz = scorer.getClass();
-        assertEquals("Scorer is implemented by wrong class", FakeScorer.class.getName(), clazz.getName());
-      }
-      
-      @Override
-      public void collect(int doc) {
-        count[0]++;
-      }
-      
-      @Override
-      public boolean acceptsDocsOutOfOrder() {
-        return true;
-      }
-    });
-
-    assertEquals(1, count[0]);
-    
-    r.close();
-    d.close();
-  }
 
   /** Throws UOE if Weight.scorer is called */
   private static class CrazyMustUseBulkScorerQuery extends Query {
@@ -210,14 +96,15 @@
         }
 
         @Override
-        public BulkScorer bulkScorer(LeafReaderContext context, boolean scoreDocsInOrder, Bits acceptDocs) {
+        public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) {
           return new BulkScorer() {
 
             @Override
-            public boolean score(LeafCollector collector, int max) throws IOException {
+            public int score(LeafCollector collector, int min, int max) throws IOException {
+              assert min == 0;
               collector.setScorer(new FakeScorer());
               collector.collect(0);
-              return false;
+              return DocIdSetIterator.NO_MORE_DOCS;
             }
           };
         }
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanUnevenly.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanUnevenly.java
deleted file mode 100644
index d0ecaa0..0000000
--- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanUnevenly.java
+++ /dev/null
@@ -1,132 +0,0 @@
-package org.apache.lucene.search;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.lucene.analysis.MockAnalyzer;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.index.Term;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.LuceneTestCase;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-/**
- * BooleanQuery.scorer should be tested, when hit documents
- * are very unevenly distributed.
- */
-public class TestBooleanUnevenly extends LuceneTestCase {
-  private static IndexSearcher searcher;
-  private static IndexReader reader;
-
-  public static final String field = "field";
-  private static Directory directory;
-
-  private static int count1;
-
-  @BeforeClass
-  public static void beforeClass() throws Exception {
-    directory = newDirectory();
-    RandomIndexWriter w = new RandomIndexWriter(random(), directory, new MockAnalyzer(random()));
-    Document doc;
-    count1 = 0;
-    for (int i=0;i<2;i++) {
-      for (int j=0;j<2048;j++) {
-        doc = w.newDocument();
-        doc.addLargeText(field, "1");
-        count1 ++;
-        w.addDocument(doc);
-      }
-      for (int j=0;j<2048;j++) {
-        doc = w.newDocument();
-        doc.addLargeText(field, "2");
-        w.addDocument(doc);
-      }
-      doc = w.newDocument();
-      doc.addLargeText(field, "1");
-      count1 ++;
-      w.addDocument(doc);
-      for (int j=0;j<2048;j++) {
-        doc = w.newDocument();
-        doc.addLargeText(field, "2");
-        w.addDocument(doc);
-      }
-    }
-    reader = w.getReader();
-    searcher = newSearcher(reader);
-    w.close();
-  }
-
-  @AfterClass
-  public static void afterClass() throws Exception {
-    reader.close();
-    directory.close();
-    searcher = null;
-    reader = null;
-    directory = null;
-  }
-
-  @Test
-  public void testQueries01() throws Exception {
-    BooleanQuery query = new BooleanQuery();
-    query.add(new TermQuery(new Term(field, "1")), BooleanClause.Occur.MUST);
-    query.add(new TermQuery(new Term(field, "1")), BooleanClause.Occur.SHOULD);
-    query.add(new TermQuery(new Term(field, "2")), BooleanClause.Occur.SHOULD);
-
-    TopScoreDocCollector collector = TopScoreDocCollector.create(1000, false);
-    searcher.search(query, null, collector);
-    TopDocs tops1 = collector.topDocs();
-    ScoreDoc[] hits1 = tops1.scoreDocs;
-    int hitsNum1 = tops1.totalHits;
-
-    collector = TopScoreDocCollector.create(1000, true);
-    searcher.search(query, null, collector);
-    TopDocs tops2 = collector.topDocs();
-    ScoreDoc[] hits2 = tops2.scoreDocs;
-    int hitsNum2 = tops2.totalHits;
-
-    assertEquals(count1, hitsNum1);
-    assertEquals(count1, hitsNum2);
-    CheckHits.checkEqual(query, hits1, hits2);
-  }
-
-  @Test
-  public void testQueries02() throws Exception {
-    BooleanQuery query = new BooleanQuery();
-    query.add(new TermQuery(new Term(field, "1")), BooleanClause.Occur.SHOULD);
-    query.add(new TermQuery(new Term(field, "1")), BooleanClause.Occur.SHOULD);
-
-    TopScoreDocCollector collector = TopScoreDocCollector.create(1000, false);
-    searcher.search(query, null, collector);
-    TopDocs tops1 = collector.topDocs();
-    ScoreDoc[] hits1 = tops1.scoreDocs;
-    int hitsNum1 = tops1.totalHits;
-
-    collector = TopScoreDocCollector.create(1000, true);
-    searcher.search(query, null, collector);
-    TopDocs tops2 = collector.topDocs();
-    ScoreDoc[] hits2 = tops2.scoreDocs;
-    int hitsNum2 = tops2.totalHits;
-
-    assertEquals(count1, hitsNum1);
-    assertEquals(count1, hitsNum2);
-    CheckHits.checkEqual(query, hits1, hits2);
-  }
-}
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestCachingCollector.java b/lucene/core/src/test/org/apache/lucene/search/TestCachingCollector.java
index e842909..0b6e02a 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestCachingCollector.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestCachingCollector.java
@@ -54,25 +54,14 @@
   
   private static class NoOpCollector extends SimpleCollector {
 
-    private final boolean acceptDocsOutOfOrder;
-    
-    public NoOpCollector(boolean acceptDocsOutOfOrder) {
-      this.acceptDocsOutOfOrder = acceptDocsOutOfOrder;
-    }
-
     @Override
     public void collect(int doc) throws IOException {}
 
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return acceptDocsOutOfOrder;
-    }
-    
   }
 
   public void testBasic() throws Exception {
     for (boolean cacheScores : new boolean[] { false, true }) {
-      CachingCollector cc = CachingCollector.create(new NoOpCollector(false), cacheScores, 1.0);
+      CachingCollector cc = CachingCollector.create(new NoOpCollector(), cacheScores, 1.0);
       LeafCollector acc = cc.getLeafCollector(null);
       acc.setScorer(new MockScorer());
 
@@ -90,17 +79,12 @@
           assertEquals(prevDocID + 1, doc);
           prevDocID = doc;
         }
-
-        @Override
-        public boolean acceptsDocsOutOfOrder() {
-          return false;
-        }
       });
     }
   }
   
   public void testIllegalStateOnReplay() throws Exception {
-    CachingCollector cc = CachingCollector.create(new NoOpCollector(false), true, 50 * ONE_BYTE);
+    CachingCollector cc = CachingCollector.create(new NoOpCollector(), true, 50 * ONE_BYTE);
     LeafCollector acc = cc.getLeafCollector(null);
     acc.setScorer(new MockScorer());
     
@@ -112,40 +96,13 @@
     assertFalse("CachingCollector should not be cached due to low memory limit", cc.isCached());
     
     try {
-      cc.replay(new NoOpCollector(false));
+      cc.replay(new NoOpCollector());
       fail("replay should fail if CachingCollector is not cached");
     } catch (IllegalStateException e) {
       // expected
     }
   }
   
-  public void testIllegalCollectorOnReplay() throws Exception {
-    // tests that the Collector passed to replay() has an out-of-order mode that
-    // is valid with the Collector passed to the ctor
-    
-    // 'src' Collector does not support out-of-order
-    CachingCollector cc = CachingCollector.create(new NoOpCollector(false), true, 100 * ONE_BYTE);
-    LeafCollector acc = cc.getLeafCollector(null);
-    acc.setScorer(new MockScorer());
-    for (int i = 0; i < 10; i++) acc.collect(i);
-    cc.replay(new NoOpCollector(true)); // this call should not fail
-    cc.replay(new NoOpCollector(false)); // this call should not fail
-
-    // 'src' Collector supports out-of-order
-    cc = CachingCollector.create(new NoOpCollector(true), true, 100 * ONE_BYTE);
-    acc = cc.getLeafCollector(null);
-    acc.setScorer(new MockScorer());
-    for (int i = 0; i < 10; i++) acc.collect(i);
-    cc.replay(new NoOpCollector(true)); // this call should not fail
-    try {
-      cc.replay(new NoOpCollector(false)); // this call should fail
-      fail("should have failed if an in-order Collector was given to replay(), " +
-           "while CachingCollector was initialized with out-of-order collection");
-    } catch (IllegalArgumentException e) {
-      // ok
-    }
-  }
-  
   public void testCachedArraysAllocation() throws Exception {
     // tests the cached arrays allocation -- if the 'nextLength' was too high,
     // caching would terminate even if a smaller length would suffice.
@@ -154,7 +111,7 @@
     int numDocs = random().nextInt(10000) + 150;
     for (boolean cacheScores : new boolean[] { false, true }) {
       int bytesPerDoc = cacheScores ? 8 : 4;
-      CachingCollector cc = CachingCollector.create(new NoOpCollector(false),
+      CachingCollector cc = CachingCollector.create(new NoOpCollector(),
           cacheScores, bytesPerDoc * ONE_BYTE * numDocs);
       LeafCollector acc = cc.getLeafCollector(null);
       acc.setScorer(new MockScorer());
@@ -170,13 +127,13 @@
   public void testNoWrappedCollector() throws Exception {
     for (boolean cacheScores : new boolean[] { false, true }) {
       // create w/ null wrapped collector, and test that the methods work
-      CachingCollector cc = CachingCollector.create(true, cacheScores, 50 * ONE_BYTE);
+      CachingCollector cc = CachingCollector.create(cacheScores, 50 * ONE_BYTE);
       LeafCollector acc = cc.getLeafCollector(null);
       acc.setScorer(new MockScorer());
       acc.collect(0);
       
       assertTrue(cc.isCached());
-      cc.replay(new NoOpCollector(true));
+      cc.replay(new NoOpCollector());
     }
   }
   
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreQuery.java
index 82330e5..b3d1498 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreQuery.java
@@ -66,11 +66,6 @@
         assertEquals("Score differs from expected", expectedScore, this.scorer.score(), 0);
         count[0]++;
       }
-      
-      @Override
-      public boolean acceptsDocsOutOfOrder() {
-        return true;
-      }
     });
     assertEquals("invalid number of results", 1, count[0]);
   }
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java
index ae98266..8833be9 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java
@@ -480,7 +480,7 @@
     SpanQuery sq2 = new SpanTermQuery(new Term(FIELD, "clckwork"));
     query.add(sq1);
     query.add(sq2);
-    TopScoreDocCollector collector = TopScoreDocCollector.create(1000, true);
+    TopScoreDocCollector collector = TopScoreDocCollector.create(1000);
     searcher.search(query, collector);
     hits = collector.topDocs().scoreDocs.length;
     for (ScoreDoc scoreDoc : collector.topDocs().scoreDocs){
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java b/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java
index 297e1b4..3a516d8 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java
@@ -69,10 +69,6 @@
          protected void doSetNextReader(LeafReaderContext context) throws IOException {
            base = context.docBase;
          }
-         @Override
-         public boolean acceptsDocsOutOfOrder() {
-           return true;
-         }
        });
 
     float lastScore = 0.0f;
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestEarlyTermination.java b/lucene/core/src/test/org/apache/lucene/search/TestEarlyTermination.java
index d2b4587..81831a3 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestEarlyTermination.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestEarlyTermination.java
@@ -59,7 +59,6 @@
       final IndexSearcher searcher = newSearcher(reader);
       final Collector collector = new SimpleCollector() {
 
-        final boolean outOfOrder = random().nextBoolean();
         boolean collectionTerminated = true;
 
         @Override
@@ -81,11 +80,6 @@
           }
         }
 
-        @Override
-        public boolean acceptsDocsOutOfOrder() {
-          return outOfOrder;
-        }
-
       };
 
       searcher.search(new MatchAllDocsQuery(), collector);
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestElevationComparator.java b/lucene/core/src/test/org/apache/lucene/search/TestElevationComparator.java
index 787cdd4..f818f34 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestElevationComparator.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestElevationComparator.java
@@ -77,7 +77,7 @@
                          new SortField(null, SortField.Type.SCORE, reversed)
                          );
 
-    TopDocsCollector<Entry> topCollector = TopFieldCollector.create(sort, 50, false, true, true, true);
+    TopDocsCollector<Entry> topCollector = TopFieldCollector.create(sort, 50, false, true, true);
     searcher.search(newq, null, topCollector);
 
     TopDocs topDocs = topCollector.topDocs(0, 10);
@@ -146,61 +146,67 @@
   public FieldComparator<Integer> newComparator(final String fieldname, final int numHits, int sortPos, boolean reversed) throws IOException {
    return new FieldComparator<Integer>() {
 
-     SortedDocValues idIndex;
      private final int[] values = new int[numHits];
      int bottomVal;
 
      @Override
+    public LeafFieldComparator getLeafComparator(LeafReaderContext context)
+        throws IOException {
+      final SortedDocValues idIndex = DocValues.getSorted(context.reader(), fieldname);
+      return new LeafFieldComparator() {
+
+        @Override
+        public void setBottom(int slot) {
+          bottomVal = values[slot];
+        }
+
+        @Override
+        public int compareTop(int doc) {
+          throw new UnsupportedOperationException();
+        }
+
+        private int docVal(int doc) {
+          int ord = idIndex.getOrd(doc);
+          if (ord == -1) {
+            return 0;
+          } else {
+            final BytesRef term = idIndex.lookupOrd(ord);
+            Integer prio = priority.get(term);
+            return prio == null ? 0 : prio.intValue();
+          }
+        }
+
+        @Override
+        public int compareBottom(int doc) {
+          return docVal(doc) - bottomVal;
+        }
+
+        @Override
+        public void copy(int slot, int doc) {
+          values[slot] = docVal(doc);
+        }
+
+        @Override
+        public void setScorer(Scorer scorer) {}
+      };
+    }
+
+     @Override
      public int compare(int slot1, int slot2) {
        return values[slot2] - values[slot1];  // values will be small enough that there is no overflow concern
      }
 
      @Override
-     public void setBottom(int slot) {
-       bottomVal = values[slot];
-     }
-
-     @Override
      public void setTopValue(Integer value) {
        throw new UnsupportedOperationException();
      }
 
-     private int docVal(int doc) {
-       int ord = idIndex.getOrd(doc);
-       if (ord == -1) {
-         return 0;
-       } else {
-         final BytesRef term = idIndex.lookupOrd(ord);
-         Integer prio = priority.get(term);
-         return prio == null ? 0 : prio.intValue();
-       }
-     }
-
-     @Override
-     public int compareBottom(int doc) {
-       return docVal(doc) - bottomVal;
-     }
-
-     @Override
-     public void copy(int slot, int doc) {
-       values[slot] = docVal(doc);
-     }
-
-     @Override
-     public FieldComparator<Integer> setNextReader(LeafReaderContext context) throws IOException {
-       idIndex = DocValues.getSorted(context.reader(), fieldname);
-       return this;
-     }
-
      @Override
      public Integer value(int slot) {
        return Integer.valueOf(values[slot]);
      }
 
-     @Override
-     public int compareTop(int doc) {
-       throw new UnsupportedOperationException();
-     }
+
    };
  }
 }
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestLRUFilterCache.java b/lucene/core/src/test/org/apache/lucene/search/TestLRUFilterCache.java
index d1b3483..228c31f 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestLRUFilterCache.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestLRUFilterCache.java
@@ -27,6 +27,7 @@
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.AtomicReference;
 
 import org.apache.lucene.document.Document;
@@ -532,4 +533,167 @@
     dir.close();
   }
 
+  public void testFineGrainedStats() throws IOException {
+    Directory dir1 = newDirectory();
+    final RandomIndexWriter w1 = new RandomIndexWriter(random(), dir1);
+    Directory dir2 = newDirectory();
+    final RandomIndexWriter w2 = new RandomIndexWriter(random(), dir2);
+
+    final List<String> colors = Arrays.asList("blue", "red", "green", "yellow");
+
+    for (RandomIndexWriter w : Arrays.asList(w1, w2)) {
+      for (int i = 0; i < 10; ++i) {
+        Document doc = w.newDocument();
+        doc.addAtom("color", RandomPicks.randomFrom(random(), colors));
+        w.addDocument(doc);
+        if (random().nextBoolean()) {
+          w.getReader().close();
+        }
+      }
+    }
+
+    final DirectoryReader reader1 = w1.getReader();
+    final int segmentCount1 = reader1.leaves().size();
+    final IndexSearcher searcher1 = new IndexSearcher(reader1);
+
+    final DirectoryReader reader2 = w2.getReader();
+    final int segmentCount2 = reader2.leaves().size();
+    final IndexSearcher searcher2 = new IndexSearcher(reader2);
+
+    final Map<Object, Integer> indexId = new HashMap<>();
+    for (LeafReaderContext ctx : reader1.leaves()) {
+      indexId.put(ctx.reader().getCoreCacheKey(), 1);
+    }
+    for (LeafReaderContext ctx : reader2.leaves()) {
+      indexId.put(ctx.reader().getCoreCacheKey(), 2);
+    }
+
+    final AtomicLong hitCount1 = new AtomicLong();
+    final AtomicLong hitCount2 = new AtomicLong();
+    final AtomicLong missCount1 = new AtomicLong();
+    final AtomicLong missCount2 = new AtomicLong();
+
+    final AtomicLong ramBytesUsage = new AtomicLong();
+    final AtomicLong cacheSize = new AtomicLong();
+
+    final LRUFilterCache filterCache = new LRUFilterCache(2, 10000000) {
+      @Override
+      protected void onHit(Object readerCoreKey, Filter filter) {
+        super.onHit(readerCoreKey, filter);
+        switch(indexId.get(readerCoreKey).intValue()) {
+          case 1:
+            hitCount1.incrementAndGet();
+            break;
+          case 2:
+            hitCount2.incrementAndGet();
+            break;
+          default:
+            throw new AssertionError();
+        }
+      }
+
+      @Override
+      protected void onMiss(Object readerCoreKey, Filter filter) {
+        super.onMiss(readerCoreKey, filter);
+        switch(indexId.get(readerCoreKey).intValue()) {
+          case 1:
+            missCount1.incrementAndGet();
+            break;
+          case 2:
+            missCount2.incrementAndGet();
+            break;
+          default:
+            throw new AssertionError();
+        }
+      }
+
+      @Override
+      protected void onFilterCache(Filter filter, long ramBytesUsed) {
+        super.onFilterCache(filter, ramBytesUsed);
+        ramBytesUsage.addAndGet(ramBytesUsed);
+      }
+
+      @Override
+      protected void onFilterEviction(Filter filter, long ramBytesUsed) {
+        super.onFilterEviction(filter, ramBytesUsed);
+        ramBytesUsage.addAndGet(-ramBytesUsed);
+      }
+
+      @Override
+      protected void onDocIdSetCache(Object readerCoreKey, long ramBytesUsed) {
+        super.onDocIdSetCache(readerCoreKey, ramBytesUsed);
+        ramBytesUsage.addAndGet(ramBytesUsed);
+        cacheSize.incrementAndGet();
+      }
+
+      @Override
+      protected void onDocIdSetEviction(Object readerCoreKey, int numEntries, long sumRamBytesUsed) {
+        super.onDocIdSetEviction(readerCoreKey, numEntries, sumRamBytesUsed);
+        ramBytesUsage.addAndGet(-sumRamBytesUsed);
+        cacheSize.addAndGet(-numEntries);
+      }
+
+      @Override
+      protected void onClear() {
+        super.onClear();
+        ramBytesUsage.set(0);
+        cacheSize.set(0);
+      }
+    };
+
+    final Filter filter = new QueryWrapperFilter(new TermQuery(new Term("color", "red")));
+    final Filter filter2 = new QueryWrapperFilter(new TermQuery(new Term("color", "blue")));
+    final Filter filter3 = new QueryWrapperFilter(new TermQuery(new Term("color", "green")));
+
+    // search on searcher1
+    Filter cached = filterCache.doCache(filter, FilterCachingPolicy.ALWAYS_CACHE);
+    for (int i = 0; i < 10; ++i) {
+      searcher1.search(new ConstantScoreQuery(cached), 1);
+    }
+    assertEquals(9 * segmentCount1, hitCount1.longValue());
+    assertEquals(0, hitCount2.longValue());
+    assertEquals(segmentCount1, missCount1.longValue());
+    assertEquals(0, missCount2.longValue());
+
+    // then on searcher2
+    cached = filterCache.doCache(filter2, FilterCachingPolicy.ALWAYS_CACHE);
+    for (int i = 0; i < 20; ++i) {
+      searcher2.search(new ConstantScoreQuery(cached), 1);
+    }
+    assertEquals(9 * segmentCount1, hitCount1.longValue());
+    assertEquals(19 * segmentCount2, hitCount2.longValue());
+    assertEquals(segmentCount1, missCount1.longValue());
+    assertEquals(segmentCount2, missCount2.longValue());
+
+    // now on searcher1 again to trigger evictions
+    cached = filterCache.doCache(filter3, FilterCachingPolicy.ALWAYS_CACHE);
+    for (int i = 0; i < 30; ++i) {
+      searcher1.search(new ConstantScoreQuery(cached), 1);
+    }
+    assertEquals(segmentCount1, filterCache.getEvictionCount());
+    assertEquals(38 * segmentCount1, hitCount1.longValue());
+    assertEquals(19 * segmentCount2, hitCount2.longValue());
+    assertEquals(2 * segmentCount1, missCount1.longValue());
+    assertEquals(segmentCount2, missCount2.longValue());
+
+    // check that the recomputed stats are the same as those reported by the cache
+    assertEquals(filterCache.ramBytesUsed(), (segmentCount1 + segmentCount2) * LRUFilterCache.HASHTABLE_RAM_BYTES_PER_ENTRY + ramBytesUsage.longValue());
+    assertEquals(filterCache.getCacheSize(), cacheSize.longValue());
+
+    reader1.close();
+    reader2.close();
+    w1.close();
+    w2.close();
+
+    assertEquals(filterCache.ramBytesUsed(), ramBytesUsage.longValue());
+    assertEquals(0, cacheSize.longValue());
+
+    filterCache.clear();
+    assertEquals(0, ramBytesUsage.longValue());
+    assertEquals(0, cacheSize.longValue());
+
+    dir1.close();
+    dir2.close();
+  }
+
 }
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java b/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java
index 156c477..1de9057 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java
@@ -236,10 +236,6 @@
       protected void doSetNextReader(LeafReaderContext context) throws IOException {
         base = context.docBase;
       }
-      @Override
-      public boolean acceptsDocsOutOfOrder() {
-        return true;
-      }
     });
 
     //
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPositiveScoresOnlyCollector.java b/lucene/core/src/test/org/apache/lucene/search/TestPositiveScoresOnlyCollector.java
index 1768d9c..a97da89 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestPositiveScoresOnlyCollector.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestPositiveScoresOnlyCollector.java
@@ -85,7 +85,7 @@
     IndexSearcher searcher = newSearcher(ir);
     Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher);
     Scorer s = new SimpleScorer(fake);
-    TopDocsCollector<ScoreDoc> tdc = TopScoreDocCollector.create(scores.length, true);
+    TopDocsCollector<ScoreDoc> tdc = TopScoreDocCollector.create(scores.length);
     Collector c = new PositiveScoresOnlyCollector(tdc);
     LeafCollector ac = c.getLeafCollector(ir.leaves().get(0));
     ac.setScorer(s);
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestScoreCachingWrappingScorer.java b/lucene/core/src/test/org/apache/lucene/search/TestScoreCachingWrappingScorer.java
index b082da9..3fd3c2b 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestScoreCachingWrappingScorer.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestScoreCachingWrappingScorer.java
@@ -90,10 +90,6 @@
     @Override public void setScorer(Scorer scorer) {
       this.scorer = new ScoreCachingWrappingScorer(scorer);
     }
-    
-    @Override public boolean acceptsDocsOutOfOrder() {
-      return true;
-    }
 
   }
 
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestScorerPerf.java b/lucene/core/src/test/org/apache/lucene/search/TestScorerPerf.java
index 0ea422b..fd4a868 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestScorerPerf.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestScorerPerf.java
@@ -114,10 +114,6 @@
     protected void doSetNextReader(LeafReaderContext context) throws IOException {
       docBase = context.docBase;
     }
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return true;
-    }
   }
 
 
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSimilarity.java b/lucene/core/src/test/org/apache/lucene/search/TestSimilarity.java
index 229b7b2..0daa070 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestSimilarity.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestSimilarity.java
@@ -109,10 +109,6 @@
          protected void doSetNextReader(LeafReaderContext context) throws IOException {
            base = context.docBase;
          }
-         @Override
-        public boolean acceptsDocsOutOfOrder() {
-           return true;
-         }
        });
 
     PhraseQuery pq = new PhraseQuery();
@@ -131,10 +127,6 @@
            //System.out.println("Doc=" + doc + " score=" + score);
            assertEquals(1.0f, scorer.score(), 0);
          }
-         @Override
-         public boolean acceptsDocsOutOfOrder() {
-           return true;
-         }
        });
 
     pq.setSlop(2);
@@ -150,10 +142,6 @@
         //System.out.println("Doc=" + doc + " score=" + score);
         assertEquals(2.0f, scorer.score(), 0);
       }
-      @Override
-      public boolean acceptsDocsOutOfOrder() {
-        return true;
-      }
     });
 
     reader.close();
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSloppyPhraseQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestSloppyPhraseQuery.java
index 2fb3c37..4ab76c5 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestSloppyPhraseQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestSloppyPhraseQuery.java
@@ -190,11 +190,6 @@
       totalHits++;
       max = Math.max(max, scorer.freq());
     }
-
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return false;
-    }
   }
   
   /** checks that no scores or freqs are infinite */
@@ -212,11 +207,6 @@
         assertFalse(Float.isInfinite(scorer.freq()));
         assertFalse(Float.isInfinite(scorer.score()));
       }
-      
-      @Override
-      public boolean acceptsDocsOutOfOrder() {
-        return false;
-      }
     });
     QueryUtils.check(random(), pq, searcher);
   }
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSort.java b/lucene/core/src/test/org/apache/lucene/search/TestSort.java
index 38d8476..d6def0b 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestSort.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestSort.java
@@ -28,6 +28,7 @@
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LuceneTestCase;
 
 /*
@@ -706,4 +707,59 @@
     ir.close();
     dir.close();
   }
+
+  /** Tests sorting on multiple sort fields */
+  public void testMultiSort() throws IOException {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    Document doc = writer.newDocument();
+    doc.addAtom("value1", new BytesRef("foo"));
+    doc.addInt("value2", 0);
+    writer.addDocument(doc);
+
+    doc = writer.newDocument();
+    doc.addAtom("value1", new BytesRef("bar"));
+    doc.addInt("value2", 1);
+    writer.addDocument(doc);
+
+    doc = writer.newDocument();
+    doc.addAtom("value1", new BytesRef("bar"));
+    doc.addInt("value2", 0);
+    writer.addDocument(doc);
+
+    doc = writer.newDocument();
+    doc.addAtom("value1", new BytesRef("foo"));
+    doc.addInt("value2", 1);
+    writer.addDocument(doc);
+
+    IndexReader ir = writer.getReader();
+    writer.close();
+    
+    IndexSearcher searcher = newSearcher(ir);
+    Sort sort = new Sort(
+        new SortField("value1", SortField.Type.STRING),
+        new SortField("value2", SortField.Type.LONG));
+
+    TopDocs td = searcher.search(new MatchAllDocsQuery(), 10, sort);
+    assertEquals(4, td.totalHits);
+    // 'bar' comes before 'foo'
+    assertEquals("bar", searcher.doc(td.scoreDocs[0].doc).getBinary("value1").utf8ToString());
+    assertEquals("bar", searcher.doc(td.scoreDocs[1].doc).getBinary("value1").utf8ToString());
+    assertEquals("foo", searcher.doc(td.scoreDocs[2].doc).getBinary("value1").utf8ToString());
+    assertEquals("foo", searcher.doc(td.scoreDocs[3].doc).getBinary("value1").utf8ToString());
+    // 0 comes before 1
+    assertEquals(0, searcher.doc(td.scoreDocs[0].doc).getInt("value2").intValue());
+    assertEquals(1, searcher.doc(td.scoreDocs[1].doc).getInt("value2").intValue());
+    assertEquals(0, searcher.doc(td.scoreDocs[2].doc).getInt("value2").intValue());
+    assertEquals(1, searcher.doc(td.scoreDocs[3].doc).getInt("value2").intValue());
+
+    // Now with overflow
+    td = searcher.search(new MatchAllDocsQuery(), 1, sort);
+    assertEquals(4, td.totalHits);
+    assertEquals("bar", searcher.doc(td.scoreDocs[0].doc).getBinary("value1").utf8ToString());
+    assertEquals(0, searcher.doc(td.scoreDocs[0].doc).getInt("value2").intValue());
+
+    ir.close();
+    dir.close();
+  }
 }
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSortLocale.java b/lucene/core/src/test/org/apache/lucene/search/TestSortLocale.java
index 3aea903..9f078cb 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestSortLocale.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestSortLocale.java
@@ -85,15 +85,18 @@
     IndexSearcher is = newSearcher(ir);
     
     int numChecks = atLeast(100);
-    for (int i = 0; i < numChecks; i++) {
-      String start = TestUtil.randomSimpleString(random());
-      String end = TestUtil.randomSimpleString(random());
-      Query query = new ConstantScoreQuery(fieldTypes.newStringDocValuesRangeFilter("collated", start, true, end, true));
-      doTestRanges(is, start, end, query, collator);
+
+    try {
+      for (int i = 0; i < numChecks; i++) {
+        String start = TestUtil.randomSimpleString(random());
+        String end = TestUtil.randomSimpleString(random());
+        Query query = new ConstantScoreQuery(fieldTypes.newStringDocValuesRangeFilter("collated", start, true, end, true));
+        doTestRanges(is, start, end, query, collator);
+      }
+    } finally {    
+      ir.close();
+      dir.close();
     }
-    
-    ir.close();
-    dir.close();
   }
   
   private void doTestRanges(IndexSearcher is, String startPoint, String endPoint, Query query, Collator collator) throws Exception { 
@@ -103,8 +106,8 @@
     TopDocs docs = is.search(query, is.getIndexReader().maxDoc());
     for (ScoreDoc doc : docs.scoreDocs) {
       String value = is.doc(doc.doc).getString("field");
-      assertTrue(collator.compare(value, startPoint) >= 0);
-      assertTrue(collator.compare(value, endPoint) <= 0);
+      assertTrue(collate(collator, value, startPoint) >= 0);
+      assertTrue(collate(collator, value, endPoint) <= 0);
     }
     
     // negative test
@@ -114,7 +117,7 @@
     docs = is.search(bq, is.getIndexReader().maxDoc());
     for (ScoreDoc doc : docs.scoreDocs) {
       String value = is.doc(doc.doc).getString("field");
-      assertTrue(collator.compare(value, startPoint) < 0 || collator.compare(value, endPoint) > 0);
+      assertTrue(collate(collator, value, startPoint) < 0 || collate(collator, value, endPoint) > 0);
     }
   }
 }
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSubScorerFreqs.java b/lucene/core/src/test/org/apache/lucene/search/TestSubScorerFreqs.java
index 2130ac6..d334e49 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestSubScorerFreqs.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestSubScorerFreqs.java
@@ -123,8 +123,7 @@
   @Test
   public void testTermQuery() throws Exception {
     TermQuery q = new TermQuery(new Term("f", "d"));
-    CountingCollector c = new CountingCollector(TopScoreDocCollector.create(10,
-        true));
+    CountingCollector c = new CountingCollector(TopScoreDocCollector.create(10));
     s.search(q, null, c);
     final int maxDocs = s.getIndexReader().maxDoc();
     assertEquals(maxDocs, c.docCounts.size());
@@ -164,7 +163,7 @@
     
     for (final Set<String> occur : occurList) {
       CountingCollector c = new CountingCollector(TopScoreDocCollector.create(
-          10, true), occur);
+          10), occur);
       s.search(query, null, c);
       final int maxDocs = s.getIndexReader().maxDoc();
       assertEquals(maxDocs, c.docCounts.size());
@@ -196,8 +195,7 @@
     PhraseQuery q = new PhraseQuery();
     q.add(new Term("f", "b"));
     q.add(new Term("f", "c"));
-    CountingCollector c = new CountingCollector(TopScoreDocCollector.create(10,
-        true));
+    CountingCollector c = new CountingCollector(TopScoreDocCollector.create(10));
     s.search(q, null, c);
     final int maxDocs = s.getIndexReader().maxDoc();
     assertEquals(maxDocs, c.docCounts.size());
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTermScorer.java b/lucene/core/src/test/org/apache/lucene/search/TestTermScorer.java
index c5ddee3..ffb0933 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestTermScorer.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestTermScorer.java
@@ -76,7 +76,7 @@
     Weight weight = indexSearcher.createNormalizedWeight(termQuery);
     assertTrue(indexSearcher.getTopReaderContext() instanceof LeafReaderContext);
     LeafReaderContext context = (LeafReaderContext)indexSearcher.getTopReaderContext();
-    BulkScorer ts = weight.bulkScorer(context, true, context.reader().getLiveDocs());
+    BulkScorer ts = weight.bulkScorer(context, context.reader().getLiveDocs());
     // we have 2 documents with the term all in them, one document for all the
     // other values
     final List<TestHit> docs = new ArrayList<>();
@@ -105,11 +105,6 @@
       protected void doSetNextReader(LeafReaderContext context) throws IOException {
         base = context.docBase;
       }
-      
-      @Override
-      public boolean acceptsDocsOutOfOrder() {
-        return true;
-      }
     });
     assertTrue("docs Size: " + docs.size() + " is not: " + 2, docs.size() == 2);
     TestHit doc0 = docs.get(0);
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java b/lucene/core/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java
index 33312c5..326b705 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java
@@ -355,11 +355,6 @@
     protected void doSetNextReader(LeafReaderContext context) throws IOException {
       docBase = context.docBase;
     }
-    
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return false;
-    }
 
   }
 
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTopDocsCollector.java b/lucene/core/src/test/org/apache/lucene/search/TestTopDocsCollector.java
index a6226d1..e721e04 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestTopDocsCollector.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestTopDocsCollector.java
@@ -30,7 +30,6 @@
   private static final class MyTopsDocCollector extends TopDocsCollector<ScoreDoc> {
 
     private int idx = 0;
-    private int base = 0;
     
     public MyTopsDocCollector(int size) {
       super(new HitQueue(size, false));
@@ -54,24 +53,21 @@
     }
     
     @Override
-    public void collect(int doc) {
-      ++totalHits;
-      pq.insertWithOverflow(new ScoreDoc(doc + base, scores[idx++]));
-    }
+    public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
+      final int base = context.docBase;
+      return new LeafCollector() {
+        
+        @Override
+        public void collect(int doc) {
+          ++totalHits;
+          pq.insertWithOverflow(new ScoreDoc(doc + base, scores[idx++]));
+        }
 
-    @Override
-    protected void doSetNextReader(LeafReaderContext context) throws IOException {
-      base = context.docBase;
-    }
-
-    @Override
-    public void setScorer(Scorer scorer) {
-      // Don't do anything. Assign scores in random
-    }
-    
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return true;
+        @Override
+        public void setScorer(Scorer scorer) {
+          // Don't do anything. Assign scores in random
+        }
+      };
     }
 
   }
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTopDocsMerge.java b/lucene/core/src/test/org/apache/lucene/search/TestTopDocsMerge.java
index 2d32299..48ab87c 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestTopDocsMerge.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestTopDocsMerge.java
@@ -191,7 +191,7 @@
       final TopDocs topHits;
       if (sort == null) {
         if (useFrom) {
-          TopScoreDocCollector c = TopScoreDocCollector.create(numHits, random().nextBoolean());
+          TopScoreDocCollector c = TopScoreDocCollector.create(numHits);
           searcher.search(query, c);
           from = TestUtil.nextInt(random(), 0, numHits - 1);
           size = numHits - from;
@@ -210,7 +210,7 @@
           topHits = searcher.search(query, numHits);
         }
       } else {
-        final TopFieldCollector c = TopFieldCollector.create(sort, numHits, true, true, true, random().nextBoolean());
+        final TopFieldCollector c = TopFieldCollector.create(sort, numHits, true, true, true);
         searcher.search(query, c);
         if (useFrom) {
           from = TestUtil.nextInt(random(), 0, numHits - 1);
@@ -254,7 +254,7 @@
         if (sort == null) {
           subHits = subSearcher.search(w, numHits);
         } else {
-          final TopFieldCollector c = TopFieldCollector.create(sort, numHits, true, true, true, random().nextBoolean());
+          final TopFieldCollector c = TopFieldCollector.create(sort, numHits, true, true, true);
           subSearcher.search(w, c);
           subHits = c.topDocs(0, numHits);
         }
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollector.java b/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollector.java
index 1d30a3f..5dfd1b9 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollector.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestTopFieldCollector.java
@@ -61,7 +61,7 @@
     for(int i = 0; i < sort.length; i++) {
       Query q = new MatchAllDocsQuery();
       TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, false,
-          false, false, true);
+          false, false);
       
       is.search(q, tdc);
       
@@ -80,7 +80,7 @@
     for(int i = 0; i < sort.length; i++) {
       Query q = new MatchAllDocsQuery();
       TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, true, false,
-          false, true);
+          false);
       
       is.search(q, tdc);
       
@@ -100,7 +100,7 @@
     for(int i = 0; i < sort.length; i++) {
       Query q = new MatchAllDocsQuery();
       TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, true, true,
-          false, true);
+          false);
       
       is.search(q, tdc);
       
@@ -121,7 +121,7 @@
     for(int i = 0; i < sort.length; i++) {
       Query q = new MatchAllDocsQuery();
       TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, true, true,
-          false, true);
+          false);
 
       is.search(q, tdc);
       
@@ -141,7 +141,7 @@
     for(int i = 0; i < sort.length; i++) {
       Query q = new MatchAllDocsQuery();
       TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, true, true,
-          true, true);
+          true);
       
       is.search(q, tdc);
       
@@ -153,110 +153,13 @@
       assertTrue(!Float.isNaN(td.getMaxScore()));
     }
   }
-  
-  public void testOutOfOrderDocsScoringSort() throws Exception {
 
-    // Two Sort criteria to instantiate the multi/single comparators.
-    Sort[] sort = new Sort[] {new Sort(SortField.FIELD_DOC), new Sort() };
-    boolean[][] tfcOptions = new boolean[][] {
-        new boolean[] { false, false, false },
-        new boolean[] { false, false, true },
-        new boolean[] { false, true, false },
-        new boolean[] { false, true, true },
-        new boolean[] { true, false, false },
-        new boolean[] { true, false, true },
-        new boolean[] { true, true, false },
-        new boolean[] { true, true, true },
-    };
-    String[] actualTFCClasses = new String[] {
-        "OutOfOrderOneComparatorNonScoringCollector", 
-        "OutOfOrderOneComparatorScoringMaxScoreCollector", 
-        "OutOfOrderOneComparatorScoringNoMaxScoreCollector", 
-        "OutOfOrderOneComparatorScoringMaxScoreCollector", 
-        "OutOfOrderOneComparatorNonScoringCollector", 
-        "OutOfOrderOneComparatorScoringMaxScoreCollector", 
-        "OutOfOrderOneComparatorScoringNoMaxScoreCollector", 
-        "OutOfOrderOneComparatorScoringMaxScoreCollector" 
-    };
-    
-    BooleanQuery bq = new BooleanQuery();
-    // Add a Query with SHOULD, since bw.scorer() returns BooleanScorer2
-    // which delegates to BS if there are no mandatory clauses.
-    bq.add(new MatchAllDocsQuery(), Occur.SHOULD);
-    // Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return
-    // the clause instead of BQ.
-    bq.setMinimumNumberShouldMatch(1);
-    for(int i = 0; i < sort.length; i++) {
-      for(int j = 0; j < tfcOptions.length; j++) {
-        TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10,
-            tfcOptions[j][0], tfcOptions[j][1], tfcOptions[j][2], false);
-
-        assertTrue(tdc.getClass().getName().endsWith("$"+actualTFCClasses[j]));
-        
-        is.search(bq, tdc);
-        
-        TopDocs td = tdc.topDocs();
-        ScoreDoc[] sd = td.scoreDocs;
-        assertEquals(10, sd.length);
-      }
-    }
-  }
-  
-  // OutOfOrderMulti*Collector
-  public void testOutOfOrderDocsScoringSortMulti() throws Exception {
-
-    // Two Sort criteria to instantiate the multi/single comparators.
-    Sort[] sort = new Sort[] {new Sort(SortField.FIELD_DOC, SortField.FIELD_SCORE) };
-    boolean[][] tfcOptions = new boolean[][] {
-        new boolean[] { false, false, false },
-        new boolean[] { false, false, true },
-        new boolean[] { false, true, false },
-        new boolean[] { false, true, true },
-        new boolean[] { true, false, false },
-        new boolean[] { true, false, true },
-        new boolean[] { true, true, false },
-        new boolean[] { true, true, true },
-    };
-    String[] actualTFCClasses = new String[] {
-        "OutOfOrderMultiComparatorNonScoringCollector", 
-        "OutOfOrderMultiComparatorScoringMaxScoreCollector", 
-        "OutOfOrderMultiComparatorScoringNoMaxScoreCollector", 
-        "OutOfOrderMultiComparatorScoringMaxScoreCollector", 
-        "OutOfOrderMultiComparatorNonScoringCollector", 
-        "OutOfOrderMultiComparatorScoringMaxScoreCollector", 
-        "OutOfOrderMultiComparatorScoringNoMaxScoreCollector", 
-        "OutOfOrderMultiComparatorScoringMaxScoreCollector" 
-    };
-    
-    BooleanQuery bq = new BooleanQuery();
-    // Add a Query with SHOULD, since bw.scorer() returns BooleanScorer2
-    // which delegates to BS if there are no mandatory clauses.
-    bq.add(new MatchAllDocsQuery(), Occur.SHOULD);
-    // Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return
-    // the clause instead of BQ.
-    bq.setMinimumNumberShouldMatch(1);
-    for(int i = 0; i < sort.length; i++) {
-      for(int j = 0; j < tfcOptions.length; j++) {
-        TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10,
-            tfcOptions[j][0], tfcOptions[j][1], tfcOptions[j][2], false);
-
-        assertTrue(tdc.getClass().getName().endsWith("$"+actualTFCClasses[j]));
-        
-        is.search(bq, tdc);
-        
-        TopDocs td = tdc.topDocs();
-        ScoreDoc[] sd = td.scoreDocs;
-        assertEquals(10, sd.length);
-      }
-    }
-  }
-  
   public void testSortWithScoreAndMaxScoreTrackingNoResults() throws Exception {
     
     // Two Sort criteria to instantiate the multi/single comparators.
     Sort[] sort = new Sort[] {new Sort(SortField.FIELD_DOC), new Sort() };
     for(int i = 0; i < sort.length; i++) {
-      TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, true, true, true, true);
+      TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, true, true, true);
       TopDocs td = tdc.topDocs();
       assertEquals(0, td.totalHits);
       assertTrue(Float.isNaN(td.getMaxScore()));
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTopScoreDocCollector.java b/lucene/core/src/test/org/apache/lucene/search/TestTopScoreDocCollector.java
deleted file mode 100644
index 5cceb19..0000000
--- a/lucene/core/src/test/org/apache/lucene/search/TestTopScoreDocCollector.java
+++ /dev/null
@@ -1,67 +0,0 @@
-package org.apache.lucene.search;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.RandomIndexWriter;
-import org.apache.lucene.search.BooleanClause.Occur;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.LuceneTestCase;
-
-public class TestTopScoreDocCollector extends LuceneTestCase {
-
-  public void testOutOfOrderCollection() throws Exception {
-    Directory dir = newDirectory();
-    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
-    for (int i = 0; i < 10; i++) {
-      writer.addDocument(writer.newDocument());
-    }
-    
-    boolean[] inOrder = new boolean[] { false, true };
-    String[] actualTSDCClass = new String[] {
-        "OutOfOrderTopScoreDocCollector", 
-        "InOrderTopScoreDocCollector" 
-    };
-    
-    BooleanQuery bq = new BooleanQuery();
-    // Add a Query with SHOULD, since bw.scorer() returns BooleanScorer2
-    // which delegates to BS if there are no mandatory clauses.
-    bq.add(new MatchAllDocsQuery(), Occur.SHOULD);
-    // Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return
-    // the clause instead of BQ.
-    bq.setMinimumNumberShouldMatch(1);
-    IndexReader reader = writer.getReader();
-    IndexSearcher searcher = newSearcher(reader);
-    for (int i = 0; i < inOrder.length; i++) {
-      TopDocsCollector<ScoreDoc> tdc = TopScoreDocCollector.create(3, inOrder[i]);
-      assertEquals("org.apache.lucene.search.TopScoreDocCollector$" + actualTSDCClass[i], tdc.getClass().getName());
-      
-      searcher.search(new MatchAllDocsQuery(), tdc);
-      
-      ScoreDoc[] sd = tdc.topDocs().scoreDocs;
-      assertEquals(3, sd.length);
-      for (int j = 0; j < sd.length; j++) {
-        assertEquals("expected doc Id " + j + " found " + sd[j].doc, j, sd[j].doc);
-      }
-    }
-    writer.close();
-    reader.close();
-    dir.close();
-  }
-  
-}
diff --git a/lucene/core/src/test/org/apache/lucene/store/TestRateLimitedDirectoryWrapper.java b/lucene/core/src/test/org/apache/lucene/store/TestRateLimitedDirectoryWrapper.java
deleted file mode 100644
index c63119b..0000000
--- a/lucene/core/src/test/org/apache/lucene/store/TestRateLimitedDirectoryWrapper.java
+++ /dev/null
@@ -1,44 +0,0 @@
-package org.apache.lucene.store;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.nio.file.Path;
-
-import org.apache.lucene.util.TestUtil;
-
-public class TestRateLimitedDirectoryWrapper extends BaseDirectoryTestCase {
-
-  @Override
-  protected Directory getDirectory(Path path) {
-    Directory in = newFSDirectory(path);
-    if (in instanceof MockDirectoryWrapper) {
-      // test manipulates directory directly
-      ((MockDirectoryWrapper)in).setEnableVirusScanner(false);
-    }
-    RateLimitedDirectoryWrapper dir = new RateLimitedDirectoryWrapper(in);
-    RateLimiter limiter = new RateLimiter.SimpleRateLimiter(TestUtil.nextInt(random(), 10, 40));
-    dir.setRateLimiter(limiter, IOContext.Context.MERGE);
-    return dir;
-  }
-
-  // since we are rate-limiting, this test gets pretty slow
-  @Override @Nightly
-  public void testThreadSafety() throws Exception {
-    super.testThreadSafety();
-  }
-}
diff --git a/lucene/core/src/test/org/apache/lucene/util/TestUnicodeUtil.java b/lucene/core/src/test/org/apache/lucene/util/TestUnicodeUtil.java
index c6f5d8b..7879e7c 100644
--- a/lucene/core/src/test/org/apache/lucene/util/TestUnicodeUtil.java
+++ b/lucene/core/src/test/org/apache/lucene/util/TestUnicodeUtil.java
@@ -197,9 +197,7 @@
         assertFalse(rc == -1);
         assertEquals(cpString.substring(rs, rs + rc), str);
         continue;
-      } catch (IndexOutOfBoundsException e1) {
-        // Ignored.
-      } catch (IllegalArgumentException e2) {
+      } catch (IndexOutOfBoundsException | IllegalArgumentException e1) {
         // Ignored.
       }
       assertTrue(rc == -1);
diff --git a/lucene/expressions/src/java/org/apache/lucene/expressions/ExpressionComparator.java b/lucene/expressions/src/java/org/apache/lucene/expressions/ExpressionComparator.java
index ebeeca9..bf386bc 100644
--- a/lucene/expressions/src/java/org/apache/lucene/expressions/ExpressionComparator.java
+++ b/lucene/expressions/src/java/org/apache/lucene/expressions/ExpressionComparator.java
@@ -24,10 +24,11 @@
 import org.apache.lucene.queries.function.FunctionValues;
 import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.search.FieldComparator;
+import org.apache.lucene.search.LeafFieldComparator;
 import org.apache.lucene.search.Scorer;
 
 /** A custom comparator for sorting documents by an expression */
-class ExpressionComparator extends FieldComparator<Double> {
+class ExpressionComparator extends FieldComparator<Double> implements LeafFieldComparator {
   private final double[] values;
   private double bottom;
   private double topValue;
@@ -44,7 +45,6 @@
   // TODO: change FieldComparator.setScorer to throw IOException and remove this try-catch
   @Override
   public void setScorer(Scorer scorer) {
-    super.setScorer(scorer);
     // TODO: might be cleaner to lazy-init 'source' and set scorer after?
     assert readerContext != null;
     try {
@@ -83,7 +83,7 @@
   }
   
   @Override
-  public FieldComparator<Double> setNextReader(LeafReaderContext context) throws IOException {
+  public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException {
     this.readerContext = context;
     return this;
   }
diff --git a/lucene/expressions/src/java/org/apache/lucene/expressions/js/JavascriptCompiler.java b/lucene/expressions/src/java/org/apache/lucene/expressions/js/JavascriptCompiler.java
index d00df91f..ef1639c 100644
--- a/lucene/expressions/src/java/org/apache/lucene/expressions/js/JavascriptCompiler.java
+++ b/lucene/expressions/src/java/org/apache/lucene/expressions/js/JavascriptCompiler.java
@@ -565,7 +565,7 @@
         checkFunction(method, JavascriptCompiler.class.getClassLoader());
         map.put(call, method);
       }
-    } catch (NoSuchMethodException | ClassNotFoundException | IOException e) {
+    } catch (ReflectiveOperationException | IOException e) {
       throw new Error("Cannot resolve function", e);
     }
     DEFAULT_FUNCTIONS = Collections.unmodifiableMap(map);
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java b/lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java
index c855de6..16df217 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/DrillSideways.java
@@ -199,8 +199,7 @@
                                                                       after,
                                                                       true,
                                                                       doDocScores,
-                                                                      doMaxScore,
-                                                                      true);
+                                                                      doMaxScore);
       DrillSidewaysResult r = search(query, hitCollector);
       return new DrillSidewaysResult(r.facets, hitCollector.topDocs());
     } else {
@@ -227,7 +226,7 @@
       limit = 1; // the collector does not alow numHits = 0
     }
     topN = Math.min(topN, limit);
-    TopScoreDocCollector hitCollector = TopScoreDocCollector.create(topN, after, true);
+    TopScoreDocCollector hitCollector = TopScoreDocCollector.create(topN, after);
     DrillSidewaysResult r = search(query, hitCollector);
     return new DrillSidewaysResult(r.facets, hitCollector.topDocs());
   }
@@ -236,12 +235,7 @@
    *  (e.g., {@code ToParentBlockJoinCollector}) expects all
    *  sub-scorers to be positioned on the document being
    *  collected.  This will cause some performance loss;
-   *  default is false.  Note that if you return true from
-   *  this method (in a subclass) be sure your collector
-   *  also returns false from {@link
-   *  LeafCollector#acceptsDocsOutOfOrder}: this will trick
-   *  {@code BooleanQuery} into also scoring all subDocs at
-   *  once. */
+   *  default is false. */
   protected boolean scoreSubDocsAtOnce() {
     return false;
   }
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/DrillSidewaysQuery.java b/lucene/facet/src/java/org/apache/lucene/facet/DrillSidewaysQuery.java
index 45f647e..6fde4ae 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/DrillSidewaysQuery.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/DrillSidewaysQuery.java
@@ -111,20 +111,13 @@
       }
 
       @Override
-      public boolean scoresDocsOutOfOrder() {
-        // TODO: would be nice if AssertingIndexSearcher
-        // confirmed this for us
-        return false;
-      }
-
-      @Override
       public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
         // We can only run as a top scorer:
         throw new UnsupportedOperationException();
       }
 
       @Override
-      public BulkScorer bulkScorer(LeafReaderContext context, boolean scoreDocsInOrder, Bits acceptDocs) throws IOException {
+      public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
 
         // TODO: it could be better if we take acceptDocs
         // into account instead of baseScorer?
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/DrillSidewaysScorer.java b/lucene/facet/src/java/org/apache/lucene/facet/DrillSidewaysScorer.java
index 3b5b175..7988cb2 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/DrillSidewaysScorer.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/DrillSidewaysScorer.java
@@ -64,7 +64,10 @@
   }
 
   @Override
-  public boolean score(LeafCollector collector, int maxDoc) throws IOException {
+  public int score(LeafCollector collector, int min, int maxDoc) throws IOException {
+    if (min != 0) {
+      throw new IllegalArgumentException("min must be 0, got " + min);
+    }
     if (maxDoc != Integer.MAX_VALUE) {
       throw new IllegalArgumentException("maxDoc must be Integer.MAX_VALUE");
     }
@@ -150,7 +153,7 @@
       doUnionScoring(collector, disis, sidewaysCollectors);
     }
 
-    return false;
+    return Integer.MAX_VALUE;
   }
 
   /** Used when base query is highly constraining vs the
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/FacetsCollector.java b/lucene/facet/src/java/org/apache/lucene/facet/FacetsCollector.java
index f1aa08f..762cf90 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/FacetsCollector.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/FacetsCollector.java
@@ -155,14 +155,6 @@
   }
 
   @Override
-  public final boolean acceptsDocsOutOfOrder() {
-    // If we are keeping scores then we require in-order
-    // because we append each score to the float[] and
-    // expect that they correlate in order to the hits:
-    return keepScores == false;
-  }
-
-  @Override
   public final void collect(int doc) throws IOException {
     docs.addDoc(doc);
     if (keepScores) {
@@ -284,14 +276,9 @@
                                                (FieldDoc) after,
                                                fillFields,
                                                doDocScores,
-                                               doMaxScore,
-                                               false);
+                                               doMaxScore);
     } else {
-      // TODO: can we pass the right boolean for
-      // in-order instead of hardwired to false...?  we'd
-      // need access to the protected IS.search methods
-      // taking Weight... could use reflection...
-      hitsCollector = TopScoreDocCollector.create(n, after, false);
+      hitsCollector = TopScoreDocCollector.create(n, after);
     }
     searcher.search(q, MultiCollector.wrap(hitsCollector, fc));
     return hitsCollector.topDocs();
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyMergeUtils.java b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyMergeUtils.java
index bdd11e7..7be7294 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyMergeUtils.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/TaxonomyMergeUtils.java
@@ -10,7 +10,9 @@
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.CodecReader;
 import org.apache.lucene.index.MultiReader;
+import org.apache.lucene.index.SlowCodecReaderWrapper;
 import org.apache.lucene.store.Directory;
 
 /*
@@ -52,11 +54,11 @@
     try {
       List<LeafReaderContext> leaves = reader.leaves();
       int numReaders = leaves.size();
-      LeafReader wrappedLeaves[] = new LeafReader[numReaders];
+      CodecReader wrappedLeaves[] = new CodecReader[numReaders];
       for (int i = 0; i < numReaders; i++) {
-        wrappedLeaves[i] = new OrdinalMappingLeafReader(leaves.get(i).reader(), ordinalMap, srcConfig);
+        wrappedLeaves[i] = SlowCodecReaderWrapper.wrap(new OrdinalMappingLeafReader(leaves.get(i).reader(), ordinalMap, srcConfig));
       }
-      destIndexWriter.addIndexes(new MultiReader(wrappedLeaves));
+      destIndexWriter.addIndexes(wrappedLeaves);
       
       // commit changes to taxonomy and index respectively.
       destTaxoWriter.commit();
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/AssertingSubDocsAtOnceCollector.java b/lucene/facet/src/test/org/apache/lucene/facet/AssertingSubDocsAtOnceCollector.java
index df104b0..1b42f89 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/AssertingSubDocsAtOnceCollector.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/AssertingSubDocsAtOnceCollector.java
@@ -54,9 +54,4 @@
       }
     }
   }
-
-  @Override
-  public boolean acceptsDocsOutOfOrder() {
-    return false;
-  }
 }
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/TestDrillSideways.java b/lucene/facet/src/test/org/apache/lucene/facet/TestDrillSideways.java
index ac67472..1b3a486 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/TestDrillSideways.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/TestDrillSideways.java
@@ -687,11 +687,6 @@
                              protected void doSetNextReader(LeafReaderContext context) throws IOException {
                                lastDocID = -1;
                              }
-
-                             @Override
-                             public boolean acceptsDocsOutOfOrder() {
-                               return false;
-                             }
                            });
 
       // Also separately verify that DS respects the
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractAllGroupHeadsCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractAllGroupHeadsCollector.java
index be6d8f8..2d35145 100644
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractAllGroupHeadsCollector.java
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractAllGroupHeadsCollector.java
@@ -126,11 +126,6 @@
     groupHead.updateDocHead(doc);
   }
 
-  @Override
-  public boolean acceptsDocsOutOfOrder() {
-    return false;
-  }
-
   /**
    * Contains the result of group head retrieval.
    * To prevent new object creations of this class for every collect.
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractAllGroupsCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractAllGroupsCollector.java
index 1677eca..be9ed38 100644
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractAllGroupsCollector.java
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractAllGroupsCollector.java
@@ -62,8 +62,4 @@
   @Override
   public void setScorer(Scorer scorer) throws IOException {}
 
-  @Override
-  public boolean acceptsDocsOutOfOrder() {
-    return true;
-  }
 }
\ No newline at end of file
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractDistinctValuesCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractDistinctValuesCollector.java
index a735caf..ffdbcce 100644
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractDistinctValuesCollector.java
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractDistinctValuesCollector.java
@@ -37,11 +37,6 @@
    */
   public abstract List<GC> getGroups();
 
-  @Override
-  public boolean acceptsDocsOutOfOrder() {
-    return true;
-  }
-
   /**
    * Returned by {@link AbstractDistinctValuesCollector#getGroups()},
    * representing the value and set of distinct values for the group.
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractFirstPassGroupingCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractFirstPassGroupingCollector.java
index c383042..1667bd0 100644
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractFirstPassGroupingCollector.java
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractFirstPassGroupingCollector.java
@@ -37,6 +37,7 @@
 
   private final Sort groupSort;
   private final FieldComparator<?>[] comparators;
+  private final LeafFieldComparator[] leafComparators;
   private final int[] reversed;
   private final int topNGroups;
   private final HashMap<GROUP_VALUE_TYPE, CollectedSearchGroup<GROUP_VALUE_TYPE>> groupMap;
@@ -60,7 +61,6 @@
    *  @param topNGroups How many top groups to keep.
    *  @throws IOException If I/O related errors occur
    */
-  @SuppressWarnings({"unchecked","rawtypes"})
   public AbstractFirstPassGroupingCollector(Sort groupSort, int topNGroups) throws IOException {
     if (topNGroups < 1) {
       throw new IllegalArgumentException("topNGroups must be >= 1 (got " + topNGroups + ")");
@@ -74,6 +74,7 @@
 
     final SortField[] sortFields = groupSort.getSort();
     comparators = new FieldComparator[sortFields.length];
+    leafComparators = new LeafFieldComparator[sortFields.length];
     compIDXEnd = comparators.length - 1;
     reversed = new int[sortFields.length];
     for (int i = 0; i < sortFields.length; i++) {
@@ -137,7 +138,7 @@
 
   @Override
   public void setScorer(Scorer scorer) throws IOException {
-    for (FieldComparator<?> comparator : comparators) {
+    for (LeafFieldComparator comparator : leafComparators) {
       comparator.setScorer(scorer);
     }
   }
@@ -157,7 +158,7 @@
     // wasted effort as we will most likely be updating an existing group.
     if (orderedGroups != null) {
       for (int compIDX = 0;; compIDX++) {
-        final int c = reversed[compIDX] * comparators[compIDX].compareBottom(doc);
+        final int c = reversed[compIDX] * leafComparators[compIDX].compareBottom(doc);
         if (c < 0) {
           // Definitely not competitive. So don't even bother to continue
           return;
@@ -197,7 +198,7 @@
         sg.groupValue = copyDocGroupValue(groupValue, null);
         sg.comparatorSlot = groupMap.size();
         sg.topDoc = docBase + doc;
-        for (FieldComparator<?> fc : comparators) {
+        for (LeafFieldComparator fc : leafComparators) {
           fc.copy(sg.comparatorSlot, doc);
         }
         groupMap.put(sg.groupValue, sg);
@@ -223,7 +224,7 @@
       bottomGroup.groupValue = copyDocGroupValue(groupValue, bottomGroup.groupValue);
       bottomGroup.topDoc = docBase + doc;
 
-      for (FieldComparator<?> fc : comparators) {
+      for (LeafFieldComparator fc : leafComparators) {
         fc.copy(bottomGroup.comparatorSlot, doc);
       }
 
@@ -232,7 +233,7 @@
       assert orderedGroups.size() == topNGroups;
 
       final int lastComparatorSlot = orderedGroups.last().comparatorSlot;
-      for (FieldComparator<?> fc : comparators) {
+      for (LeafFieldComparator fc : leafComparators) {
         fc.setBottom(lastComparatorSlot);
       }
 
@@ -241,17 +242,16 @@
 
     // Update existing group:
     for (int compIDX = 0;; compIDX++) {
-      final FieldComparator<?> fc = comparators[compIDX];
-      fc.copy(spareSlot, doc);
+      leafComparators[compIDX].copy(spareSlot, doc);
 
-      final int c = reversed[compIDX] * fc.compare(group.comparatorSlot, spareSlot);
+      final int c = reversed[compIDX] * comparators[compIDX].compare(group.comparatorSlot, spareSlot);
       if (c < 0) {
         // Definitely not competitive.
         return;
       } else if (c > 0) {
         // Definitely competitive; set remaining comparators:
         for (int compIDX2=compIDX+1; compIDX2<comparators.length; compIDX2++) {
-          comparators[compIDX2].copy(spareSlot, doc);
+          leafComparators[compIDX2].copy(spareSlot, doc);
         }
         break;
       } else if (compIDX == compIDXEnd) {
@@ -288,7 +288,7 @@
       final CollectedSearchGroup<?> newLast = orderedGroups.last();
       // If we changed the value of the last group, or changed which group was last, then update bottom:
       if (group == newLast || prevLast != newLast) {
-        for (FieldComparator<?> fc : comparators) {
+        for (LeafFieldComparator fc : leafComparators) {
           fc.setBottom(newLast.comparatorSlot);
         }
       }
@@ -315,21 +315,16 @@
     orderedGroups.addAll(groupMap.values());
     assert orderedGroups.size() > 0;
 
-    for (FieldComparator<?> fc : comparators) {
+    for (LeafFieldComparator fc : leafComparators) {
       fc.setBottom(orderedGroups.last().comparatorSlot);
     }
   }
 
   @Override
-  public boolean acceptsDocsOutOfOrder() {
-    return false;
-  }
-
-  @Override
   protected void doSetNextReader(LeafReaderContext readerContext) throws IOException {
     docBase = readerContext.docBase;
     for (int i=0; i<comparators.length; i++) {
-      comparators[i] = comparators[i].setNextReader(readerContext);
+      leafComparators[i] = comparators[i].getLeafComparator(readerContext);
     }
   }
 
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractGroupFacetCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractGroupFacetCollector.java
index 016f393..745a3d6 100644
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractGroupFacetCollector.java
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractGroupFacetCollector.java
@@ -109,11 +109,6 @@
   public void setScorer(Scorer scorer) throws IOException {
   }
 
-  @Override
-  public boolean acceptsDocsOutOfOrder() {
-    return true;
-  }
-
   /**
    * The grouped facet result. Containing grouped facet entries, total count and total missing count.
    */
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractSecondPassGroupingCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractSecondPassGroupingCollector.java
index 2dba7a9..0634ee1 100644
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractSecondPassGroupingCollector.java
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/AbstractSecondPassGroupingCollector.java
@@ -69,10 +69,10 @@
       final TopDocsCollector<?> collector;
       if (withinGroupSort == null) {
         // Sort by score
-        collector = TopScoreDocCollector.create(maxDocsPerGroup, true);
+        collector = TopScoreDocCollector.create(maxDocsPerGroup);
       } else {
         // Sort by fields
-        collector = TopFieldCollector.create(withinGroupSort, maxDocsPerGroup, fillSortFields, getScores, getMaxScores, true);
+        collector = TopFieldCollector.create(withinGroupSort, maxDocsPerGroup, fillSortFields, getScores, getMaxScores);
       }
       groupMap.put(group.groupValue,
           new SearchGroupDocs<>(group.groupValue,
@@ -83,7 +83,7 @@
   @Override
   public void setScorer(Scorer scorer) throws IOException {
     for (SearchGroupDocs<GROUP_VALUE_TYPE> group : groupMap.values()) {
-      group.collector.setScorer(scorer);
+      group.leafCollector.setScorer(scorer);
     }
   }
 
@@ -93,7 +93,7 @@
     SearchGroupDocs<GROUP_VALUE_TYPE> group = retrieveGroup(doc);
     if (group != null) {
       totalGroupedHitCount++;
-      group.collector.collect(doc);
+      group.leafCollector.collect(doc);
     }
   }
 
@@ -110,15 +110,10 @@
   protected void doSetNextReader(LeafReaderContext readerContext) throws IOException {
     //System.out.println("SP.setNextReader");
     for (SearchGroupDocs<GROUP_VALUE_TYPE> group : groupMap.values()) {
-      group.collector.getLeafCollector(readerContext);
+      group.leafCollector = group.collector.getLeafCollector(readerContext);
     }
   }
 
-  @Override
-  public boolean acceptsDocsOutOfOrder() {
-    return false;
-  }
-
   public TopGroups<GROUP_VALUE_TYPE> getTopGroups(int withinGroupOffset) {
     @SuppressWarnings({"unchecked","rawtypes"})
     final GroupDocs<GROUP_VALUE_TYPE>[] groupDocsResult = (GroupDocs<GROUP_VALUE_TYPE>[]) new GroupDocs[groups.size()];
@@ -151,6 +146,7 @@
 
     public final GROUP_VALUE_TYPE groupValue;
     public final TopDocsCollector<?> collector;
+    public LeafCollector leafCollector;
 
     public SearchGroupDocs(GROUP_VALUE_TYPE groupValue, TopDocsCollector<?> collector) {
       this.groupValue = groupValue;
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/BlockGroupingCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/BlockGroupingCollector.java
index 4c0c6b5..16cbf40 100644
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/BlockGroupingCollector.java
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/BlockGroupingCollector.java
@@ -69,6 +69,7 @@
   private final boolean needsScores;
 
   private final FieldComparator<?>[] comparators;
+  private final LeafFieldComparator[] leafComparators;
   private final int[] reversed;
   private final int compIDXEnd;
   private int bottomSlot;
@@ -202,7 +203,7 @@
           bottomSlot = bottomGroup.comparatorSlot;
           //System.out.println("    set bottom=" + bottomSlot);
           for (int i = 0; i < comparators.length; i++) {
-            comparators[i].setBottom(bottomSlot);
+            leafComparators[i].setBottom(bottomSlot);
           }
           //System.out.println("     QUEUE FULL");
         } else {
@@ -231,7 +232,7 @@
 
         //System.out.println("    set bottom=" + bottomSlot);
         for (int i = 0; i < comparators.length; i++) {
-          comparators[i].setBottom(bottomSlot);
+          leafComparators[i].setBottom(bottomSlot);
         }
       }
     }
@@ -278,6 +279,7 @@
 
     final SortField[] sortFields = groupSort.getSort();
     comparators = new FieldComparator<?>[sortFields.length];
+    leafComparators = new LeafFieldComparator[sortFields.length];
     compIDXEnd = comparators.length - 1;
     reversed = new int[sortFields.length];
     for (int i = 0; i < sortFields.length; i++) {
@@ -343,21 +345,21 @@
         if (!needsScores) {
           throw new IllegalArgumentException("cannot sort by relevance within group: needsScores=false");
         }
-        collector = TopScoreDocCollector.create(maxDocsPerGroup, true);
+        collector = TopScoreDocCollector.create(maxDocsPerGroup);
       } else {
         // Sort by fields
-        collector = TopFieldCollector.create(withinGroupSort, maxDocsPerGroup, fillSortFields, needsScores, needsScores, true);
+        collector = TopFieldCollector.create(withinGroupSort, maxDocsPerGroup, fillSortFields, needsScores, needsScores);
       }
 
-      collector.setScorer(fakeScorer);
-      collector.getLeafCollector(og.readerContext);
+      LeafCollector leafCollector = collector.getLeafCollector(og.readerContext);
+      leafCollector.setScorer(fakeScorer);
       for(int docIDX=0;docIDX<og.count;docIDX++) {
         final int doc = og.docs[docIDX];
         fakeScorer.doc = doc;
         if (needsScores) {
           fakeScorer.score = og.scores[docIDX];
         }
-        collector.collect(doc);
+        leafCollector.collect(doc);
       }
       totalGroupedHitCount += og.count;
 
@@ -402,7 +404,7 @@
   @Override
   public void setScorer(Scorer scorer) throws IOException {
     this.scorer = scorer;
-    for (FieldComparator<?> comparator : comparators) {
+    for (LeafFieldComparator comparator : leafComparators) {
       comparator.setScorer(scorer);
     }
   }
@@ -443,7 +445,7 @@
         assert !queueFull;
 
         //System.out.println("    init copy to bottomSlot=" + bottomSlot);
-        for (FieldComparator<?> fc : comparators) {
+        for (LeafFieldComparator fc : leafComparators) {
           fc.copy(bottomSlot, doc);
           fc.setBottom(bottomSlot);
         }        
@@ -451,7 +453,7 @@
       } else {
         // Compare to bottomSlot
         for (int compIDX = 0;; compIDX++) {
-          final int c = reversed[compIDX] * comparators[compIDX].compareBottom(doc);
+          final int c = reversed[compIDX] * leafComparators[compIDX].compareBottom(doc);
           if (c < 0) {
             // Definitely not competitive -- done
             return;
@@ -468,7 +470,7 @@
 
         //System.out.println("       best w/in group!");
         
-        for (FieldComparator<?> fc : comparators) {
+        for (LeafFieldComparator fc : leafComparators) {
           fc.copy(bottomSlot, doc);
           // Necessary because some comparators cache
           // details of bottom slot; this forces them to
@@ -481,7 +483,7 @@
       // We're not sure this group will make it into the
       // queue yet
       for (int compIDX = 0;; compIDX++) {
-        final int c = reversed[compIDX] * comparators[compIDX].compareBottom(doc);
+        final int c = reversed[compIDX] * leafComparators[compIDX].compareBottom(doc);
         if (c < 0) {
           // Definitely not competitive -- done
           //System.out.println("    doc doesn't compete w/ top groups");
@@ -498,7 +500,7 @@
         }
       }
       groupCompetes = true;
-      for (FieldComparator<?> fc : comparators) {
+      for (LeafFieldComparator fc : leafComparators) {
         fc.copy(bottomSlot, doc);
         // Necessary because some comparators cache
         // details of bottom slot; this forces them to
@@ -511,11 +513,6 @@
   }
 
   @Override
-  public boolean acceptsDocsOutOfOrder() {
-    return false;
-  }
-
-  @Override
   protected void doSetNextReader(LeafReaderContext readerContext) throws IOException {
     if (subDocUpto != 0) {
       processGroup();
@@ -528,7 +525,7 @@
 
     currentReaderContext = readerContext;
     for (int i=0; i<comparators.length; i++) {
-      comparators[i] = comparators[i].setNextReader(readerContext);
+      leafComparators[i] = comparators[i].getLeafComparator(readerContext);
     }
   }
 }
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/SearchGroup.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/SearchGroup.java
index 032abbb..4239da2 100644
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/SearchGroup.java
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/SearchGroup.java
@@ -18,6 +18,7 @@
  */
 
 import org.apache.lucene.search.FieldComparator;
+import org.apache.lucene.search.LeafFieldComparator;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.SortField;
 
@@ -158,7 +159,7 @@
 
     public GroupComparator(Sort groupSort) throws IOException {
       final SortField[] sortFields = groupSort.getSort();
-      comparators = new FieldComparator<?>[sortFields.length];
+      comparators = new FieldComparator[sortFields.length];
       reversed = new int[sortFields.length];
       for (int compIDX = 0; compIDX < sortFields.length; compIDX++) {
         final SortField sortField = sortFields[compIDX];
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionAllGroupHeadsCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionAllGroupHeadsCollector.java
index 771660d..e30a551 100644
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionAllGroupHeadsCollector.java
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/function/FunctionAllGroupHeadsCollector.java
@@ -21,6 +21,7 @@
 import org.apache.lucene.queries.function.FunctionValues;
 import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.search.FieldComparator;
+import org.apache.lucene.search.LeafFieldComparator;
 import org.apache.lucene.search.Scorer;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.SortField;
@@ -94,7 +95,7 @@
   public void setScorer(Scorer scorer) throws IOException {
     this.scorer = scorer;
     for (GroupHead groupHead : groups.values()) {
-      for (FieldComparator<?> comparator : groupHead.comparators) {
+      for (LeafFieldComparator comparator : groupHead.leafComparators) {
         comparator.setScorer(scorer);
       }
     }
@@ -109,7 +110,7 @@
 
     for (GroupHead groupHead : groups.values()) {
       for (int i = 0; i < groupHead.comparators.length; i++) {
-        groupHead.comparators[i] = groupHead.comparators[i].setNextReader(context);
+        groupHead.leafComparators[i] = groupHead.comparators[i].getLeafComparator(context);
       }
     }
   }
@@ -120,28 +121,31 @@
   public class GroupHead extends AbstractAllGroupHeadsCollector.GroupHead<MutableValue> {
 
     final FieldComparator<?>[] comparators;
+    final LeafFieldComparator[] leafComparators;
 
     @SuppressWarnings({"unchecked","rawtypes"})
     private GroupHead(MutableValue groupValue, Sort sort, int doc) throws IOException {
       super(groupValue, doc + readerContext.docBase);
       final SortField[] sortFields = sort.getSort();
       comparators = new FieldComparator[sortFields.length];
+      leafComparators = new LeafFieldComparator[sortFields.length];
       for (int i = 0; i < sortFields.length; i++) {
-        comparators[i] = sortFields[i].getComparator(1, i).setNextReader(readerContext);
-        comparators[i].setScorer(scorer);
-        comparators[i].copy(0, doc);
-        comparators[i].setBottom(0);
+        comparators[i] = sortFields[i].getComparator(1, i);
+        leafComparators[i] = comparators[i].getLeafComparator(readerContext);
+        leafComparators[i].setScorer(scorer);
+        leafComparators[i].copy(0, doc);
+        leafComparators[i].setBottom(0);
       }
     }
 
     @Override
     public int compare(int compIDX, int doc) throws IOException {
-      return comparators[compIDX].compareBottom(doc);
+      return leafComparators[compIDX].compareBottom(doc);
     }
 
     @Override
     public void updateDocHead(int doc) throws IOException {
-      for (FieldComparator<?> comparator : comparators) {
+      for (LeafFieldComparator comparator : leafComparators) {
         comparator.copy(0, doc);
         comparator.setBottom(0);
       }
diff --git a/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermAllGroupHeadsCollector.java b/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermAllGroupHeadsCollector.java
index df0c0bb..61c981a 100644
--- a/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermAllGroupHeadsCollector.java
+++ b/lucene/grouping/src/java/org/apache/lucene/search/grouping/term/TermAllGroupHeadsCollector.java
@@ -21,6 +21,7 @@
 import org.apache.lucene.index.DocValues;
 import org.apache.lucene.index.SortedDocValues;
 import org.apache.lucene.search.FieldComparator;
+import org.apache.lucene.search.LeafFieldComparator;
 import org.apache.lucene.search.Scorer;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.SortField;
@@ -164,7 +165,7 @@
 
       for (GroupHead groupHead : groups.values()) {
         for (int i = 0; i < groupHead.comparators.length; i++) {
-          groupHead.comparators[i] = groupHead.comparators[i].setNextReader(context);
+          groupHead.leafComparators[i] = groupHead.comparators[i].getLeafComparator(context);
         }
       }
     }
@@ -173,7 +174,7 @@
     public void setScorer(Scorer scorer) throws IOException {
       this.scorer = scorer;
       for (GroupHead groupHead : groups.values()) {
-        for (FieldComparator<?> comparator : groupHead.comparators) {
+        for (LeafFieldComparator comparator : groupHead.leafComparators) {
           comparator.setScorer(scorer);
         }
       }
@@ -181,28 +182,31 @@
 
     class GroupHead extends AbstractAllGroupHeadsCollector.GroupHead<BytesRef> {
 
-      final FieldComparator<?>[] comparators;
+      final FieldComparator[] comparators;
+      final LeafFieldComparator[] leafComparators;
 
       private GroupHead(BytesRef groupValue, Sort sort, int doc) throws IOException {
         super(groupValue, doc + readerContext.docBase);
         final SortField[] sortFields = sort.getSort();
         comparators = new FieldComparator[sortFields.length];
+        leafComparators = new LeafFieldComparator[sortFields.length];
         for (int i = 0; i < sortFields.length; i++) {
-          comparators[i] = sortFields[i].getComparator(1, i).setNextReader(readerContext);
-          comparators[i].setScorer(scorer);
-          comparators[i].copy(0, doc);
-          comparators[i].setBottom(0);
+          comparators[i] = sortFields[i].getComparator(1, i);
+          leafComparators[i] = comparators[i].getLeafComparator(readerContext);
+          leafComparators[i].setScorer(scorer);
+          leafComparators[i].copy(0, doc);
+          leafComparators[i].setBottom(0);
         }
       }
 
       @Override
       public int compare(int compIDX, int doc) throws IOException {
-        return comparators[compIDX].compareBottom(doc);
+        return leafComparators[compIDX].compareBottom(doc);
       }
 
       @Override
       public void updateDocHead(int doc) throws IOException {
-        for (FieldComparator<?> comparator : comparators) {
+        for (LeafFieldComparator comparator : leafComparators) {
           comparator.copy(0, doc);
           comparator.setBottom(0);
         }
diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
index c88e9f5..0e22db6 100644
--- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
+++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java
@@ -835,7 +835,7 @@
             }
           } else {
             // Collect only into cache, then replay multiple times:
-            c = cCache = CachingCollector.create(false, true, maxCacheMB);
+            c = cCache = CachingCollector.create(true, maxCacheMB);
           }
         } else {
           cCache = null;
diff --git a/lucene/highlighter/src/java/org/apache/lucene/search/highlight/Highlighter.java b/lucene/highlighter/src/java/org/apache/lucene/search/highlight/Highlighter.java
index cf14b7f..5e16347 100644
--- a/lucene/highlighter/src/java/org/apache/lucene/search/highlight/Highlighter.java
+++ b/lucene/highlighter/src/java/org/apache/lucene/search/highlight/Highlighter.java
@@ -225,12 +225,12 @@
           throw new InvalidTokenOffsetsException("Token "+ termAtt.toString()
               +" exceeds length of provided text sized "+text.length());
         }
-        if((tokenGroup.numTokens>0)&&(tokenGroup.isDistinct()))
+        if((tokenGroup.getNumTokens() >0)&&(tokenGroup.isDistinct()))
         {
           //the current token is distinct from previous tokens -
           // markup the cached token group info
-          startOffset = tokenGroup.matchStartOffset;
-          endOffset = tokenGroup.matchEndOffset;
+          startOffset = tokenGroup.getStartOffset();
+          endOffset = tokenGroup.getEndOffset();
           tokenText = text.substring(startOffset, endOffset);
           String markedUpText=formatter.highlightTerm(encoder.encodeText(tokenText), tokenGroup);
           //store any whitespace etc from between this and last group
@@ -261,11 +261,11 @@
       }
       currentFrag.setScore(fragmentScorer.getFragmentScore());
 
-      if(tokenGroup.numTokens>0)
+      if(tokenGroup.getNumTokens() >0)
       {
         //flush the accumulated text (same code as in above loop)
-        startOffset = tokenGroup.matchStartOffset;
-        endOffset = tokenGroup.matchEndOffset;
+        startOffset = tokenGroup.getStartOffset();
+        endOffset = tokenGroup.getEndOffset();
         tokenText = text.substring(startOffset, endOffset);
         String markedUpText=formatter.highlightTerm(encoder.encodeText(tokenText), tokenGroup);
         //store any whitespace etc from between this and last group
diff --git a/lucene/highlighter/src/java/org/apache/lucene/search/highlight/QueryScorer.java b/lucene/highlighter/src/java/org/apache/lucene/search/highlight/QueryScorer.java
index e855a17..7655e88 100644
--- a/lucene/highlighter/src/java/org/apache/lucene/search/highlight/QueryScorer.java
+++ b/lucene/highlighter/src/java/org/apache/lucene/search/highlight/QueryScorer.java
@@ -54,6 +54,7 @@
   private boolean skipInitExtractor;
   private boolean wrapToCaching = true;
   private int maxCharsToAnalyze;
+  private boolean usePayloads = false;
 
   /**
    * @param query Query to use for highlighting
@@ -213,6 +214,7 @@
     qse.setMaxDocCharsToAnalyze(maxCharsToAnalyze);
     qse.setExpandMultiTermQuery(expandMultiTermQuery);
     qse.setWrapIfNotCachingTokenFilter(wrapToCaching);
+    qse.setUsePayloads(usePayloads);
     if (reader == null) {
       this.fieldWeightedSpanTerms = qse.getWeightedSpanTerms(query,
           tokenStream, field);
@@ -259,7 +261,19 @@
   public void setExpandMultiTermQuery(boolean expandMultiTermQuery) {
     this.expandMultiTermQuery = expandMultiTermQuery;
   }
-  
+
+  /**
+   * Whether or not we should capture payloads in {@link MemoryIndex} at each position so that queries can access them.
+   * This does not apply to term vector based TokenStreams, which support payloads only when the term vector has them.
+   */
+  public boolean isUsePayloads() {
+    return usePayloads;
+  }
+
+  public void setUsePayloads(boolean usePayloads) {
+    this.usePayloads = usePayloads;
+  }
+
   /**
    * By default, {@link TokenStream}s that are not of the type
    * {@link CachingTokenFilter} are wrapped in a {@link CachingTokenFilter} to
diff --git a/lucene/highlighter/src/java/org/apache/lucene/search/highlight/TokenGroup.java b/lucene/highlighter/src/java/org/apache/lucene/search/highlight/TokenGroup.java
index fd0fa41..cc0d517 100644
--- a/lucene/highlighter/src/java/org/apache/lucene/search/highlight/TokenGroup.java
+++ b/lucene/highlighter/src/java/org/apache/lucene/search/highlight/TokenGroup.java
@@ -24,18 +24,20 @@
 
 /**
  * One, or several overlapping tokens, along with the score(s) and the scope of
- * the original text
+ * the original text.
  */
 public class TokenGroup {
 
   private static final int MAX_NUM_TOKENS_PER_GROUP = 50;
-  Token [] tokens=new Token[MAX_NUM_TOKENS_PER_GROUP];
-  float[] scores = new float[MAX_NUM_TOKENS_PER_GROUP];
-  int numTokens = 0;
-  int startOffset = 0;
-  int endOffset = 0;
-  float tot;
-  int matchStartOffset, matchEndOffset;
+
+  private Token[] tokens = new Token[MAX_NUM_TOKENS_PER_GROUP];
+  private float[] scores = new float[MAX_NUM_TOKENS_PER_GROUP];
+  private int numTokens = 0;
+  private int startOffset = 0;
+  private int endOffset = 0;
+  private float tot;
+  private int matchStartOffset;
+  private int matchEndOffset;
 
   private OffsetAttribute offsetAtt;
   private CharTermAttribute termAtt;
@@ -47,8 +49,8 @@
 
   void addToken(float score) {
     if (numTokens < MAX_NUM_TOKENS_PER_GROUP) {
-      int termStartOffset = offsetAtt.startOffset();
-      int termEndOffset = offsetAtt.endOffset();
+      final int termStartOffset = offsetAtt.startOffset();
+      final int termEndOffset = offsetAtt.endOffset();
       if (numTokens == 0) {
         startOffset = matchStartOffset = termStartOffset;
         endOffset = matchEndOffset = termEndOffset;
@@ -58,8 +60,8 @@
         endOffset = Math.max(endOffset, termEndOffset);
         if (score > 0) {
           if (tot == 0) {
-            matchStartOffset = offsetAtt.startOffset();
-            matchEndOffset = offsetAtt.endOffset();
+            matchStartOffset = termStartOffset;
+            matchEndOffset = termEndOffset;
           } else {
             matchStartOffset = Math.min(matchStartOffset, termStartOffset);
             matchEndOffset = Math.max(matchEndOffset, termEndOffset);
@@ -84,15 +86,14 @@
     numTokens = 0;
     tot = 0;
   }
-  
-  /* 
-  * @param index a value between 0 and numTokens -1
-  * @return the "n"th token
-  */
- public Token getToken(int index)
- {
-     return tokens[index];
- }
+
+  /**
+   * @param index a value between 0 and numTokens -1
+   * @return the "n"th token
+   */
+  public Token getToken(int index) {
+    return tokens[index];
+  }
 
   /**
    * 
@@ -104,10 +105,19 @@
   }
 
   /**
-   * @return the end position in the original text
+   * @return the earliest start offset in the original text of a matching token in this group (score &gt; 0), or
+   * if there are none then the earliest offset of any token in the group.
+   */
+  public int getStartOffset() {
+    return matchStartOffset;
+  }
+
+  /**
+   * @return the latest end offset in the original text of a matching token in this group (score &gt; 0), or
+   * if there are none then {@link #getEndOffset()}.
    */
   public int getEndOffset() {
-    return endOffset;
+    return matchEndOffset;
   }
 
   /**
@@ -118,16 +128,10 @@
   }
 
   /**
-   * @return the start position in the original text
-   */
-  public int getStartOffset() {
-    return startOffset;
-  }
-
-  /**
    * @return all tokens' scores summed up
    */
   public float getTotalScore() {
     return tot;
   }
+
 }
diff --git a/lucene/highlighter/src/java/org/apache/lucene/search/highlight/WeightedSpanTermExtractor.java b/lucene/highlighter/src/java/org/apache/lucene/search/highlight/WeightedSpanTermExtractor.java
index abbfd5f..04e794a 100644
--- a/lucene/highlighter/src/java/org/apache/lucene/search/highlight/WeightedSpanTermExtractor.java
+++ b/lucene/highlighter/src/java/org/apache/lucene/search/highlight/WeightedSpanTermExtractor.java
@@ -83,9 +83,9 @@
   private boolean cachedTokenStream;
   private boolean wrapToCaching = true;
   private int maxDocCharsToAnalyze;
+  private boolean usePayloads = false;
   private LeafReader internalReader = null;
 
-
   public WeightedSpanTermExtractor() {
   }
 
@@ -384,7 +384,7 @@
 
       // Use MemoryIndex (index/invert this tokenStream now)
       if (internalReader == null) {
-        final MemoryIndex indexer = new MemoryIndex(true);
+        final MemoryIndex indexer = new MemoryIndex(true, usePayloads);//offsets and payloads
         if (cacheIt) {
           assert !cachedTokenStream;
           tokenStream = new CachingTokenFilter(new OffsetLimitTokenFilter(tokenStream, maxDocCharsToAnalyze));
@@ -652,7 +652,15 @@
   public void setExpandMultiTermQuery(boolean expandMultiTermQuery) {
     this.expandMultiTermQuery = expandMultiTermQuery;
   }
-  
+
+  public boolean isUsePayloads() {
+    return usePayloads;
+  }
+
+  public void setUsePayloads(boolean usePayloads) {
+    this.usePayloads = usePayloads;
+  }
+
   public boolean isCachedTokenStream() {
     return cachedTokenStream;
   }
diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterPhraseTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterPhraseTest.java
index ec7493c..3f73c73 100644
--- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterPhraseTest.java
+++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterPhraseTest.java
@@ -116,11 +116,6 @@
         private int baseDoc;
 
         @Override
-        public boolean acceptsDocsOutOfOrder() {
-          return true;
-        }
-
-        @Override
         public void collect(int i) {
           bitset.set(this.baseDoc + i);
         }
diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
index a437a8e..79f7050 100644
--- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
+++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
@@ -17,6 +17,8 @@
  * limitations under the License.
  */
 
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
@@ -28,10 +30,17 @@
 import java.util.List;
 import java.util.Map;
 import java.util.StringTokenizer;
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
 
-import org.apache.lucene.analysis.*;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.BaseTokenStreamTestCase;
+import org.apache.lucene.analysis.CachingTokenFilter;
+import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.analysis.MockPayloadAnalyzer;
+import org.apache.lucene.analysis.MockTokenFilter;
+import org.apache.lucene.analysis.MockTokenizer;
+import org.apache.lucene.analysis.Token;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.Tokenizer;
 import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
 import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
 import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
@@ -40,19 +49,41 @@
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriterConfig.OpenMode;
 import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.queries.CommonTermsQuery;
-import org.apache.lucene.search.*;
 import org.apache.lucene.search.BooleanClause.Occur;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.ConstantScoreQuery;
+import org.apache.lucene.search.FilteredQuery;
+import org.apache.lucene.search.FuzzyQuery;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MultiPhraseQuery;
+import org.apache.lucene.search.MultiTermQuery;
+import org.apache.lucene.search.PhraseQuery;
+import org.apache.lucene.search.PrefixQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.QueryWrapperFilter;
+import org.apache.lucene.search.RegexpQuery;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TermRangeFilter;
+import org.apache.lucene.search.TermRangeQuery;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.WildcardQuery;
 import org.apache.lucene.search.highlight.SynonymTokenizer.TestHighlightRunner;
 import org.apache.lucene.search.join.BitDocIdSetCachingWrapperFilter;
 import org.apache.lucene.search.join.BitDocIdSetFilter;
 import org.apache.lucene.search.join.ScoreMode;
 import org.apache.lucene.search.join.ToChildBlockJoinQuery;
 import org.apache.lucene.search.join.ToParentBlockJoinQuery;
-import org.apache.lucene.search.spans.*;
+import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper;
+import org.apache.lucene.search.spans.SpanNearQuery;
+import org.apache.lucene.search.spans.SpanNotQuery;
+import org.apache.lucene.search.spans.SpanOrQuery;
+import org.apache.lucene.search.spans.SpanPayloadCheckQuery;
+import org.apache.lucene.search.spans.SpanQuery;
+import org.apache.lucene.search.spans.SpanTermQuery;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.LuceneTestCase;
@@ -1881,13 +1912,14 @@
     reader.close();
   }
 
-  /** If we have term vectors, we can highlight based on payloads */
+  /** We can highlight based on payloads. It's supported both via term vectors and MemoryIndex since Lucene 5. */
   public void testPayloadQuery() throws IOException, InvalidTokenOffsetsException {
     final String text = "random words and words";//"words" at positions 1 & 4
 
     Analyzer analyzer = new MockPayloadAnalyzer();//sets payload to "pos: X" (where X is position #)
     Directory dir = newDirectory(); 
     try (IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(analyzer))) {
+      writer.deleteAll();
       Document doc = writer.newDocument();
       FieldTypes fieldTypes = writer.getFieldTypes();
       fieldTypes.enableTermVectors(FIELD_NAME);
@@ -1902,12 +1934,16 @@
       Query query = new SpanPayloadCheckQuery(new SpanTermQuery(new Term(FIELD_NAME, "words")),
           Collections.singleton("pos: 1".getBytes("UTF-8")));//just match the first "word" occurrence
       IndexSearcher searcher = newSearcher(reader);
-      Scorer scorer = new QueryScorer(query, searcher.getIndexReader(), FIELD_NAME);
+      QueryScorer scorer = new QueryScorer(query, searcher.getIndexReader(), FIELD_NAME);
+      scorer.setUsePayloads(true);
       Highlighter h = new Highlighter(scorer);
 
       TopDocs hits = searcher.search(query, null, 10);
       assertEquals(1, hits.scoreDocs.length);
       TokenStream stream = TokenSources.getAnyTokenStream(searcher.getIndexReader(), 0, FIELD_NAME, analyzer);
+      if (random().nextBoolean()) {
+        stream = new CachingTokenFilter(stream);//conceals detection of TokenStreamFromTermVector
+      }
       String result = h.getBestFragment(stream, text);
       assertEquals("random <B>words</B> and words", result);//only highlight first "word"
     }
diff --git a/lucene/ivy-settings.xml b/lucene/ivy-settings.xml
index 8518e0d..024db07 100644
--- a/lucene/ivy-settings.xml
+++ b/lucene/ivy-settings.xml
@@ -33,7 +33,6 @@
   <resolvers>
     <ibiblio name="sonatype-releases" root="https://oss.sonatype.org/content/repositories/releases" m2compatible="true" />
     <ibiblio name="maven.restlet.org" root="http://maven.restlet.org" m2compatible="true" />
-    <ibiblio name="cloudera" root="http://repository.cloudera.com/artifactory/repo" m2compatible="true" />
     <ibiblio name="releases.cloudera.com" root="http://repository.cloudera.com/content/repositories/releases" m2compatible="true" />
     
     <!-- needed only for newer svnkit releases, e.g. 1.8.x -->
@@ -55,10 +54,9 @@
       <resolver ref="local"/>
       <!-- <resolver ref="local-maven-2" /> -->
       <resolver ref="main"/>
-      <resolver ref="cloudera"/>
-      <resolver ref="releases.cloudera.com"/>
-      <resolver ref="sonatype-releases" />
       <resolver ref="maven.restlet.org" />
+      <resolver ref="sonatype-releases" />
+      <resolver ref="releases.cloudera.com"/>
       <!-- <resolver ref="svnkit-releases" /> -->
       <resolver ref="working-chinese-mirror" />
     </chain>
diff --git a/lucene/ivy-versions.properties b/lucene/ivy-versions.properties
index f94ece4..6a2d53c 100644
--- a/lucene/ivy-versions.properties
+++ b/lucene/ivy-versions.properties
@@ -70,7 +70,7 @@
 /jakarta-regexp/jakarta-regexp = 1.4
 /javax.activation/activation = 1.1.1
 /javax.inject/javax.inject= 1
-/javax.servlet/javax.servlet-api = 3.0.1
+/javax.servlet/javax.servlet-api = 3.1.0
 /javax.servlet/servlet-api = 2.4
 /jdom/jdom = 1.0
 /joda-time/joda-time = 2.2
@@ -88,8 +88,9 @@
 /org.apache.avro/avro = 1.7.5
 /org.apache.commons/commons-compress = 1.8.1
 /org.apache.derby/derby = 10.9.1.0
+/org.apache.directory.server/apacheds-all = 2.0.0-M15
 
-org.apache.hadoop.version = 2.2.0
+org.apache.hadoop.version = 2.3.0
 /org.apache.hadoop/hadoop-annotations = ${org.apache.hadoop.version}
 /org.apache.hadoop/hadoop-auth = ${org.apache.hadoop.version}
 /org.apache.hadoop/hadoop-common = ${org.apache.hadoop.version}
@@ -101,6 +102,7 @@
 /org.apache.hadoop/hadoop-mapreduce-client-hs = ${org.apache.hadoop.version}
 /org.apache.hadoop/hadoop-mapreduce-client-jobclient = ${org.apache.hadoop.version}
 /org.apache.hadoop/hadoop-mapreduce-client-shuffle = ${org.apache.hadoop.version}
+/org.apache.hadoop/hadoop-minikdc = ${org.apache.hadoop.version}
 /org.apache.hadoop/hadoop-yarn-api = ${org.apache.hadoop.version}
 /org.apache.hadoop/hadoop-yarn-client = ${org.apache.hadoop.version}
 /org.apache.hadoop/hadoop-yarn-common = ${org.apache.hadoop.version}
@@ -126,19 +128,20 @@
 /org.apache.mahout/mahout-math = 0.6
 /org.apache.mrunit/mrunit = 1.0.0
 
-org.apache.pdfbox.version = 1.8.6
+org.apache.pdfbox.version = 1.8.8
 /org.apache.pdfbox/fontbox = ${org.apache.pdfbox.version}
 /org.apache.pdfbox/jempbox = ${org.apache.pdfbox.version}
 /org.apache.pdfbox/pdfbox = ${org.apache.pdfbox.version}
 
-org.apache.poi.version = 3.11-beta2
+org.apache.poi.version = 3.11
 /org.apache.poi/poi = ${org.apache.poi.version}
 /org.apache.poi/poi-ooxml = ${org.apache.poi.version}
 /org.apache.poi/poi-ooxml-schemas = ${org.apache.poi.version}
 /org.apache.poi/poi-scratchpad = ${org.apache.poi.version}
 
-org.apache.tika.version = 1.6
+org.apache.tika.version = 1.7
 /org.apache.tika/tika-core = ${org.apache.tika.version}
+/org.apache.tika/tika-java7 = ${org.apache.tika.version}
 /org.apache.tika/tika-parsers = ${org.apache.tika.version}
 /org.apache.tika/tika-xmp = ${org.apache.tika.version}
 
@@ -174,11 +177,11 @@
 /org.codehaus.jackson/jackson-jaxrs = ${org.codehaus.jackson.version}
 /org.codehaus.jackson/jackson-mapper-asl = ${org.codehaus.jackson.version}
 
-/org.codehaus.woodstox/wstx-asl = 3.2.7
+/org.codehaus.woodstox/stax2-api = 3.1.4
+/org.codehaus.woodstox/woodstox-core-asl = 4.4.1
 /org.easymock/easymock = 3.0
-/org.eclipse.jetty.orbit/javax.servlet = 3.0.0.v201112011016
 
-org.eclipse.jetty.version = 8.1.10.v20130312
+org.eclipse.jetty.version = 9.2.6.v20141205
 /org.eclipse.jetty/jetty-continuation = ${org.eclipse.jetty.version}
 /org.eclipse.jetty/jetty-deploy = ${org.eclipse.jetty.version}
 /org.eclipse.jetty/jetty-http = ${org.eclipse.jetty.version}
@@ -187,6 +190,7 @@
 /org.eclipse.jetty/jetty-security = ${org.eclipse.jetty.version}
 /org.eclipse.jetty/jetty-server = ${org.eclipse.jetty.version}
 /org.eclipse.jetty/jetty-servlet = ${org.eclipse.jetty.version}
+/org.eclipse.jetty/jetty-servlets = ${org.eclipse.jetty.version}
 /org.eclipse.jetty/jetty-start = ${org.eclipse.jetty.version}
 /org.eclipse.jetty/jetty-util = ${org.eclipse.jetty.version}
 /org.eclipse.jetty/jetty-webapp = ${org.eclipse.jetty.version}
@@ -219,13 +223,13 @@
 /org.ow2.asm/asm = ${org.ow2.asm.version}
 /org.ow2.asm/asm-commons = ${org.ow2.asm.version}
 
-org.restlet.jee.version = 2.1.1
+org.restlet.jee.version = 2.3.0
 /org.restlet.jee/org.restlet = ${org.restlet.jee.version}
 /org.restlet.jee/org.restlet.ext.servlet = ${org.restlet.jee.version}
 
 /org.simpleframework/simple-xml = 2.7
 
-org.slf4j.version = 1.7.6
+org.slf4j.version = 1.7.7
 /org.slf4j/jcl-over-slf4j = ${org.slf4j.version}
 /org.slf4j/jul-to-slf4j = ${org.slf4j.version}
 /org.slf4j/slf4j-api = ${org.slf4j.version}
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/TermsCollector.java b/lucene/join/src/java/org/apache/lucene/search/join/TermsCollector.java
index 3d85349..4becf34 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/TermsCollector.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/TermsCollector.java
@@ -45,11 +45,6 @@
     return collectorTerms;
   }
 
-  @Override
-  public boolean acceptsDocsOutOfOrder() {
-    return true;
-  }
-
   /**
    * Chooses the right {@link TermsCollector} implementation.
    *
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/TermsIncludingScoreQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/TermsIncludingScoreQuery.java
index 9b6eb26..ce7f445 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/TermsIncludingScoreQuery.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/TermsIncludingScoreQuery.java
@@ -133,21 +133,25 @@
 
       @Override
       public Explanation explain(LeafReaderContext context, int doc) throws IOException {
-        SVInnerScorer scorer = (SVInnerScorer) bulkScorer(context, false, null);
-        if (scorer != null) {
-          return scorer.explain(doc);
+        Terms terms = context.reader().terms(field);
+        if (terms != null) {
+          segmentTermsEnum = terms.iterator(segmentTermsEnum);
+          BytesRef spare = new BytesRef();
+          DocsEnum docsEnum = null;
+          for (int i = 0; i < TermsIncludingScoreQuery.this.terms.size(); i++) {
+            if (segmentTermsEnum.seekExact(TermsIncludingScoreQuery.this.terms.get(ords[i], spare))) {
+              docsEnum = segmentTermsEnum.docs(null, docsEnum, DocsEnum.FLAG_NONE);
+              if (docsEnum.advance(doc) == doc) {
+                final float score = TermsIncludingScoreQuery.this.scores[ords[i]];
+                return new ComplexExplanation(true, score, "Score based on join value " + segmentTermsEnum.term().utf8ToString());
+              }
+            }
+          }
         }
         return new ComplexExplanation(false, 0.0f, "Not a match");
       }
 
       @Override
-      public boolean scoresDocsOutOfOrder() {
-        // We have optimized impls below if we are allowed
-        // to score out-of-order:
-        return true;
-      }
-
-      @Override
       public Query getQuery() {
         return TermsIncludingScoreQuery.this;
       }
@@ -179,143 +183,9 @@
           return new SVInOrderScorer(this, acceptDocs, segmentTermsEnum, context.reader().maxDoc(), cost);
         }
       }
-
-      @Override
-      public BulkScorer bulkScorer(LeafReaderContext context, boolean scoreDocsInOrder, Bits acceptDocs) throws IOException {
-
-        if (scoreDocsInOrder) {
-          return super.bulkScorer(context, scoreDocsInOrder, acceptDocs);
-        } else {
-          Terms terms = context.reader().terms(field);
-          if (terms == null) {
-            return null;
-          }
-          // what is the runtime...seems ok?
-          final long cost = context.reader().maxDoc() * terms.size();
-
-          segmentTermsEnum = terms.iterator(segmentTermsEnum);
-          // Optimized impls that take advantage of docs
-          // being allowed to be out of order:
-          if (multipleValuesPerDocument) {
-            return new MVInnerScorer(this, acceptDocs, segmentTermsEnum, context.reader().maxDoc(), cost);
-          } else {
-            return new SVInnerScorer(this, acceptDocs, segmentTermsEnum, cost);
-          }
-        }
-      }
     };
   }
 
-  // This impl assumes that the 'join' values are used uniquely per doc per field. Used for one to many relations.
-  class SVInnerScorer extends BulkScorer {
-
-    final BytesRef spare = new BytesRef();
-    final Bits acceptDocs;
-    final TermsEnum termsEnum;
-    final long cost;
-
-    int upto;
-    DocsEnum docsEnum;
-    DocsEnum reuse;
-    int scoreUpto;
-    int doc;
-
-    SVInnerScorer(Weight weight, Bits acceptDocs, TermsEnum termsEnum, long cost) {
-      this.acceptDocs = acceptDocs;
-      this.termsEnum = termsEnum;
-      this.cost = cost;
-      this.doc = -1;
-    }
-
-    @Override
-    public boolean score(LeafCollector collector, int max) throws IOException {
-      FakeScorer fakeScorer = new FakeScorer();
-      collector.setScorer(fakeScorer);
-      if (doc == -1) {
-        doc = nextDocOutOfOrder();
-      }
-      while(doc < max) {
-        fakeScorer.doc = doc;
-        fakeScorer.score = scores[ords[scoreUpto]];
-        collector.collect(doc);
-        doc = nextDocOutOfOrder();
-      }
-
-      return doc != DocsEnum.NO_MORE_DOCS;
-    }
-
-    int nextDocOutOfOrder() throws IOException {
-      while (true) {
-        if (docsEnum != null) {
-          int docId = docsEnumNextDoc();
-          if (docId == DocIdSetIterator.NO_MORE_DOCS) {
-            docsEnum = null;
-          } else {
-            return doc = docId;
-          }
-        }
-
-        if (upto == terms.size()) {
-          return doc = DocIdSetIterator.NO_MORE_DOCS;
-        }
-
-        scoreUpto = upto;
-        if (termsEnum.seekExact(terms.get(ords[upto++], spare))) {
-          docsEnum = reuse = termsEnum.docs(acceptDocs, reuse, DocsEnum.FLAG_NONE);
-        }
-      }
-    }
-
-    protected int docsEnumNextDoc() throws IOException {
-      return docsEnum.nextDoc();
-    }
-
-    private Explanation explain(int target) throws IOException {
-      int docId;
-      do {
-        docId = nextDocOutOfOrder();
-        if (docId < target) {
-          int tempDocId = docsEnum.advance(target);
-          if (tempDocId == target) {
-            docId = tempDocId;
-            break;
-          }
-        } else if (docId == target) {
-          break;
-        }
-        docsEnum = null; // goto the next ord.
-      } while (docId != DocIdSetIterator.NO_MORE_DOCS);
-
-      return new ComplexExplanation(true, scores[ords[scoreUpto]], "Score based on join value " + termsEnum.term().utf8ToString());
-    }
-  }
-
-  // This impl that tracks whether a docid has already been emitted. This check makes sure that docs aren't emitted
-  // twice for different join values. This means that the first encountered join value determines the score of a document
-  // even if other join values yield a higher score.
-  class MVInnerScorer extends SVInnerScorer {
-
-    final FixedBitSet alreadyEmittedDocs;
-
-    MVInnerScorer(Weight weight, Bits acceptDocs, TermsEnum termsEnum, int maxDoc, long cost) {
-      super(weight, acceptDocs, termsEnum, cost);
-      alreadyEmittedDocs = new FixedBitSet(maxDoc);
-    }
-
-    @Override
-    protected int docsEnumNextDoc() throws IOException {
-      while (true) {
-        int docId = docsEnum.nextDoc();
-        if (docId == DocIdSetIterator.NO_MORE_DOCS) {
-          return docId;
-        }
-        if (!alreadyEmittedDocs.getAndSet(docId)) {
-          return docId;//if it wasn't previously set, return it
-        }
-      }
-    }
-  }
-
   class SVInOrderScorer extends Scorer {
 
     final DocIdSetIterator matchingDocsIterator;
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/TermsWithScoreCollector.java b/lucene/join/src/java/org/apache/lucene/search/join/TermsWithScoreCollector.java
index 0bdbc14..a48b556 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/TermsWithScoreCollector.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/TermsWithScoreCollector.java
@@ -57,11 +57,6 @@
     this.scorer = scorer;
   }
 
-  @Override
-  public boolean acceptsDocsOutOfOrder() {
-    return true;
-  }
-
   /**
    * Chooses the right {@link TermsWithScoreCollector} implementation.
    *
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java
index 3d6d8f0..5a39106 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java
@@ -154,11 +154,6 @@
       throw new UnsupportedOperationException(getClass().getName() +
                                               " cannot explain match on parent document");
     }
-
-    @Override
-    public boolean scoresDocsOutOfOrder() {
-      return false;
-    }
   }
 
   static class ToChildBlockJoinScorer extends Scorer {
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinCollector.java b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinCollector.java
index 6fe1fc4..36fc37e 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinCollector.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinCollector.java
@@ -38,6 +38,9 @@
  *  IndexWriter#updateDocuments}).  Ie, the join is computed
  *  at index time.
  *
+ *  <p>This collector MUST be used with {@link ToParentBlockJoinIndexSearcher},
+ *  in order to work correctly.
+ *
  *  <p>The parent Sort must only use
  *  fields from the parent documents; sorting by field in
  *  the child documents is not supported.</p>
@@ -74,7 +77,7 @@
  *
  * @lucene.experimental
  */
-public class ToParentBlockJoinCollector extends SimpleCollector {
+public class ToParentBlockJoinCollector implements Collector {
 
   private final Sort sort;
 
@@ -83,16 +86,11 @@
   private final Map<Query,Integer> joinQueryID = new HashMap<>();
   private final int numParentHits;
   private final FieldValueHitQueue<OneGroup> queue;
-  private final FieldComparator[] comparators;
-  private final int[] reverseMul;
-  private final int compEnd;
+  private final FieldComparator<?>[] comparators;
   private final boolean trackMaxScore;
   private final boolean trackScores;
 
-  private int docBase;
   private ToParentBlockJoinQuery.BlockJoinScorer[] joinScorers = new ToParentBlockJoinQuery.BlockJoinScorer[0];
-  private LeafReaderContext currentReaderContext;
-  private Scorer scorer;
   private boolean queueFull;
 
   private OneGroup bottom;
@@ -116,8 +114,6 @@
     this.numParentHits = numParentHits;
     queue = FieldValueHitQueue.create(sort.getSort(), numParentHits);
     comparators = queue.getComparators();
-    reverseMul = queue.getReverseMul();
-    compEnd = comparators.length - 1;
   }
   
   private static final class OneGroup extends FieldValueHitQueue.Entry {
@@ -143,143 +139,167 @@
   }
 
   @Override
-  public void collect(int parentDoc) throws IOException {
-    //System.out.println("\nC parentDoc=" + parentDoc);
-    totalHitCount++;
+  public LeafCollector getLeafCollector(final LeafReaderContext context)
+      throws IOException {
+    final LeafFieldComparator[] comparators = queue.getComparators(context);
+    final int[] reverseMul = queue.getReverseMul();
+    final int docBase = context.docBase;
+    return new LeafCollector() {
 
-    float score = Float.NaN;
+      private Scorer scorer;
 
-    if (trackMaxScore) {
-      score = scorer.score();
-      maxScore = Math.max(maxScore, score);
-    }
-
-    // TODO: we could sweep all joinScorers here and
-    // aggregate total child hit count, so we can fill this
-    // in getTopGroups (we wire it to 0 now)
-
-    if (queueFull) {
-      //System.out.println("  queueFull");
-      // Fastmatch: return if this hit is not competitive
-      for (int i = 0;; i++) {
-        final int c = reverseMul[i] * comparators[i].compareBottom(parentDoc);
-        if (c < 0) {
-          // Definitely not competitive.
-          //System.out.println("    skip");
-          return;
-        } else if (c > 0) {
-          // Definitely competitive.
-          break;
-        } else if (i == compEnd) {
-          // Here c=0. If we're at the last comparator, this doc is not
-          // competitive, since docs are visited in doc Id order, which means
-          // this doc cannot compete with any other document in the queue.
-          //System.out.println("    skip");
-          return;
+      @Override
+      public void setScorer(Scorer scorer) throws IOException {
+        //System.out.println("C.setScorer scorer=" + scorer);
+        // Since we invoke .score(), and the comparators likely
+        // do as well, cache it so it's only "really" computed
+        // once:
+        if (scorer instanceof ScoreCachingWrappingScorer == false) {
+          scorer = new ScoreCachingWrappingScorer(scorer);
         }
-      }
-
-      //System.out.println("    competes!  doc=" + (docBase + parentDoc));
-
-      // This hit is competitive - replace bottom element in queue & adjustTop
-      for (int i = 0; i < comparators.length; i++) {
-        comparators[i].copy(bottom.slot, parentDoc);
-      }
-      if (!trackMaxScore && trackScores) {
-        score = scorer.score();
-      }
-      bottom.doc = docBase + parentDoc;
-      bottom.readerContext = currentReaderContext;
-      bottom.score = score;
-      copyGroups(bottom);
-      bottom = queue.updateTop();
-
-      for (int i = 0; i < comparators.length; i++) {
-        comparators[i].setBottom(bottom.slot);
-      }
-    } else {
-      // Startup transient: queue is not yet full:
-      final int comparatorSlot = totalHitCount - 1;
-
-      // Copy hit into queue
-      for (int i = 0; i < comparators.length; i++) {
-        comparators[i].copy(comparatorSlot, parentDoc);
-      }
-      //System.out.println("  startup: new OG doc=" + (docBase+parentDoc));
-      if (!trackMaxScore && trackScores) {
-        score = scorer.score();
-      }
-      final OneGroup og = new OneGroup(comparatorSlot, docBase+parentDoc, score, joinScorers.length, trackScores);
-      og.readerContext = currentReaderContext;
-      copyGroups(og);
-      bottom = queue.add(og);
-      queueFull = totalHitCount == numParentHits;
-      if (queueFull) {
-        // End of startup transient: queue just filled up:
-        for (int i = 0; i < comparators.length; i++) {
-          comparators[i].setBottom(bottom.slot);
+        this.scorer = scorer;
+        for (LeafFieldComparator comparator : comparators) {
+          comparator.setScorer(scorer);
         }
-      }
-    }
-  }
+        Arrays.fill(joinScorers, null);
 
-  // Pulls out child doc and scores for all join queries:
-  private void copyGroups(OneGroup og) {
-    // While rare, it's possible top arrays could be too
-    // short if join query had null scorer on first
-    // segment(s) but then became non-null on later segments
-    final int numSubScorers = joinScorers.length;
-    if (og.docs.length < numSubScorers) {
-      // While rare, this could happen if join query had
-      // null scorer on first segment(s) but then became
-      // non-null on later segments
-      og.docs = ArrayUtil.grow(og.docs);
-    }
-    if (og.counts.length < numSubScorers) {
-      og.counts = ArrayUtil.grow(og.counts);
-    }
-    if (trackScores && og.scores.length < numSubScorers) {
-      og.scores = ArrayUtil.grow(og.scores);
-    }
-
-    //System.out.println("\ncopyGroups parentDoc=" + og.doc);
-    for(int scorerIDX = 0;scorerIDX < numSubScorers;scorerIDX++) {
-      final ToParentBlockJoinQuery.BlockJoinScorer joinScorer = joinScorers[scorerIDX];
-      //System.out.println("  scorer=" + joinScorer);
-      if (joinScorer != null && docBase + joinScorer.getParentDoc() == og.doc) {
-        og.counts[scorerIDX] = joinScorer.getChildCount();
-        //System.out.println("    count=" + og.counts[scorerIDX]);
-        og.docs[scorerIDX] = joinScorer.swapChildDocs(og.docs[scorerIDX]);
-        assert og.docs[scorerIDX].length >= og.counts[scorerIDX]: "length=" + og.docs[scorerIDX].length + " vs count=" + og.counts[scorerIDX];
-        //System.out.println("    len=" + og.docs[scorerIDX].length);
-        /*
-          for(int idx=0;idx<og.counts[scorerIDX];idx++) {
-          System.out.println("    docs[" + idx + "]=" + og.docs[scorerIDX][idx]);
+        Queue<Scorer> queue = new LinkedList<>();
+        //System.out.println("\nqueue: add top scorer=" + scorer);
+        queue.add(scorer);
+        while ((scorer = queue.poll()) != null) {
+          //System.out.println("  poll: " + scorer + "; " + scorer.getWeight().getQuery());
+          if (scorer instanceof ToParentBlockJoinQuery.BlockJoinScorer) {
+            enroll((ToParentBlockJoinQuery) scorer.getWeight().getQuery(), (ToParentBlockJoinQuery.BlockJoinScorer) scorer);
           }
-        */
-        if (trackScores) {
-          //System.out.println("    copy scores");
-          og.scores[scorerIDX] = joinScorer.swapChildScores(og.scores[scorerIDX]);
-          assert og.scores[scorerIDX].length >= og.counts[scorerIDX]: "length=" + og.scores[scorerIDX].length + " vs count=" + og.counts[scorerIDX];
+
+          for (ChildScorer sub : scorer.getChildren()) {
+            //System.out.println("  add sub: " + sub.child + "; " + sub.child.getWeight().getQuery());
+            queue.add(sub.child);
+          }
         }
-      } else {
-        og.counts[scorerIDX] = 0;
       }
-    }
-  }
+      
+      @Override
+      public void collect(int parentDoc) throws IOException {
+      //System.out.println("\nC parentDoc=" + parentDoc);
+        totalHitCount++;
 
-  @Override
-  protected void doSetNextReader(LeafReaderContext context) throws IOException {
-    currentReaderContext = context;
-    docBase = context.docBase;
-    for (int compIDX = 0; compIDX < comparators.length; compIDX++) {
-      queue.setComparator(compIDX, comparators[compIDX].setNextReader(context));
-    }
-  }
+        float score = Float.NaN;
 
-  @Override
-  public boolean acceptsDocsOutOfOrder() {
-    return false;
+        if (trackMaxScore) {
+          score = scorer.score();
+          maxScore = Math.max(maxScore, score);
+        }
+
+        // TODO: we could sweep all joinScorers here and
+        // aggregate total child hit count, so we can fill this
+        // in getTopGroups (we wire it to 0 now)
+
+        if (queueFull) {
+          //System.out.println("  queueFull");
+          // Fastmatch: return if this hit is not competitive
+          int c = 0;
+          for (int i = 0; i < comparators.length; ++i) {
+            c = reverseMul[i] * comparators[i].compareBottom(parentDoc);
+            if (c != 0) {
+              break;
+            }
+          }
+          if (c <= 0) { // in case of equality, this hit is not competitive as docs are visited in order
+            // Definitely not competitive.
+            //System.out.println("    skip");
+            return;
+          }
+
+          //System.out.println("    competes!  doc=" + (docBase + parentDoc));
+
+          // This hit is competitive - replace bottom element in queue & adjustTop
+          for (LeafFieldComparator comparator : comparators) {
+            comparator.copy(bottom.slot, parentDoc);
+          }
+          if (!trackMaxScore && trackScores) {
+            score = scorer.score();
+          }
+          bottom.doc = docBase + parentDoc;
+          bottom.readerContext = context;
+          bottom.score = score;
+          copyGroups(bottom);
+          bottom = queue.updateTop();
+
+          for (LeafFieldComparator comparator : comparators) {
+            comparator.setBottom(bottom.slot);
+          }
+        } else {
+          // Startup transient: queue is not yet full:
+          final int comparatorSlot = totalHitCount - 1;
+
+          // Copy hit into queue
+          for (LeafFieldComparator comparator : comparators) {
+            comparator.copy(comparatorSlot, parentDoc);
+          }
+          //System.out.println("  startup: new OG doc=" + (docBase+parentDoc));
+          if (!trackMaxScore && trackScores) {
+            score = scorer.score();
+          }
+          final OneGroup og = new OneGroup(comparatorSlot, docBase+parentDoc, score, joinScorers.length, trackScores);
+          og.readerContext = context;
+          copyGroups(og);
+          bottom = queue.add(og);
+          queueFull = totalHitCount == numParentHits;
+          if (queueFull) {
+            // End of startup transient: queue just filled up:
+            for (LeafFieldComparator comparator : comparators) {
+              comparator.setBottom(bottom.slot);
+            }
+          }
+        }
+      }
+      
+      // Pulls out child doc and scores for all join queries:
+      private void copyGroups(OneGroup og) {
+        // While rare, it's possible top arrays could be too
+        // short if join query had null scorer on first
+        // segment(s) but then became non-null on later segments
+        final int numSubScorers = joinScorers.length;
+        if (og.docs.length < numSubScorers) {
+          // While rare, this could happen if join query had
+          // null scorer on first segment(s) but then became
+          // non-null on later segments
+          og.docs = ArrayUtil.grow(og.docs);
+        }
+        if (og.counts.length < numSubScorers) {
+          og.counts = ArrayUtil.grow(og.counts);
+        }
+        if (trackScores && og.scores.length < numSubScorers) {
+          og.scores = ArrayUtil.grow(og.scores);
+        }
+
+        //System.out.println("\ncopyGroups parentDoc=" + og.doc);
+        for(int scorerIDX = 0;scorerIDX < numSubScorers;scorerIDX++) {
+          final ToParentBlockJoinQuery.BlockJoinScorer joinScorer = joinScorers[scorerIDX];
+          //System.out.println("  scorer=" + joinScorer);
+          if (joinScorer != null && docBase + joinScorer.getParentDoc() == og.doc) {
+            og.counts[scorerIDX] = joinScorer.getChildCount();
+            //System.out.println("    count=" + og.counts[scorerIDX]);
+            og.docs[scorerIDX] = joinScorer.swapChildDocs(og.docs[scorerIDX]);
+            assert og.docs[scorerIDX].length >= og.counts[scorerIDX]: "length=" + og.docs[scorerIDX].length + " vs count=" + og.counts[scorerIDX];
+            //System.out.println("    len=" + og.docs[scorerIDX].length);
+            /*
+              for(int idx=0;idx<og.counts[scorerIDX];idx++) {
+              System.out.println("    docs[" + idx + "]=" + og.docs[scorerIDX][idx]);
+              }
+            */
+            if (trackScores) {
+              //System.out.println("    copy scores");
+              og.scores[scorerIDX] = joinScorer.swapChildScores(og.scores[scorerIDX]);
+              assert og.scores[scorerIDX].length >= og.counts[scorerIDX]: "length=" + og.scores[scorerIDX].length + " vs count=" + og.counts[scorerIDX];
+            }
+          } else {
+            og.counts[scorerIDX] = 0;
+          }
+        }
+      }
+    };
   }
 
   private void enroll(ToParentBlockJoinQuery query, ToParentBlockJoinQuery.BlockJoinScorer scorer) {
@@ -296,34 +316,6 @@
       joinScorers[slot] = scorer;
     }
   }
-  
-  @Override
-  public void setScorer(Scorer scorer) {
-    //System.out.println("C.setScorer scorer=" + scorer);
-    // Since we invoke .score(), and the comparators likely
-    // do as well, cache it so it's only "really" computed
-    // once:
-    this.scorer = new ScoreCachingWrappingScorer(scorer);
-    for (int compIDX = 0; compIDX < comparators.length; compIDX++) {
-      comparators[compIDX].setScorer(this.scorer);
-    }
-    Arrays.fill(joinScorers, null);
-
-    Queue<Scorer> queue = new LinkedList<>();
-    //System.out.println("\nqueue: add top scorer=" + scorer);
-    queue.add(scorer);
-    while ((scorer = queue.poll()) != null) {
-      //System.out.println("  poll: " + scorer + "; " + scorer.getWeight().getQuery());
-      if (scorer instanceof ToParentBlockJoinQuery.BlockJoinScorer) {
-        enroll((ToParentBlockJoinQuery) scorer.getWeight().getQuery(), (ToParentBlockJoinQuery.BlockJoinScorer) scorer);
-      }
-
-      for (ChildScorer sub : scorer.getChildren()) {
-        //System.out.println("  add sub: " + sub.child + "; " + sub.child.getWeight().getQuery());
-        queue.add(sub.child);
-      }
-    }
-  }
 
   private OneGroup[] sortedGroups;
 
@@ -414,14 +406,14 @@
         if (!trackScores) {
           throw new IllegalArgumentException("cannot sort by relevance within group: trackScores=false");
         }
-        collector = TopScoreDocCollector.create(numDocsInGroup, true);
+        collector = TopScoreDocCollector.create(numDocsInGroup);
       } else {
         // Sort by fields
-        collector = TopFieldCollector.create(withinGroupSort, numDocsInGroup, fillSortFields, trackScores, trackMaxScore, true);
+        collector = TopFieldCollector.create(withinGroupSort, numDocsInGroup, fillSortFields, trackScores, trackMaxScore);
       }
 
-      collector.setScorer(fakeScorer);
-      collector.getLeafCollector(og.readerContext);
+      LeafCollector leafCollector = collector.getLeafCollector(og.readerContext);
+      leafCollector.setScorer(fakeScorer);
       for(int docIDX=0;docIDX<numChildDocs;docIDX++) {
         //System.out.println("docIDX=" + docIDX + " vs " + og.docs[slot].length);
         final int doc = og.docs[slot][docIDX];
@@ -429,7 +421,7 @@
         if (trackScores) {
           fakeScorer.score = og.scores[slot][docIDX];
         }
-        collector.collect(doc);
+        leafCollector.collect(doc);
       }
       totalGroupedHitCount += numChildDocs;
 
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinFieldComparator.java b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinFieldComparator.java
index 72743a6..04ef679 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinFieldComparator.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinFieldComparator.java
@@ -22,6 +22,8 @@
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.FieldComparator;
+import org.apache.lucene.search.LeafFieldComparator;
+import org.apache.lucene.search.SimpleFieldComparator;
 import org.apache.lucene.util.BitDocIdSet;
 import org.apache.lucene.util.BitSet;
 
@@ -31,13 +33,14 @@
  *
  * @lucene.experimental
  */
-public abstract class ToParentBlockJoinFieldComparator extends FieldComparator<Object> {
+public abstract class ToParentBlockJoinFieldComparator extends SimpleFieldComparator<Object> implements LeafFieldComparator { // repeat LeafFieldComparator for javadocs
 
   private final BitDocIdSetFilter parentFilter;
   private final BitDocIdSetFilter childFilter;
   final int spareSlot;
 
   FieldComparator<Object> wrappedComparator;
+  LeafFieldComparator wrappedLeafComparator;
   BitSet parentDocuments;
   BitSet childDocuments;
 
@@ -55,7 +58,7 @@
 
   @Override
   public void setBottom(int slot) {
-    wrappedComparator.setBottom(slot);
+    wrappedLeafComparator.setBottom(slot);
   }
 
   @Override
@@ -64,7 +67,7 @@
   }
 
   @Override
-  public FieldComparator<Object> setNextReader(LeafReaderContext context) throws IOException {
+  protected void doSetNextReader(LeafReaderContext context) throws IOException {
     BitDocIdSet children = childFilter.getDocIdSet(context);
     if (children == null) {
       childDocuments = null;
@@ -77,8 +80,7 @@
     } else {
       parentDocuments = parents.bits();
     }
-    wrappedComparator = wrappedComparator.setNextReader(context);
-    return this;
+    wrappedLeafComparator = wrappedComparator.getLeafComparator(context);
   }
 
   @Override
@@ -90,12 +92,12 @@
    * Concrete implementation of {@link ToParentBlockJoinSortField} to sorts the parent docs with the lowest values
    * in the child / nested docs first.
    */
-  public static final class Lowest extends ToParentBlockJoinFieldComparator {
+  public static final class Lowest extends ToParentBlockJoinFieldComparator implements LeafFieldComparator {
 
     /**
      * Create ToParentBlockJoinFieldComparator.Lowest
      *
-     * @param wrappedComparator The {@link FieldComparator} on the child / nested level.
+     * @param wrappedComparator The {@link LeafFieldComparator} on the child / nested level.
      * @param parentFilter Filter that identifies the parent documents.
      * @param childFilter Filter that defines which child / nested documents participates in sorting.
      * @param spareSlot The extra slot inside the wrapped comparator that is used to compare which nested document
@@ -119,7 +121,7 @@
       }
 
       // We only need to emit a single cmp value for any matching child doc
-      int cmp = wrappedComparator.compareBottom(childDoc);
+      int cmp = wrappedLeafComparator.compareBottom(childDoc);
       if (cmp > 0) {
         return cmp;
       }
@@ -129,7 +131,7 @@
         if (childDoc >= parentDoc || childDoc == DocIdSetIterator.NO_MORE_DOCS) {
           return cmp;
         }
-        int cmp1 = wrappedComparator.compareBottom(childDoc);
+        int cmp1 = wrappedLeafComparator.compareBottom(childDoc);
         if (cmp1 > 0) {
           return cmp1;
         } else {
@@ -152,23 +154,22 @@
       if (childDoc >= parentDoc || childDoc == DocIdSetIterator.NO_MORE_DOCS) {
         return;
       }
-      wrappedComparator.copy(spareSlot, childDoc);
-      wrappedComparator.copy(slot, childDoc);
+      wrappedLeafComparator.copy(spareSlot, childDoc);
+      wrappedLeafComparator.copy(slot, childDoc);
 
       while (true) {
         childDoc = childDocuments.nextSetBit(childDoc + 1);
         if (childDoc >= parentDoc || childDoc == DocIdSetIterator.NO_MORE_DOCS) {
           return;
         }
-        wrappedComparator.copy(spareSlot, childDoc);
+        wrappedLeafComparator.copy(spareSlot, childDoc);
         if (wrappedComparator.compare(spareSlot, slot) < 0) {
-          wrappedComparator.copy(slot, childDoc);
+          wrappedLeafComparator.copy(slot, childDoc);
         }
       }
     }
 
     @Override
-    @SuppressWarnings("unchecked")
     public int compareTop(int parentDoc) throws IOException {
       if (parentDoc == 0 || parentDocuments == null || childDocuments == null) {
         return 0;
@@ -182,7 +183,7 @@
       }
 
       // We only need to emit a single cmp value for any matching child doc
-      int cmp = wrappedComparator.compareBottom(childDoc);
+      int cmp = wrappedLeafComparator.compareBottom(childDoc);
       if (cmp > 0) {
         return cmp;
       }
@@ -192,7 +193,7 @@
         if (childDoc >= parentDoc || childDoc == DocIdSetIterator.NO_MORE_DOCS) {
           return cmp;
         }
-        int cmp1 = wrappedComparator.compareTop(childDoc);
+        int cmp1 = wrappedLeafComparator.compareTop(childDoc);
         if (cmp1 > 0) {
           return cmp1;
         } else {
@@ -209,12 +210,12 @@
    * Concrete implementation of {@link ToParentBlockJoinSortField} to sorts the parent docs with the highest values
    * in the child / nested docs first.
    */
-  public static final class Highest extends ToParentBlockJoinFieldComparator {
+  public static final class Highest extends ToParentBlockJoinFieldComparator implements LeafFieldComparator {
 
     /**
      * Create ToParentBlockJoinFieldComparator.Highest
      *
-     * @param wrappedComparator The {@link FieldComparator} on the child / nested level.
+     * @param wrappedComparator The {@link LeafFieldComparator} on the child / nested level.
      * @param parentFilter Filter that identifies the parent documents.
      * @param childFilter Filter that defines which child / nested documents participates in sorting.
      * @param spareSlot The extra slot inside the wrapped comparator that is used to compare which nested document
@@ -236,7 +237,7 @@
         return 0;
       }
 
-      int cmp = wrappedComparator.compareBottom(childDoc);
+      int cmp = wrappedLeafComparator.compareBottom(childDoc);
       if (cmp < 0) {
         return cmp;
       }
@@ -246,7 +247,7 @@
         if (childDoc >= parentDoc || childDoc == DocIdSetIterator.NO_MORE_DOCS) {
           return cmp;
         }
-        int cmp1 = wrappedComparator.compareBottom(childDoc);
+        int cmp1 = wrappedLeafComparator.compareBottom(childDoc);
         if (cmp1 < 0) {
           return cmp1;
         } else {
@@ -268,23 +269,22 @@
       if (childDoc >= parentDoc || childDoc == DocIdSetIterator.NO_MORE_DOCS) {
         return;
       }
-      wrappedComparator.copy(spareSlot, childDoc);
-      wrappedComparator.copy(slot, childDoc);
+      wrappedLeafComparator.copy(spareSlot, childDoc);
+      wrappedLeafComparator.copy(slot, childDoc);
 
       while (true) {
         childDoc = childDocuments.nextSetBit(childDoc + 1);
         if (childDoc >= parentDoc || childDoc == DocIdSetIterator.NO_MORE_DOCS) {
           return;
         }
-        wrappedComparator.copy(spareSlot, childDoc);
+        wrappedLeafComparator.copy(spareSlot, childDoc);
         if (wrappedComparator.compare(spareSlot, slot) > 0) {
-          wrappedComparator.copy(slot, childDoc);
+          wrappedLeafComparator.copy(slot, childDoc);
         }
       }
     }
 
     @Override
-    @SuppressWarnings("unchecked")
     public int compareTop(int parentDoc) throws IOException {
       if (parentDoc == 0 || parentDocuments == null || childDocuments == null) {
         return 0;
@@ -296,7 +296,7 @@
         return 0;
       }
 
-      int cmp = wrappedComparator.compareBottom(childDoc);
+      int cmp = wrappedLeafComparator.compareBottom(childDoc);
       if (cmp < 0) {
         return cmp;
       }
@@ -306,7 +306,7 @@
         if (childDoc >= parentDoc || childDoc == DocIdSetIterator.NO_MORE_DOCS) {
           return cmp;
         }
-        int cmp1 = wrappedComparator.compareTop(childDoc);
+        int cmp1 = wrappedLeafComparator.compareTop(childDoc);
         if (cmp1 < 0) {
           return cmp1;
         } else {
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinIndexSearcher.java b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinIndexSearcher.java
new file mode 100644
index 0000000..31a0463
--- /dev/null
+++ b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinIndexSearcher.java
@@ -0,0 +1,69 @@
+package org.apache.lucene.search.join;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.ExecutorService;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.LeafCollector;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.Weight;
+
+/**
+ * An {@link IndexSearcher} to use in conjunction with
+ * {@link ToParentBlockJoinCollector}.
+ */
+public class ToParentBlockJoinIndexSearcher extends IndexSearcher {
+
+  /** Creates a searcher searching the provided index. Search on individual
+   *  segments will be run in the provided {@link ExecutorService}.
+   * @see IndexSearcher#IndexSearcher(IndexReader, ExecutorService) */
+  public ToParentBlockJoinIndexSearcher(IndexReader r, ExecutorService executor) {
+    super(r, executor);
+  }
+
+  /** Creates a searcher searching the provided index.
+   * @see IndexSearcher#IndexSearcher(IndexReader) */
+  public ToParentBlockJoinIndexSearcher(IndexReader r) {
+    super(r);
+  }
+
+  @Override
+  protected void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
+    for (LeafReaderContext ctx : leaves) { // search each subreader
+      // we force the use of Scorer (not BulkScorer) to make sure
+      // that the scorer passed to LeafCollector.setScorer supports
+      // Scorer.getChildren
+      Scorer scorer = weight.scorer(ctx, ctx.reader().getLiveDocs());
+      if (scorer != null) {
+        final LeafCollector leafCollector = collector.getLeafCollector(ctx);
+        leafCollector.setScorer(scorer);
+        for (int doc = scorer.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = scorer.nextDoc()) {
+          leafCollector.collect(doc);
+        }
+      }
+    }
+  }
+
+}
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java
index 39ac46d..865e5e5 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java
@@ -194,11 +194,6 @@
       }
       return new ComplexExplanation(false, 0.0f, "Not a match");
     }
-
-    @Override
-    public boolean scoresDocsOutOfOrder() {
-      return false;
-    }
   }
 
   static class BlockJoinScorer extends Scorer {
diff --git a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinSortField.java b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinSortField.java
index b4730a7..4d6a6ed 100644
--- a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinSortField.java
+++ b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinSortField.java
@@ -18,6 +18,7 @@
  */
 
 import org.apache.lucene.search.FieldComparator;
+import org.apache.lucene.search.LeafFieldComparator;
 import org.apache.lucene.search.SortField;
 
 import java.io.IOException;
diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
index 94b7a4e..29c9b1d 100644
--- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
+++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java
@@ -1323,6 +1323,7 @@
     IndexReader r = w.getReader();
     w.close();
 
+    IndexSearcher searcher = new ToParentBlockJoinIndexSearcher(r);
     Query childQuery = new TermQuery(new Term("childText", "text"));
     BitDocIdSetFilter parentsFilter = new BitDocIdSetCachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("isParent", "yes"))));
     ToParentBlockJoinQuery childJoinQuery = new ToParentBlockJoinQuery(childQuery, parentsFilter, ScoreMode.Avg);
@@ -1332,7 +1333,7 @@
 
     ToParentBlockJoinCollector c = new ToParentBlockJoinCollector(new Sort(new SortField("parentID", SortField.Type.STRING)),
                                                                   10, true, true);
-    newSearcher(r).search(parentQuery, c);
+    searcher.search(parentQuery, c);
     TopGroups<Integer> groups = c.getTopGroups(childJoinQuery, null, 0, 10, 0, false);
 
     // Two parents:
@@ -1388,6 +1389,8 @@
     IndexReader r = w.getReader();
     w.close();
 
+    IndexSearcher searcher = new ToParentBlockJoinIndexSearcher(r);
+    
     // never matches:
     Query childQuery = new TermQuery(new Term("childText", "bogus"));
     BitDocIdSetFilter parentsFilter = new BitDocIdSetCachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("isParent", "yes"))));
@@ -1398,7 +1401,7 @@
 
     ToParentBlockJoinCollector c = new ToParentBlockJoinCollector(new Sort(new SortField("parentID", SortField.Type.STRING)),
                                                                   10, true, true);
-    newSearcher(r).search(parentQuery, c);
+    searcher.search(parentQuery, c);
     TopGroups<Integer> groups = c.getTopGroups(childJoinQuery, null, 0, 10, 0, false);
 
     // Two parents:
diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
index db1af4a..605c14f 100644
--- a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
+++ b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java
@@ -49,9 +49,12 @@
 import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.search.BooleanClause;
 import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.Collector;
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.Explanation;
+import org.apache.lucene.search.FilterLeafCollector;
 import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.LeafCollector;
 import org.apache.lucene.search.MatchAllDocsQuery;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.ScoreDoc;
@@ -291,10 +294,6 @@
             assertFalse("optimized bulkScorer was not used for join query embedded in boolean query!", sawFive);
           }
         }
-        @Override
-        public boolean acceptsDocsOutOfOrder() {
-          return true;
-        }
       });
 
     indexSearcher.getIndexReader().close();
@@ -418,8 +417,7 @@
           dir,
           newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.KEYWORD, false)).setMergePolicy(newLogMergePolicy())
       );
-      final boolean scoreDocsInOrder = TestJoinUtil.random().nextBoolean();
-      IndexIterationContext context = createContext(numberOfDocumentsToIndex, w, multipleValuesPerDocument, scoreDocsInOrder);
+      IndexIterationContext context = createContext(numberOfDocumentsToIndex, w, multipleValuesPerDocument);
 
       IndexReader topLevelReader = w.getReader();
       w.close();
@@ -455,31 +453,21 @@
 
         // Need to know all documents that have matches. TopDocs doesn't give me that and then I'd be also testing TopDocsCollector...
         final BitSet actualResult = new FixedBitSet(indexSearcher.getIndexReader().maxDoc());
-        final TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(10, false);
-        indexSearcher.search(joinQuery, new SimpleCollector() {
-
-          int docBase;
+        final TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(10);
+        indexSearcher.search(joinQuery, new Collector() {
 
           @Override
-          public void collect(int doc) throws IOException {
-            actualResult.set(doc + docBase);
-            topScoreDocCollector.collect(doc);
-          }
+          public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
+            final int docBase = context.docBase;
+            final LeafCollector in = topScoreDocCollector.getLeafCollector(context);
+            return new FilterLeafCollector(in) {
 
-          @Override
-          protected void doSetNextReader(LeafReaderContext context) throws IOException {
-            docBase = context.docBase;
-            topScoreDocCollector.getLeafCollector(context);
-          }
-
-          @Override
-          public void setScorer(Scorer scorer) throws IOException {
-            topScoreDocCollector.setScorer(scorer);
-          }
-
-          @Override
-          public boolean acceptsDocsOutOfOrder() {
-            return scoreDocsInOrder;
+              @Override
+              public void collect(int doc) throws IOException {
+                super.collect(doc);
+                actualResult.set(doc + docBase);
+              }
+            };
           }
         });
         // Asserting bit set...
@@ -523,11 +511,11 @@
     }
   }
 
-  private IndexIterationContext createContext(int nDocs, RandomIndexWriter writer, boolean multipleValuesPerDocument, boolean scoreDocsInOrder) throws IOException {
-    return createContext(nDocs, writer, writer, multipleValuesPerDocument, scoreDocsInOrder);
+  private IndexIterationContext createContext(int nDocs, RandomIndexWriter writer, boolean multipleValuesPerDocument) throws IOException {
+    return createContext(nDocs, writer, writer, multipleValuesPerDocument);
   }
 
-  private IndexIterationContext createContext(int nDocs, RandomIndexWriter fromWriter, RandomIndexWriter toWriter, boolean multipleValuesPerDocument, boolean scoreDocsInOrder) throws IOException {
+  private IndexIterationContext createContext(int nDocs, RandomIndexWriter fromWriter, RandomIndexWriter toWriter, boolean multipleValuesPerDocument) throws IOException {
     IndexIterationContext context = new IndexIterationContext();
     int numRandomValues = nDocs / 2;
     context.randomUniqueValues = new String[numRandomValues];
@@ -655,11 +643,6 @@
           public void setScorer(Scorer scorer) {
             this.scorer = scorer;
           }
-
-          @Override
-          public boolean acceptsDocsOutOfOrder() {
-            return false;
-          }
         });
       } else {
         fromSearcher.search(new TermQuery(new Term("value", uniqueRandomValue)), new SimpleCollector() {
@@ -692,76 +675,33 @@
           public void setScorer(Scorer scorer) {
             this.scorer = scorer;
           }
-
-          @Override
-          public boolean acceptsDocsOutOfOrder() {
-            return false;
-          }
         });
       }
 
       final Map<Integer, JoinScore> docToJoinScore = new HashMap<>();
       if (multipleValuesPerDocument) {
-        if (scoreDocsInOrder) {
-          LeafReader slowCompositeReader = SlowCompositeReaderWrapper.wrap(toSearcher.getIndexReader());
-          Terms terms = slowCompositeReader.terms(toField);
-          if (terms != null) {
-            DocsEnum docsEnum = null;
-            TermsEnum termsEnum = null;
-            SortedSet<BytesRef> joinValues = new TreeSet<>(BytesRef.getUTF8SortedAsUnicodeComparator());
-            joinValues.addAll(joinValueToJoinScores.keySet());
-            for (BytesRef joinValue : joinValues) {
-              termsEnum = terms.iterator(termsEnum);
-              if (termsEnum.seekExact(joinValue)) {
-                docsEnum = termsEnum.docs(slowCompositeReader.getLiveDocs(), docsEnum, DocsEnum.FLAG_NONE);
-                JoinScore joinScore = joinValueToJoinScores.get(joinValue);
+        LeafReader slowCompositeReader = SlowCompositeReaderWrapper.wrap(toSearcher.getIndexReader());
+        Terms terms = slowCompositeReader.terms(toField);
+        if (terms != null) {
+          DocsEnum docsEnum = null;
+          TermsEnum termsEnum = null;
+          SortedSet<BytesRef> joinValues = new TreeSet<>(BytesRef.getUTF8SortedAsUnicodeComparator());
+          joinValues.addAll(joinValueToJoinScores.keySet());
+          for (BytesRef joinValue : joinValues) {
+            termsEnum = terms.iterator(termsEnum);
+            if (termsEnum.seekExact(joinValue)) {
+              docsEnum = termsEnum.docs(slowCompositeReader.getLiveDocs(), docsEnum, DocsEnum.FLAG_NONE);
+              JoinScore joinScore = joinValueToJoinScores.get(joinValue);
 
-                for (int doc = docsEnum.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = docsEnum.nextDoc()) {
-                  // First encountered join value determines the score.
-                  // Something to keep in mind for many-to-many relations.
-                  if (!docToJoinScore.containsKey(doc)) {
-                    docToJoinScore.put(doc, joinScore);
-                  }
+              for (int doc = docsEnum.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = docsEnum.nextDoc()) {
+                // First encountered join value determines the score.
+                // Something to keep in mind for many-to-many relations.
+                if (!docToJoinScore.containsKey(doc)) {
+                  docToJoinScore.put(doc, joinScore);
                 }
               }
             }
           }
-        } else {
-          toSearcher.search(new MatchAllDocsQuery(), new SimpleCollector() {
-
-            private SortedSetDocValues docTermOrds;
-            private int docBase;
-
-            @Override
-            public void collect(int doc) throws IOException {
-              docTermOrds.setDocument(doc);
-              long ord;
-              while ((ord = docTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
-                final BytesRef joinValue = docTermOrds.lookupOrd(ord);
-                JoinScore joinScore = joinValueToJoinScores.get(joinValue);
-                if (joinScore == null) {
-                  continue;
-                }
-                Integer basedDoc = docBase + doc;
-                // First encountered join value determines the score.
-                // Something to keep in mind for many-to-many relations.
-                if (!docToJoinScore.containsKey(basedDoc)) {
-                  docToJoinScore.put(basedDoc, joinScore);
-                }
-              }
-            }
-
-            @Override
-            protected void doSetNextReader(LeafReaderContext context) throws IOException {
-              docBase = context.docBase;
-              docTermOrds = DocValues.getSortedSet(context.reader(), toField);
-            }
-
-            @Override
-            public boolean acceptsDocsOutOfOrder() {return false;}
-            @Override
-            public void setScorer(Scorer scorer) {}
-          });
         }
       } else {
         toSearcher.search(new MatchAllDocsQuery(), new SimpleCollector() {
@@ -786,8 +726,6 @@
           }
 
           @Override
-          public boolean acceptsDocsOutOfOrder() {return false;}
-          @Override
           public void setScorer(Scorer scorer) {}
         });
       }
diff --git a/lucene/licenses/javax.servlet-3.0.0.v201112011016.jar.sha1 b/lucene/licenses/javax.servlet-3.0.0.v201112011016.jar.sha1
deleted file mode 100644
index 749a2c2..0000000
--- a/lucene/licenses/javax.servlet-3.0.0.v201112011016.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-0aaaa85845fb5c59da00193f06b8e5278d8bf3f8
diff --git a/lucene/licenses/javax.servlet-api-3.1.0.jar.sha1 b/lucene/licenses/javax.servlet-api-3.1.0.jar.sha1
new file mode 100644
index 0000000..a269ca0
--- /dev/null
+++ b/lucene/licenses/javax.servlet-api-3.1.0.jar.sha1
@@ -0,0 +1 @@
+3cd63d075497751784b2fa84be59432f4905bf7c
diff --git a/lucene/licenses/jetty-continuation-8.1.10.v20130312.jar.sha1 b/lucene/licenses/jetty-continuation-8.1.10.v20130312.jar.sha1
deleted file mode 100644
index 40d32c2..0000000
--- a/lucene/licenses/jetty-continuation-8.1.10.v20130312.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c0e26574ddcac7a86486f19a8b3782657acfd961
diff --git a/lucene/licenses/jetty-continuation-9.2.6.v20141205.jar.sha1 b/lucene/licenses/jetty-continuation-9.2.6.v20141205.jar.sha1
new file mode 100644
index 0000000..6566b24
--- /dev/null
+++ b/lucene/licenses/jetty-continuation-9.2.6.v20141205.jar.sha1
@@ -0,0 +1 @@
+74ca2679e461e7e9b5fdffcf9685575a7d5f5c8e
diff --git a/lucene/licenses/jetty-http-8.1.10.v20130312.jar.sha1 b/lucene/licenses/jetty-http-8.1.10.v20130312.jar.sha1
deleted file mode 100644
index 7cc9164..0000000
--- a/lucene/licenses/jetty-http-8.1.10.v20130312.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d9eb53007e04d6338f12f3ded60fad1f7bfcb40e
diff --git a/lucene/licenses/jetty-http-9.2.6.v20141205.jar.sha1 b/lucene/licenses/jetty-http-9.2.6.v20141205.jar.sha1
new file mode 100644
index 0000000..eee8f10
--- /dev/null
+++ b/lucene/licenses/jetty-http-9.2.6.v20141205.jar.sha1
@@ -0,0 +1 @@
+5484771191612c1f5a57466865b7014ff56886ce
diff --git a/lucene/licenses/jetty-io-8.1.10.v20130312.jar.sha1 b/lucene/licenses/jetty-io-8.1.10.v20130312.jar.sha1
deleted file mode 100644
index 2ba8e66..0000000
--- a/lucene/licenses/jetty-io-8.1.10.v20130312.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-e829c768f2b9de5d9fae3bc0aba3996bd0344f56
diff --git a/lucene/licenses/jetty-io-9.2.6.v20141205.jar.sha1 b/lucene/licenses/jetty-io-9.2.6.v20141205.jar.sha1
new file mode 100644
index 0000000..bc69203
--- /dev/null
+++ b/lucene/licenses/jetty-io-9.2.6.v20141205.jar.sha1
@@ -0,0 +1 @@
+11b84cc7078745fca844bd2fb95c2b4f818eafc2
diff --git a/lucene/licenses/jetty-server-8.1.10.v20130312.jar.sha1 b/lucene/licenses/jetty-server-8.1.10.v20130312.jar.sha1
deleted file mode 100644
index 2674565..0000000
--- a/lucene/licenses/jetty-server-8.1.10.v20130312.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-13ca9587bc1645f8fac89454b15252a2ad5bdcf5
diff --git a/lucene/licenses/jetty-server-9.2.6.v20141205.jar.sha1 b/lucene/licenses/jetty-server-9.2.6.v20141205.jar.sha1
new file mode 100644
index 0000000..bba3d06
--- /dev/null
+++ b/lucene/licenses/jetty-server-9.2.6.v20141205.jar.sha1
@@ -0,0 +1 @@
+5960eb385ded42360045447185a0510365f811dc
diff --git a/lucene/licenses/jetty-servlet-8.1.10.v20130312.jar.sha1 b/lucene/licenses/jetty-servlet-8.1.10.v20130312.jar.sha1
deleted file mode 100644
index ac4faaa..0000000
--- a/lucene/licenses/jetty-servlet-8.1.10.v20130312.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-98f8029fe7236e9c66381c04f292b5319f47ca84
diff --git a/lucene/licenses/jetty-servlet-9.2.6.v20141205.jar.sha1 b/lucene/licenses/jetty-servlet-9.2.6.v20141205.jar.sha1
new file mode 100644
index 0000000..c29ef43
--- /dev/null
+++ b/lucene/licenses/jetty-servlet-9.2.6.v20141205.jar.sha1
@@ -0,0 +1 @@
+026aa018ef20780e8a900ae7fc95f59884d3095b
diff --git a/lucene/licenses/jetty-util-8.1.10.v20130312.jar.sha1 b/lucene/licenses/jetty-util-8.1.10.v20130312.jar.sha1
deleted file mode 100644
index cff356d..0000000
--- a/lucene/licenses/jetty-util-8.1.10.v20130312.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d198a8ad8ea20b4fb74c781175c48500ec2b8b7a
diff --git a/lucene/licenses/jetty-util-9.2.6.v20141205.jar.sha1 b/lucene/licenses/jetty-util-9.2.6.v20141205.jar.sha1
new file mode 100644
index 0000000..6da0dcc
--- /dev/null
+++ b/lucene/licenses/jetty-util-9.2.6.v20141205.jar.sha1
@@ -0,0 +1 @@
+fdfa0b969d99a2dfb2a46c0ff00251d7e6c7b2bb
diff --git a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
index fc5a949..2695e38 100644
--- a/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
+++ b/lucene/memory/src/java/org/apache/lucene/index/memory/MemoryIndex.java
@@ -29,6 +29,7 @@
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
 import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
+import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
 import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
 import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
 import org.apache.lucene.document.FieldTypes;
@@ -61,6 +62,8 @@
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.ByteBlockPool;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.BytesRefArray;
+import org.apache.lucene.util.BytesRefBuilder;
 import org.apache.lucene.util.BytesRefHash.DirectBytesStartArray;
 import org.apache.lucene.util.BytesRefHash;
 import org.apache.lucene.util.Counter;
@@ -188,17 +191,19 @@
  */
 public class MemoryIndex {
 
+  private static final boolean DEBUG = false;
+
   /** info for each field: Map&lt;String fieldName, Info field&gt; */
   private final SortedMap<String,Info> fields = new TreeMap<>();
   
   private final boolean storeOffsets;
-  
-  private static final boolean DEBUG = false;
+  private final boolean storePayloads;
 
   private final ByteBlockPool byteBlockPool;
   private final IntBlockPool intBlockPool;
 //  private final IntBlockPool.SliceReader postingsReader;
   private final IntBlockPool.SliceWriter postingsWriter;
+  private final BytesRefArray payloadsBytesRefs;//non null only when storePayloads
 
   private Counter bytesUsed;
 
@@ -207,7 +212,7 @@
   private Similarity normSimilarity = IndexSearcher.getDefaultSimilarity();
 
   /**
-   * Constructs an empty instance.
+   * Constructs an empty instance that will not store offsets or payloads.
    */
   public MemoryIndex() {
     this(false);
@@ -216,25 +221,37 @@
   /**
    * Constructs an empty instance that can optionally store the start and end
    * character offset of each token term in the text. This can be useful for
-   * highlighting of hit locations with the Lucene highlighter package.
-   * Protected until the highlighter package matures, so that this can actually
-   * be meaningfully integrated.
+   * highlighting of hit locations with the Lucene highlighter package.  But
+   * it will not store payloads; use another constructor for that.
    * 
    * @param storeOffsets
    *            whether or not to store the start and end character offset of
    *            each token term in the text
    */
   public MemoryIndex(boolean storeOffsets) {
-    this(storeOffsets, 0);
+    this(storeOffsets, false);
   }
-  
+
+  /**
+   * Constructs an empty instance with the option of storing offsets and payloads.
+   *
+   * @param storeOffsets store term offsets at each position
+   * @param storePayloads store term payloads at each position
+   */
+  public MemoryIndex(boolean storeOffsets, boolean storePayloads) {
+    this(storeOffsets, storePayloads, 0);
+  }
+
   /**
    * Expert: This constructor accepts an upper limit for the number of bytes that should be reused if this instance is {@link #reset()}.
+   * The payload storage, if used, is unaffected by maxReusuedBytes, however.
    * @param storeOffsets <code>true</code> if offsets should be stored
+   * @param storePayloads <code>true</code> if payloads should be stored
    * @param maxReusedBytes the number of bytes that should remain in the internal memory pools after {@link #reset()} is called
    */
-  MemoryIndex(boolean storeOffsets, long maxReusedBytes) {
+  MemoryIndex(boolean storeOffsets, boolean storePayloads, long maxReusedBytes) {
     this.storeOffsets = storeOffsets;
+    this.storePayloads = storePayloads;
     this.bytesUsed = Counter.newCounter();
     final int maxBufferedByteBlocks = (int)((maxReusedBytes/2) / ByteBlockPool.BYTE_BLOCK_SIZE );
     final int maxBufferedIntBlocks = (int) ((maxReusedBytes - (maxBufferedByteBlocks*ByteBlockPool.BYTE_BLOCK_SIZE))/(IntBlockPool.INT_BLOCK_SIZE * RamUsageEstimator.NUM_BYTES_INT));
@@ -242,6 +259,8 @@
     byteBlockPool = new ByteBlockPool(new RecyclingByteBlockAllocator(ByteBlockPool.BYTE_BLOCK_SIZE, maxBufferedByteBlocks, bytesUsed));
     intBlockPool = new IntBlockPool(new RecyclingIntBlockAllocator(IntBlockPool.INT_BLOCK_SIZE, maxBufferedIntBlocks, bytesUsed));
     postingsWriter = new SliceWriter(intBlockPool);
+    //TODO refactor BytesRefArray to allow us to apply maxReusedBytes option
+    payloadsBytesRefs = storePayloads ? new BytesRefArray(bytesUsed) : null;
   }
   
   /**
@@ -382,8 +401,8 @@
    *
    * @param fieldName
    *            a name to be associated with the text
-   * @param stream
-   *            the token stream to retrieve tokens from.
+   * @param tokenStream
+   *            the token stream to retrieve tokens from. It's guaranteed to be closed no matter what.
    * @param boost
    *            the boost factor for hits for this field
    * @param positionIncrementGap
@@ -392,16 +411,17 @@
    *            the offset gap if fields with the same name are added more than once
    * @see org.apache.lucene.document.Field#setBoost(float)
    */
-  public void addField(String fieldName, TokenStream stream, float boost, int positionIncrementGap, int offsetGap) {
-    try {
+  public void addField(String fieldName, TokenStream tokenStream, float boost, int positionIncrementGap,
+                       int offsetGap) {
+    try (TokenStream stream = tokenStream) {
       if (frozen)
         throw new IllegalArgumentException("Cannot call addField() when MemoryIndex is frozen");
       if (fieldName == null)
         throw new IllegalArgumentException("fieldName must not be null");
       if (stream == null)
-          throw new IllegalArgumentException("token stream must not be null");
+        throw new IllegalArgumentException("token stream must not be null");
       if (boost <= 0.0f)
-          throw new IllegalArgumentException("boost factor must be greater than 0.0");
+        throw new IllegalArgumentException("boost factor must be greater than 0.0");
       int numTokens = 0;
       int numOverlapTokens = 0;
       int pos = -1;
@@ -422,8 +442,9 @@
         sliceArray = info.sliceArray;
         sumTotalTermFreq = info.sumTotalTermFreq;
       } else {
-        fieldInfo = new FieldInfo(fieldName, fields.size(), false, false, false,
-            this.storeOffsets ? IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS,
+        fieldInfo = new FieldInfo(fieldName, fields.size(), false, false, this.storePayloads,
+            this.storeOffsets
+                ? IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS,
             DocValuesType.NONE, -1, null);
         sliceArray = new SliceByteStartArray(BytesRefHash.DEFAULT_CAPACITY);
         terms = new BytesRefHash(byteBlockPool, BytesRefHash.DEFAULT_CAPACITY, sliceArray);
@@ -432,6 +453,7 @@
       TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class);
       PositionIncrementAttribute posIncrAttribute = stream.addAttribute(PositionIncrementAttribute.class);
       OffsetAttribute offsetAtt = stream.addAttribute(OffsetAttribute.class);
+      PayloadAttribute payloadAtt = storePayloads ? stream.addAttribute(PayloadAttribute.class) : null;
       BytesRef ref = termAtt.getBytesRef();
       stream.reset();
       
@@ -452,13 +474,16 @@
         }
         sliceArray.freq[ord]++;
         sumTotalTermFreq++;
-        if (!storeOffsets) {
-          postingsWriter.writeInt(pos);
-        } else {
-          postingsWriter.writeInt(pos);
+        postingsWriter.writeInt(pos);
+        if (storeOffsets) {
           postingsWriter.writeInt(offsetAtt.startOffset() + offset);
           postingsWriter.writeInt(offsetAtt.endOffset() + offset);
         }
+        if (storePayloads) {
+          final BytesRef payload = payloadAtt.getPayload();
+          int pIndex = payload == null ? -1 : payloadsBytesRefs.append(payload);
+          postingsWriter.writeInt(pIndex);
+        }
         sliceArray.end[ord] = postingsWriter.getCurrentOffset();
       }
       stream.end();
@@ -467,16 +492,8 @@
       if (numTokens > 0) {
         fields.put(fieldName, new Info(fieldInfo, terms, sliceArray, numTokens, numOverlapTokens, boost, pos, offsetAtt.endOffset() + offset, sumTotalTermFreq));
       }
-    } catch (Exception e) { // can never happen
+    } catch (IOException e) {
       throw new RuntimeException(e);
-    } finally {
-      try {
-        if (stream != null) {
-          stream.close();
-        }
-      } catch (IOException e2) {
-        throw new RuntimeException(e2);
-      }
     }
   }
 
@@ -553,11 +570,6 @@
           this.scorer = scorer;
         }
 
-        @Override
-        public boolean acceptsDocsOutOfOrder() {
-          return true;
-        }
-
       });
       float score = scores[0];
       return score;
@@ -869,7 +881,7 @@
 
           @Override
           public boolean hasPayloads() {
-            return false;
+            return storePayloads;
           }
         };
       }
@@ -1052,17 +1064,20 @@
     }
     
     private class MemoryDocsAndPositionsEnum extends DocsAndPositionsEnum {
+      private final SliceReader sliceReader;
       private int posUpto; // for assert
       private boolean hasNext;
       private Bits liveDocs;
       private int doc = -1;
-      private SliceReader sliceReader;
       private int freq;
       private int startOffset;
       private int endOffset;
-      
+      private int payloadIndex;
+      private final BytesRefBuilder payloadBuilder;//only non-null when storePayloads
+
       public MemoryDocsAndPositionsEnum() {
         this.sliceReader = new SliceReader(intBlockPool);
+        this.payloadBuilder = storePayloads ? new BytesRefBuilder() : null;
       }
 
       public DocsAndPositionsEnum reset(Bits liveDocs, int start, int end, int freq) {
@@ -1105,14 +1120,15 @@
       public int nextPosition() {
         assert posUpto++ < freq;
         assert !sliceReader.endOfSlice() : " stores offsets : " + startOffset;
+        int pos = sliceReader.readInt();
         if (storeOffsets) {
-          int pos = sliceReader.readInt();
           startOffset = sliceReader.readInt();
           endOffset = sliceReader.readInt();
-          return pos;
-        } else {
-          return sliceReader.readInt();
         }
+        if (storePayloads) {
+          payloadIndex = sliceReader.readInt();
+        }
+        return pos;
       }
 
       @Override
@@ -1127,7 +1143,10 @@
 
       @Override
       public BytesRef getPayload() {
-        return null;
+        if (payloadBuilder == null || payloadIndex == -1) {
+          return null;
+        }
+        return payloadsBytesRefs.get(payloadBuilder, payloadIndex);
       }
       
       @Override
@@ -1187,6 +1206,9 @@
     this.normSimilarity = IndexSearcher.getDefaultSimilarity();
     byteBlockPool.reset(false, false); // no need to 0-fill the buffers
     intBlockPool.reset(true, false); // here must must 0-fill since we use slices
+    if (payloadsBytesRefs != null) {
+      payloadsBytesRefs.clear();
+    }
     this.frozen = false;
   }
   
diff --git a/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java b/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java
index 8389337..74dc4fc 100644
--- a/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java
+++ b/lucene/memory/src/test/org/apache/lucene/index/memory/TestMemoryIndexAgainstRAMDir.java
@@ -67,8 +67,8 @@
 import org.apache.lucene.search.spans.SpanQuery;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.RAMDirectory;
-import org.apache.lucene.util.ByteBlockPool.Allocator;
 import org.apache.lucene.util.ByteBlockPool;
+import org.apache.lucene.util.ByteBlockPool.Allocator;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LineFileDocs;
@@ -115,7 +115,7 @@
    * runs random tests, up to ITERATIONS times.
    */
   public void testRandomQueries() throws Exception {
-    MemoryIndex index =  new MemoryIndex(random().nextBoolean(), random().nextInt(50) * 1024 * 1024);
+    MemoryIndex index = randomMemoryIndex();
     for (int i = 0; i < ITERATIONS; i++) {
       assertAgainstRAMDirectory(index);
     }
@@ -147,7 +147,8 @@
     Directory ramdir = new RAMDirectory();
     Analyzer analyzer = randomAnalyzer();
     IndexWriter writer = new IndexWriter(ramdir,
-                                         new IndexWriterConfig(analyzer).setCodec(TestUtil.alwaysPostingsFormat(TestUtil.getDefaultPostingsFormat())));
+                                         new IndexWriterConfig(analyzer).setCodec(
+                                             TestUtil.alwaysPostingsFormat(TestUtil.getDefaultPostingsFormat())));
     Document doc = writer.newDocument();
     doc.addLargeText("foo", fooField.toString());
     doc.addLargeText("term", termField.toString());
@@ -206,6 +207,10 @@
                   assertEquals(iwDocsAndPos.startOffset(), memDocsAndPos.startOffset());
                   assertEquals(iwDocsAndPos.endOffset(), memDocsAndPos.endOffset());
                 }
+
+                if (iwTerms.hasPayloads()) {
+                  assertEquals(iwDocsAndPos.getPayload(), memDocsAndPos.getPayload());
+                }
               }
               
             }
@@ -308,7 +313,7 @@
   
   public void testDocsEnumStart() throws Exception {
     Analyzer analyzer = new MockAnalyzer(random());
-    MemoryIndex memory = new MemoryIndex(random().nextBoolean(),  random().nextInt(50) * 1024 * 1024);
+    MemoryIndex memory = new MemoryIndex(random().nextBoolean(), false, random().nextInt(50) * 1024 * 1024);
     memory.addField("foo", "bar", analyzer);
     LeafReader reader = (LeafReader) memory.createSearcher().getIndexReader();
     DocsEnum disi = TestUtil.docs(random(), reader, "foo", new BytesRef("bar"), null, null, DocsEnum.FLAG_NONE);
@@ -333,11 +338,15 @@
       return new ByteBlockPool.DirectAllocator();
     }
   }
-  
+
+  private MemoryIndex randomMemoryIndex() {
+    return new MemoryIndex(random().nextBoolean(), random().nextBoolean(), random().nextInt(50) * 1024 * 1024);
+  }
+
   public void testDocsAndPositionsEnumStart() throws Exception {
     Analyzer analyzer = new MockAnalyzer(random());
     int numIters = atLeast(3);
-    MemoryIndex memory = new MemoryIndex(true,  random().nextInt(50) * 1024 * 1024);
+    MemoryIndex memory = new MemoryIndex(true, false, random().nextInt(50) * 1024 * 1024);
     for (int i = 0; i < numIters; i++) { // check reuse
       memory.addField("foo", "bar", analyzer);
       LeafReader reader = (LeafReader) memory.createSearcher().getIndexReader();
@@ -367,7 +376,7 @@
     RegexpQuery regex = new RegexpQuery(new Term("field", "worl."));
     SpanQuery wrappedquery = new SpanMultiTermQueryWrapper<>(regex);
         
-    MemoryIndex mindex = new MemoryIndex(random().nextBoolean(),  random().nextInt(50) * 1024 * 1024);
+    MemoryIndex mindex = randomMemoryIndex();
     mindex.addField("field", new MockAnalyzer(random()).tokenStream("field", "hello there"));
 
     // This throws an NPE
@@ -379,7 +388,7 @@
     RegexpQuery regex = new RegexpQuery(new Term("field", "worl."));
     SpanQuery wrappedquery = new SpanOrQuery(new SpanMultiTermQueryWrapper<>(regex));
 
-    MemoryIndex mindex = new MemoryIndex(random().nextBoolean(),  random().nextInt(50) * 1024 * 1024);
+    MemoryIndex mindex = randomMemoryIndex();
     mindex.addField("field", new MockAnalyzer(random()).tokenStream("field", "hello there"));
 
     // This passes though
@@ -387,7 +396,7 @@
   }
   
   public void testSameFieldAddedMultipleTimes() throws IOException {
-    MemoryIndex mindex = new MemoryIndex(random().nextBoolean(),  random().nextInt(50) * 1024 * 1024);
+    MemoryIndex mindex = randomMemoryIndex();
     MockAnalyzer mockAnalyzer = new MockAnalyzer(random());
     mindex.addField("field", "the quick brown fox", mockAnalyzer);
     mindex.addField("field", "jumps over the", mockAnalyzer);
@@ -406,8 +415,8 @@
     assertTrue("posGap" + mockAnalyzer.getPositionIncrementGap("field") , mindex.search(query) > 0.0001);
   }
   
-  public void testNonExistingsField() throws IOException {
-    MemoryIndex mindex = new MemoryIndex(random().nextBoolean(),  random().nextInt(50) * 1024 * 1024);
+  public void testNonExistentField() throws IOException {
+    MemoryIndex mindex = randomMemoryIndex();
     MockAnalyzer mockAnalyzer = new MockAnalyzer(random());
     mindex.addField("field", "the quick brown fox", mockAnalyzer);
     LeafReader reader = (LeafReader) mindex.createSearcher().getIndexReader();
@@ -417,10 +426,10 @@
     assertNull(reader.termPositionsEnum(new Term("not-in-index", "foo")));
     assertNull(reader.terms("not-in-index"));
   }
-  
+
   public void testDuellMemIndex() throws IOException {
     int numDocs = atLeast(10);
-    MemoryIndex memory = new MemoryIndex(random().nextBoolean(),  random().nextInt(50) * 1024 * 1024);
+    MemoryIndex memory = randomMemoryIndex();
     for (int i = 0; i < numDocs; i++) {
       Directory dir = newDirectory();
       MockAnalyzer mockAnalyzer = new MockAnalyzer(random());
@@ -532,7 +541,7 @@
         assertThat("Position test failed" + failDesc, memPos, equalTo(pos));
         assertThat("Start offset test failed" + failDesc, memDocsPosEnum.startOffset(), equalTo(docsPosEnum.startOffset()));
         assertThat("End offset test failed" + failDesc, memDocsPosEnum.endOffset(), equalTo(docsPosEnum.endOffset()));
-        assertThat("Missing payload test failed" + failDesc, docsPosEnum.getPayload(), equalTo(null));
+        assertThat("Missing payload test failed" + failDesc, docsPosEnum.getPayload(), equalTo(docsPosEnum.getPayload()));
       }
     }
     assertNull("Still some tokens not processed", memTermEnum.next());
diff --git a/lucene/misc/src/java/org/apache/lucene/index/MergeReaderWrapper.java b/lucene/misc/src/java/org/apache/lucene/index/MergeReaderWrapper.java
index 4091634..a27a0c5 100644
--- a/lucene/misc/src/java/org/apache/lucene/index/MergeReaderWrapper.java
+++ b/lucene/misc/src/java/org/apache/lucene/index/MergeReaderWrapper.java
@@ -39,7 +39,7 @@
   MergeReaderWrapper(SegmentReader in) throws IOException {
     this.in = in;
     
-    FieldsProducer fields = in.fields();
+    FieldsProducer fields = in.getPostingsReader();
     if (fields != null) {
       fields = fields.getMergeInstance();
     }
diff --git a/lucene/misc/src/java/org/apache/lucene/index/MultiPassIndexSplitter.java b/lucene/misc/src/java/org/apache/lucene/index/MultiPassIndexSplitter.java
index 6784606..3a58168 100644
--- a/lucene/misc/src/java/org/apache/lucene/index/MultiPassIndexSplitter.java
+++ b/lucene/misc/src/java/org/apache/lucene/index/MultiPassIndexSplitter.java
@@ -33,7 +33,7 @@
 
 /**
  * This tool splits input index into multiple equal parts. The method employed
- * here uses {@link IndexWriter#addIndexes(IndexReader[])} where the input data
+ * here uses {@link IndexWriter#addIndexes(CodecReader[])} where the input data
  * comes from the input index with artificially applied deletes to the document
  * id-s that fall outside the selected partition.
  * <p>Note 1: Deletes are only applied to a buffered list of deleted docs and
@@ -59,7 +59,7 @@
    * assigned in a deterministic round-robin fashion to one of the output splits.
    * @throws IOException If there is a low-level I/O error
    */
-  public void split(Version version, IndexReader in, Directory[] outputs, boolean seq) throws IOException {
+  public void split(IndexReader in, Directory[] outputs, boolean seq) throws IOException {
     if (outputs == null || outputs.length < 2) {
       throw new IOException("Invalid number of outputs.");
     }
@@ -102,7 +102,7 @@
       System.err.println("Writing part " + (i + 1) + " ...");
       // pass the subreaders directly, as our wrapper's numDocs/hasDeletetions are not up-to-date
       final List<? extends FakeDeleteLeafIndexReader> sr = input.getSequentialSubReaders();
-      w.addIndexes(sr.toArray(new IndexReader[sr.size()])); // TODO: maybe take List<IR> here?
+      w.addIndexes(sr.toArray(new CodecReader[sr.size()])); // TODO: maybe take List<IR> here?
       w.close();
     }
     System.err.println("Done.");
@@ -170,7 +170,7 @@
     } else {
       input = new MultiReader(indexes.toArray(new IndexReader[indexes.size()]));
     }
-    splitter.split(Version.LATEST, input, dirs, seq);
+    splitter.split(input, dirs, seq);
   }
   
   /**
@@ -178,16 +178,16 @@
    */
   private static final class FakeDeleteIndexReader extends BaseCompositeReader<FakeDeleteLeafIndexReader> {
 
-    public FakeDeleteIndexReader(IndexReader reader) {
+    public FakeDeleteIndexReader(IndexReader reader) throws IOException {
       super(initSubReaders(reader));
     }
     
-    private static FakeDeleteLeafIndexReader[] initSubReaders(IndexReader reader) {
+    private static FakeDeleteLeafIndexReader[] initSubReaders(IndexReader reader) throws IOException {
       final List<LeafReaderContext> leaves = reader.leaves();
       final FakeDeleteLeafIndexReader[] subs = new FakeDeleteLeafIndexReader[leaves.size()];
       int i = 0;
       for (final LeafReaderContext ctx : leaves) {
-        subs[i++] = new FakeDeleteLeafIndexReader(ctx.reader());
+        subs[i++] = new FakeDeleteLeafIndexReader(SlowCodecReaderWrapper.wrap(ctx.reader()));
       }
       return subs;
     }
@@ -210,10 +210,10 @@
     // as we pass the subreaders directly to IW.addIndexes().
   }
   
-  private static final class FakeDeleteLeafIndexReader extends FilterLeafReader {
+  private static final class FakeDeleteLeafIndexReader extends FilterCodecReader {
     FixedBitSet liveDocs;
 
-    public FakeDeleteLeafIndexReader(LeafReader reader) {
+    public FakeDeleteLeafIndexReader(CodecReader reader) {
       super(reader);
       undeleteAll(); // initialize main bitset
     }
diff --git a/lucene/misc/src/java/org/apache/lucene/index/PKIndexSplitter.java b/lucene/misc/src/java/org/apache/lucene/index/PKIndexSplitter.java
index a58a479..9b9b724 100644
--- a/lucene/misc/src/java/org/apache/lucene/index/PKIndexSplitter.java
+++ b/lucene/misc/src/java/org/apache/lucene/index/PKIndexSplitter.java
@@ -98,12 +98,12 @@
     }
   }
   
-  private void createIndex(IndexWriterConfig config, Directory target, IndexReader reader, Filter preserveFilter, boolean negateFilter) throws IOException {
+  private void createIndex(IndexWriterConfig config, Directory target, DirectoryReader reader, Filter preserveFilter, boolean negateFilter) throws IOException {
     boolean success = false;
     final IndexWriter w = new IndexWriter(target, config);
     try {
       final List<LeafReaderContext> leaves = reader.leaves();
-      final IndexReader[] subReaders = new IndexReader[leaves.size()];
+      final CodecReader[] subReaders = new CodecReader[leaves.size()];
       int i = 0;
       for (final LeafReaderContext ctx : leaves) {
         subReaders[i++] = new DocumentFilteredLeafIndexReader(ctx, preserveFilter, negateFilter);
@@ -119,12 +119,13 @@
     }
   }
     
-  private static class DocumentFilteredLeafIndexReader extends FilterLeafReader {
+  private static class DocumentFilteredLeafIndexReader extends FilterCodecReader {
     final Bits liveDocs;
     final int numDocs;
     
     public DocumentFilteredLeafIndexReader(LeafReaderContext context, Filter preserveFilter, boolean negateFilter) throws IOException {
-      super(context.reader());
+      // our cast is ok, since we open the Directory.
+      super((CodecReader) context.reader());
       final int maxDoc = in.maxDoc();
       final FixedBitSet bits = new FixedBitSet(maxDoc);
       // ignore livedocs here, as we filter them later:
diff --git a/lucene/misc/src/java/org/apache/lucene/index/Sorter.java b/lucene/misc/src/java/org/apache/lucene/index/Sorter.java
index 6ae99b0..22912bc 100644
--- a/lucene/misc/src/java/org/apache/lucene/index/Sorter.java
+++ b/lucene/misc/src/java/org/apache/lucene/index/Sorter.java
@@ -20,9 +20,7 @@
 import java.io.IOException;
 import java.util.Comparator;
 
-import org.apache.lucene.index.LeafReader;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.search.FieldComparator;
+import org.apache.lucene.search.LeafFieldComparator;
 import org.apache.lucene.search.Scorer;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.SortField;
@@ -214,12 +212,11 @@
   DocMap sort(LeafReader reader) throws IOException {
     SortField fields[] = sort.getSort();
     final int reverseMul[] = new int[fields.length];
-    final FieldComparator<?> comparators[] = new FieldComparator[fields.length];
+    final LeafFieldComparator comparators[] = new LeafFieldComparator[fields.length];
     
     for (int i = 0; i < fields.length; i++) {
       reverseMul[i] = fields[i].getReverse() ? -1 : 1;
-      comparators[i] = fields[i].getComparator(1, i);
-      comparators[i].setNextReader(reader.getContext());
+      comparators[i] = fields[i].getComparator(1, i).getLeafComparator(reader.getContext());
       comparators[i].setScorer(FAKESCORER);
     }
     final DocComparator comparator = new DocComparator() {
diff --git a/lucene/misc/src/java/org/apache/lucene/index/SortingMergePolicy.java b/lucene/misc/src/java/org/apache/lucene/index/SortingMergePolicy.java
index 3a4bca9..9e7b9a7 100644
--- a/lucene/misc/src/java/org/apache/lucene/index/SortingMergePolicy.java
+++ b/lucene/misc/src/java/org/apache/lucene/index/SortingMergePolicy.java
@@ -47,7 +47,7 @@
  *  will be sorted while segments resulting from a flush will be in the order
  *  in which documents have been added.
  *  <p><b>NOTE</b>: Never use this policy if you rely on
- *  {@link IndexWriter#addDocuments(Iterable, Analyzer) IndexWriter.addDocuments}
+ *  {@link IndexWriter#addDocuments(Iterable) IndexWriter.addDocuments}
  *  to have sequentially-assigned doc IDs, this policy will scatter doc IDs.
  *  <p><b>NOTE</b>: This policy should only be used with idempotent {@code Sort}s 
  *  so that the order of segments is predictable. For example, using 
@@ -65,7 +65,7 @@
   
   class SortingOneMerge extends OneMerge {
 
-    List<LeafReader> unsortedReaders;
+    List<CodecReader> unsortedReaders;
     Sorter.DocMap docMap;
     LeafReader sortedView;
     final InfoStream infoStream;
@@ -76,7 +76,7 @@
     }
 
     @Override
-    public List<LeafReader> getMergeReaders() throws IOException {
+    public List<CodecReader> getMergeReaders() throws IOException {
       if (unsortedReaders == null) {
         unsortedReaders = super.getMergeReaders();
         if (infoStream.isEnabled("SMP")) {
@@ -117,7 +117,7 @@
         if (infoStream.isEnabled("SMP")) {
           infoStream.message("SMP", "sorting readers by " + sort);
         }
-        return Collections.singletonList(sortedView);
+        return Collections.singletonList(SlowCodecReaderWrapper.wrap(sortedView));
       }
     }
     
@@ -128,7 +128,7 @@
       super.setInfo(info);
     }
 
-    private PackedLongValues getDeletes(List<LeafReader> readers) {
+    private PackedLongValues getDeletes(List<CodecReader> readers) {
       PackedLongValues.Builder deletes = PackedLongValues.monotonicBuilder(PackedInts.COMPACT);
       int deleteCount = 0;
       for (LeafReader reader : readers) {
diff --git a/lucene/misc/src/java/org/apache/lucene/search/BlockJoinComparatorSource.java b/lucene/misc/src/java/org/apache/lucene/search/BlockJoinComparatorSource.java
index 21143a2..bd7d5e4 100644
--- a/lucene/misc/src/java/org/apache/lucene/search/BlockJoinComparatorSource.java
+++ b/lucene/misc/src/java/org/apache/lucene/search/BlockJoinComparatorSource.java
@@ -21,18 +21,8 @@
 
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.SortingMergePolicy;
-import org.apache.lucene.search.DocIdSet;
-import org.apache.lucene.search.FieldComparator;
-import org.apache.lucene.search.FieldComparatorSource;
-import org.apache.lucene.search.Filter;
-import org.apache.lucene.search.IndexSearcher; // javadocs
-import org.apache.lucene.search.Query; // javadocs
-import org.apache.lucene.search.ScoreDoc; // javadocs
-import org.apache.lucene.search.Scorer;
-import org.apache.lucene.search.Sort;
-import org.apache.lucene.search.SortField;
 import org.apache.lucene.util.BitDocIdSet;
-import org.apache.lucene.util.FixedBitSet;
+import org.apache.lucene.util.BitSet;
 
 /**
  * Helper class to sort readers that contain blocks of documents.
@@ -51,22 +41,22 @@
   final Filter parentsFilter;
   final Sort parentSort;
   final Sort childSort;
-  
-  /** 
+
+  /**
    * Create a new BlockJoinComparatorSource, sorting only blocks of documents
    * with {@code parentSort} and not reordering children with a block.
-   * 
+   *
    * @param parentsFilter Filter identifying parent documents
    * @param parentSort Sort for parent documents
    */
   public BlockJoinComparatorSource(Filter parentsFilter, Sort parentSort) {
     this(parentsFilter, parentSort, new Sort(SortField.FIELD_DOC));
   }
-  
-  /** 
+
+  /**
    * Create a new BlockJoinComparatorSource, specifying the sort order for both
    * blocks of documents and children within a block.
-   * 
+   *
    * @param parentsFilter Filter identifying parent documents
    * @param parentSort Sort for parent documents
    * @param childSort Sort for child documents in the same block
@@ -82,7 +72,7 @@
     // we keep parallel slots: the parent ids and the child ids
     final int parentSlots[] = new int[numHits];
     final int childSlots[] = new int[numHits];
-    
+
     SortField parentFields[] = parentSort.getSort();
     final int parentReverseMul[] = new int[parentFields.length];
     final FieldComparator<?> parentComparators[] = new FieldComparator[parentFields.length];
@@ -90,7 +80,7 @@
       parentReverseMul[i] = parentFields[i].getReverse() ? -1 : 1;
       parentComparators[i] = parentFields[i].getComparator(1, i);
     }
-    
+
     SortField childFields[] = childSort.getSort();
     final int childReverseMul[] = new int[childFields.length];
     final FieldComparator<?> childComparators[] = new FieldComparator[childFields.length];
@@ -98,14 +88,16 @@
       childReverseMul[i] = childFields[i].getReverse() ? -1 : 1;
       childComparators[i] = childFields[i].getComparator(1, i);
     }
-        
+
     // NOTE: we could return parent ID as value but really our sort "value" is more complex...
     // So we throw UOE for now. At the moment you really should only use this at indexing time.
     return new FieldComparator<Integer>() {
       int bottomParent;
       int bottomChild;
-      FixedBitSet parentBits;
-      
+      BitSet parentBits;
+      LeafFieldComparator[] parentLeafComparators;
+      LeafFieldComparator[] childLeafComparators;
+
       @Override
       public int compare(int slot1, int slot2) {
         try {
@@ -116,51 +108,69 @@
       }
 
       @Override
-      public void setBottom(int slot) {
-        bottomParent = parentSlots[slot];
-        bottomChild = childSlots[slot];
-      }
-
-      @Override
       public void setTopValue(Integer value) {
         // we dont have enough information (the docid is needed)
         throw new UnsupportedOperationException("this comparator cannot be used with deep paging");
       }
 
       @Override
-      public int compareBottom(int doc) throws IOException {
-        return compare(bottomChild, bottomParent, doc, parent(doc));
-      }
-
-      @Override
-      public int compareTop(int doc) throws IOException {
-        // we dont have enough information (the docid is needed)
-        throw new UnsupportedOperationException("this comparator cannot be used with deep paging");
-      }
-
-      @Override
-      public void copy(int slot, int doc) throws IOException {
-        childSlots[slot] = doc;
-        parentSlots[slot] = parent(doc);
-      }
-
-      @Override
-      public FieldComparator<Integer> setNextReader(LeafReaderContext context) throws IOException {
+      public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException {
+        if (parentBits != null) {
+          throw new IllegalStateException("This comparator can only be used on a single segment");
+        }
         final DocIdSet parents = parentsFilter.getDocIdSet(context, null);
         if (parents == null) {
           throw new IllegalStateException("LeafReader " + context.reader() + " contains no parents!");
         }
-        if (!(parents instanceof BitDocIdSet)) {
-          throw new IllegalStateException("parentFilter must return FixedBitSet; got " + parents);
+        if (parents instanceof BitDocIdSet == false) {
+          throw new IllegalStateException("parentFilter must return BitSet; got " + parents);
         }
-        parentBits = (FixedBitSet) parents.bits();
+        parentBits = (BitSet) parents.bits();
+        parentLeafComparators = new LeafFieldComparator[parentComparators.length];
         for (int i = 0; i < parentComparators.length; i++) {
-          parentComparators[i] = parentComparators[i].setNextReader(context);
+          parentLeafComparators[i] = parentComparators[i].getLeafComparator(context);
         }
+        childLeafComparators = new LeafFieldComparator[childComparators.length];
         for (int i = 0; i < childComparators.length; i++) {
-          childComparators[i] = childComparators[i].setNextReader(context);
+          childLeafComparators[i] = childComparators[i].getLeafComparator(context);
         }
-        return this;
+
+        return new LeafFieldComparator() {
+
+          @Override
+          public int compareBottom(int doc) throws IOException {
+            return compare(bottomChild, bottomParent, doc, parent(doc));
+          }
+
+          @Override
+          public int compareTop(int doc) throws IOException {
+            // we dont have enough information (the docid is needed)
+            throw new UnsupportedOperationException("this comparator cannot be used with deep paging");
+          }
+
+          @Override
+          public void copy(int slot, int doc) throws IOException {
+            childSlots[slot] = doc;
+            parentSlots[slot] = parent(doc);
+          }
+
+          @Override
+          public void setBottom(int slot) {
+            bottomParent = parentSlots[slot];
+            bottomChild = childSlots[slot];
+          }
+
+          @Override
+          public void setScorer(Scorer scorer) {
+            for (LeafFieldComparator comp : parentLeafComparators) {
+              comp.setScorer(scorer);
+            }
+            for (LeafFieldComparator comp : childLeafComparators) {
+              comp.setScorer(scorer);
+            }
+          }
+
+        };
       }
 
       @Override
@@ -168,32 +178,21 @@
         // really our sort "value" is more complex...
         throw new UnsupportedOperationException("filling sort field values is not yet supported");
       }
-      
-      @Override
-      public void setScorer(Scorer scorer) {
-        super.setScorer(scorer);
-        for (FieldComparator<?> comp : parentComparators) {
-          comp.setScorer(scorer);
-        }
-        for (FieldComparator<?> comp : childComparators) {
-          comp.setScorer(scorer);
-        }
-      }
 
       int parent(int doc) {
         return parentBits.nextSetBit(doc);
       }
-      
+
       int compare(int docID1, int parent1, int docID2, int parent2) throws IOException {
         if (parent1 == parent2) { // both are in the same block
           if (docID1 == parent1 || docID2 == parent2) {
             // keep parents at the end of blocks
             return docID1 - docID2;
           } else {
-            return compare(docID1, docID2, childComparators, childReverseMul);
+            return compare(docID1, docID2, childLeafComparators, childReverseMul);
           }
         } else {
-          int cmp = compare(parent1, parent2, parentComparators, parentReverseMul);
+          int cmp = compare(parent1, parent2, parentLeafComparators, parentReverseMul);
           if (cmp == 0) {
             return parent1 - parent2;
           } else {
@@ -201,8 +200,8 @@
           }
         }
       }
-      
-      int compare(int docID1, int docID2, FieldComparator<?> comparators[], int reverseMul[]) throws IOException {
+
+      int compare(int docID1, int docID2, LeafFieldComparator comparators[], int reverseMul[]) throws IOException {
         for (int i = 0; i < comparators.length; i++) {
           // TODO: would be better if copy() didnt cause a term lookup in TermOrdVal & co,
           // the segments are always the same here...
@@ -217,7 +216,7 @@
       }
     };
   }
-  
+
   @Override
   public String toString() {
     return "blockJoin(parentSort=" + parentSort + ",childSort=" + childSort + ")";
diff --git a/lucene/misc/src/java/org/apache/lucene/search/EarlyTerminatingSortingCollector.java b/lucene/misc/src/java/org/apache/lucene/search/EarlyTerminatingSortingCollector.java
index b9d9f4f..3b68170 100644
--- a/lucene/misc/src/java/org/apache/lucene/search/EarlyTerminatingSortingCollector.java
+++ b/lucene/misc/src/java/org/apache/lucene/search/EarlyTerminatingSortingCollector.java
@@ -128,11 +128,6 @@
           }
         }
 
-        @Override
-        public boolean acceptsDocsOutOfOrder() {
-          return false;
-        }
-
       };
     } else {
       return super.getLeafCollector(context);
diff --git a/lucene/misc/src/test/org/apache/lucene/index/IndexSortingTest.java b/lucene/misc/src/test/org/apache/lucene/index/IndexSortingTest.java
index da1b6ae..3041d58 100644
--- a/lucene/misc/src/test/org/apache/lucene/index/IndexSortingTest.java
+++ b/lucene/misc/src/test/org/apache/lucene/index/IndexSortingTest.java
@@ -71,8 +71,8 @@
 
     Directory target = newDirectory();
     IndexWriter writer = new IndexWriter(target, newIndexWriterConfig(null));
-    IndexReader reader = SortingLeafReader.wrap(unsortedReader, sorter);
-    writer.addIndexes(reader);
+    LeafReader reader = SortingLeafReader.wrap(unsortedReader, sorter);
+    writer.addIndexes(SlowCodecReaderWrapper.wrap(reader));
     writer.close();
     // NOTE: also closes unsortedReader
     reader.close();
diff --git a/lucene/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java b/lucene/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java
index 7a8ec99..e9aecac 100644
--- a/lucene/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java
+++ b/lucene/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java
@@ -64,7 +64,7 @@
             newDirectory(),
             newDirectory()
     };
-    splitter.split(Version.LATEST, input, dirs, false);
+    splitter.split(input, dirs, false);
     IndexReader ir;
     ir = DirectoryReader.open(dirs[0]);
     assertTrue(ir.numDocs() - NUM_DOCS / 3 <= 1); // rounding error
@@ -109,7 +109,7 @@
             newDirectory(),
             newDirectory()
     };
-    splitter.split(Version.LATEST, input, dirs, true);
+    splitter.split(input, dirs, true);
     IndexReader ir;
     ir = DirectoryReader.open(dirs[0]);
     assertTrue(ir.numDocs() - NUM_DOCS / 3 <= 1);
diff --git a/lucene/misc/src/test/org/apache/lucene/search/TestEarlyTerminatingSortingCollector.java b/lucene/misc/src/test/org/apache/lucene/search/TestEarlyTerminatingSortingCollector.java
index 1308c6c..804f760 100644
--- a/lucene/misc/src/test/org/apache/lucene/search/TestEarlyTerminatingSortingCollector.java
+++ b/lucene/misc/src/test/org/apache/lucene/search/TestEarlyTerminatingSortingCollector.java
@@ -123,9 +123,8 @@
         final boolean fillFields = random().nextBoolean();
         final boolean trackDocScores = random().nextBoolean();
         final boolean trackMaxScore = random().nextBoolean();
-        final boolean inOrder = random().nextBoolean();
-        final TopFieldCollector collector1 = TopFieldCollector.create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder);
-        final TopFieldCollector collector2 = TopFieldCollector.create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder);
+        final TopFieldCollector collector1 = TopFieldCollector.create(sort, numHits, fillFields, trackDocScores, trackMaxScore);
+        final TopFieldCollector collector2 = TopFieldCollector.create(sort, numHits, fillFields, trackDocScores, trackMaxScore);
 
         final Query query;
         if (random().nextBoolean()) {
@@ -188,9 +187,8 @@
       final boolean fillFields = random().nextBoolean();
       final boolean trackDocScores = random().nextBoolean();
       final boolean trackMaxScore = random().nextBoolean();
-      final boolean inOrder = random().nextBoolean();
-      final TopFieldCollector collector1 = TopFieldCollector.create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder);
-      final TopFieldCollector collector2 = TopFieldCollector.create(sort, numHits, fillFields, trackDocScores, trackMaxScore, inOrder);
+      final TopFieldCollector collector1 = TopFieldCollector.create(sort, numHits, fillFields, trackDocScores, trackMaxScore);
+      final TopFieldCollector collector2 = TopFieldCollector.create(sort, numHits, fillFields, trackDocScores, trackMaxScore);
       
       final Query query;
       if (random().nextBoolean()) {
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/CustomScoreQuery.java b/lucene/queries/src/java/org/apache/lucene/queries/CustomScoreQuery.java
index e48e8f2..3aae0cc 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/CustomScoreQuery.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/CustomScoreQuery.java
@@ -270,11 +270,6 @@
       res.addDetail(new Explanation(queryWeight, "queryWeight"));
       return res;
     }
-
-    @Override
-    public boolean scoresDocsOutOfOrder() {
-      return false;
-    }
     
   }
 
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/ValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/ValueSource.java
index 3cf9081..14cdc1d 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/ValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/ValueSource.java
@@ -17,16 +17,17 @@
  * limitations under the License.
  */
 
+import java.io.IOException;
+import java.util.IdentityHashMap;
+import java.util.Map;
+
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.search.FieldComparator;
 import org.apache.lucene.search.FieldComparatorSource;
 import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.SimpleFieldComparator;
 import org.apache.lucene.search.SortField;
 
-import java.io.IOException;
-import java.util.IdentityHashMap;
-import java.util.Map;
-
 /**
  * Instantiates {@link FunctionValues} for a particular reader.
  * <br>
@@ -126,7 +127,7 @@
    * off of the {@link FunctionValues} for a ValueSource
    * instead of the normal Lucene FieldComparator that works off of a FieldCache.
    */
-  class ValueSourceComparator extends FieldComparator<Double> {
+  class ValueSourceComparator extends SimpleFieldComparator<Double> {
     private final double[] values;
     private FunctionValues docVals;
     private double bottom;
@@ -154,9 +155,8 @@
     }
 
     @Override
-    public FieldComparator setNextReader(LeafReaderContext context) throws IOException {
+    public void doSetNextReader(LeafReaderContext context) throws IOException {
       docVals = getValues(fcontext, context);
-      return this;
     }
 
     @Override
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserBase.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserBase.java
index 2d10e68..a4d7797 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserBase.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/classic/QueryParserBase.java
@@ -117,18 +117,12 @@
       Query res = TopLevelQuery(field);
       return res!=null ? res : newBooleanQuery(false);
     }
-    catch (ParseException tme) {
+    catch (ParseException | TokenMgrError tme) {
       // rethrow to include the original query:
       ParseException e = new ParseException("Cannot parse '" +query+ "': " + tme.getMessage());
       e.initCause(tme);
       throw e;
-    }
-    catch (TokenMgrError tme) {
-      ParseException e = new ParseException("Cannot parse '" +query+ "': " + tme.getMessage());
-      e.initCause(tme);
-      throw e;
-    }
-    catch (BooleanQuery.TooManyClauses tmc) {
+    } catch (BooleanQuery.TooManyClauses tmc) {
       ParseException e = new ParseException("Cannot parse '" +query+ "': too many boolean clauses");
       e.initCause(tmc);
       throw e;
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/messages/NLS.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/messages/NLS.java
index 06b65ca..ea319b5 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/messages/NLS.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/messages/NLS.java
@@ -149,9 +149,7 @@
     try {
       field.set(null, field.getName());
       validateMessage(field.getName(), clazz);
-    } catch (IllegalArgumentException e) {
-      // should not happen
-    } catch (IllegalAccessException e) {
+    } catch (IllegalArgumentException | IllegalAccessException e) {
       // should not happen
     }
   }
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/surround/query/BooleanQueryTst.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/surround/query/BooleanQueryTst.java
index 842d43b..b77bdac 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/surround/query/BooleanQueryTst.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/surround/query/BooleanQueryTst.java
@@ -72,11 +72,6 @@
     }
 
     @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return true;
-    }
-
-    @Override
     protected void doSetNextReader(LeafReaderContext context) throws IOException {
       docBase = context.docBase;
     }
diff --git a/lucene/replicator/ivy.xml b/lucene/replicator/ivy.xml
index d7725d1..645a702 100644
--- a/lucene/replicator/ivy.xml
+++ b/lucene/replicator/ivy.xml
@@ -32,6 +32,7 @@
     <dependency org="org.apache.httpcomponents" name="httpcore" rev="${/org.apache.httpcomponents/httpcore}" conf="http"/>
     
     <dependency org="org.eclipse.jetty" name="jetty-server" rev="${/org.eclipse.jetty/jetty-server}" conf="jetty"/>
+    <dependency org="javax.servlet" name="javax.servlet-api" rev="${/javax.servlet/javax.servlet-api}" conf="jetty"/>
     <dependency org="org.eclipse.jetty" name="jetty-servlet" rev="${/org.eclipse.jetty/jetty-servlet}" conf="jetty"/>
     <dependency org="org.eclipse.jetty" name="jetty-util" rev="${/org.eclipse.jetty/jetty-util}" conf="jetty"/>
     <dependency org="org.eclipse.jetty" name="jetty-io" rev="${/org.eclipse.jetty/jetty-io}" conf="jetty"/>
@@ -40,10 +41,6 @@
 
     <dependency org="commons-logging" name="commons-logging" rev="${/commons-logging/commons-logging}" conf="logging"/>
 
-    <dependency org="org.eclipse.jetty.orbit" name="javax.servlet" rev="${/org.eclipse.jetty.orbit/javax.servlet}" conf="servlet">
-      <artifact name="javax.servlet" type="orbit" ext="jar"/>
-    </dependency>
-
     <exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/>
   </dependencies>
 
diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/IndexAndTaxonomyReplicationClientTest.java b/lucene/replicator/src/test/org/apache/lucene/replicator/IndexAndTaxonomyReplicationClientTest.java
index bbb099c..0ebc6d9 100644
--- a/lucene/replicator/src/test/org/apache/lucene/replicator/IndexAndTaxonomyReplicationClientTest.java
+++ b/lucene/replicator/src/test/org/apache/lucene/replicator/IndexAndTaxonomyReplicationClientTest.java
@@ -419,9 +419,7 @@
               checker.setInfoStream(new PrintStream(bos, false, IOUtils.UTF_8), false);
               try {
                 indexStatus = checker.checkIndex(null);
-              } catch (IOException ioe) {
-                // ok: we fallback below
-              } catch (RuntimeException re) {
+              } catch (IOException | RuntimeException ioe) {
                 // ok: we fallback below
               }
             }
diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/ReplicatorTestCase.java b/lucene/replicator/src/test/org/apache/lucene/replicator/ReplicatorTestCase.java
index aca90b7..410d551 100644
--- a/lucene/replicator/src/test/org/apache/lucene/replicator/ReplicatorTestCase.java
+++ b/lucene/replicator/src/test/org/apache/lucene/replicator/ReplicatorTestCase.java
@@ -18,18 +18,20 @@
  */
 
 import java.util.Random;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.http.conn.HttpClientConnectionManager;
 import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
 import org.apache.lucene.util.LuceneTestCase;
 import org.eclipse.jetty.server.Connector;
 import org.eclipse.jetty.server.Handler;
+import org.eclipse.jetty.server.HttpConfiguration;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.SecureRequestCustomizer;
 import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.server.bio.SocketConnector;
-import org.eclipse.jetty.server.nio.SelectChannelConnector;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.SslConnectionFactory;
 import org.eclipse.jetty.server.session.HashSessionIdManager;
-import org.eclipse.jetty.server.ssl.SslSelectChannelConnector;
-import org.eclipse.jetty.server.ssl.SslSocketConnector;
 import org.eclipse.jetty.util.ssl.SslContextFactory;
 import org.eclipse.jetty.util.thread.QueuedThreadPool;
 import org.junit.AfterClass;
@@ -51,12 +53,6 @@
    * {@link #serverPort(Server)}.
    */
   public static synchronized Server newHttpServer(Handler handler) throws Exception {
-    Server server = new Server(0);
-    
-    server.setHandler(handler);
-    
-    final String connectorName = System.getProperty("tests.jettyConnector", "SelectChannel");
-    
     // if this property is true, then jetty will be configured to use SSL
     // leveraging the same system properties as java to specify
     // the keystore/truststore if they are set
@@ -78,7 +74,7 @@
         (System.getProperty("javax.net.ssl.keyStorePassword"));
       }
       if (null != System.getProperty("javax.net.ssl.trustStore")) {
-        sslcontext.setTrustStore
+        sslcontext.setKeyStorePath
         (System.getProperty("javax.net.ssl.trustStore"));
       }
       if (null != System.getProperty("javax.net.ssl.trustStorePassword")) {
@@ -88,34 +84,36 @@
       sslcontext.setNeedClientAuth(Boolean.getBoolean("tests.jettySsl.clientAuth"));
     }
     
-    final Connector connector;
-    final QueuedThreadPool threadPool;
-    if ("SelectChannel".equals(connectorName)) {
-      final SelectChannelConnector c = useSsl ? new SslSelectChannelConnector(sslcontext) : new SelectChannelConnector();
-      c.setReuseAddress(true);
-      c.setLowResourcesMaxIdleTime(1500);
+    final QueuedThreadPool threadPool = new QueuedThreadPool();
+    threadPool.setDaemon(true);
+    threadPool.setMaxThreads(10000);
+    threadPool.setIdleTimeout(5000);
+    threadPool.setStopTimeout(30000);
+
+    Server server = new Server(threadPool);
+    server.setStopAtShutdown(true);
+    server.manage(threadPool);
+
+
+    final ServerConnector connector;
+    if (useSsl) {
+      HttpConfiguration configuration = new HttpConfiguration();
+      configuration.setSecureScheme("https");
+      configuration.addCustomizer(new SecureRequestCustomizer());
+      ServerConnector c = new ServerConnector(server, new SslConnectionFactory(sslcontext, "http/1.1"),
+          new HttpConnectionFactory(configuration));
       connector = c;
-      threadPool = (QueuedThreadPool) c.getThreadPool();
-    } else if ("Socket".equals(connectorName)) {
-      final SocketConnector c = useSsl ? new SslSocketConnector(sslcontext) : new SocketConnector();
-      c.setReuseAddress(true);
-      connector = c;
-      threadPool = (QueuedThreadPool) c.getThreadPool();
     } else {
-      throw new IllegalArgumentException("Illegal value for system property 'tests.jettyConnector': " + connectorName);
+      ServerConnector c = new ServerConnector(server, new HttpConnectionFactory());
+      connector = c;
     }
     
     connector.setPort(0);
     connector.setHost("127.0.0.1");
-    if (threadPool != null) {
-      threadPool.setDaemon(true);
-      threadPool.setMaxThreads(10000);
-      threadPool.setMaxIdleTimeMs(5000);
-      threadPool.setMaxStopTimeMs(30000);
-    }
-    
+
     server.setConnectors(new Connector[] {connector});
     server.setSessionIdManager(new HashSessionIdManager(new Random(random().nextLong())));
+    server.setHandler(handler);
     
     server.start();
     
@@ -124,12 +122,12 @@
   
   /** Returns a {@link Server}'s port. */
   public static int serverPort(Server server) {
-    return server.getConnectors()[0].getLocalPort();
+    return ((ServerConnector)server.getConnectors()[0]).getLocalPort();
   }
   
   /** Returns a {@link Server}'s host. */
   public static String serverHost(Server server) {
-    return server.getConnectors()[0].getHost();
+    return ((ServerConnector)server.getConnectors()[0]).getHost();
   }
   
   /**
diff --git a/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/IDVersionPostingsFormat.java b/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/IDVersionPostingsFormat.java
index 01cb00a..042e361 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/IDVersionPostingsFormat.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/codecs/idversion/IDVersionPostingsFormat.java
@@ -77,6 +77,7 @@
     super("IDVersion");
     this.minTermsInBlock = minTermsInBlock;
     this.maxTermsInBlock = maxTermsInBlock;
+    BlockTreeTermsWriter.validateSettings(minTermsInBlock, maxTermsInBlock);
   }
 
   @Override
diff --git a/lucene/sandbox/src/java/org/apache/lucene/sandbox/queries/SlowCollatedStringComparator.java b/lucene/sandbox/src/java/org/apache/lucene/sandbox/queries/SlowCollatedStringComparator.java
index 311e128..d96017c 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/sandbox/queries/SlowCollatedStringComparator.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/sandbox/queries/SlowCollatedStringComparator.java
@@ -23,7 +23,7 @@
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.BinaryDocValues;
 import org.apache.lucene.index.DocValues;
-import org.apache.lucene.search.FieldComparator;
+import org.apache.lucene.search.SimpleFieldComparator;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 
@@ -36,7 +36,7 @@
  * This class will be removed in Lucene 5.0
  */
 @Deprecated
-public final class SlowCollatedStringComparator extends FieldComparator<String> {
+public final class SlowCollatedStringComparator extends SimpleFieldComparator<String> {
 
   private final String[] values;
   private BinaryDocValues currentDocTerms;
@@ -93,10 +93,9 @@
   }
 
   @Override
-  public FieldComparator<String> setNextReader(LeafReaderContext context) throws IOException {
+  protected void doSetNextReader(LeafReaderContext context) throws IOException {
     currentDocTerms = DocValues.getBinary(context.reader(), field);
     docsWithField = DocValues.getDocsWithField(context.reader(), field);
-    return this;
   }
   
   @Override
diff --git a/lucene/sandbox/src/test/org/apache/lucene/codecs/idversion/TestIDVersionPostingsFormat.java b/lucene/sandbox/src/test/org/apache/lucene/codecs/idversion/TestIDVersionPostingsFormat.java
index cacd2d2..02cc363 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/codecs/idversion/TestIDVersionPostingsFormat.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/codecs/idversion/TestIDVersionPostingsFormat.java
@@ -29,6 +29,7 @@
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.atomic.AtomicLong;
 
+import org.apache.lucene.analysis.Analyzer.TokenStreamComponents;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.analysis.MockTokenFilter;
@@ -405,7 +406,7 @@
     if (ms instanceof ConcurrentMergeScheduler) {
       iwc.setMergeScheduler(new ConcurrentMergeScheduler() {
           @Override
-          protected void handleMergeException(Throwable exc) {
+          protected void handleMergeException(Directory dir, Throwable exc) {
             assertTrue(exc instanceof IllegalArgumentException);
           }
         });
diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java
index 4b65e31..a9b2de3 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java
@@ -289,14 +289,12 @@
         private Scorer scorer;
 
         @Override
-        public boolean acceptsDocsOutOfOrder() {
-          return false;
-        }
-
-        @Override
         public void setScorer(Scorer scorer) {
-          assert scorer instanceof TermAutomatonScorer;
           this.scorer = scorer;
+          while (scorer instanceof AssertingScorer) {
+            scorer = ((AssertingScorer) scorer).getIn();
+          }
+          assert scorer instanceof TermAutomatonScorer;
         }
 
         @Override
diff --git a/lucene/spatial/src/java/org/apache/lucene/spatial/NumberRangePrefixTreeStrategy.java b/lucene/spatial/src/java/org/apache/lucene/spatial/NumberRangePrefixTreeStrategy.java
index 80a0b9c..a643b22 100644
--- a/lucene/spatial/src/java/org/apache/lucene/spatial/NumberRangePrefixTreeStrategy.java
+++ b/lucene/spatial/src/java/org/apache/lucene/spatial/NumberRangePrefixTreeStrategy.java
@@ -28,10 +28,11 @@
 import org.apache.lucene.document.Document;
 import org.apache.lucene.index.IndexReaderContext;
 import org.apache.lucene.queries.function.ValueSource;
-import org.apache.lucene.spatial.prefix.NumberRangePrefixTreeFacets;
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.spatial.prefix.PrefixTreeFacetCounter;
 import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
+import org.apache.lucene.spatial.prefix.tree.Cell;
 import org.apache.lucene.spatial.prefix.tree.NumberRangePrefixTree;
-import org.apache.lucene.util.Bits;
 import com.spatial4j.core.shape.Point;
 import com.spatial4j.core.shape.Shape;
 
@@ -75,13 +76,13 @@
   /** Calculates facets between {@code start} and {@code end} to a detail level one greater than that provided by the
    * arguments. For example providing March to October of 2014 would return facets to the day level of those months.
    * This is just a convenience method.
-   * @see #calcFacets(IndexReaderContext, Bits, Shape, int)
+   * @see #calcFacets(IndexReaderContext, Filter, Shape, int)
    */
-  public Facets calcFacets(IndexReaderContext context, final Bits acceptDocs, UnitNRShape start, UnitNRShape end)
+  public Facets calcFacets(IndexReaderContext context, Filter filter, UnitNRShape start, UnitNRShape end)
       throws IOException {
-    Shape filter = getGrid().toRangeShape(start, end);
+    Shape facetRange = getGrid().toRangeShape(start, end);
     int detailLevel = Math.max(start.getLevel(), end.getLevel()) + 1;
-    return calcFacets(context, acceptDocs, filter, detailLevel);
+    return calcFacets(context, filter, facetRange, detailLevel);
   }
 
   /**
@@ -91,9 +92,50 @@
    * {@link org.apache.lucene.spatial.prefix.tree.NumberRangePrefixTree.UnitNRShape#getLevel()}.
    * Facet computation is implemented by navigating the underlying indexed terms efficiently.
    */
-  public Facets calcFacets(IndexReaderContext context, final Bits acceptDocs, Shape facetRange, int level)
+  public Facets calcFacets(IndexReaderContext context, Filter filter, Shape facetRange, final int level)
       throws IOException {
-    return NumberRangePrefixTreeFacets.compute(this, context, acceptDocs, facetRange, level);
+    final Facets facets = new Facets(level);
+    PrefixTreeFacetCounter.compute(this, context, filter, facetRange, level,
+        new PrefixTreeFacetCounter.FacetVisitor() {
+          Facets.FacetParentVal parentFacet;
+          UnitNRShape parentShape;
+
+          @Override
+          public void visit(Cell cell, int count) {
+            if (cell.getLevel() < level - 1) {//some ancestor of parent facet level, direct or distant
+              parentFacet = null;//reset
+              parentShape = null;//reset
+              facets.topLeaves += count;
+            } else if (cell.getLevel() == level - 1) {//parent
+              //set up FacetParentVal
+              setupParent((UnitNRShape) cell.getShape());
+              parentFacet.parentLeaves += count;
+            } else {//at facet level
+              UnitNRShape unitShape = (UnitNRShape) cell.getShape();
+              UnitNRShape unitShapeParent = unitShape.getShapeAtLevel(unitShape.getLevel() - 1);
+              if (parentFacet == null || !parentShape.equals(unitShapeParent)) {
+                setupParent(unitShapeParent);
+              }
+              //lazy init childCounts
+              if (parentFacet.childCounts == null) {
+                parentFacet.childCounts = new int[parentFacet.childCountsLen];
+              }
+              parentFacet.childCounts[unitShape.getValAtLevel(cell.getLevel())] += count;
+            }
+          }
+
+          private void setupParent(UnitNRShape unitShape) {
+            parentShape = unitShape.clone();
+            //Look for existing parentFacet (from previous segment), or create anew if needed
+            parentFacet = facets.parents.get(parentShape);
+            if (parentFacet == null) {//didn't find one; make a new one
+              parentFacet = new Facets.FacetParentVal();
+              parentFacet.childCountsLen = getGrid().getNumSubCells(parentShape);
+              facets.parents.put(parentShape, parentFacet);
+            }
+          }
+        });
+    return facets;
   }
 
   /** Facet response information */
diff --git a/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/AbstractPrefixTreeFilter.java b/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/AbstractPrefixTreeFilter.java
index 71d3693..6a2e283 100644
--- a/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/AbstractPrefixTreeFilter.java
+++ b/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/AbstractPrefixTreeFilter.java
@@ -17,23 +17,21 @@
  * limitations under the License.
  */
 
+import java.io.IOException;
+
 import com.spatial4j.core.shape.Shape;
+import org.apache.lucene.index.DocsEnum;
 import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.DocsEnum;
 import org.apache.lucene.index.Terms;
 import org.apache.lucene.index.TermsEnum;
-import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.Filter;
 import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
+import org.apache.lucene.util.BitSet;
 import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.FixedBitSet;
-
-import java.io.IOException;
 
 /**
  * Base class for Lucene Filters on SpatialPrefixTree fields.
- *
  * @lucene.experimental
  */
 public abstract class AbstractPrefixTreeFilter extends Filter {
@@ -73,14 +71,15 @@
   }
 
   /** Holds transient state and docid collecting utility methods as part of
-   * traversing a {@link TermsEnum}. */
-  public abstract class BaseTermsEnumTraverser {
+   * traversing a {@link TermsEnum} for a {@link org.apache.lucene.index.LeafReaderContext}. */
+  public abstract class BaseTermsEnumTraverser {//TODO rename to LeafTermsEnumTraverser ?
+    //note: only 'fieldName' (accessed in constructor) keeps this from being a static inner class
 
     protected final LeafReaderContext context;
     protected Bits acceptDocs;
     protected final int maxDoc;
 
-    protected TermsEnum termsEnum;//remember to check for null in getDocIdSet
+    protected TermsEnum termsEnum;//remember to check for null!
     protected DocsEnum docsEnum;
 
     public BaseTermsEnumTraverser(LeafReaderContext context, Bits acceptDocs) throws IOException {
@@ -93,32 +92,12 @@
         this.termsEnum = terms.iterator(null);
     }
 
-    protected void collectDocs(FixedBitSet bitSet) throws IOException {
-      //WARN: keep this specialization in sync
+    protected void collectDocs(BitSet bitSet) throws IOException {
       assert termsEnum != null;
       docsEnum = termsEnum.docs(acceptDocs, docsEnum, DocsEnum.FLAG_NONE);
-      int docid;
-      while ((docid = docsEnum.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
-        bitSet.set(docid);
-      }
+      bitSet.or(docsEnum);
     }
 
-    /* Eventually uncomment when needed.
-
-    protected void collectDocs(Collector collector) throws IOException {
-      //WARN: keep this specialization in sync
-      assert termsEnum != null;
-      docsEnum = termsEnum.docs(acceptDocs, docsEnum, DocsEnum.FLAG_NONE);
-      int docid;
-      while ((docid = docsEnum.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
-        collector.collect(docid);
-      }
-    }
-
-    public abstract class Collector {
-      abstract void collect(int docid) throws IOException;
-    }
-    */
   }
 
 }
diff --git a/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/AbstractVisitingPrefixTreeFilter.java b/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/AbstractVisitingPrefixTreeFilter.java
index 5063860..fed0dc7 100644
--- a/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/AbstractVisitingPrefixTreeFilter.java
+++ b/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/AbstractVisitingPrefixTreeFilter.java
@@ -17,6 +17,9 @@
  * limitations under the License.
  */
 
+import java.io.IOException;
+import java.util.Iterator;
+
 import com.spatial4j.core.shape.Shape;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.TermsEnum;
@@ -27,9 +30,6 @@
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
 
-import java.io.IOException;
-import java.util.Iterator;
-
 /**
  * Traverses a {@link SpatialPrefixTree} indexed field, using the template and
  * visitor design patterns for subclasses to guide the traversal and collect
@@ -326,17 +326,21 @@
     protected abstract DocIdSet finish() throws IOException;
 
     /**
-     * Visit an indexed cell returned from
-     * {@link #findSubCellsToVisit(org.apache.lucene.spatial.prefix.tree.Cell)}.
+     * Visit an indexed non-leaf cell returned from
+     * {@link #findSubCellsToVisit(org.apache.lucene.spatial.prefix.tree.Cell)}
+     * that is also found in the index.
+     * It will also be called by the default implementation of
+     * {@link #visitScanned(org.apache.lucene.spatial.prefix.tree.Cell)} for
+     * cells at the bottom detail level.
      *
-     * @param cell An intersecting cell.
+     * @param cell An intersecting cell; not a leaf.
      * @return true to descend to more levels. It is an error to return true
      * if cell.level == detailLevel
      */
     protected abstract boolean visit(Cell cell) throws IOException;
 
     /**
-     * Called after visit() returns true and an indexed leaf cell is found. An
+     * Called when an indexed leaf cell is found. An
      * indexed leaf cell means associated documents generally won't be found at
      * further detail levels.
      */
@@ -345,8 +349,19 @@
     /**
      * The cell is either indexed as a leaf or is the last level of detail. It
      * might not even intersect the query shape, so be sure to check for that.
+     * The default implementation will check that and if passes then call
+     * {@link #visitLeaf(org.apache.lucene.spatial.prefix.tree.Cell)} or
+     * {@link #visit(org.apache.lucene.spatial.prefix.tree.Cell)}.
      */
-    protected abstract void visitScanned(Cell cell) throws IOException;
+    protected void visitScanned(Cell cell) throws IOException {
+      if (queryShape.relate(cell.getShape()).intersects()) {
+        if (cell.isLeaf()) {
+          visitLeaf(cell);
+        } else {
+          visit(cell);
+        }
+      }
+    }
 
     protected void preSiblings(VNode vNode) throws IOException {
     }
diff --git a/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/CellTokenStream.java b/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/CellTokenStream.java
index 5a3d6c8..7d433a9 100644
--- a/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/CellTokenStream.java
+++ b/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/CellTokenStream.java
@@ -29,6 +29,7 @@
 
 import java.io.IOException;
 import java.util.Iterator;
+import java.util.Objects;
 
 /**
  * A TokenStream used internally by {@link org.apache.lucene.spatial.prefix.PrefixTreeStrategy}.
@@ -124,6 +125,31 @@
       fillBytesRef();
       reflector.reflect(TermToBytesRefAttribute.class, "bytes", BytesRef.deepCopyOf(bytes));
     }
+
+    @Override
+    public CellTermAttributeImpl clone() {
+      final CellTermAttributeImpl clone = (CellTermAttributeImpl) super.clone();
+      clone.bytes = BytesRef.deepCopyOf(bytes);
+      return clone;
+    }
+
+    @Override
+    public int hashCode() {
+      return Objects.hash(cell, omitLeafByte);
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+      if (this == obj) return true;
+      if (obj == null) return false;
+      if (getClass() != obj.getClass()) return false;
+      CellTermAttributeImpl other = (CellTermAttributeImpl) obj;
+      if (cell == null) {
+        if (other.cell != null) return false;
+      } else if (!cell.equals(other.cell)) return false;
+      if (omitLeafByte != other.omitLeafByte) return false;
+      return true;
+    }
   }
 
   public CellTokenStream() {
diff --git a/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/IntersectsPrefixTreeFilter.java b/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/IntersectsPrefixTreeFilter.java
index c162f72..bf5544d 100644
--- a/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/IntersectsPrefixTreeFilter.java
+++ b/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/IntersectsPrefixTreeFilter.java
@@ -17,19 +17,18 @@
  * limitations under the License.
  */
 
+import java.io.IOException;
+
 import com.spatial4j.core.shape.Shape;
 import com.spatial4j.core.shape.SpatialRelation;
-
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.search.DocIdSet;
 import org.apache.lucene.spatial.prefix.tree.Cell;
 import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
-import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BitDocIdSet;
+import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.FixedBitSet;
 
-import java.io.IOException;
-
 /**
  * A Filter matching documents that have an {@link SpatialRelation#INTERSECTS}
  * (i.e. not DISTINCT) relationship with a provided query shape.
@@ -90,12 +89,6 @@
         collectDocs(results);
       }
 
-      @Override
-      protected void visitScanned(Cell cell) throws IOException {
-        if (queryShape.relate(cell.getShape()).intersects())
-          collectDocs(results);
-      }
-
     }.getDocIdSet();
   }
 
diff --git a/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/NumberRangePrefixTreeFacets.java b/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/NumberRangePrefixTreeFacets.java
deleted file mode 100644
index e0c6dea..0000000
--- a/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/NumberRangePrefixTreeFacets.java
+++ /dev/null
@@ -1,215 +0,0 @@
-package org.apache.lucene.spatial.prefix;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.IOException;
-import java.util.List;
-
-import com.spatial4j.core.shape.Shape;
-import org.apache.lucene.index.DocsEnum;
-import org.apache.lucene.index.IndexReaderContext;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.search.DocIdSet;
-import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.spatial.NumberRangePrefixTreeStrategy;
-import org.apache.lucene.spatial.NumberRangePrefixTreeStrategy.Facets;
-import org.apache.lucene.spatial.prefix.tree.Cell;
-import org.apache.lucene.spatial.prefix.tree.NumberRangePrefixTree;
-import org.apache.lucene.spatial.prefix.tree.NumberRangePrefixTree.UnitNRShape;
-import org.apache.lucene.util.Bits;
-
-/**
- * Computes range facets for {@link NumberRangePrefixTreeStrategy}.
- *
- * @see NumberRangePrefixTreeStrategy#calcFacets(IndexReaderContext, Bits, Shape, int)
- *
- * @lucene.internal
- */
-public class NumberRangePrefixTreeFacets {
-
-  public static Facets compute(NumberRangePrefixTreeStrategy strategy,
-                               IndexReaderContext context, final Bits acceptDocs, Shape queryShape, int facetLevel)
-      throws IOException {
-
-    Facets facets = new Facets(facetLevel);
-
-    // TODO should we pre-create all parent buckets? It's not necessary, but the client/user may find it convenient to
-    //   have so it needn't do a bunch of calendar work itself to ascertain which buckets are missing. It would
-    //   also then easily allow us to have a too-many-facets exception (e.g. you ask for a millisecond bucket for
-    //   the entire year). We could do that now but we would only be able to throw if the actual counts get to the
-    //   threshold vs. being able to know the possible values consistently a-priori which is much preferred. Now on the
-    //   other hand, we can facet over extremely sparse data sets without needless parent buckets.
-
-    //We collect per-leaf
-    final List<LeafReaderContext> leaves = context.leaves();
-    for (final LeafReaderContext leafCtx : leaves) {
-      //determine leaf acceptDocs
-      Bits leafAcceptDocs;
-      if (acceptDocs == null) {
-        leafAcceptDocs = leafCtx.reader().getLiveDocs();
-      } else if (leaves.size() == 1) {
-        leafAcceptDocs = acceptDocs;
-      } else {
-        leafAcceptDocs = new Bits() {//note: it'd be nice if Lucene's BitsSlice was public.
-
-          final int docBase = leafCtx.docBase;
-
-          @Override
-          public boolean get(int index) {
-            return acceptDocs.get(docBase + index);
-          }
-
-          @Override
-          public int length() {
-            return leafCtx.reader().maxDoc();
-          }
-        };
-      }
-
-      facets = compute(strategy, leafCtx, leafAcceptDocs, queryShape, facets);
-    }
-    return facets;
-
-  }
-
-  public static Facets compute(final NumberRangePrefixTreeStrategy strategy,
-                               LeafReaderContext context, Bits acceptDocs, Shape queryShape, final Facets facets)
-      throws IOException {
-    final NumberRangePrefixTree tree = strategy.getGrid();
-    final int scanLevel = tree.getMaxLevels();
-
-    //TODO extract AbstractVisitingPrefixTreeFilter / VisitorTemplate to be generic, not necessarily a Filter/DocIdSet.
-    new AbstractVisitingPrefixTreeFilter(queryShape, strategy.getFieldName(), tree, facets.detailLevel, scanLevel) {
-
-      @Override
-      public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) throws IOException {
-        return new VisitorTemplate(context, acceptDocs, !strategy.pointsOnly) {
-
-          Facets.FacetParentVal parentFacet;
-
-          @Override
-          protected void start() throws IOException {
-          }
-
-          @Override
-          protected DocIdSet finish() throws IOException {
-            return null;//unused
-          }
-
-          @Override
-          protected boolean visit(Cell cell) throws IOException {
-            // At facetLevel...
-            if (cell.getLevel() == facets.detailLevel) {
-              //note: parentFacet shouldn't be null if we get here
-
-              // Count docs
-              int count = countDocsAtThisTermInSet(acceptDocs);
-              if (count > 0) {
-                //lazy init childCounts
-                if (parentFacet.childCounts == null) {
-                  parentFacet.childCounts = new int[parentFacet.childCountsLen];
-                }
-                UnitNRShape unitShape = (UnitNRShape) cell.getShape();
-                parentFacet.childCounts[unitShape.getValAtLevel(cell.getLevel())] += count;
-              }
-              return false;//don't descend further; this is enough detail
-            }
-
-            parentFacet = null;//reset
-
-            // At parent
-            if (cell.getLevel() == facets.detailLevel - 1) {
-              if (!hasDocsAtThisTermInSet(acceptDocs)) {
-                return false;
-              }
-              //Look for existing parentFacet (from previous segment)
-              UnitNRShape unitShape = (UnitNRShape) cell.getShape();
-              UnitNRShape key = unitShape.clone();
-              parentFacet = facets.parents.get(key);
-              if (parentFacet == null) {//didn't find one; make a new one
-                parentFacet = new Facets.FacetParentVal();
-                parentFacet.childCountsLen = tree.getNumSubCells(unitShape);
-                facets.parents.put(key, parentFacet);
-              }
-            }
-            return true;
-          }
-
-          @Override
-          protected void visitLeaf(Cell cell) throws IOException {
-            final int levelsToGo = facets.detailLevel - cell.getLevel();
-            if (levelsToGo <= 0) {
-              return;//do nothing; we already collected in visit()
-              //note: once we index ranges without direct prefix's of leaves,
-              //  we'll need to collect here at levelsToGo==0 too.
-            }
-            int count = countDocsAtThisTermInSet(acceptDocs);
-            if (count == 0) {
-              return;
-            }
-            if (levelsToGo == 1) {
-              // Because all leaves also have an indexed non-leaf, we can be sure we have parentCell set via visit().
-              parentFacet.parentLeaves += count;
-            } else {
-              facets.topLeaves += count;
-            }
-
-          }
-
-          @Override
-          protected void visitScanned(Cell cell) throws IOException {
-            //TODO does this belong in superclass?  It ignores boolean result from visit(), but that's ok.
-            if (queryShape.relate(cell.getShape()).intersects()) {
-              if (cell.isLeaf()) {
-                visitLeaf(cell);
-              } else {
-                visit(cell);
-              }
-            }
-          }
-
-          //TODO These utility methods could move to superclass
-
-          private int countDocsAtThisTermInSet(Bits actualBaseDocs) throws IOException {
-            if (actualBaseDocs == null) {
-              return termsEnum.docFreq();
-            }
-            int count = 0;
-            docsEnum = termsEnum.docs(actualBaseDocs, docsEnum, DocsEnum.FLAG_NONE);
-            while (docsEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
-              count++;
-            }
-            return count;
-          }
-
-          private boolean hasDocsAtThisTermInSet(Bits actualBaseDocs) throws IOException {
-            if (actualBaseDocs == null) {
-              return true;
-            }
-            docsEnum = termsEnum.docs(actualBaseDocs, docsEnum, DocsEnum.FLAG_NONE);
-            return (docsEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
-          }
-
-        }.getDocIdSet();
-      }
-    }.getDocIdSet(context, acceptDocs);
-
-    return facets;
-  }
-
-}
diff --git a/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/PrefixTreeFacetCounter.java b/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/PrefixTreeFacetCounter.java
new file mode 100644
index 0000000..036346d
--- /dev/null
+++ b/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/PrefixTreeFacetCounter.java
@@ -0,0 +1,191 @@
+package org.apache.lucene.spatial.prefix;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+
+import com.spatial4j.core.shape.Shape;
+import org.apache.lucene.index.DocsEnum;
+import org.apache.lucene.index.IndexReaderContext;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.Filter;
+import org.apache.lucene.spatial.prefix.tree.Cell;
+import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.SparseFixedBitSet;
+
+/**
+ * Computes facets on cells for {@link org.apache.lucene.spatial.prefix.PrefixTreeStrategy}.
+ *
+ * @lucene.experimental
+ */
+public class PrefixTreeFacetCounter {
+
+  /** A callback/visitor of facet counts. */
+  public static abstract class FacetVisitor {
+    /** Called at the start of the segment, if there is indexed data. */
+    public void startOfSegment() {}
+
+    /** Called for cells with a leaf, or cells at the target facet level.  {@code count} is greater than zero.
+     * When an ancestor cell is given with non-zero count, the count can be considered to be added to all cells
+     * below. You won't necessarily get a cell at level {@code facetLevel} if the indexed data is courser (bigger).
+     */
+    public abstract void visit(Cell cell, int count);
+  }
+
+  private PrefixTreeFacetCounter() {
+  }
+
+  /**
+   * Computes facets using a callback/visitor style design, allowing flexibility for the caller to determine what to do
+   * with each underlying count.
+   *
+   * @param strategy the prefix tree strategy (contains the field reference, grid, max levels)
+   * @param context the IndexReader's context
+   * @param filter a Filter to limit counted docs. For optimal performance, it's
+   *               {@link org.apache.lucene.search.DocIdSet#bits()} should be non-null. If no filter is provided, live
+   *               docs are counted.
+   * @param queryShape the shape to limit the range of facet counts to
+   * @param facetLevel the maximum depth (detail) of faceted cells
+   * @param facetVisitor the visitor/callback to receive the counts
+   */
+  public static void compute(PrefixTreeStrategy strategy, IndexReaderContext context, Filter filter,
+                             Shape queryShape, int facetLevel, FacetVisitor facetVisitor)
+      throws IOException {
+    //We collect per-leaf
+    for (final LeafReaderContext leafCtx : context.leaves()) {
+      //determine leaf acceptDocs Bits
+      Bits leafAcceptDocs;
+      if (filter == null) {
+        leafAcceptDocs = leafCtx.reader().getLiveDocs();//filter deleted
+      } else {
+        final DocIdSet docIdSet = filter.getDocIdSet(leafCtx, leafCtx.reader().getLiveDocs());
+        if (docIdSet == null) {
+          continue;//no docs in filter
+        }
+        leafAcceptDocs = docIdSet.bits();
+        if (leafAcceptDocs == null) {
+          final DocIdSetIterator iterator = docIdSet.iterator();
+          if (iterator == null) {
+            continue;//no docs in filter
+          }
+          //build bits from iterator (abnormal, hopefully, not expecting many docs)
+          SparseFixedBitSet bitSet = new SparseFixedBitSet(leafCtx.reader().maxDoc());
+          bitSet.or(iterator);
+          leafAcceptDocs = bitSet;
+        }
+      }
+
+      compute(strategy, leafCtx, leafAcceptDocs, queryShape, facetLevel, facetVisitor);
+    }
+  }
+
+  /** Lower-level per-leaf segment method. */
+  public static void compute(final PrefixTreeStrategy strategy, final LeafReaderContext context, final Bits acceptDocs,
+                             final Shape queryShape, final int facetLevel, final FacetVisitor facetVisitor)
+      throws IOException {
+    if (acceptDocs != null && acceptDocs.length() != context.reader().maxDoc()) {
+      throw new IllegalArgumentException(
+          "acceptDocs bits length " + acceptDocs.length() +" != leaf maxdoc " + context.reader().maxDoc());
+    }
+    final SpatialPrefixTree tree = strategy.getGrid();
+
+    //scanLevel is an optimization knob of AbstractVisitingPrefixTreeFilter. It's unlikely
+    // another scanLevel would be much faster and it tends to be a risky knob (can help a little, can hurt a ton).
+    // TODO use RPT's configured scan level?  Do we know better here?  Hard to say.
+    final int scanLevel = tree.getMaxLevels();
+
+    //AbstractVisitingPrefixTreeFilter is a Lucene Filter.  We don't need a filter; we use it for its great prefix-tree
+    // traversal code.  TODO consider refactoring if/when it makes sense (more use cases than this)
+    new AbstractVisitingPrefixTreeFilter(queryShape, strategy.getFieldName(), tree, facetLevel, scanLevel) {
+
+      @Override
+      public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) throws IOException {
+        assert facetLevel == super.detailLevel;//same thing, FYI. (constant)
+
+        final boolean hasIndexedLeaves = !strategy.isPointsOnly();
+
+        return new VisitorTemplate(context, acceptDocs, hasIndexedLeaves) {
+
+          @Override
+          protected void start() throws IOException {
+            facetVisitor.startOfSegment();
+          }
+
+          @Override
+          protected DocIdSet finish() throws IOException {
+            return null;//unused;
+          }
+
+          @Override
+          protected boolean visit(Cell cell) throws IOException {
+            // At facetLevel...
+            if (cell.getLevel() == facetLevel) {
+              // Count docs
+              visitLeaf(cell);//we're not a leaf but we treat it as such at facet level
+              return false;//don't descend further; this is enough detail
+            }
+
+            // We optimize for discriminating filters (reflected in acceptDocs) and short-circuit if no
+            // matching docs. We could do this at all levels or never but the closer we get to the facet level, the
+            // higher the probability this is worthwhile. We do when docFreq == 1 because it's a cheap check, especially
+            // due to "pulsing" in the codec.
+            //TODO this opt should move to VisitorTemplate (which contains an optimization TODO to this effect)
+            if (cell.getLevel() == facetLevel - 1 || termsEnum.docFreq() == 1) {
+              if (!hasDocsAtThisTerm()) {
+                return false;
+              }
+            }
+            return true;
+          }
+
+          @Override
+          protected void visitLeaf(Cell cell) throws IOException {
+            final int count = countDocsAtThisTerm();
+            if (count > 0) {
+              facetVisitor.visit(cell, count);
+            }
+          }
+
+          private int countDocsAtThisTerm() throws IOException {
+            if (acceptDocs == null) {
+              return termsEnum.docFreq();
+            }
+            int count = 0;
+            docsEnum = termsEnum.docs(acceptDocs, docsEnum, DocsEnum.FLAG_NONE);
+            while (docsEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
+              count++;
+            }
+            return count;
+          }
+
+          private boolean hasDocsAtThisTerm() throws IOException {
+            if (acceptDocs == null) {
+              return true;
+            }
+            docsEnum = termsEnum.docs(acceptDocs, docsEnum, DocsEnum.FLAG_NONE);
+            return (docsEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
+          }
+
+        }.getDocIdSet();
+      }
+    }.getDocIdSet(context, acceptDocs);
+  }
+}
diff --git a/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/PrefixTreeStrategy.java b/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/PrefixTreeStrategy.java
index fc51ef8..3f535dc 100644
--- a/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/PrefixTreeStrategy.java
+++ b/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/PrefixTreeStrategy.java
@@ -21,6 +21,8 @@
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 
+import com.spatial4j.core.shape.Point;
+import com.spatial4j.core.shape.Shape;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.FieldTypes;
@@ -31,8 +33,6 @@
 import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
 import org.apache.lucene.spatial.query.SpatialArgs;
 import org.apache.lucene.spatial.util.ShapeFieldCacheDistanceValueSource;
-import com.spatial4j.core.shape.Point;
-import com.spatial4j.core.shape.Shape;
 
 /**
  * An abstract SpatialStrategy based on {@link SpatialPrefixTree}. The two
@@ -80,6 +80,7 @@
   private final Map<String, PointPrefixTreeFieldCacheProvider> provider = new ConcurrentHashMap<>();
   protected int defaultFieldValuesArrayLen = 2;
   protected double distErrPct = SpatialArgs.DEFAULT_DISTERRPCT;// [ 0 TO 0.5 ]
+  protected boolean pointsOnly = false;//if true, there are no leaves
 
   public PrefixTreeStrategy(SpatialPrefixTree grid, String fieldName) {
     super(grid.getSpatialContext(), fieldName);
@@ -115,6 +116,16 @@
     this.distErrPct = distErrPct;
   }
 
+  public boolean isPointsOnly() {
+    return pointsOnly;
+  }
+
+  /** True if only indexed points shall be supported. There are no "leafs" in such a case.  See
+   *  {@link org.apache.lucene.spatial.prefix.IntersectsPrefixTreeFilter#hasIndexedLeaves}. */
+  public void setPointsOnly(boolean pointsOnly) {
+    this.pointsOnly = pointsOnly;
+  }
+
   @Override
   public void addFields(Document doc, Shape shape) {
     double distErr = SpatialArgs.calcDistanceFromErrPct(shape, distErrPct, ctx);
@@ -139,6 +150,9 @@
   }
 
   protected TokenStream createTokenStream(Shape shape, int detailLevel) {
+    if (pointsOnly && !(shape instanceof Point)) {
+      throw new IllegalArgumentException("pointsOnly is true yet a " + shape.getClass() + " is given for indexing");
+    }
     Iterator<Cell> cells = grid.getTreeCellIterator(shape, detailLevel);
     return new CellTokenStream().setCells(cells);
   }
diff --git a/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/RecursivePrefixTreeStrategy.java b/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/RecursivePrefixTreeStrategy.java
index d45c785..b1b3814 100644
--- a/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/RecursivePrefixTreeStrategy.java
+++ b/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/RecursivePrefixTreeStrategy.java
@@ -31,6 +31,8 @@
 import org.apache.lucene.spatial.query.SpatialOperation;
 import org.apache.lucene.spatial.query.UnsupportedSpatialOperation;
 import com.spatial4j.core.shape.Point;
+import com.spatial4j.core.shape.Point;
+import com.spatial4j.core.shape.Shape;
 import com.spatial4j.core.shape.Shape;
 
 /**
@@ -57,8 +59,6 @@
   // and a LegacyPrefixTree.
   protected boolean pruneLeafyBranches = true;
 
-  protected boolean pointsOnly = false;//if true, there are no leaves
-
   protected boolean multiOverlappingIndexedShapes = true;
 
   public RecursivePrefixTreeStrategy(SpatialPrefixTree grid, String fieldName) {
@@ -66,6 +66,10 @@
     prefixGridScanLevel = grid.getMaxLevels() - 4;//TODO this default constant is dependent on the prefix grid size
   }
 
+  public int getPrefixGridScanLevel() {
+    return prefixGridScanLevel;
+  }
+
   /**
    * Sets the grid level [1-maxLevels] at which indexed terms are scanned brute-force
    * instead of by grid decomposition.  By default this is maxLevels - 4.  The
@@ -78,10 +82,8 @@
     this.prefixGridScanLevel = prefixGridScanLevel;
   }
 
-  /** True if only indexed points shall be supported. There are no "leafs" in such a case.  See
-   *  {@link IntersectsPrefixTreeFilter#hasIndexedLeaves}. */
-  public void setPointsOnly(boolean pointsOnly) {
-    this.pointsOnly = pointsOnly;
+  public boolean isMultiOverlappingIndexedShapes() {
+    return multiOverlappingIndexedShapes;
   }
 
   /** See {@link ContainsPrefixTreeFilter#multiOverlappingIndexedShapes}. */
@@ -89,6 +91,10 @@
     this.multiOverlappingIndexedShapes = multiOverlappingIndexedShapes;
   }
 
+  public boolean isPruneLeafyBranches() {
+    return pruneLeafyBranches;
+  }
+
   /** An optional hint affecting non-point shapes: it will
    * simplify/aggregate sets of complete leaves in a cell to its parent, resulting in ~20-25%
    * fewer indexed cells. However, it will likely be removed in the future. (default=true)
diff --git a/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/WithinPrefixTreeFilter.java b/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/WithinPrefixTreeFilter.java
index 4191a1f..c245aed 100644
--- a/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/WithinPrefixTreeFilter.java
+++ b/lucene/spatial/src/java/org/apache/lucene/spatial/prefix/WithinPrefixTreeFilter.java
@@ -19,15 +19,6 @@
 
 import java.io.IOException;
 
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.search.DocIdSet;
-import org.apache.lucene.spatial.prefix.tree.Cell;
-import org.apache.lucene.spatial.prefix.tree.CellIterator;
-import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
-import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.BitDocIdSet;
-import org.apache.lucene.util.FixedBitSet;
-
 import com.spatial4j.core.context.SpatialContext;
 import com.spatial4j.core.distance.DistanceUtils;
 import com.spatial4j.core.shape.Circle;
@@ -35,6 +26,14 @@
 import com.spatial4j.core.shape.Rectangle;
 import com.spatial4j.core.shape.Shape;
 import com.spatial4j.core.shape.SpatialRelation;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.spatial.prefix.tree.Cell;
+import org.apache.lucene.spatial.prefix.tree.CellIterator;
+import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
+import org.apache.lucene.util.BitDocIdSet;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.FixedBitSet;
 
 /**
  * Finds docs where its indexed shape is {@link org.apache.lucene.spatial.query.SpatialOperation#IsWithin
@@ -165,7 +164,10 @@
 
       @Override
       protected void visitLeaf(Cell cell) throws IOException {
-        //visitRelation is declared as a field, populated by visit() so we don't recompute it
+        //visitRelation is declared as a field, populated by visit() so we don't recompute it.
+        // We have a specialized visitScanned() which doesn't call this. If we didn't, we would
+        // not be able to assume visitRelation is from a prior visit() call since in scanning,
+        // parent cells aren't visited.
         assert detailLevel != cell.getLevel();
         assert visitRelation == cell.getShape().relate(queryShape);
         if (allCellsIntersectQuery(cell, visitRelation))
@@ -199,6 +201,7 @@
 
       @Override
       protected void visitScanned(Cell cell) throws IOException {
+        //slightly optimize over default impl; required for our 'visitRelation' field re-use above
         if (allCellsIntersectQuery(cell, null)) {
           collectDocs(inside);
         } else {
diff --git a/lucene/spatial/src/java/org/apache/lucene/spatial/util/ShapeAreaValueSource.java b/lucene/spatial/src/java/org/apache/lucene/spatial/util/ShapeAreaValueSource.java
index c5eb590..c7b7253 100644
--- a/lucene/spatial/src/java/org/apache/lucene/spatial/util/ShapeAreaValueSource.java
+++ b/lucene/spatial/src/java/org/apache/lucene/spatial/util/ShapeAreaValueSource.java
@@ -17,6 +17,9 @@
  * limitations under the License.
  */
 
+import java.io.IOException;
+import java.util.Map;
+
 import com.spatial4j.core.context.SpatialContext;
 import com.spatial4j.core.shape.Shape;
 import org.apache.lucene.index.LeafReaderContext;
@@ -26,9 +29,6 @@
 import org.apache.lucene.search.Explanation;
 import org.apache.lucene.search.IndexSearcher;
 
-import java.io.IOException;
-import java.util.Map;
-
 /**
  * The area of a Shape retrieved from a ValueSource via
  * {@link org.apache.lucene.queries.function.FunctionValues#objectVal(int)}.
@@ -41,11 +41,13 @@
   private final ValueSource shapeValueSource;
   private final SpatialContext ctx;//not part of identity; should be associated with shapeValueSource indirectly
   private final boolean geoArea;
+  private double multiplier;
 
-  public ShapeAreaValueSource(ValueSource shapeValueSource, SpatialContext ctx, boolean geoArea) {
+  public ShapeAreaValueSource(ValueSource shapeValueSource, SpatialContext ctx, boolean geoArea, double multiplier) {
     this.shapeValueSource = shapeValueSource;
     this.ctx = ctx;
     this.geoArea = geoArea;
+    this.multiplier = multiplier;
   }
 
   @Override
@@ -70,7 +72,7 @@
           return 0;//or NaN?
         //This part of Spatial4j API is kinda weird. Passing null means 2D area, otherwise geo
         //   assuming ctx.isGeo()
-        return shape.getArea( geoArea ? ctx : null );
+        return shape.getArea( geoArea ? ctx : null ) * multiplier;
       }
 
       @Override
diff --git a/lucene/spatial/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java b/lucene/spatial/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java
index 31572ee..368c5ac 100644
--- a/lucene/spatial/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java
+++ b/lucene/spatial/src/test/org/apache/lucene/spatial/bbox/TestBBoxStrategy.java
@@ -298,9 +298,11 @@
     adoc("100", ctx.makeRectangle(0, 20, 40, 80));
     adoc("999", (Shape) null);
     commit();
-    checkValueSource(new ShapeAreaValueSource(bboxStrategy.makeShapeValueSource(), ctx, false),
+    checkValueSource(new ShapeAreaValueSource(bboxStrategy.makeShapeValueSource(), ctx, false, 1.0),
         new float[]{800f, 0f}, 0f);
-    checkValueSource(new ShapeAreaValueSource(bboxStrategy.makeShapeValueSource(), ctx, true),//geo
+    checkValueSource(new ShapeAreaValueSource(bboxStrategy.makeShapeValueSource(), ctx, true, 1.0),//geo
         new float[]{391.93f, 0f}, 0.01f);
+    checkValueSource(new ShapeAreaValueSource(bboxStrategy.makeShapeValueSource(), ctx, true, 2.0),
+        new float[]{783.86f, 0f}, 0.01f); // testing with a different multiplier
   }
 }
diff --git a/lucene/spatial/src/test/org/apache/lucene/spatial/prefix/NumberRangeFacetsTest.java b/lucene/spatial/src/test/org/apache/lucene/spatial/prefix/NumberRangeFacetsTest.java
index 5efcbec..f190bf0 100644
--- a/lucene/spatial/src/test/org/apache/lucene/spatial/prefix/NumberRangeFacetsTest.java
+++ b/lucene/spatial/src/test/org/apache/lucene/spatial/prefix/NumberRangeFacetsTest.java
@@ -26,11 +26,8 @@
 import com.carrotsearch.randomizedtesting.annotations.Repeat;
 import com.spatial4j.core.shape.Shape;
 import org.apache.lucene.index.Term;
-import org.apache.lucene.search.BooleanClause;
-import org.apache.lucene.search.BooleanQuery;
-import org.apache.lucene.search.ScoreDoc;
-import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.queries.TermsFilter;
+import org.apache.lucene.search.Filter;
 import org.apache.lucene.spatial.NumberRangePrefixTreeStrategy;
 import org.apache.lucene.spatial.NumberRangePrefixTreeStrategy.Facets;
 import org.apache.lucene.spatial.StrategyTestCase;
@@ -39,7 +36,6 @@
 import org.apache.lucene.spatial.prefix.tree.DateRangePrefixTree;
 import org.apache.lucene.spatial.prefix.tree.NumberRangePrefixTree;
 import org.apache.lucene.spatial.prefix.tree.NumberRangePrefixTree.UnitNRShape;
-import org.apache.lucene.util.FixedBitSet;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -114,47 +110,40 @@
         detailLevel = -1 * detailLevel;
       }
 
-      //Randomly pick a filter as Bits/acceptDocs
-      FixedBitSet acceptDocs = null;//the answer
+      //Randomly pick a filter
+      Filter filter = null;
       List<Integer> acceptFieldIds = new ArrayList<>();
       if (usually()) {
         //get all possible IDs into a list, random shuffle it, then randomly choose how many of the first we use to
         // replace the list.
         for (int i = 0; i < indexedShapes.size(); i++) {
-          if (indexedShapes.get(i) == null) {
+          if (indexedShapes.get(i) == null) { // we deleted this one
             continue;
           }
           acceptFieldIds.add(i);
         }
         Collections.shuffle(acceptFieldIds, random());
         acceptFieldIds = acceptFieldIds.subList(0, randomInt(acceptFieldIds.size()));
-        acceptDocs = new FixedBitSet(indexSearcher.getIndexReader().maxDoc());
-        //query for their Lucene docIds to put into acceptDocs
         if (!acceptFieldIds.isEmpty()) {
-          BooleanQuery acceptQuery = new BooleanQuery();
+          List<Term> terms = new ArrayList<>();
           for (Integer acceptDocId : acceptFieldIds) {
-            acceptQuery.add(new TermQuery(new Term("id", acceptDocId.toString())), BooleanClause.Occur.SHOULD);
+            terms.add(new Term("id", acceptDocId.toString()));
           }
-          final TopDocs topDocs = indexSearcher.search(acceptQuery, numIndexedShapes);
-
-          for (ScoreDoc scoreDoc : topDocs.scoreDocs) {
-            acceptDocs.set(scoreDoc.doc);
-          }
-
+          filter = new TermsFilter(terms);
         }
       }
 
       //Lets do it!
       NumberRangePrefixTree.NRShape facetRange = tree.toRangeShape(tree.toShape(leftCal), tree.toShape(rightCal));
       Facets facets = ((NumberRangePrefixTreeStrategy) strategy)
-          .calcFacets(indexSearcher.getTopReaderContext(), acceptDocs, facetRange, detailLevel);
+          .calcFacets(indexSearcher.getTopReaderContext(), filter, facetRange, detailLevel);
 
       //System.out.println("Q: " + queryIdx + " " + facets);
 
       //Verify results. We do it by looping over indexed shapes and reducing the facet counts.
       Shape facetShapeRounded = facetRange.roundToLevel(detailLevel);
       for (int indexedShapeId = 0; indexedShapeId < indexedShapes.size(); indexedShapeId++) {
-        if (acceptDocs != null && !acceptFieldIds.contains(indexedShapeId)) {
+        if (filter != null && !acceptFieldIds.contains(indexedShapeId)) {
           continue;// this doc was filtered out via acceptDocs
         }
         Shape indexedShape = indexedShapes.get(indexedShapeId);
diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggester.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggester.java
index 60ebeb3..a36e247 100644
--- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggester.java
+++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggester.java
@@ -124,6 +124,10 @@
   protected final Analyzer indexAnalyzer;
   private final Directory dir;
   final int minPrefixChars;
+  
+  private final boolean allTermsRequired;
+  private final boolean highlight;
+  
   private final boolean commitOnBuild;
 
   /** Used for ongoing NRT additions/updates. */
@@ -135,6 +139,12 @@
   /** Default minimum number of leading characters before
    *  PrefixQuery is used (4). */
   public static final int DEFAULT_MIN_PREFIX_CHARS = 4;
+  
+  /** Default boolean clause option for multiple terms matching (all terms required). */
+  public static final boolean DEFAULT_ALL_TERMS_REQUIRED = true;
+ 
+  /** Default higlighting option. */
+  public static final boolean DEFAULT_HIGHLIGHT = true;
 
   /** How we sort the postings and search results. */
   private static final Sort SORT = new Sort(new SortField("weight", SortField.Type.LONG, true));
@@ -145,9 +155,9 @@
    *  Lucene index).  Note that {@link #close}
    *  will also close the provided directory. */
   public AnalyzingInfixSuggester(Directory dir, Analyzer analyzer) throws IOException {
-    this(dir, analyzer, analyzer, DEFAULT_MIN_PREFIX_CHARS, false);
+    this(dir, analyzer, analyzer, DEFAULT_MIN_PREFIX_CHARS, false, DEFAULT_ALL_TERMS_REQUIRED, DEFAULT_HIGHLIGHT);
   }
-
+  
   /** Create a new instance, loading from a previously built
    *  AnalyzingInfixSuggester directory, if it exists.  This directory must be
    *  private to the infix suggester (i.e., not an external
@@ -165,7 +175,32 @@
    */
   public AnalyzingInfixSuggester(Directory dir, Analyzer indexAnalyzer, Analyzer queryAnalyzer, int minPrefixChars,
                                  boolean commitOnBuild) throws IOException {
-
+    this(dir, indexAnalyzer, queryAnalyzer, minPrefixChars, commitOnBuild, DEFAULT_ALL_TERMS_REQUIRED, DEFAULT_HIGHLIGHT);
+  }
+  
+  /** Create a new instance, loading from a previously built
+   *  AnalyzingInfixSuggester directory, if it exists.  This directory must be
+   *  private to the infix suggester (i.e., not an external
+   *  Lucene index).  Note that {@link #close}
+   *  will also close the provided directory.
+   *
+   *  @param minPrefixChars Minimum number of leading characters
+   *     before PrefixQuery is used (default 4).
+   *     Prefixes shorter than this are indexed as character
+   *     ngrams (increasing index size but making lookups
+   *     faster).
+   *
+   *  @param commitOnBuild Call commit after the index has finished building. This would persist the
+   *                       suggester index to disk and future instances of this suggester can use this pre-built dictionary.
+   *
+   *  @param allTermsRequired All terms in the suggest query must be matched.
+   *  @param highlight Highlight suggest query in suggestions.
+   *
+   */
+  public AnalyzingInfixSuggester(Directory dir, Analyzer indexAnalyzer, Analyzer queryAnalyzer, int minPrefixChars,
+                                 boolean commitOnBuild, 
+                                 boolean allTermsRequired, boolean highlight) throws IOException {
+                                    
     if (minPrefixChars < 0) {
       throw new IllegalArgumentException("minPrefixChars must be >= 0; got: " + minPrefixChars);
     }
@@ -175,6 +210,8 @@
     this.dir = dir;
     this.minPrefixChars = minPrefixChars;
     this.commitOnBuild = commitOnBuild;
+    this.allTermsRequired = allTermsRequired;
+    this.highlight = highlight;
 
     if (DirectoryReader.indexExists(dir)) {
       // Already built; open it:
@@ -373,7 +410,7 @@
 
   @Override
   public List<LookupResult> lookup(CharSequence key, Set<BytesRef> contexts, boolean onlyMorePopular, int num) throws IOException {
-    return lookup(key, contexts, num, true, true);
+    return lookup(key, contexts, num, allTermsRequired, highlight);
   }
 
   /** Lookup, without any context. */
@@ -514,7 +551,7 @@
     //System.out.println("finalQuery=" + query);
 
     // Sort by weight, descending:
-    TopFieldCollector c = TopFieldCollector.create(SORT, num, true, false, false, false);
+    TopFieldCollector c = TopFieldCollector.create(SORT, num, true, false, false);
 
     // We sorted postings by weight during indexing, so we
     // only retrieve the first num hits now:
@@ -543,7 +580,11 @@
 
   /**
    * Create the results based on the search hits.
-   * Can be overridden by subclass to add particular behavior (e.g. weight transformation)
+   * Can be overridden by subclass to add particular behavior (e.g. weight transformation).
+   * Note that there is no prefix toke (the {@code prefixToken} argument will
+   * be null) whenever the final token in the incoming request was in fact finished
+   * (had trailing characters, such as white-space).
+   *
    * @throws IOException If there are problems reading fields from the underlying Lucene index.
    */
   protected List<LookupResult> createResults(IndexSearcher searcher, TopFieldDocs hits, int num,
diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/BlendedInfixSuggester.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/BlendedInfixSuggester.java
index cff1618..473f0ce 100644
--- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/BlendedInfixSuggester.java
+++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/BlendedInfixSuggester.java
@@ -117,7 +117,27 @@
     this.blenderType = blenderType;
     this.numFactor = numFactor;
   }
-
+  
+  /**
+   * Create a new instance, loading from a previously built
+   * directory, if it exists.
+   *
+   * @param blenderType Type of blending strategy, see BlenderType for more precisions
+   * @param numFactor   Factor to multiply the number of searched elements before ponderate
+   * @param commitOnBuild Call commit after the index has finished building. This would persist the
+   *                      suggester index to disk and future instances of this suggester can use this pre-built dictionary.
+   * @param allTermsRequired All terms in the suggest query must be matched.
+   * @param highlight Highlight suggest query in suggestions.
+   * @throws IOException If there are problems opening the underlying Lucene index.
+   */
+  public BlendedInfixSuggester(Directory dir, Analyzer indexAnalyzer, Analyzer queryAnalyzer,
+                               int minPrefixChars, BlenderType blenderType, int numFactor, 
+                               boolean commitOnBuild, boolean allTermsRequired, boolean highlight) throws IOException {
+    super(dir, indexAnalyzer, queryAnalyzer, minPrefixChars, commitOnBuild, allTermsRequired, highlight);
+    this.blenderType = blenderType;
+    this.numFactor = numFactor;
+  }
+  
   @Override
   public List<Lookup.LookupResult> lookup(CharSequence key, Set<BytesRef> contexts, boolean onlyMorePopular, int num) throws IOException {
     // here we multiply the number of searched element by the defined factor
@@ -255,8 +275,8 @@
 
       String docTerm = term.utf8ToString();
 
-      if (matchedTokens.contains(docTerm) || docTerm.startsWith(prefixToken)) {
-
+      if (matchedTokens.contains(docTerm) || (prefixToken != null && docTerm.startsWith(prefixToken))) {
+ 
         DocsAndPositionsEnum docPosEnum = it.docsAndPositions(null, null, DocsAndPositionsEnum.FLAG_OFFSETS);
         docPosEnum.nextDoc();
 
diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/LookupBenchmarkTest.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/LookupBenchmarkTest.java
index 8ff64a2..a0b3278 100644
--- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/LookupBenchmarkTest.java
+++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/LookupBenchmarkTest.java
@@ -36,11 +36,14 @@
 import org.apache.lucene.analysis.MockTokenizer;
 import org.apache.lucene.search.suggest.analyzing.AnalyzingInfixSuggester;
 import org.apache.lucene.search.suggest.analyzing.AnalyzingSuggester;
+import org.apache.lucene.search.suggest.analyzing.BlendedInfixSuggester;
+import org.apache.lucene.search.suggest.analyzing.FreeTextSuggester;
 import org.apache.lucene.search.suggest.analyzing.FuzzySuggester;
 import org.apache.lucene.search.suggest.fst.FSTCompletionLookup;
 import org.apache.lucene.search.suggest.fst.WFSTCompletionLookup;
 import org.apache.lucene.search.suggest.jaspell.JaspellLookup;
 import org.apache.lucene.search.suggest.tst.TSTLookup;
+import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.FSDirectory;
 import org.apache.lucene.util.*;
 import org.junit.BeforeClass;
@@ -59,7 +62,9 @@
       JaspellLookup.class, 
       TSTLookup.class,
       FSTCompletionLookup.class,
-      WFSTCompletionLookup.class
+      WFSTCompletionLookup.class,
+      BlendedInfixSuggester.class,
+      FreeTextSuggester.class
       );
 
   private final static int rounds = 15;
@@ -162,8 +167,9 @@
       lookup = cls.newInstance();
     } catch (InstantiationException e) {
       Analyzer a = new MockAnalyzer(random, MockTokenizer.KEYWORD, false);
-      if (cls == AnalyzingInfixSuggester.class) {
-        lookup = new AnalyzingInfixSuggester(FSDirectory.open(createTempDir("LookupBenchmarkTest")), a);
+      if (cls == AnalyzingInfixSuggester.class || cls == BlendedInfixSuggester.class) {
+        Constructor<? extends Lookup> ctor = cls.getConstructor(Directory.class, Analyzer.class);
+        lookup = ctor.newInstance(FSDirectory.open(createTempDir("LookupBenchmarkTest")), a);
       } else {
         Constructor<? extends Lookup> ctor = cls.getConstructor(Analyzer.class);
         lookup = ctor.newInstance(a);
diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggesterTest.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggesterTest.java
index e5a9a4c..2717286 100644
--- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggesterTest.java
+++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggesterTest.java
@@ -91,10 +91,50 @@
     assertEquals("a <b>p</b>enny saved is a <b>p</b>enny earned", results.get(0).highlightKey);
     assertEquals(10, results.get(0).value);
     assertEquals(new BytesRef("foobaz"), results.get(0).payload);
-
+    
+    results = suggester.lookup(TestUtil.stringToCharSequence("money penny", random()), 10, false, true);
+    assertEquals(1, results.size());
+    assertEquals("a penny saved is a penny earned", results.get(0).key);
+    assertEquals("a <b>penny</b> saved is a <b>penny</b> earned", results.get(0).highlightKey);
+    assertEquals(10, results.get(0).value);
+    assertEquals(new BytesRef("foobaz"), results.get(0).payload);
+ 
+    results = suggester.lookup(TestUtil.stringToCharSequence("penny ea", random()), 10, false, true);
+    assertEquals(2, results.size());
+    assertEquals("a penny saved is a penny earned", results.get(0).key);
+    assertEquals("a <b>penny</b> saved is a <b>penny</b> <b>ea</b>rned", results.get(0).highlightKey);
+    assertEquals("lend me your ear", results.get(1).key);
+    assertEquals("lend me your <b>ea</b>r", results.get(1).highlightKey);
+        
+    results = suggester.lookup(TestUtil.stringToCharSequence("money penny", random()), 10, false, false);
+    assertEquals(1, results.size());
+    assertEquals("a penny saved is a penny earned", results.get(0).key);
+    assertNull(results.get(0).highlightKey);
+    
+    testConstructorDefaults(suggester, keys, a, true, true);
+    testConstructorDefaults(suggester, keys, a, true, false);
+    testConstructorDefaults(suggester, keys, a, false, false);
+    testConstructorDefaults(suggester, keys, a, false, true);
+    
     suggester.close();
   }
 
+  private void testConstructorDefaults(AnalyzingInfixSuggester suggester, Input[] keys, Analyzer a, 
+      boolean allTermsRequired, boolean highlight) throws IOException {
+    AnalyzingInfixSuggester suggester2 = new AnalyzingInfixSuggester(newDirectory(), a, a, 3, false, allTermsRequired, highlight);
+    suggester2.build(new InputArrayIterator(keys));
+    
+    CharSequence key = TestUtil.stringToCharSequence("penny ea", random());
+    
+    List<LookupResult> results1 = suggester.lookup(key, 10, allTermsRequired, highlight);
+    List<LookupResult> results2 = suggester2.lookup(key, false, 10);
+    assertEquals(results1.size(), results2.size());
+    assertEquals(results1.get(0).key, results2.get(0).key);
+    assertEquals(results1.get(0).highlightKey, results2.get(0).highlightKey);
+    
+    suggester2.close();
+  }
+
   public void testAfterLoad() throws Exception {
     Input keys[] = new Input[] {
       new Input("lend me your ear", 8, new BytesRef("foobar")),
diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/BlendedInfixSuggesterTest.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/BlendedInfixSuggesterTest.java
index 9fd393a..cb22d36 100644
--- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/BlendedInfixSuggesterTest.java
+++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/BlendedInfixSuggesterTest.java
@@ -165,6 +165,33 @@
     suggester.close();
   }
 
+  /**
+   * Handle trailing spaces that result in no prefix token LUCENE-6093
+   */
+  public void testNullPrefixToken() throws IOException {
+
+    BytesRef payload = new BytesRef("lake");
+
+    Input keys[] = new Input[]{
+        new Input("top of the lake", 8, payload)
+    };
+
+    Path tempDir = createTempDir("BlendedInfixSuggesterTest");
+
+    Analyzer a = new StandardAnalyzer(CharArraySet.EMPTY_SET);
+    BlendedInfixSuggester suggester = new BlendedInfixSuggester(newFSDirectory(tempDir), a, a,
+                                                                AnalyzingInfixSuggester.DEFAULT_MIN_PREFIX_CHARS,
+                                                                BlendedInfixSuggester.BlenderType.POSITION_LINEAR,
+                                                                BlendedInfixSuggester.DEFAULT_NUM_FACTOR, false);
+    suggester.build(new InputArrayIterator(keys));
+
+    getInResults(suggester, "of ", payload, 1);
+    getInResults(suggester, "the ", payload, 1);
+    getInResults(suggester, "lake ", payload, 1);
+
+    suggester.close();
+  }
+
   public void /*testT*/rying() throws IOException {
 
     BytesRef lake = new BytesRef("lake");
diff --git a/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java
index 0bc0b43..9806efb 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java
@@ -258,7 +258,7 @@
     }
 
     if (ts.incrementToken()) {
-      fail("TokenStream has more tokens than expected (expected count=" + output.length + "); extra token=" + termAtt.toString());
+      fail("TokenStream has more tokens than expected (expected count=" + output.length + "); extra token=" + termAtt);
     }
 
     // repeat our extra safety checks for end()
diff --git a/lucene/test-framework/src/java/org/apache/lucene/codecs/cranky/CrankyCompoundFormat.java b/lucene/test-framework/src/java/org/apache/lucene/codecs/cranky/CrankyCompoundFormat.java
index 8f64f9f..221a189 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/codecs/cranky/CrankyCompoundFormat.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/codecs/cranky/CrankyCompoundFormat.java
@@ -18,11 +18,9 @@
  */
 
 import java.io.IOException;
-import java.util.Collection;
 import java.util.Random;
 
 import org.apache.lucene.codecs.CompoundFormat;
-import org.apache.lucene.index.MergeState.CheckAbort;
 import org.apache.lucene.index.SegmentInfo;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
@@ -42,15 +40,10 @@
   }
   
   @Override
-  public void write(Directory dir, SegmentInfo si, Collection<String> files, CheckAbort checkAbort, IOContext context) throws IOException {
+  public void write(Directory dir, SegmentInfo si, IOContext context) throws IOException {
     if (random.nextInt(100) == 0) {
       throw new IOException("Fake IOException from CompoundFormat.write()");
     }
-    delegate.write(dir, si, files, checkAbort, context);
-  }
-  
-  @Override
-  public String[] files(SegmentInfo si) {
-    return delegate.files(si);
+    delegate.write(dir, si, context);
   }
 }
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/AssertingLeafReader.java b/lucene/test-framework/src/java/org/apache/lucene/index/AssertingLeafReader.java
index e39f7f2..e94e088 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/AssertingLeafReader.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/AssertingLeafReader.java
@@ -280,20 +280,8 @@
     private int doc;
     
     public AssertingDocsEnum(DocsEnum in) {
-      this(in, true);
-    }
-
-    public AssertingDocsEnum(DocsEnum in, boolean failOnUnsupportedDocID) {
       super(in);
-      try {
-        int docid = in.docID();
-        assert docid == -1 : in.getClass() + ": invalid initial doc id: " + docid;
-      } catch (UnsupportedOperationException e) {
-        if (failOnUnsupportedDocID) {
-          throw e;
-        }
-      }
-      doc = -1;
+      this.doc = in.docID();
     }
 
     @Override
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseCompoundFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseCompoundFormatTestCase.java
index 72c0753..661099c 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseCompoundFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseCompoundFormatTestCase.java
@@ -53,7 +53,8 @@
     Directory dir = newDirectory();
     
     SegmentInfo si = newSegmentInfo(dir, "_123");
-    si.getCodec().compoundFormat().write(dir, si, Collections.<String>emptyList(), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+    si.setFiles(Collections.emptySet());
+    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
     Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
     assertEquals(0, cfs.listAll().length);
     cfs.close();
@@ -72,7 +73,8 @@
       createSequenceFile(dir, testfile, (byte) 0, data[i]);
       
       SegmentInfo si = newSegmentInfo(dir, "_" + i);
-      si.getCodec().compoundFormat().write(dir, si, Collections.singleton(testfile), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+      si.setFiles(Collections.singleton(testfile));
+      si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
       Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
       
       IndexInput expected = dir.openInput(testfile, newIOContext(random()));
@@ -96,7 +98,8 @@
     createSequenceFile(dir, files[1], (byte) 0, 114);
     
     SegmentInfo si = newSegmentInfo(dir, "_123");
-    si.getCodec().compoundFormat().write(dir, si, Arrays.asList(files), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+    si.setFiles(Arrays.asList(files));
+    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
     Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
 
     for (String file : files) {
@@ -122,7 +125,8 @@
     out.close();
     
     SegmentInfo si = newSegmentInfo(dir, "_123");
-    si.getCodec().compoundFormat().write(dir, si, Collections.singleton(testfile), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+    si.setFiles(Collections.singleton(testfile));
+    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
     Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
     assertEquals(1, cfs.listAll().length);
     cfs.close();
@@ -147,7 +151,8 @@
     out.close();
     
     SegmentInfo si = newSegmentInfo(dir, "_123");
-    si.getCodec().compoundFormat().write(dir, si, Collections.singleton(testfile), MergeState.CheckAbort.NONE, myContext);
+    si.setFiles(Collections.singleton(testfile));
+    si.getCodec().compoundFormat().write(dir, si, myContext);
     dir.close();
   }
   
@@ -166,7 +171,8 @@
     out.close();
     
     SegmentInfo si = newSegmentInfo(dir, "_123");
-    si.getCodec().compoundFormat().write(dir, si, Collections.singleton(testfile), MergeState.CheckAbort.NONE, context);
+    si.setFiles(Collections.singleton(testfile));
+    si.getCodec().compoundFormat().write(dir, si, context);
 
     dir.close();
   }
@@ -211,7 +217,8 @@
     Directory dir = newDirectory();
     
     SegmentInfo si = newSegmentInfo(dir, "_123");
-    si.getCodec().compoundFormat().write(dir, si, Collections.<String>emptyList(), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+    si.setFiles(Collections.emptyList());
+    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
     Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
     try {
       cfs.createOutput("bogus", IOContext.DEFAULT);
@@ -233,7 +240,8 @@
     out.close();
  
     SegmentInfo si = newSegmentInfo(dir, "_123");
-    si.getCodec().compoundFormat().write(dir, si, Collections.<String>emptyList(), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+    si.setFiles(Collections.emptyList());
+    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
     Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
     try {
       cfs.deleteFile(testfile);
@@ -255,7 +263,8 @@
     out.close();
  
     SegmentInfo si = newSegmentInfo(dir, "_123");
-    si.getCodec().compoundFormat().write(dir, si, Collections.<String>emptyList(), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+    si.setFiles(Collections.emptyList());
+    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
     Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
     try {
       cfs.renameFile(testfile, "bogus");
@@ -277,7 +286,8 @@
     out.close();
  
     SegmentInfo si = newSegmentInfo(dir, "_123");
-    si.getCodec().compoundFormat().write(dir, si, Collections.<String>emptyList(), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+    si.setFiles(Collections.emptyList());
+    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
     Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
     try {
       cfs.sync(Collections.singleton(testfile));
@@ -299,7 +309,8 @@
     out.close();
  
     SegmentInfo si = newSegmentInfo(dir, "_123");
-    si.getCodec().compoundFormat().write(dir, si, Collections.<String>emptyList(), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+    si.setFiles(Collections.emptyList());
+    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
     Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
     try {
       cfs.makeLock("foobar");
@@ -338,7 +349,8 @@
     String files[] = dir.listAll();
     
     SegmentInfo si = newSegmentInfo(dir, "_123");
-    si.getCodec().compoundFormat().write(dir, si, Arrays.asList(files), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+    si.setFiles(Arrays.asList(files));
+    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
     Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
     
     for (int i = 0; i < files.length; i++) {
@@ -369,7 +381,8 @@
     assertEquals(0, dir.getFileHandleCount());
     
     SegmentInfo si = newSegmentInfo(dir, "_123");
-    si.getCodec().compoundFormat().write(dir, si, Arrays.asList(dir.listAll()), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+    si.setFiles(Arrays.asList(dir.listAll()));
+    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
     Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
     
     final IndexInput[] ins = new IndexInput[FILE_COUNT];
@@ -722,7 +735,8 @@
     }
     
     SegmentInfo si = newSegmentInfo(dir, "_123");
-    si.getCodec().compoundFormat().write(dir, si, files, MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+    si.setFiles(files);
+    si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
     Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
     return cfs;
   }
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java
index d937d6c..5429fe6 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java
@@ -24,6 +24,7 @@
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map.Entry;
 import java.util.Map;
 import java.util.Set;
@@ -46,6 +47,7 @@
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.Bits;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.BytesRefBuilder;
 import org.apache.lucene.util.BytesRefHash;
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.TestUtil;
@@ -3004,6 +3006,151 @@
     directory.close();
   }
 
+  public void testSortedEnumAdvanceIndependently() throws IOException {
+    Directory directory = newDirectory();
+    Analyzer analyzer = new MockAnalyzer(random());
+    IndexWriterConfig iwconfig = newIndexWriterConfig(analyzer);
+    iwconfig.setMergePolicy(newLogMergePolicy());
+    RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, iwconfig);
+    
+    Document doc = iwriter.newDocument();
+    doc.addAtom("field", new BytesRef("2"));
+    iwriter.addDocument(doc);
+
+    doc = iwriter.newDocument();
+    doc.addAtom("field", new BytesRef("1"));
+    iwriter.addDocument(doc);
+
+    doc = iwriter.newDocument();
+    doc.addAtom("field", new BytesRef("3"));
+    iwriter.addDocument(doc);
+
+    iwriter.commit();
+    iwriter.forceMerge(1);
+
+    DirectoryReader ireader = iwriter.getReader();
+    iwriter.close();
+
+    SortedDocValues dv = getOnlySegmentReader(ireader).getSortedDocValues("field");
+    doTestSortedSetEnumAdvanceIndependently(DocValues.singleton(dv));
+
+    ireader.close();
+    directory.close();
+  }
+
+  public void testSortedSetEnumAdvanceIndependently() throws IOException {
+    Directory directory = newDirectory();
+    Analyzer analyzer = new MockAnalyzer(random());
+    IndexWriterConfig iwconfig = newIndexWriterConfig(analyzer);
+    iwconfig.setMergePolicy(newLogMergePolicy());
+    RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, iwconfig);
+    FieldTypes fieldTypes = iwriter.getFieldTypes();
+    fieldTypes.setMultiValued("field");
+
+    Document doc = iwriter.newDocument();
+    doc.addAtom("field", new BytesRef("2"));
+    doc.addAtom("field", new BytesRef("3"));
+    iwriter.addDocument(doc);
+
+    doc = iwriter.newDocument();
+    doc.addAtom("field", new BytesRef("1"));
+    doc.addAtom("field", new BytesRef("3"));
+    iwriter.addDocument(doc);
+
+    doc = iwriter.newDocument();
+    doc.addAtom("field", new BytesRef("1"));
+    doc.addAtom("field", new BytesRef("2"));
+    iwriter.addDocument(doc);
+
+    iwriter.commit();
+    iwriter.forceMerge(1);
+
+    DirectoryReader ireader = iwriter.getReader();
+    iwriter.close();
+
+    SortedSetDocValues dv = getOnlySegmentReader(ireader).getSortedSetDocValues("field");
+    doTestSortedSetEnumAdvanceIndependently(dv);
+
+    ireader.close();
+    directory.close();
+  }
+
+  protected void doTestSortedSetEnumAdvanceIndependently(SortedSetDocValues dv) throws IOException {
+    if (dv.getValueCount() < 2) {
+      return;
+    }
+    List<BytesRef> terms = new ArrayList<>();
+    TermsEnum te = dv.termsEnum();
+    terms.add(BytesRef.deepCopyOf(te.next()));
+    terms.add(BytesRef.deepCopyOf(te.next()));
+
+    // Make sure that calls to next() does not modify the term of the other enum
+    TermsEnum enum1 = dv.termsEnum();
+    TermsEnum enum2 = dv.termsEnum();
+    BytesRefBuilder term1 = new BytesRefBuilder();
+    BytesRefBuilder term2 = new BytesRefBuilder();
+
+    term1.copyBytes(enum1.next());
+    term2.copyBytes(enum2.next());
+    term1.copyBytes(enum1.next());
+
+    assertEquals(term1.get(), enum1.term());
+    assertEquals(term2.get(), enum2.term());
+
+    // Same for seekCeil
+    enum1 = dv.termsEnum();
+    enum2 = dv.termsEnum();
+    term1 = new BytesRefBuilder();
+    term2 = new BytesRefBuilder();
+
+    term2.copyBytes(enum2.next());
+    BytesRefBuilder seekTerm = new BytesRefBuilder();
+    seekTerm.append(terms.get(0));
+    seekTerm.append((byte) 0);
+    enum1.seekCeil(seekTerm.get());
+    term1.copyBytes(enum1.term());
+
+    assertEquals(term1.get(), enum1.term());
+    assertEquals(term2.get(), enum2.term());
+
+    // Same for seekCeil on an exact value
+    enum1 = dv.termsEnum();
+    enum2 = dv.termsEnum();
+    term1 = new BytesRefBuilder();
+    term2 = new BytesRefBuilder();
+
+    term2.copyBytes(enum2.next());
+    enum1.seekCeil(terms.get(1));
+    term1.copyBytes(enum1.term());
+    
+    assertEquals(term1.get(), enum1.term());
+    assertEquals(term2.get(), enum2.term());
+
+    // Same for seekExact
+    enum1 = dv.termsEnum();
+    enum2 = dv.termsEnum();
+    term1 = new BytesRefBuilder();
+    term2 = new BytesRefBuilder();
+
+    term2.copyBytes(enum2.next());
+    final boolean found = enum1.seekExact(terms.get(1));
+    assertTrue(found);
+    term1.copyBytes(enum1.term());
+
+    // Same for seek by ord
+    enum1 = dv.termsEnum();
+    enum2 = dv.termsEnum();
+    term1 = new BytesRefBuilder();
+    term2 = new BytesRefBuilder();
+
+    term2.copyBytes(enum2.next());
+    enum1.seekExact(1);
+    term1.copyBytes(enum1.term());
+
+    assertEquals(term1.get(), enum1.term());
+    assertEquals(term2.get(), enum2.term());
+  }
+
   protected boolean codecAcceptsHugeBinaryValues(String field) {
     return true;
   }
@@ -3023,4 +3170,5 @@
   protected boolean codecSupportsSortedNumeric() {
     return true;
   }
+
 }
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
index 2b2c004..35727ea2 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java
@@ -207,7 +207,7 @@
     w.forceMerge(1);
     w.commit();
     w.close();
-    IndexReader reader = DirectoryReader.open(dir);
+    DirectoryReader reader = DirectoryReader.open(dir);
 
     Directory dir2 = newDirectory();
     if (dir2 instanceof MockDirectoryWrapper) {
@@ -221,7 +221,8 @@
     w = new IndexWriter(dir2, cfg);
     fieldTypes = w.getFieldTypes();
     fieldTypes.disableExistsFilters();
-    w.addIndexes(reader);
+    TestUtil.addIndexesSlowly(w, reader);
+
     w.commit();
     w.close();
 
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java
index 781129a..c6fd137 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java
@@ -551,7 +551,7 @@
     
     Directory dir2 = newDirectory();
     w = new RandomIndexWriter(random(), dir2);
-    w.addIndexes(reader);
+    TestUtil.addIndexesSlowly(w.w, reader);
     reader.close();
     dir.close();
 
@@ -679,7 +679,7 @@
       }
       dirs[i] = newDirectory();
       IndexWriter adder = new IndexWriter(dirs[i], new IndexWriterConfig(null));
-      adder.addIndexes(reader);
+      TestUtil.addIndexesSlowly(adder, reader);
       adder.commit();
       adder.close();
       
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseTermVectorsFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseTermVectorsFormatTestCase.java
index dcf36ad..2d8e937 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseTermVectorsFormatTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseTermVectorsFormatTestCase.java
@@ -503,9 +503,7 @@
           try {
             docsAndPositionsEnum.nextPosition();
             fail();
-          } catch (Exception e) {
-            // ok
-          } catch (AssertionError e) {
+          } catch (Exception | AssertionError e) {
             // ok
           }
         }
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java b/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java
index b43b4aa..afd747f 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java
@@ -138,7 +138,7 @@
   
   static class MockRandomOneMerge extends OneMerge {
     final Random r;
-    ArrayList<LeafReader> readers;
+    ArrayList<CodecReader> readers;
 
     MockRandomOneMerge(List<SegmentCommitInfo> segments, long seed) {
       super(segments);
@@ -146,21 +146,23 @@
     }
 
     @Override
-    public List<LeafReader> getMergeReaders() throws IOException {
+    public List<CodecReader> getMergeReaders() throws IOException {
       if (readers == null) {
-        readers = new ArrayList<LeafReader>(super.getMergeReaders());
+        readers = new ArrayList<CodecReader>(super.getMergeReaders());
         for (int i = 0; i < readers.size(); i++) {
           // wrap it (e.g. prevent bulk merge etc)
+          // TODO: cut this over to FilterCodecReader api, we can explicitly
+          // enable/disable bulk merge for portions of the index we want.
           int thingToDo = r.nextInt(7);
           if (thingToDo == 0) {
             // simple no-op FilterReader
-            readers.set(i, new FilterLeafReader(readers.get(i)));
+            readers.set(i, SlowCodecReaderWrapper.wrap(new FilterLeafReader(readers.get(i))));
           } else if (thingToDo == 1) {
             // renumber fields
             // NOTE: currently this only "blocks" bulk merges just by
             // being a FilterReader. But it might find bugs elsewhere, 
             // and maybe the situation can be improved in the future.
-            readers.set(i, new MismatchedLeafReader(readers.get(i), r));
+            readers.set(i, SlowCodecReaderWrapper.wrap(new MismatchedLeafReader(readers.get(i), r)));
           }
           // otherwise, reader is unchanged
         }
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/RandomIndexWriter.java b/lucene/test-framework/src/java/org/apache/lucene/index/RandomIndexWriter.java
index d44ff21..fa0f466 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/RandomIndexWriter.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/RandomIndexWriter.java
@@ -227,7 +227,7 @@
     w.addIndexes(dirs);
   }
 
-  public void addIndexes(IndexReader... readers) throws IOException {
+  public void addIndexes(CodecReader... readers) throws IOException {
     LuceneTestCase.maybeChangeLiveIndexWriterConfig(r, w.getConfig());
     w.addIndexes(readers);
   }
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/SuppressingConcurrentMergeScheduler.java b/lucene/test-framework/src/java/org/apache/lucene/index/SuppressingConcurrentMergeScheduler.java
index 004db28..cebe584 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/SuppressingConcurrentMergeScheduler.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/SuppressingConcurrentMergeScheduler.java
@@ -1,5 +1,7 @@
 package org.apache.lucene.index;
 
+import org.apache.lucene.store.Directory;
+
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -17,22 +19,17 @@
  * limitations under the License.
  */
 
-import java.io.IOException;
-
-import org.apache.lucene.store.AlreadyClosedException;
-import org.apache.lucene.util.IOUtils;
-
 /** A {@link ConcurrentMergeScheduler} that ignores AlreadyClosedException. */
 public abstract class SuppressingConcurrentMergeScheduler extends ConcurrentMergeScheduler {
   @Override
-  protected void handleMergeException(Throwable exc) {
+  protected void handleMergeException(Directory dir, Throwable exc) {
     while (true) {
       if (isOK(exc)) {
         return;
       }
       exc = exc.getCause();
       if (exc == null) {
-        super.handleMergeException(exc);
+        super.handleMergeException(dir, exc);
       }
     }
   }
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java
index 358c36e..322c5fb 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java
@@ -442,6 +442,11 @@
       } else if (mp instanceof LogMergePolicy) {
         ((LogMergePolicy) mp).setMaxMergeDocs(100000);
       }
+      // when running nightly, merging can still have crazy parameters, 
+      // and might use many per-field codecs. turn on CFS for IW flushes
+      // and ensure CFS ratio is reasonable to keep it contained.
+      conf.setUseCompoundFile(true);
+      mp.setNoCFSRatio(Math.max(0.25d, mp.getNoCFSRatio()));
     }
 
     conf.setMergedSegmentWarmer(new IndexWriter.IndexReaderWarmer() {
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingBulkOutOfOrderScorer.java b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingBulkOutOfOrderScorer.java
deleted file mode 100644
index 26a0a4f..0000000
--- a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingBulkOutOfOrderScorer.java
+++ /dev/null
@@ -1,54 +0,0 @@
-package org.apache.lucene.search;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.IOException;
-import java.util.Random;
-
-/** A crazy {@link BulkScorer} that wraps another {@link BulkScorer}
- *  but shuffles the order of the collected documents. */
-public class AssertingBulkOutOfOrderScorer extends BulkScorer {
-
-  final BulkScorer in;
-  final Random random;
-
-  public AssertingBulkOutOfOrderScorer(Random random, BulkScorer in) {
-    this.in = in;
-    this.random = random;
-  }
-
-  @Override
-  public boolean score(LeafCollector collector, int max) throws IOException {
-    final RandomOrderCollector randomCollector = new RandomOrderCollector(random, collector);
-    final boolean remaining = in.score(randomCollector, max);
-    randomCollector.flush();
-    return remaining;
-  }
-
-  @Override
-  public void score(LeafCollector collector) throws IOException {
-    final RandomOrderCollector randomCollector = new RandomOrderCollector(random, collector);
-    in.score(randomCollector);
-    randomCollector.flush();
-  }
-
-  @Override
-  public String toString() {
-    return "AssertingBulkOutOfOrderScorer(" + in + ")";
-  }
-}
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingBulkScorer.java b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingBulkScorer.java
index 15e53bf..6c28d9d 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingBulkScorer.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingBulkScorer.java
@@ -18,39 +18,31 @@
  */
 
 import java.io.IOException;
-import java.lang.ref.WeakReference;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Map;
 import java.util.Random;
-import java.util.WeakHashMap;
 
 import org.apache.lucene.index.DocsEnum;
-import org.apache.lucene.util.VirtualMethod;
+
+import com.carrotsearch.randomizedtesting.generators.RandomInts;
 
 /** Wraps a Scorer with additional checks */
-public class AssertingBulkScorer extends BulkScorer {
+final class AssertingBulkScorer extends BulkScorer {
 
-  private static final VirtualMethod<BulkScorer> SCORE_COLLECTOR = new VirtualMethod<>(BulkScorer.class, "score", LeafCollector.class);
-  private static final VirtualMethod<BulkScorer> SCORE_COLLECTOR_RANGE = new VirtualMethod<>(BulkScorer.class, "score", LeafCollector.class, int.class);
-
-  public static BulkScorer wrap(Random random, BulkScorer other) {
+  public static BulkScorer wrap(Random random, BulkScorer other, int maxDoc) {
     if (other == null || other instanceof AssertingBulkScorer) {
       return other;
     }
-    return new AssertingBulkScorer(random, other);
-  }
-
-  public static boolean shouldWrap(BulkScorer inScorer) {
-    return SCORE_COLLECTOR.isOverriddenAsOf(inScorer.getClass()) || SCORE_COLLECTOR_RANGE.isOverriddenAsOf(inScorer.getClass());
+    return new AssertingBulkScorer(random, other, maxDoc);
   }
 
   final Random random;
   final BulkScorer in;
+  final int maxDoc;
+  int max = 0;
 
-  private AssertingBulkScorer(Random random, BulkScorer in) {
+  private AssertingBulkScorer(Random random, BulkScorer in, int maxDoc) {
     this.random = random;
     this.in = in;
+    this.maxDoc = maxDoc;
   }
 
   public BulkScorer getIn() {
@@ -59,10 +51,12 @@
 
   @Override
   public void score(LeafCollector collector) throws IOException {
+    assert max == 0;
+    collector = new AssertingLeafCollector(random, collector, 0, DocsEnum.NO_MORE_DOCS);
     if (random.nextBoolean()) {
       try {
-        final boolean remaining = in.score(collector, DocsEnum.NO_MORE_DOCS);
-        assert !remaining;
+        final int next = score(collector, 0, DocsEnum.NO_MORE_DOCS);
+        assert next == DocIdSetIterator.NO_MORE_DOCS;
       } catch (UnsupportedOperationException e) {
         in.score(collector);
       }
@@ -72,8 +66,19 @@
   }
 
   @Override
-  public boolean score(LeafCollector collector, int max) throws IOException {
-    return in.score(collector, max);
+  public int score(LeafCollector collector, int min, final int max) throws IOException {
+    assert min >= this.max: "Scoring backward: min=" + min + " while previous max was max=" + this.max;
+    assert min < max : "max must be greater than min, got min=" + min + ", and max=" + max;
+    this.max = max;
+    collector = new AssertingLeafCollector(random, collector, min, max);
+    final int next = in.score(collector, min, max);
+    assert next >= max;
+    if (max >= maxDoc || next >= maxDoc) {
+      assert next == DocIdSetIterator.NO_MORE_DOCS;
+      return DocIdSetIterator.NO_MORE_DOCS;
+    } else {
+      return RandomInts.randomIntBetween(random, max, next);
+    }
   }
 
   @Override
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingCollector.java b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingCollector.java
index 7f1ca9c..838c85e 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingCollector.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingCollector.java
@@ -22,46 +22,42 @@
 
 import org.apache.lucene.index.LeafReaderContext;
 
-/** Wraps another Collector and checks that
- *  acceptsDocsOutOfOrder is respected. */
+/**
+ * A collector that asserts that it is used correctly.
+ */
+class AssertingCollector extends FilterCollector {
 
-public class AssertingCollector extends FilterCollector {
+  private final Random random;
+  private int maxDoc = -1;
 
-  public static Collector wrap(Random random, Collector other, boolean inOrder) {
-    return other instanceof AssertingCollector ? other : new AssertingCollector(random, other, inOrder);
+  /** Wrap the given collector in order to add assertions. */
+  public static Collector wrap(Random random, Collector in) {
+    if (in instanceof AssertingCollector) {
+      return in;
+    }
+    return new AssertingCollector(random, in);
   }
 
-  final Random random;
-  final boolean inOrder;
-
-  AssertingCollector(Random random, Collector in, boolean inOrder) {
+  private AssertingCollector(Random random, Collector in) {
     super(in);
     this.random = random;
-    this.inOrder = inOrder;
   }
 
   @Override
   public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
-    return new FilterLeafCollector(super.getLeafCollector(context)) {
-
-      int lastCollected = -1;
-
-      @Override
-      public void setScorer(Scorer scorer) throws IOException {
-        super.setScorer(AssertingScorer.getAssertingScorer(random, scorer));
-      }
-
+    final LeafCollector in = super.getLeafCollector(context);
+    final int docBase = context.docBase;
+    return new AssertingLeafCollector(random, in, 0, DocIdSetIterator.NO_MORE_DOCS) {
       @Override
       public void collect(int doc) throws IOException {
-        if (inOrder || !acceptsDocsOutOfOrder()) {
-          assert doc > lastCollected : "Out of order : " + lastCollected + " " + doc;
-        }
-        in.collect(doc);
-        lastCollected = doc;
+        // check that documents are scored in order globally,
+        // not only per segment
+        assert docBase + doc >= maxDoc : "collection is not in order: current doc="
+            + (docBase + doc) + " while " + maxDoc + " has already been collected";
+        super.collect(doc);
+        maxDoc = docBase + doc;
       }
-
     };
   }
 
 }
-
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingIndexSearcher.java b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingIndexSearcher.java
index e2eefa0..0889d26 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingIndexSearcher.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingIndexSearcher.java
@@ -91,7 +91,7 @@
   @Override
   protected void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
     // TODO: shouldn't we AssertingCollector.wrap(collector) here?
-    super.search(leaves, AssertingWeight.wrap(random, weight), collector);
+    super.search(leaves, AssertingWeight.wrap(random, weight), AssertingCollector.wrap(random, collector));
   }
 
   @Override
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingLeafCollector.java b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingLeafCollector.java
new file mode 100644
index 0000000..b684047
--- /dev/null
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingLeafCollector.java
@@ -0,0 +1,58 @@
+package org.apache.lucene.search;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import java.util.Random;
+
+/** Wraps another Collector and checks that
+ *  order is respected. */
+class AssertingLeafCollector extends FilterLeafCollector {
+
+  private final Random random;
+  private final int min;
+  private final int max;
+
+  private Scorer scorer;
+  private int lastCollected = -1;
+
+  AssertingLeafCollector(Random random, LeafCollector collector, int min, int max) {
+    super(collector);
+    this.random = random;
+    this.min = min;
+    this.max = max;
+  }
+
+  @Override
+  public void setScorer(Scorer scorer) throws IOException {
+    this.scorer = scorer;
+    super.setScorer(AssertingScorer.getAssertingScorer(random, scorer));
+  }
+
+  @Override
+  public void collect(int doc) throws IOException {
+    assert doc > lastCollected : "Out of order : " + lastCollected + " " + doc;
+    assert doc >= min : "Out of range: " + doc + " < " + min;
+    assert doc < max : "Out of range: " + doc + " >= " + max;
+    assert scorer.docID() == doc : "Collected: " + doc + " but scorer: " + scorer.docID();
+    in.collect(doc);
+    lastCollected = doc;
+  }
+
+}
+
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingWeight.java b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingWeight.java
index eb08c2d..4d881a6 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingWeight.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingWeight.java
@@ -29,14 +29,12 @@
     return other instanceof AssertingWeight ? other : new AssertingWeight(random, other);
   }
 
-  final boolean scoresDocsOutOfOrder;
   final Random random;
   final Weight in;
 
   AssertingWeight(Random random, Weight in) {
     this.random = random;
     this.in = in;
-    scoresDocsOutOfOrder = in.scoresDocsOutOfOrder() || random.nextBoolean();
   }
 
   @Override
@@ -61,43 +59,18 @@
 
   @Override
   public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
-    // if the caller asks for in-order scoring or if the weight does not support
-    // out-of order scoring then collection will have to happen in-order.
     final Scorer inScorer = in.scorer(context, acceptDocs);
+    assert inScorer == null || inScorer.docID() == -1;
     return AssertingScorer.wrap(new Random(random.nextLong()), inScorer);
   }
 
   @Override
-  public BulkScorer bulkScorer(LeafReaderContext context, boolean scoreDocsInOrder, Bits acceptDocs) throws IOException {
-    // if the caller asks for in-order scoring or if the weight does not support
-    // out-of order scoring then collection will have to happen in-order.
-    BulkScorer inScorer = in.bulkScorer(context, scoreDocsInOrder, acceptDocs);
+  public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
+    BulkScorer inScorer = in.bulkScorer(context, acceptDocs);
     if (inScorer == null) {
       return null;
     }
 
-    if (AssertingBulkScorer.shouldWrap(inScorer)) {
-      // The incoming scorer already has a specialized
-      // implementation for BulkScorer, so we should use it:
-      inScorer = AssertingBulkScorer.wrap(new Random(random.nextLong()), inScorer);
-    } else if (random.nextBoolean()) {
-      // Let super wrap this.scorer instead, so we use
-      // AssertingScorer:
-      inScorer = super.bulkScorer(context, scoreDocsInOrder, acceptDocs);
-    }
-
-    if (scoreDocsInOrder == false && random.nextBoolean()) {
-      // The caller claims it can handle out-of-order
-      // docs; let's confirm that by pulling docs and
-      // randomly shuffling them before collection:
-      inScorer = new AssertingBulkOutOfOrderScorer(new Random(random.nextLong()), inScorer);
-    }
-    return inScorer;
-  }
-
-  @Override
-  public boolean scoresDocsOutOfOrder() {
-    return scoresDocsOutOfOrder;
+    return AssertingBulkScorer.wrap(new Random(random.nextLong()), inScorer, context.reader().maxDoc());
   }
 }
-
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/CheckHits.java b/lucene/test-framework/src/java/org/apache/lucene/search/CheckHits.java
index c0e3aed..5721aa2 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/CheckHits.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/CheckHits.java
@@ -139,10 +139,6 @@
     protected void doSetNextReader(LeafReaderContext context) throws IOException {
       base = context.docBase;
     }
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return true;
-    }
   }
 
   /**
@@ -511,10 +507,6 @@
     protected void doSetNextReader(LeafReaderContext context) throws IOException {
       base = context.docBase;
     }
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      return true;
-    }
   }
 
 }
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java b/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java
index 4538c3d..55bfa8c 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java
@@ -110,6 +110,7 @@
       if (s!=null) {
         checkFirstSkipTo(q1,s);
         checkSkipTo(q1,s);
+        checkBulkScorerSkipTo(random, q1, s);
         if (wrap) {
           check(random, q1, wrapUnderlyingReader(random, s, -1), false);
           check(random, q1, wrapUnderlyingReader(random, s,  0), false);
@@ -218,7 +219,6 @@
   public static void checkSkipTo(final Query q, final IndexSearcher s) throws IOException {
     //System.out.println("Checking "+q);
     final List<LeafReaderContext> readerContextArray = s.getTopReaderContext().leaves();
-    if (s.createNormalizedWeight(q).scoresDocsOutOfOrder()) return;  // in this case order of skipTo() might differ from that of next().
 
     final int skip_op = 0;
     final int next_op = 1;
@@ -322,11 +322,6 @@
             this.scorer = null;
             lastDoc[0] = -1;
           }
-
-          @Override
-          public boolean acceptsDocsOutOfOrder() {
-            return false;
-          }
         });
 
         if (lastReader[0] != null) {
@@ -408,10 +403,6 @@
         lastDoc[0] = -1;
         liveDocs = context.reader().getLiveDocs();
       }
-      @Override
-      public boolean acceptsDocsOutOfOrder() {
-        return false;
-      }
     });
 
     if (lastReader[0] != null) {
@@ -428,4 +419,56 @@
       }
     }
   }
+
+  /** Check that the scorer and bulk scorer advance consistently. */
+  public static void checkBulkScorerSkipTo(Random r, Query query, IndexSearcher searcher) throws IOException {
+    Weight weight = searcher.createNormalizedWeight(query);
+    for (LeafReaderContext context : searcher.getIndexReader().leaves()) {
+      final Scorer scorer = weight.scorer(context, context.reader().getLiveDocs());
+      final BulkScorer bulkScorer = weight.bulkScorer(context, context.reader().getLiveDocs());
+      if (scorer == null && bulkScorer == null) {
+        continue;
+      }
+      int upTo = 0;
+      while (true) {
+        final int min = upTo + r.nextInt(5);
+        final int max = min + 1 + r.nextInt(r.nextBoolean() ? 10 : 5000);
+        if (scorer.docID() < min) {
+          scorer.advance(min);
+        }
+        final int next = bulkScorer.score(new LeafCollector() {
+          Scorer scorer2;
+          @Override
+          public void setScorer(Scorer scorer) throws IOException {
+            this.scorer2 = scorer;
+          }
+          @Override
+          public void collect(int doc) throws IOException {
+            assert doc >= min;
+            assert doc < max;
+            Assert.assertEquals(scorer.docID(), doc);
+            Assert.assertEquals(scorer.score(), scorer2.score(), 0.01f);
+            scorer.nextDoc();
+          }
+        }, min, max);
+        assert max <= next;
+        assert next <= scorer.docID();
+        upTo = max;
+
+        if (scorer.docID() == DocIdSetIterator.NO_MORE_DOCS) {
+          bulkScorer.score(new LeafCollector() {
+            @Override
+            public void setScorer(Scorer scorer) throws IOException {}
+            
+            @Override
+            public void collect(int doc) throws IOException {
+              // no more matches
+              assert false;
+            }
+          }, upTo, DocIdSetIterator.NO_MORE_DOCS);
+          break;
+        }
+      }
+    }
+  }
 }
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/RandomOrderCollector.java b/lucene/test-framework/src/java/org/apache/lucene/search/RandomOrderCollector.java
deleted file mode 100644
index c91835b..0000000
--- a/lucene/test-framework/src/java/org/apache/lucene/search/RandomOrderCollector.java
+++ /dev/null
@@ -1,106 +0,0 @@
-package org.apache.lucene.search;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.IOException;
-import java.util.Random;
-
-/** Randomize collection order. Don't forget to call {@link #flush()} when
- *  collection is finished to collect buffered documents. */
-final class RandomOrderCollector extends FilterLeafCollector {
-
-  final Random random;
-  Scorer scorer;
-  FakeScorer fakeScorer;
-
-  int buffered;
-  final int bufferSize;
-  final int[] docIDs;
-  final float[] scores;
-  final int[] freqs;
-
-  RandomOrderCollector(Random random, LeafCollector in) {
-    super(in);
-    if (!in.acceptsDocsOutOfOrder()) {
-      throw new IllegalArgumentException();
-    }
-    this.random = random;
-    bufferSize = 1 + random.nextInt(100);
-    docIDs = new int[bufferSize];
-    scores = new float[bufferSize];
-    freqs = new int[bufferSize];
-    buffered = 0;
-  }
-
-  @Override
-  public void setScorer(Scorer scorer) throws IOException {
-    this.scorer = scorer;
-    fakeScorer = new FakeScorer();
-    in.setScorer(fakeScorer);
-  }
-
-  private void shuffle() {
-    for (int i = buffered - 1; i > 0; --i) {
-      final int other = random.nextInt(i + 1);
-
-      final int tmpDoc = docIDs[i];
-      docIDs[i] = docIDs[other];
-      docIDs[other] = tmpDoc;
-
-      final float tmpScore = scores[i];
-      scores[i] = scores[other];
-      scores[other] = tmpScore;
-
-      final int tmpFreq = freqs[i];
-      freqs[i] = freqs[other];
-      freqs[other] = tmpFreq;
-    }
-  }
-
-  public void flush() throws IOException {
-    shuffle();
-    for (int i = 0; i < buffered; ++i) {
-      fakeScorer.doc = docIDs[i];
-      fakeScorer.freq = freqs[i];
-      fakeScorer.score = scores[i];
-      in.collect(fakeScorer.doc);
-    }
-    buffered = 0;
-  }
-
-  @Override
-  public void collect(int doc) throws IOException {
-    docIDs[buffered] = doc;
-    scores[buffered] = scorer.score();
-    try {
-      freqs[buffered] = scorer.freq();
-    } catch (UnsupportedOperationException e) {
-      freqs[buffered] = -1;
-    }
-    if (++buffered == bufferSize) {
-      flush();
-    }
-  }
-
-  @Override
-  public boolean acceptsDocsOutOfOrder() {
-    return in.acceptsDocsOutOfOrder();
-  }
-
-}
-
diff --git a/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java b/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
index 385ccf6..b416d5e 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
@@ -611,7 +611,7 @@
     
     // throttling REALLY slows down tests, so don't do it very often for SOMETIMES.
     if (throttling == Throttling.ALWAYS || 
-        (throttling == Throttling.SOMETIMES && randomState.nextInt(200) == 0) && !(in instanceof RateLimitedDirectoryWrapper)) {
+        (throttling == Throttling.SOMETIMES && randomState.nextInt(200) == 0)) {
       if (LuceneTestCase.VERBOSE) {
         System.out.println("MockDirectoryWrapper: throttling indexOutput (" + name + ")");
       }
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
index 8429bd1..8b15876 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
@@ -34,6 +34,7 @@
 import java.nio.file.NoSuchFileException;
 import java.nio.file.Path;
 import java.nio.file.Paths;
+import java.text.Collator;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -72,8 +73,8 @@
 import org.apache.lucene.index.FieldInfo;
 import org.apache.lucene.index.FieldInfos;
 import org.apache.lucene.index.Fields;
-import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexReader.ReaderClosedListener;
+import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.IndexableField;
@@ -85,6 +86,7 @@
 import org.apache.lucene.index.LogMergePolicy;
 import org.apache.lucene.index.MergePolicy;
 import org.apache.lucene.index.MergeScheduler;
+import org.apache.lucene.index.MergeTrigger;
 import org.apache.lucene.index.MismatchedDirectoryReader;
 import org.apache.lucene.index.MismatchedLeafReader;
 import org.apache.lucene.index.MockRandomMergePolicy;
@@ -102,8 +104,8 @@
 import org.apache.lucene.index.SortedNumericDocValues;
 import org.apache.lucene.index.SortedSetDocValues;
 import org.apache.lucene.index.Terms;
-import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.index.TermsEnum.SeekStatus;
+import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.index.TieredMergePolicy;
 import org.apache.lucene.search.AssertingIndexSearcher;
 import org.apache.lucene.search.DocIdSet;
@@ -120,13 +122,11 @@
 import org.apache.lucene.store.FSLockFactory;
 import org.apache.lucene.store.FlushInfo;
 import org.apache.lucene.store.IOContext;
-import org.apache.lucene.store.IOContext.Context;
 import org.apache.lucene.store.LockFactory;
 import org.apache.lucene.store.MergeInfo;
-import org.apache.lucene.store.MockDirectoryWrapper;
 import org.apache.lucene.store.MockDirectoryWrapper.Throttling;
+import org.apache.lucene.store.MockDirectoryWrapper;
 import org.apache.lucene.store.NRTCachingDirectory;
-import org.apache.lucene.store.RateLimitedDirectoryWrapper;
 import org.apache.lucene.util.automaton.AutomatonTestUtil;
 import org.apache.lucene.util.automaton.CompiledAutomaton;
 import org.apache.lucene.util.automaton.RegExp;
@@ -151,16 +151,16 @@
 import com.carrotsearch.randomizedtesting.annotations.SeedDecorators;
 import com.carrotsearch.randomizedtesting.annotations.TestGroup;
 import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakGroup;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakGroup.Group;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakGroup;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
 import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
 import com.carrotsearch.randomizedtesting.generators.RandomPicks;
 import com.carrotsearch.randomizedtesting.rules.NoClassHooksShadowingRule;
@@ -691,10 +691,8 @@
     m.setAccessible(true);
     try {
       m.invoke(IndexWriter.class, limit);
-    } catch (IllegalAccessException iae) {
+    } catch (IllegalAccessException | InvocationTargetException iae) {
       throw new RuntimeException(iae);
-    } catch (InvocationTargetException ite) {
-      throw new RuntimeException(ite);
     }
   }
 
@@ -944,13 +942,18 @@
       } else {
         cms = new ConcurrentMergeScheduler() {
             @Override
-            protected synchronized void maybeStall() {
+            protected synchronized boolean maybeStall(IndexWriter writer) {
+              return true;
             }
           };
       }
       int maxThreadCount = TestUtil.nextInt(r, 1, 4);
       int maxMergeCount = TestUtil.nextInt(r, maxThreadCount, maxThreadCount + 4);
       cms.setMaxMergesAndThreads(maxMergeCount, maxThreadCount);
+      if (random().nextBoolean()) {
+        cms.disableAutoIOThrottle();
+      }
+      cms.setForceMergeMBPerSec(10 + 10*random().nextDouble());
       c.setMergeScheduler(cms);
     } else {
       // Always use consistent settings, else CMS's dynamic (SSD or not)
@@ -1377,27 +1380,6 @@
       directory = new NRTCachingDirectory(directory, random.nextDouble(), random.nextDouble());
     }
     
-    if (TEST_NIGHTLY && rarely(random) && !bare) { 
-      final double maxMBPerSec = TestUtil.nextInt(random, 20, 40);
-      if (LuceneTestCase.VERBOSE) {
-        System.out.println("LuceneTestCase: will rate limit output IndexOutput to " + maxMBPerSec + " MB/sec");
-      }
-      final RateLimitedDirectoryWrapper rateLimitedDirectoryWrapper = new RateLimitedDirectoryWrapper(directory);
-      switch (random.nextInt(10)) {
-        case 3: // sometimes rate limit on flush
-          rateLimitedDirectoryWrapper.setMaxWriteMBPerSec(maxMBPerSec, Context.FLUSH);
-          break;
-        case 2: // sometimes rate limit flush & merge
-          rateLimitedDirectoryWrapper.setMaxWriteMBPerSec(maxMBPerSec, Context.FLUSH);
-          rateLimitedDirectoryWrapper.setMaxWriteMBPerSec(maxMBPerSec, Context.MERGE);
-          break;
-        default:
-          rateLimitedDirectoryWrapper.setMaxWriteMBPerSec(maxMBPerSec, Context.MERGE);
-      }
-      directory =  rateLimitedDirectoryWrapper;
-      
-    }
-
     if (bare) {
       BaseDirectoryWrapper base = new BaseDirectoryWrapper(directory);
       closeAfterSuite(new CloseableDirectory(base, suiteFailureMarker));
@@ -1505,78 +1487,85 @@
       throw null; // dummy to prevent compiler failure
     }
   }
+
+  public static IndexReader wrapReader(IndexReader r) throws IOException {
+    Random random = random();
+      
+    // TODO: remove this, and fix those tests to wrap before putting slow around:
+    final boolean wasOriginallyAtomic = r instanceof LeafReader;
+    for (int i = 0, c = random.nextInt(6)+1; i < c; i++) {
+      switch(random.nextInt(6)) {
+      case 0:
+        r = SlowCompositeReaderWrapper.wrap(r);
+        break;
+      case 1:
+        // will create no FC insanity in atomic case, as ParallelLeafReader has own cache key:
+        r = (r instanceof LeafReader) ?
+          new ParallelLeafReader((LeafReader) r) :
+        new ParallelCompositeReader((CompositeReader) r);
+        break;
+      case 2:
+        // Häckidy-Hick-Hack: a standard MultiReader will cause FC insanity, so we use
+        // QueryUtils' reader with a fake cache key, so insanity checker cannot walk
+        // along our reader:
+        r = new FCInvisibleMultiReader(r);
+        break;
+      case 3:
+        final LeafReader ar = SlowCompositeReaderWrapper.wrap(r);
+        final List<String> allFields = new ArrayList<>();
+        for (FieldInfo fi : ar.getFieldInfos()) {
+          allFields.add(fi.name);
+        }
+        Collections.shuffle(allFields, random);
+        final int end = allFields.isEmpty() ? 0 : random.nextInt(allFields.size());
+        final Set<String> fields = new HashSet<>(allFields.subList(0, end));
+        // will create no FC insanity as ParallelLeafReader has own cache key:
+        r = new ParallelLeafReader(
+                                   new FieldFilterLeafReader(ar, fields, false),
+                                   new FieldFilterLeafReader(ar, fields, true)
+                                   );
+        break;
+      case 4:
+        // Häckidy-Hick-Hack: a standard Reader will cause FC insanity, so we use
+        // QueryUtils' reader with a fake cache key, so insanity checker cannot walk
+        // along our reader:
+        if (r instanceof LeafReader) {
+          r = new AssertingLeafReader((LeafReader)r);
+        } else if (r instanceof DirectoryReader) {
+          r = new AssertingDirectoryReader((DirectoryReader)r);
+        }
+        break;
+      case 5:
+        if (r instanceof LeafReader) {
+          r = new MismatchedLeafReader((LeafReader)r, random);
+        } else if (r instanceof DirectoryReader) {
+          r = new MismatchedDirectoryReader((DirectoryReader)r, random);
+        }
+        break;
+      default:
+        fail("should not get here");
+      }
+    }
+    if (wasOriginallyAtomic) {
+      r = SlowCompositeReaderWrapper.wrap(r);
+    } else if ((r instanceof CompositeReader) && !(r instanceof FCInvisibleMultiReader)) {
+      // prevent cache insanity caused by e.g. ParallelCompositeReader, to fix we wrap one more time:
+      r = new FCInvisibleMultiReader(r);
+    }
+    if (VERBOSE) {
+      System.out.println("wrapReader wrapped: " +r);
+    }
+
+    return r;
+  }
   
   /**
    * Sometimes wrap the IndexReader as slow, parallel or filter reader (or
    * combinations of that)
    */
   public static IndexReader maybeWrapReader(IndexReader r) throws IOException {
-    Random random = random();
     if (rarely()) {
-      // TODO: remove this, and fix those tests to wrap before putting slow around:
-      final boolean wasOriginallyAtomic = r instanceof LeafReader;
-      for (int i = 0, c = random.nextInt(6)+1; i < c; i++) {
-        switch(random.nextInt(6)) {
-          case 0:
-            r = SlowCompositeReaderWrapper.wrap(r);
-            break;
-          case 1:
-            // will create no FC insanity in atomic case, as ParallelLeafReader has own cache key:
-            r = (r instanceof LeafReader) ?
-              new ParallelLeafReader((LeafReader) r) :
-              new ParallelCompositeReader((CompositeReader) r);
-            break;
-          case 2:
-            // Häckidy-Hick-Hack: a standard MultiReader will cause FC insanity, so we use
-            // QueryUtils' reader with a fake cache key, so insanity checker cannot walk
-            // along our reader:
-            r = new FCInvisibleMultiReader(r);
-            break;
-          case 3:
-            final LeafReader ar = SlowCompositeReaderWrapper.wrap(r);
-            final List<String> allFields = new ArrayList<>();
-            for (FieldInfo fi : ar.getFieldInfos()) {
-              allFields.add(fi.name);
-            }
-            Collections.shuffle(allFields, random);
-            final int end = allFields.isEmpty() ? 0 : random.nextInt(allFields.size());
-            final Set<String> fields = new HashSet<>(allFields.subList(0, end));
-            // will create no FC insanity as ParallelLeafReader has own cache key:
-            r = new ParallelLeafReader(
-              new FieldFilterLeafReader(ar, fields, false),
-              new FieldFilterLeafReader(ar, fields, true)
-            );
-            break;
-          case 4:
-            // Häckidy-Hick-Hack: a standard Reader will cause FC insanity, so we use
-            // QueryUtils' reader with a fake cache key, so insanity checker cannot walk
-            // along our reader:
-            if (r instanceof LeafReader) {
-              r = new AssertingLeafReader((LeafReader)r);
-            } else if (r instanceof DirectoryReader) {
-              r = new AssertingDirectoryReader((DirectoryReader)r);
-            }
-            break;
-          case 5:
-            if (r instanceof LeafReader) {
-              r = new MismatchedLeafReader((LeafReader)r, random);
-            } else if (r instanceof DirectoryReader) {
-              r = new MismatchedDirectoryReader((DirectoryReader)r, random);
-            }
-            break;
-          default:
-            fail("should not get here");
-        }
-      }
-      if (wasOriginallyAtomic) {
-        r = SlowCompositeReaderWrapper.wrap(r);
-      } else if ((r instanceof CompositeReader) && !(r instanceof FCInvisibleMultiReader)) {
-        // prevent cache insanity caused by e.g. ParallelCompositeReader, to fix we wrap one more time:
-        r = new FCInvisibleMultiReader(r);
-      }
-      if (VERBOSE) {
-        System.out.println("maybeWrapReader wrapped: " +r);
-      }
+      r = wrapReader(r);
     }
     return r;
   }
@@ -2510,4 +2499,17 @@
       return c.getTotalHits();
     }
   }
+  
+  /** 
+   * Compares two strings with a collator, also looking to see if the the strings
+   * are impacted by jdk bugs. may not avoid all jdk bugs in tests.
+   * see https://bugs.openjdk.java.net/browse/JDK-8071862
+   */
+  public static int collate(Collator collator, String s1, String s2) {
+    int v1 = collator.compare(s1, s2);
+    int v2 = collator.getCollationKey(s1).compareTo(collator.getCollationKey(s2));
+    // if collation keys don't really respect collation order, things are screwed.
+    assumeTrue("hit JDK collator bug", Integer.signum(v1) == Integer.signum(v2));
+    return v1;
+  }
 }
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleTemporaryFilesCleanup.java b/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleTemporaryFilesCleanup.java
index 78131d6..599f10c 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleTemporaryFilesCleanup.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleTemporaryFilesCleanup.java
@@ -205,7 +205,7 @@
               "Failed to get a temporary name too many times, check your temp directory and consider manually cleaning it: "
                 + javaTempDir.toAbsolutePath());            
         }
-        f = javaTempDir.resolve(prefix + "-" + ctx.getRunnerSeedAsString() 
+        f = javaTempDir.resolve(prefix + " " + ctx.getRunnerSeedAsString() 
               + "-" + String.format(Locale.ENGLISH, "%03d", attempt));
         try {
           Files.createDirectory(f);
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
index a49ff36..de674db 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
@@ -29,6 +29,7 @@
 import java.nio.file.FileSystem;
 import java.nio.file.Files;
 import java.nio.file.Path;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
@@ -58,6 +59,7 @@
 import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat;
 import org.apache.lucene.index.CheckIndex;
 import org.apache.lucene.index.ConcurrentMergeScheduler;
+import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.DocValuesType;
 import org.apache.lucene.index.DocsAndPositionsEnum;
 import org.apache.lucene.index.DocsEnum;
@@ -70,8 +72,10 @@
 import org.apache.lucene.index.LogMergePolicy;
 import org.apache.lucene.index.MergePolicy;
 import org.apache.lucene.index.MergeScheduler;
+import org.apache.lucene.index.CodecReader;
 import org.apache.lucene.index.MultiFields;
 import org.apache.lucene.index.SegmentReader;
+import org.apache.lucene.index.SlowCodecReaderWrapper;
 import org.apache.lucene.index.Terms;
 import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.index.TieredMergePolicy;
@@ -870,6 +874,16 @@
       return false;
     }
   }
+  
+  public static void addIndexesSlowly(IndexWriter writer, DirectoryReader... readers) throws IOException {
+    List<CodecReader> leaves = new ArrayList<>();
+    for (DirectoryReader reader : readers) {
+      for (LeafReaderContext context : reader.leaves()) {
+        leaves.add(SlowCodecReaderWrapper.wrap(context.reader()));
+      }
+    }
+    writer.addIndexes(leaves.toArray(new CodecReader[leaves.size()]));
+  }
 
   /** just tries to configure things to keep the open file
    * count lowish */
diff --git a/lucene/tools/junit4/tests.policy b/lucene/tools/junit4/tests.policy
index 98e3f2b..c596899 100644
--- a/lucene/tools/junit4/tests.policy
+++ b/lucene/tools/junit4/tests.policy
@@ -77,4 +77,14 @@
   // SSL related properties for Solr tests
   permission java.security.SecurityPermission "getProperty.ssl.*";
 
+  // SASL/Kerberos related properties for Solr tests
+  permission javax.security.auth.PrivateCredentialPermission "javax.security.auth.kerberos.KerberosTicket * \"*\"", "read";
+  
+  // may only be necessary with Java 7?
+  permission javax.security.auth.PrivateCredentialPermission "javax.security.auth.kerberos.KeyTab * \"*\"", "read";
+  permission javax.security.auth.PrivateCredentialPermission "sun.security.jgss.krb5.Krb5Util$KeysFromKeyTab * \"*\"", "read";
+  
+  permission javax.security.auth.kerberos.ServicePermission "krbtgt/EXAMPLE.COM@EXAMPLE.COM", "initiate";
+  permission javax.security.auth.kerberos.ServicePermission "zookeeper/127.0.0.1@EXAMPLE.COM", "initiate";
+  permission javax.security.auth.kerberos.ServicePermission "zookeeper/127.0.0.1@EXAMPLE.COM", "accept";
 };
diff --git a/lucene/tools/src/java/org/apache/lucene/dependencies/GetMavenDependenciesTask.java b/lucene/tools/src/java/org/apache/lucene/dependencies/GetMavenDependenciesTask.java
index 66a13db..09314c0 100644
--- a/lucene/tools/src/java/org/apache/lucene/dependencies/GetMavenDependenciesTask.java
+++ b/lucene/tools/src/java/org/apache/lucene/dependencies/GetMavenDependenciesTask.java
@@ -75,7 +75,7 @@
  */
 public class GetMavenDependenciesTask extends Task {
   private static final Pattern PROPERTY_PREFIX_FROM_IVY_XML_FILE_PATTERN = Pattern.compile
-      ("[/\\\\](lucene|solr)[/\\\\](?:(?:contrib|(analysis)|(example))[/\\\\])?([^/\\\\]+)[/\\\\]ivy\\.xml");
+      ("[/\\\\](lucene|solr)[/\\\\](?:(?:contrib|(analysis)|(example)|(server))[/\\\\])?([^/\\\\]+)[/\\\\]ivy\\.xml");
   private static final Pattern COORDINATE_KEY_PATTERN = Pattern.compile("/([^/]+)/([^/]+)");
   private static final Pattern MODULE_DEPENDENCIES_COORDINATE_KEY_PATTERN
       = Pattern.compile("(.*?)(\\.test)?\\.dependencies");
@@ -251,7 +251,7 @@
   }
 
   /**
-   * For each module that include other modules' external dependencies via
+   * For each module that includes other modules' external dependencies via
    * including all files under their ".../lib/" dirs in their (test.)classpath,
    * add the other modules' dependencies to its set of external dependencies. 
    */
@@ -320,6 +320,7 @@
         if (globalOptionalExternalDependencies.contains(dependencyCoordinate)
             || (perModuleOptionalExternalDependencies.containsKey(module)
                 && perModuleOptionalExternalDependencies.get(module).contains(dependencyCoordinate))) {
+          // make a copy of the dep and set optional=true
           dep = new ExternalDependency(dep.groupId, dep.artifactId, dep.classifier, dep.isTestDependency, true);
         }
         deps.add(dep);
@@ -681,7 +682,7 @@
     String module = getModuleName(ivyXmlFile);
     log("Collecting external dependencies from: " + ivyXmlFile.getPath(), verboseLevel);
     Document document = documentBuilder.parse(ivyXmlFile);
-    // Exclude the 'start' configuration in solr/example/ivy.xml
+    // Exclude the 'start' configuration in solr/server/ivy.xml
     String dependencyPath = "/ivy-module/dependencies/dependency[not(starts-with(@conf,'start'))]";
     NodeList dependencies = (NodeList)xpath.evaluate(dependencyPath, document, XPathConstants.NODESET);
     for (int depNum = 0 ; depNum < dependencies.getLength() ; ++depNum) {
@@ -813,12 +814,13 @@
     builder.append(matcher.group(1));
     if (null != matcher.group(2)) { // "lucene/analysis/..."
       builder.append("-analyzers");
-    }
-    if (null != matcher.group(3)) { // "solr/example/..."
+    } else if (null != matcher.group(3)) { // "solr/example/..."
       builder.append("-example");
+    } else if (null != matcher.group(4)) { // "solr/server/..."
+      builder.append("-server");
     }
     builder.append('-');
-    builder.append(matcher.group(4));
+    builder.append(matcher.group(5));
     return builder.toString().replace("solr-solr-", "solr-");
   }
 
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index f0b8378..c69f077 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -15,7 +15,7 @@
 You need a Java 1.8 VM or later installed.
 In this release, there is an example Solr server including a bundled 
 servlet container in the directory named "example".
-See the tutorial at http://lucene.apache.org/solr/tutorial.html
+See the Quick Start guide at http://lucene.apache.org/solr/quickstart.html
 
 ==================  6.0.0 ==================
 
@@ -23,7 +23,7 @@
 
 Versions of Major Components
 ---------------------
-Apache Tika 1.6
+Apache Tika 1.7
 Carrot2 3.9.0
 Velocity 1.7 and Velocity Tools 2.0
 Apache UIMA 2.3.1
@@ -40,6 +40,11 @@
 * The deprecated SolrServer and subclasses have been removed, use SolrClient
   instead.
 
+* The deprecated <nrtMode> configuration in solrconfig.xml has been removed.
+  Please remove it from solrconfig.xml.
+
+* SolrClient.shutdown() has been removed, use SolrClient.close() instead.
+
 Detailed Change List
 ----------------------
 
@@ -51,21 +56,78 @@
 * SOLR-6895: Deprecated SolrServer classes have been removed (Alan Woodward,
   Erik Hatcher)
 
-==================  5.0.0 ==================
+* SOLR-6902: Use JUnit rules instead of inheritance with distributed Solr 
+  tests to allow for multiple tests without the same class.
+  (Ramkumar Aiyengar, Erick Erickson, Mike McCandless)
+
+* SOLR-6954: Deprecated SolrClient.shutdown() method removed (Alan Woodward)
+  
+==================  5.1.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release
 
 Versions of Major Components
 ---------------------
-Apache Tika 1.6
+Apache Tika 1.7
 Carrot2 3.9.0
 Velocity 1.7 and Velocity Tools 2.0
 Apache UIMA 2.3.1
 Apache ZooKeeper 3.4.6
+Jetty 9.2.6.v20141205
+
+Detailed Change List
+----------------------
+
+New Features
+----------------------
+
+* SOLR-6909: Extract atomic update handling logic into AtomicUpdateDocumentMerger class
+  and enable subclassing. (Steve Davids, yonik)
+
+* SOLR-6845: Add a “buildOnStartup” option for suggesters. (Tomás Fernández Löbbe)
+
+* SOLR-6449: Add first class support for Real Time Get in Solrj.
+  (Anurag Sharma, Steve Davids via shalin)
+
+* SOLR-6954: SolrClient now implements Closeable, and shutdown() has been
+  deprecated in favour of close(). (Mark Miller, Tomás Fernández Löbbe, Alan
+  Woodward)
+
+Bug Fixes
+----------------------
+
+* SOLR-7046: NullPointerException when group.function uses query() function.
+  (Jim Musil via Erick Erickson)
+
+Other Changes
+----------------------
+
+* SOLR-7014: Collapse identical catch branches in try-catch statements. (shalin)
+
+* SOLR-6500: Refactor FileFetcher in SnapPuller, add debug logging. 
+  (Ramkumar Aiyengar via Mark Miller)
+
+==================  5.0.0 ==================
+
+Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
+
+Versions of Major Components
+---------------------
+Apache Tika 1.7
+Carrot2 3.9.0
+Velocity 1.7 and Velocity Tools 2.0
+Apache UIMA 2.3.1
+Apache ZooKeeper 3.4.6
+Jetty 9.2.6.v20141205
 
 Upgrading from Solr 4.x
 ----------------------
 
+* Apache Solr has no support for Lucene/Solr 3.x and earlier indexes anymore.
+  Be sure to run Lucene's IndexUpgrader on the previous 4.10 version if you might
+  still have old segments in your index. Alternatively fully optimize your index
+  with Solr 4.10 to make sure it consists only of one up-to-date index segment.
+
 * The "file" attribute of infoStream in solrconfig.xml is removed. Control this 
   via your logging configuration (org.apache.solr.update.LoggingInfoStream) instead.
 
@@ -109,8 +171,8 @@
   in the "def()" function. Example: changing "fl=sum(fieldA,fieldB)" to 
   "fl=sum(def(fieldA,0.0),def(fieldB,0.0))".  See LUCENE-5961 for more details.
 
-* AdminHandlers is deprecated , /admin/* are implicitly defined, /get ,/replacation and
-   handlers are also implicitly registered (refer to SOLR-6792)
+* AdminHandlers is deprecated, /admin/* are implicitly defined, /get, /replication and
+  handlers are also implicitly registered (refer to SOLR-6792)
 
 * SolrCore.reload(ConfigSet coreConfig, SolrCore prev) was deprecated in 4.10.3 and
   removed in 5.0. use SolrCore.reload(ConfigSet coreConfig). See SOLR-5864.
@@ -138,22 +200,65 @@
 * SolrServer and associated classes have been deprecated.  Applications using
   SolrJ should use the equivalent SolrClient classes instead.
 
+* Spatial fields originating from Solr 4 (e.g. SpatialRecursivePrefixTreeFieldType, BBoxField)
+  have the 'units' attribute deprecated, now replaced with 'distanceUnits'.  If you change it to
+  a unit other than 'degrees' (or if you don't specify it, which will default to kilometers if
+  geo=true), then be sure to update maxDistErr as it's in those units.  If you keep units=degrees
+  then it should be backwards compatible but you'll get a deprecation warning on startup.  See
+  SOLR-6797.
+
+* The <nrtMode> configuration in solrconfig.xml has been discontinued and should be removed from
+  solrconfig.xml. Solr defaults to using NRT searchers regardless of the value in configuration
+  and a warning is logged on startup if the solrconfig.xml has <nrtMode> specified.
+
+* There was an old spatial syntax to specify a circle using Circle(x,y d=...) which should be
+  replaced with simply using {!geofilt} (if you can) or BUFFER(POINT(x y),d). Likewise a rect syntax
+  comprised of minX minY maxX maxY that should now be replaced with
+  ENVELOPE(minX, maxX, maxY, minY).
+
+* Due to changes in the underlying commons-codec package, users of the BeiderMorseFilterFactory 
+  will need to rebuild their indexes after upgrading.  See LUCENE-6058 for more details.
+
+* CachedSqlEntityProcessor has been removed, use SqlEntityProcessor with the
+  cacheImpl parameter.
+
+* HttpDataSource has been removed, use URLDataSource instead.
+
+* LegacyHTMLStripCharFilter has been removed
+
+* CoreAdminRequest.persist() call has been removed.  All changes made via
+  CoreAdmin are persistent.
+
+* SpellCheckResponse.getSuggestions() and getSuggestionFrequencies() have been
+  removed, use getAlternatives() and getAlternativeFrequencies() instead.
+
+* SolrQuery deprecated methods have been removed:
+    - setMissing() is now setFacetMissing()
+    - getFacetSort() is now getFacetSortString()
+    - setFacetSort(boolean) should instead use setFacetSort(String) with
+      FacetParams.FACET_SORT_COUNT or FacetParams.FACET_SORT_INDEX
+    - setSortField(String, ORDER) should use setSort(SortClause)
+    - addSortField(String, ORDER) should use addSort(SortClause)
+    - removeSortField(String, ORDER) should use removeSort(SortClause)
+    - getSortFields() should use getSorts()
+    - set/getQueryType() should use set/getRequestHandler()
+
+* ClientUtil deprecated date methods have been removed, use DateUtil instead
+
+* FacetParams.FacetDateOther has been removed, use FacetRangeOther
+
+* ShardParams.SHARD_KEYS has been removed, use ShardParams._ROUTE_
+
+* The 'old-style' solr.xml format is no longer supported, and cores must be
+  defined using core.properties files.  See
+  https://cwiki.apache.org/confluence/display/solr/Format+of+solr.xml
+
 Detailed Change List
 ----------------------
 
 New Features
 ----------------------
 
-* SOLR-5287: You can edit files in the conf directory from the admin UI.
-  (Erick Erickson, Stefan Matheis)
-  Additional work:
-  - SOLR-5446: Admin UI - Allow changing Schema and Config (steffkes)
-  - SOLR-5458: Admin UI - Remove separated Pages for Config & Schema (steffkes)
-  - SOLR-5456: Admin UI - Allow creating new Files (steffkes)
-  - SOLR-5518: Added EditFileRequestHandler to deal with security issues around
-    modifying solr configuration files.  (Erick Erickson)
-  - SOLR-5539: Admin UI - Remove ability to create/modify files (steffkes)
-
 * SOLR-6103: Added DateRangeField for indexing date ranges, especially multi-valued ones.
   Supports facet.range, DateMath, and is mostly interoperable with TrieDateField.
   Based on LUCENE-5648. (David Smiley)
@@ -186,7 +291,7 @@
   (hossman)
 
 * SOLR-6485: ReplicationHandler should have an option to throttle the speed of
-  replication (Varun Thacker, NOble Paul)
+  replication (Varun Thacker, Noble Paul)
 
 * SOLR-6543: Give HttpSolrClient the ability to send PUT requests (Gregory Chanan)
 
@@ -209,7 +314,7 @@
 * SOLR-6605: Make ShardHandlerFactory maxConnections configurable.
   (Christine Poerschke via shalin)
 
-* SOLR-6585: RequestHandlers can optionaly handle sub paths as well (Noble Paul)
+* SOLR-6585: RequestHandlers can optionally handle sub paths as well (Noble Paul)
 
 * SOLR-6617: /update/json/docs path will use fully qualified node names by default
              (Noble Paul)
@@ -243,7 +348,7 @@
 
 * SOLR-6533: Support editing common solrconfig.xml values (Noble Paul)
 
-* SOLR-6607: Managing requesthandlers throuh API (Noble Paul)
+* SOLR-6607: Managing requesthandlers through API (Noble Paul)
 
 * SOLR-4799: faster join using join="zipper" aka merge join for nested DIH EntityProcessors
   (Mikhail Khludnev via Noble Paul)
@@ -266,11 +371,34 @@
 * SOLR-6879: Have an option to disable autoAddReplicas temporarily for all collections.
   (Varun Thacker via Steve Rowe)
 
-* SOLR-6435: Add bin/post script to simplify posting content to Solr (ehatcher)
+* SOLR-6435: Add bin/post script to simplify posting content to Solr (Erik Hatcher)
 
 * SOLR-6761: Ability to ignore commit and/or optimize requests from clients when running in
   SolrCloud mode using the IgnoreCommitOptimizeUpdateProcessorFactory. (Timothy Potter)
 
+* SOLR-6797: Spatial fields that used to require units=degrees like
+  SpatialRecursivePrefixTreeFieldType (RPT) now take distanceUnits=degrees|kilometers|miles
+  instead.  It is applied to nearly all distance measurements involving the field: maxDistErr,
+  distErr, d, geodist, score=distance|area|area2d. score now accepts these units as well. It does
+  NOT affect distances embedded in WKT strings like BUFFER(POINT(200 10),0.2)).
+  (Ishan Chattopadhyaya, David Smiley)
+
+* SOLR-6766: Expose HdfsDirectoryFactory Block Cache statistics via JMX.
+  (Mike Drob, Mark Miller)
+
+* SOLR-2035: Add a VelocityResponseWriter $resource tool for locale-specific string lookups.
+  (Erik Hatcher)
+
+* SOLR-6916: Toggle payload support for the default highlighter via hl.payloads.  It's auto
+  enabled when the index has payloads. (David Smiley)
+
+* SOLR-6581: Efficient DocValues support and numeric collapse field implementations 
+  for Collapse and Expand (Joel Bernstein)
+
+* SOLR-6937: In schemaless mode ,replace spaces and special characters with underscore (Noble Paul)
+
+* SOLR-5147: Support child documents in DIH
+  (Vadim Kirilchuk, Shawn Heisey, Thomas Champagne, Mikhail Khludnev via Noble Paul)
   
 Bug Fixes
 ----------------------
@@ -336,7 +464,7 @@
   in local map before writing to ZK. (shalin)
 
 * SOLR-6679: Disabled suggester component from techproduct solrconfig.xml since
-  it caused long startup times on lage indexes even when it wasn't used.
+  it caused long startup times on large indexes even when it wasn't used.
   (yonik, hossman)
 
 * SOLR-6738: Admin UI - Escape Data on Plugins-View (steffkes)
@@ -384,6 +512,74 @@
 * SOLR-6874: There is a race around SocketProxy binding to it's port the way we setup 
   JettySolrRunner and SocketProxy. (Mark Miller, Timothy Potter)
 
+* SOLR-6735: Make CloneFieldUpdateProcessorFactory null safe (Steve Davids via ehatcher)
+
+* SOLR-6907: URLEncode documents directory in MorphlineMapperTest to handle spaces etc.
+  in file name. (Ramkumar Aiyengar via Erick Erickson)
+
+* SOLR-6880: Harden ZkStateReader to expect that getCollectionLive may return null
+  as it's contract states. (Mark Miller, shalin)
+
+* SOLR-6643: Fix error reporting & logging of low level JVM Errors that occur when 
+  loading/reloading a SolrCore (hossman)
+
+* SOLR-6839: Direct routing with CloudSolrServer will ignore the Overwrite document option.
+  (Mark Miller)
+
+* SOLR-6793: ReplicationHandler does not destroy all of it's created SnapPullers.
+  (Mark Miller)
+
+* SOLR-6946: Document -p port option for the create_core and create_collection actions in
+  bin/solr (Timothy Potter)
+
+* SOLR-6923: AutoAddReplicas also consults live_nodes to see if a state change has happened.
+  (Varun Thacker via Anshum Gupta)
+
+* SOLR-6941: DistributedQueue#containsTaskWithRequestId can fail with NPE. (Mark Miller)
+
+* SOLR-6764: Field types need to be re-informed after reloading a managed schema from ZK
+  (Timothy Potter)
+  
+* SOLR-6931: We should do a limited retry when using HttpClient. 
+ (Mark Miller, Hrishikesh Gadre, Gregory Chanan)
+
+* SOLR-7004: Add a missing constructor for CollectionAdminRequest.BalanceShardUnique that
+  sets the collection action. (Anshum Gupta)
+
+* SOLR-6993: install_solr_service.sh won't install on RHEL / CentOS
+  (David Anderson via Timothy Potter)
+
+* SOLR-6928: solr.cmd stop works only in english (john.work, Jan Høydahl, Timothy Potter)
+
+* SOLR-7011: Delete collection returns before collection is actually removed.
+  (Christine Poerschke via shalin)
+
+* SOLR-6640: Close searchers before rollback and recovery to avoid index corruption.
+  (Robert Muir, Varun Thacker, shalin)
+
+* SOLR-6847: LeaderInitiatedRecoveryThread compares wrong replica's state with lirState.
+  (shalin)
+  
+* SOLR-6856: Restore ExtractingRequestHandler's ability to capture all HTML tags when
+  parsing (X)HTML. (hossman, Uwe Schindler, ehatcher, Steve Rowe)
+
+* SOLR-7024: Improved error messages when java is not found by the bin/solr
+  shell script, particularly when JAVA_HOME has an invalid location.
+  (Shawn Heisey)
+
+* SOLR-7038: Validate the presence of configset before trying to create a collection.
+  (Anshum Gupta, Mark Miller)
+
+* SOLR-7037: bin/solr start -e techproducts -c fails to start Solr in cloud mode
+  (Timothy Potter)
+
+* SOLR-7016: Fix bin\solr.cmd to work in a directory with spaces in the name.
+  (Timothy Potter, Uwe Schindler)
+
+* SOLR-6969: When opening an HDFSTransactionLog for append we must first attempt to recover
+  it's lease to prevent data loss. (Mark Miller, Praneeth Varma, Colin McCabe)
+
+* SOLR-7067: bin/solr won't run under bash 4.2+. (Steve Rowe)
 
 Optimizations
 ----------------------
@@ -400,6 +596,10 @@
   hl.usePhraseHighlighter, and can be more efficient handling data from term vectors.
   (David Smiley)
 
+* SOLR-6666: Dynamic copy fields are considering all dynamic fields, causing
+  a significant performance impact on indexing documents. (Liram Vardi via Erick
+  Erickson, Steve Rowe)
+
 Other Changes
 ----------------------
 
@@ -436,10 +636,10 @@
   constructors, improve javadocs for CloudSolrClient constructors.
   (Steve Davids via Shawn Heisey)
 
-* LUCENE-5901: Replaced all occurences of LUCENE_CURRENT with LATEST for luceneMatchVersion.
+* LUCENE-5901: Replaced all occurrences of LUCENE_CURRENT with LATEST for luceneMatchVersion.
   (Ryan Ernst)
 
-* SOLR-6445: Upgrade Noggit to verion 0.6 to support more flexible JSON input (Noble Paul , Yonik Seeley)
+* SOLR-6445: Upgrade Noggit to version 0.6 to support more flexible JSON input (Noble Paul, Yonik Seeley)
 
 * SOLR-6073: Remove helper methods from CollectionsRequest (SolrJ) for CollectionsAPI
   calls and move to a builder design for the same. (Varun Thacker, Anshum Gupta)
@@ -447,10 +647,10 @@
 * SOLR-5322: core discovery can fail w/NPE and no explanation if a non-readable directory exists
   (Said Chavkin, Erick Erickson)
 
-* SOLR-6488: Update to Apache Tika 1.6. This adds support for parsing Outlook PST and Matlab
-  MAT files. Parsing for NetCDF files was removed because of license issues; if you need support
-  for this format, download the parser JAR yourself and add it to contrib/extraction/lib folder:
-  http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/
+* SOLR-6488, SOLR-6991: Update to Apache Tika 1.7. This adds support for parsing Outlook PST and
+  Matlab MAT files. Parsing for NetCDF files was removed because of license issues; if you need
+  support for this format, download the parser JAR yourself and add it to contrib/extraction/lib
+  folder: http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/
   (Uwe Schindler)
 
 * SOLR-6115: Cleanup enum/string action types in Overseer, OverseerCollectionProcessor and
@@ -511,9 +711,6 @@
 * SOLR-6754: ZkController.publish doesn't use the updateLastState parameter.
   (shalin)
 
-* SOLR-6715: ZkSolrResourceLoader constructors accept a parameter called 'collection'
-  but it should be 'configName'. (shalin)
-
 * SOLR-6751: Exceptions thrown in the analysis chain in DirectUpdateHandler2
   should return a BAD_REQUEST status (Alan Woodward)
 
@@ -603,11 +800,66 @@
   Erick Erickson)
 
 * SOLR-6906: Fix typo bug in DistributedDebugComponentTest.testCompareWithNonDistributedRequest
-  (Ramkumar Aiyenga via Erick Erickson)
+  (Ramkumar Aiyengar via Erick Erickson)
 
 * SOLR-6905: Test pseudo-field retrieval in distributed search.
   (Ramkumar Aiyengar via shalin)
 
+* SOLR-4839: Upgrade Jetty to 9.2.6.v20141205 and restlet-jee to 2.3.0
+  (Bill Bell, Timothy Potter, Uwe Schindler, Mark Miller, shalin)
+
+* SOLR-6897: Nuke non-NRT mode from code and configuration. (Hossman, shalin)
+
+* SOLR-6830: Update Woodstox to 4.4.1 and StAX to 3.1.4. (ab)
+
+* SOLR-6918: No need to log exceptions (as warn) generated when creating MBean stats if
+  the core is shutting down (Timothy Potter)
+
+* SOLR-6932: All HttpClient ConnectionManagers and SolrJ clients should always be shutdown
+  in tests and regular code. (Mark Miller)
+
+* SOLR-1723: VelocityResponseWriter improvements (Erik Hatcher)
+
+* SOLR-6324: Set finite default timeouts for select and update. (Ramkumar Aiyengar via Mark Miller)
+
+* SOLR-6952: bin/solr create action should copy configset directory instead of reusing
+  an existing configset in ZooKeeper by default (Timothy Potter)
+
+* SOLR-6933: bin/solr should provide a single "create" action that creates a core
+  or collection depending on whether Solr is running in standalone or cloud mode
+  (Timothy Potter)
+
+* SOLR-6496: LBHttpSolrClient stops server retries after the timeAllowed threshold is met.
+  (Steve Davids, Anshum Gupta)
+
+* SOLR-6904: Removed deprecated Circle & rect syntax. See upgrading notes.  (David Smiley)
+
+* SOLR-6943: HdfsDirectoryFactory should fall back to system props for most of it's config
+  if it is not found in solrconfig.xml. (Mark Miller, Mike Drob)
+
+* SOLR-6926: "ant example" makes no sense anymore - should be "ant server" 
+  (Ramkumar Aiyengar, Timothy Potter)
+
+* SOLR-6982: bin/solr and SolrCLI should support SSL-related Java System Properties
+  (Timothy Potter)
+
+* SOLR-6981: Add a delete action to the bin/solr script to allow deleting of cores /
+  collections (with delete collection config directory from ZK) (Timothy Potter)
+
+* SOLR-6840: Remove support for old-style solr.xml (Erick Erickson, Alan Woodward)
+
+* SOLR-6976: Remove classes and methods deprecated in 4.x (Alan Woodward, Noble
+  Paul, Chris Hostetter)
+
+* SOLR-6521: CloudSolrClient should synchronize cache cluster state loading
+    ( Noble Paul, Jessica Cheng Mallet)
+
+* SOLR-7018: bin/solr stop should stop if there is only one node running or generate
+  an error message prompting the user to be explicit about which of multiple nodes
+  to stop using the -p or -all options (Timothy Potter)
+  
+* SOLR-5918: ant clean does not remove ZooKeeper data (Varun Thacker, Steve Rowe)
+
 ==================  4.10.3 ==================
 
 Bug Fixes
@@ -633,7 +885,7 @@
 
 * SOLR-6780: Fixed a bug in how default/appends/invariants params were affecting the set 
   of all "keys" found in the request parameters, resulting in some key=value param pairs 
-  being duplicated.  This was noticably affecting some areas of the code where iteration 
+  being duplicated.  This was noticeably affecting some areas of the code where iteration 
   was done over the set of all params: 
     * literal.* in ExtractingRequestHandler
     * facet.* in FacetComponent
@@ -1139,7 +1391,7 @@
   (Joel Bernstein)
 
 * SOLR-5285: Added a new [child ...] DocTransformer for optionally including 
-  Block-Join decendent documents inline in the results of a search.  This works 
+  Block-Join descendant documents inline in the results of a search.  This works 
   independent of whether the search itself is a block-join related query and is 
   supported by he xml, json, and javabin response formats.
   (Varun Thacker via hossman)
@@ -1342,7 +1594,7 @@
 ----------------------
 
 * In previous versions of Solr, Terms that exceeded Lucene's MAX_TERM_LENGTH were
-  silently ignored when indexing documents.  Begining with Solr 4.8, a document
+  silently ignored when indexing documents.  Beginning with Solr 4.8, a document
   an error will be generated when attempting to index a document with a term
   that is too large.  If you wish to continue to have large terms ignored,
   use "solr.LengthFilterFactory" in all of your Analyzers.  See LUCENE-5472 for
@@ -1721,9 +1973,8 @@
 * SOLR-5320: Added support for tri-level compositeId routing.
   (Anshum Gupta via shalin)
   
-* SOLR-5287,SOLR-5522,SOLR-5446,SOLR-5456,SOLR-5539,SOLR-5458:
-  Admin UI - Added a new "Files" conf directory browser/file viewer.
-  (steffkes, Erick Erickson)
+* SOLR-5458: Admin UI - Added a new "Files" conf directory browser/file viewer.
+  (steffkes)
 
 * SOLR-5447, SOLR-5490: Add a QParserPlugin for Lucene's SimpleQueryParser.
   (Jack Conradson via shalin)
@@ -1903,7 +2154,7 @@
 
 * SOLR-5667: Performance problem when not using hdfs block cache. (Mark Miller)
 
-* SOLR-5526: Fixed NPE that could arrise when explicitly configuring some built 
+* SOLR-5526: Fixed NPE that could arise when explicitly configuring some built 
   in QParserPlugins (Nikolay Khitrin, Vitaliy Zhovtyuk, hossman)
 
 * SOLR-5598: LanguageIdentifierUpdateProcessor ignores all but the first value
@@ -2454,11 +2705,11 @@
   SOLR-5108 for more details.
 
 * In the past, schema.xml parsing would silently ignore "default" or "required" 
-  options specified on <dynamicField/> declarations.  Begining with 4.5, attempting
+  options specified on <dynamicField/> declarations.  Beginning with 4.5, attempting
   to do configured these on a dynamic field will cause an init error.  If you 
   encounter one of these errors when upgrading an existing schema.xml, you can 
   safely remove these attributes, regardless of their value, from your config and 
-  Solr will continue to bahave exactly as it did in previous versions.  See 
+  Solr will continue to behave exactly as it did in previous versions.  See 
   SOLR-5227 for more details.
 
 * The UniqFieldsUpdateProcessorFactory has been improved to support all of the
@@ -2692,7 +2943,7 @@
 * TieredMergePolicy and the various subtypes of LogMergePolicy no longer have 
   an explicit "setUseCompoundFile" method.  Instead the behavior of new 
   segments is determined by the IndexWriter configuration, and the MergePolicy 
-  is only consulted to determine if merge segements should use the compound 
+  is only consulted to determine if merge segments should use the compound 
   file format (based on the value of "setNoCFSRatio").  If you have explicitly 
   configured one of these classes using <mergePolicy> and include an init arg 
   like this...
@@ -2834,7 +3085,7 @@
 * SOLR-4891: JsonLoader should preserve field value types from the JSON content stream.
   (Steve Rowe)
  
-* SOLR-4805: SolreCore#reload should not call preRegister and publish a DOWN state to
+* SOLR-4805: SolrCore#reload should not call preRegister and publish a DOWN state to
   ZooKeeper. (Mark Miller, Jared Rodriguez)
 
 * SOLR-4899: When reconnecting after ZooKeeper expiration, we need to be willing to wait 
@@ -2994,7 +3245,7 @@
 * SOLR-4931: SolrDeletionPolicy onInit and onCommit methods changed to override
   exact signatures (with generics) from IndexDeletionPolicy (shalin)
 
-* SOLR-4942: test improvements to randomize use of compound files (hosman)
+* SOLR-4942: test improvements to randomize use of compound files (hossman)
 
 * SOLR-4966: CSS, JS and other files in webapp without license (uschindler,
   steffkes)
@@ -3122,7 +3373,7 @@
   you would like. If you are using another webapp container, these jars will 
   need to go in the corresponding location for that container. 
   In conjunction, the dist-excl-slf4j and dist-war-excl-slf4 build targets 
-  have been removed since they are redundent.  See the Slf4j documentation, 
+  have been removed since they are redundant.  See the Slf4j documentation, 
   SOLR-3706, and SOLR-4651 for more details.
 
 * The hardcoded SolrCloud defaults for 'hostContext="solr"' and 
@@ -3339,7 +3590,7 @@
   fullpath not path. (Mark Miller)
 
 * SOLR-4555: When forceNew is used with CachingDirectoryFactory#get, the old
-  CachValue should give up its path as it will be used by a new Directory
+  CacheValue should give up its path as it will be used by a new Directory
   instance. (Mark Miller)
 
 * SOLR-4578: CoreAdminHandler#handleCreateAction gets a SolrCore and does not
@@ -3553,7 +3804,7 @@
   when more unused nodes are available. 
   (Eric Falcao, Brett Hoerner, Mark Miller)
 
-* SOLR-4345 : Solr Admin UI dosent work in IE 10 (steffkes)
+* SOLR-4345 : Solr Admin UI doesn't work in IE 10 (steffkes)
 
 * SOLR-4349 : Admin UI - Query Interface does not work in IE
   (steffkes)
@@ -3716,7 +3967,7 @@
   (Shawn Heisey via hossman)
 
 * SOLR-2996: A bare * without a field specification is treated as *:*
-  by the lucene and edismax query paesers.
+  by the lucene and edismax query parsers.
   (hossman, Jan Høydahl, Alan Woodward, yonik)
 
 * SOLR-4416: Upgrade to Tika 1.3. (Markus Jelsma via Mark Miller)
diff --git a/solr/README.txt b/solr/README.txt
index 55d9bd3..448f79b 100644
--- a/solr/README.txt
+++ b/solr/README.txt
@@ -32,32 +32,28 @@
 
   bin/solr start
 
-This will launch a Solr server in the background of your shell, bound
-to port 8983. After starting Solr, you can create a new core for indexing
-your data by doing:
-
-  bin/solr create_core -n <name>
-
-This will create a core that uses a data-driven schema which tries to guess
-the correct field type when you add documents to the index. To see all available
-options for creating a new core, execute:
-
-  bin/solr create_core -help
-
-Alternatively, you can launch Solr in "cloud" mode, which allows you to scale
-out using sharding and replication. To launch Solr in cloud mode, do:
+This will launch a standalone Solr server in the background of your shell,
+listening on port 8983. Alternatively, you can launch Solr in "cloud" mode,
+which allows you to scale out using sharding and replication. To launch Solr
+in cloud mode, do:
 
   bin/solr start -cloud
 
-After starting Solr in cloud mode, you can create a new collection for indexing
-your data by doing:
-
-  bin/solr create_collection -n <name>
-
 To see all available options for starting Solr, please do:
 
   bin/solr start -help
 
+After starting Solr, create either a core or collection depending on whether
+Solr is running in standalone (core) or SolrCloud mode (collection) by doing:
+
+  bin/solr create -c <name>
+
+This will create a collection that uses a data-driven schema which tries to guess
+the correct field type when you add documents to the index. To see all available
+options for creating a new collection, execute:
+
+  bin/solr create -help
+
 After starting Solr, direct your Web browser to the Solr Admin Console at:
 
   http://localhost:8983/solr/
@@ -83,38 +79,19 @@
 Indexing Documents
 ---------------
 
-To add documents to the index, use the post.jar (or post.sh script) in
-the example/exampledocs subdirectory (while Solr is running), for example:
+To add documents to the index, use bin/post.  For example:
 
-     cd example/exampledocs
-     java -jar -Dc=<collection_name> post.jar *.xml
-Or:  sh post.sh *.xml
+     bin/post -c <collection_name> example/exampledocs/*.xml
 
 For more information about Solr examples please read...
 
  * example/solr/README.txt
    For more information about the "Solr Home" and Solr specific configuration
- * http://lucene.apache.org/solr/tutorial.html
-   For a Tutorial using this example configuration
- * http://wiki.apache.org/solr/SolrResources
+ * http://lucene.apache.org/solr/quickstart.html
+   For a Quick Start guide
+ * http://lucene.apache.org/solr/resources.html
    For a list of other tutorials and introductory articles.
 
-
-In addition, Solr ships with several example configurations that
-help you learn about Solr. To run one of the examples, you would do:
-
-  bin/solr -e <EXAMPLE> where <EXAMPLE> is one of:
-
-    cloud        : SolrCloud example
-    dih          : Data Import Handler (rdbms, mail, rss, tika)
-    schemaless   : Schema-less example (schema is inferred from data during indexing)
-    techproducts : Kitchen sink example providing comprehensive examples of Solr features
-
-
-A tutorial is available at:
-
-   http://lucene.apache.org/solr/tutorial.html
-
 or linked from "docs/index.html" in a binary distribution.
 
 Also, there are Solr clients for many programming languages, see 
@@ -173,7 +150,7 @@
    for building, testing, and packaging Solr.
   
    NOTE: 
-   To see Solr in action, you may want to use the "ant example" command to build
+   To see Solr in action, you may want to use the "ant server" command to build
    and package Solr into the server/webapps directory. See also server/README.txt.
 
 
diff --git a/solr/bin/install_solr_service.sh b/solr/bin/install_solr_service.sh
index 8a6d91b..57a0264 100644
--- a/solr/bin/install_solr_service.sh
+++ b/solr/bin/install_solr_service.sh
@@ -84,9 +84,9 @@
 SOLR_INSTALL_FILE=${SOLR_ARCHIVE##*/}
 is_tar=true
 if [ ${SOLR_INSTALL_FILE: -4} == ".tgz" ]; then
-  SOLR_DIR=${SOLR_INSTALL_FILE:0:-4}
+  SOLR_DIR=${SOLR_INSTALL_FILE%.tgz}
 elif [ ${SOLR_INSTALL_FILE: -4} == ".zip" ]; then
-  SOLR_DIR=${SOLR_INSTALL_FILE:0:-4}
+  SOLR_DIR=${SOLR_INSTALL_FILE%.zip}
   is_tar=false
 else
   print_usage "Solr installation archive $SOLR_ARCHIVE is invalid, expected a .tgz or .zip file!"
@@ -166,24 +166,24 @@
   exit 1
 fi
 
+if [ -z "$SOLR_SERVICE" ]; then
+  SOLR_SERVICE=solr
+fi
+
 if [ -z "$SOLR_VAR_DIR" ]; then
-  SOLR_VAR_DIR=/var/solr
+  SOLR_VAR_DIR=/var/$SOLR_SERVICE
 fi
 
 if [ -z "$SOLR_USER" ]; then
   SOLR_USER=solr
 fi
 
-if [ -z "$SOLR_SERVICE" ]; then
-  SOLR_SERVICE=solr
-fi
-
 if [ -z "$SOLR_PORT" ]; then
   SOLR_PORT=8983
 fi
 
 if [ -f "/etc/init.d/$SOLR_SERVICE" ]; then
-  echo -e "\nERROR: /etc/init.d/$SOLR_SERVICE already exists! Perhaps solr is already setup as a service on this host?\n" 1>&2
+  echo -e "\nERROR: /etc/init.d/$SOLR_SERVICE already exists! Perhaps Solr is already setup as a service on this host?\n" 1>&2
   exit 1
 fi
 
diff --git a/solr/bin/post b/solr/bin/post
index 1ad6999..824c013 100755
--- a/solr/bin/post
+++ b/solr/bin/post
@@ -14,15 +14,25 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-# Usage:
-#  bin/post <collection> <file(s)|url> [<params to SimplePostTool>]
-#  bin/post gettingstarted http://lucidworks.com [recursive=1] [delay=1]
-#  bin/post tehfiles ~/Documents
-#  bin/post signals LATEST-signals.csv
-#  bin/post records article*.xml
-#  bin/post wizbang events.json
+# ====== Common code copied/adapted from bin/solr (TODO: centralize/share this kind of thing across bin/solr, etc)
 
-# ====== Common code copied from bin/solr (TODO: centralize/share this kind of thing)
+THIS_SCRIPT="$0"
+
+# Resolve symlinks to this script
+while [ -h "$THIS_SCRIPT" ] ; do
+  ls=`ls -ld "$THIS_SCRIPT"`
+  # Drop everything prior to ->
+  link=`expr "$ls" : '.*-> \(.*\)$'`
+  if expr "$link" : '/.*' > /dev/null; then
+    THIS_SCRIPT="$link"
+  else
+    THIS_SCRIPT=`dirname "$THIS_SCRIPT"`/"$link"
+  fi
+done
+
+SOLR_TIP=`dirname "$THIS_SCRIPT"`/..
+SOLR_TIP=`cd "$SOLR_TIP"; pwd`
+
 if [ -n "$SOLR_JAVA_HOME" ]; then
   JAVA=$SOLR_JAVA_HOME/bin/java
 elif [ -n "$JAVA_HOME" ]; then
@@ -41,56 +51,168 @@
 
 
 # ===== post specific code
-SPT_JAR=dist/solr-core-*.jar
 
-COLLECTION=$1; shift
+TOOL_JAR=$SOLR_TIP/dist/solr-core-*.jar
 
-echo "Collection:" $COLLECTION
+function print_usage() {
+  echo ""
+  echo 'Usage: post -c <collection> [OPTIONS] <files|directories|urls|-d ["...",...]>'
+  echo "    or post -help"
+  echo ""
+  echo "   collection name defaults to DEFAULT_SOLR_COLLECTION if not specified"
+  echo ""
+  echo "OPTIONS"
+  echo "======="
+  echo "  Solr options:"
+  echo "    -url <base Solr update URL> (overrides collection, host, and port)"
+  echo "    -host <host> (default: localhost)"
+  echo "    -port <port> (default: 8983)"
+  echo "    -commit yes|no (default: yes)"
+  # optimize intentionally omitted, but can be used as '-optimize yes' (default: no)
+  echo ""
+  echo "  Web crawl options:"
+  echo "    -recursive <depth> (default: 1)"
+  echo "    -delay <seconds> (default: 10)"
+  echo ""
+  echo "  Directory crawl options:"
+  echo "    -delay <seconds> (default: 0)"
+  echo ""
+  echo "  stdin/args options:"
+  echo "    -type <content/type> (default: application/xml)"
+  echo ""
+  echo "  Other options:"
+  echo "    -filetypes <type>[,<type>,...] (default: xml,json,csv,pdf,doc,docx,ppt,pptx,xls,xlsx,odt,odp,ods,ott,otp,ots,rtf,htm,html,txt,log)"
+  echo "    -params \"<key>=<value>[&<key>=<value>...]\" (values must be URL-encoded; these pass through to Solr update request)"
+  echo "    -out yes|no (default: no; yes outputs Solr response to console)"
+  echo ""
+  echo ""
+  echo "Examples:"
+  echo ""
+  echo "* JSON file: $THIS_SCRIPT -c wizbang events.json"
+  echo "* XML files: $THIS_SCRIPT -c records article*.xml"
+  echo "* CSV file: $THIS_SCRIPT -c signals LATEST-signals.csv"
+  echo "* Directory of files: $THIS_SCRIPT -c myfiles ~/Documents"
+  echo "* Web crawl: $THIS_SCRIPT -c gettingstarted http://lucidworks.com -recursive 1 -delay 1"
+  echo "* Standard input (stdin): echo '{"commit": {}}' | $THIS_SCRIPT -c my_collection -type application/json -out yes -d"
+  echo "* Data as string: $THIS_SCRIPT -c signals -type text/csv -out yes -d $'id,value\n1,0.47'"
+  echo ""
+} # end print_usage
 
-PROPS="-Dc=$COLLECTION"
-PARAMS=""
-
-# TODO: Check that $COLLECTION actually exists?   How to determine if user omitted collection name as first param?
-
-echo -n "Data mode: "
-if [[ $1 == http* ]]; then
-  echo "WEB"
-  PROPS="$PROPS -Ddata=web"
-  PARAMS=$1; shift
-else
-  if [[ -d $1 ]]; then
-    # Directory
-    echo "DIRECTORY"
-    PROPS="$PROPS -Ddata=files -Dauto -Drecursive"
-    PARAMS=$1; shift
-  else
-    # Not a URL or existing directory, assume file(s)
-    echo "FILE"
-    FILE=$1; shift
-    EXTENSION="${FILE##*.}"
-
-    PARAMS=$FILE
-
-    if [[ $EXTENSION == xml || $EXTENSION == csv || $EXTENSION == json ]]; then
-      # Solr /update supported type (default being application/xml).
-      if [[ $EXTENSION == csv ]]; then
-        PROPS="$PROPS -Dtype=text/csv"
-      fi
-      if [[ $EXTENSION == json ]]; then
-        PROPS="$PROPS -Dtype=application/json"
-      fi
-    else
-      PROPS="$PROPS -Dauto=yes"
-    fi
-
-  fi
+if [[ $# -eq 1 && ("$1" == "-help" || "$1" == "-h" || "$1" == "-usage") ]]; then
+  print_usage
+  exit
 fi
 
-# Add all additonal trailing script parameters as system properties to SPT (eg. bin/post core_name ~/Documents depth=1)
+
+COLLECTION=$DEFAULT_SOLR_COLLECTION
+PROPS="-Dauto=yes"
+RECURSIVE=""
+FILES=()
+URLS=()
+ARGS=()
+
 while [ $# -gt 0 ]; do
-  PROPS="$PROPS -D$1"
+  # TODO: natively handle the optional parameters to SPT
+  #       but for now they can be specified as bin/post -c collection-name delay=5 http://lucidworks.com
+
+  if [[ -d "$1" ]]; then
+    # Directory
+#    echo "$1: DIRECTORY"
+    RECURSIVE="-Drecursive=yes"
+    FILES+=("$1")
+  elif [[ -f "$1" ]]; then
+    # File
+#    echo "$1: FILE"
+    FILES+=("$1")
+  elif [[ "$1" == http* ]]; then
+    # URL
+#    echo "$1: URL"
+    URLS+=("$1")
+  else
+    if [[ $1 == -* ]]; then
+      if [[ $1 == "-c" ]]; then
+        # Special case, pull out collection name
+        shift
+        COLLECTION=$1
+      elif [[ ($1 == "-d" || $1 == "--data" || $1 == "-") ]]; then
+        if [[ -s /dev/stdin ]]; then
+          MODE="stdin"
+        else
+          # when no stdin exists and -d specified, the rest of the arguments
+          # are assumed to be strings to post as-is
+          MODE="args"
+          shift
+          if [[ $# -gt 0 ]]; then
+            ARGS=("$@")
+            shift $#
+          fi
+        fi
+      else
+        key=${1:1}
+        shift
+#       echo "$1: PROP"
+        PROPS="$PROPS -D$key=$1"
+      fi
+    else
+      echo -e "\nUnrecognized argument: $1\n"
+      echo -e "If this was intended to be a data file, it does not exist relative to $PWD\n"
+      exit 1
+    fi
+  fi
   shift
 done
 
-echo $JAVA -classpath $SPT_JAR $PROPS org.apache.solr.util.SimplePostTool $PARAMS
-$JAVA -classpath $SPT_JAR $PROPS org.apache.solr.util.SimplePostTool $PARAMS
+# Check for errors
+if [[ $COLLECTION == "" ]]; then
+  echo -e "\nCollection must be specified.  Use -c <collection name> or set DEFAULT_SOLR_COLLECTION in your environment.\n"
+  echo -e "See '$THIS_SCRIPT -h' for usage instructions.\n"
+  exit 1
+fi
+
+# Unsupported: bin/post -c foo
+if [[ ${#FILES[@]} == 0 && ${#URLS[@]} == 0 && $MODE != "stdin" && $MODE != "args" ]]; then
+  echo -e "\nNo files, directories, URLs, -d strings, or stdin were specified.\n"
+  echo -e "See '$THIS_SCRIPT -h' for usage instructions.\n"
+  exit 1
+fi
+
+# SPT does not support mixing different data mode types, just files, just URLs, just stdin, or just argument strings.
+# The following are unsupported constructs:
+#    bin/post -c foo existing_file.csv http://example.com
+#    echo '<xml.../>' | bin/post -c foo existing_file.csv
+#    bin/post -c foo existing_file.csv -d 'anything'
+if [[ (${#FILES[@]} != 0 && ${#URLS[@]} != 0 && $MODE != "stdin" && $MODE != "args")
+      || ((${#FILES[@]} != 0 || ${#URLS[@]} != 0) && ($MODE == "stdin" || $MODE == "args")) ]]; then
+  echo -e "\nCombining files/directories, URLs, stdin, or args is not supported.  Post them separately.\n"
+  exit 1
+fi
+
+PARAMS=""
+
+# TODO: let's simplify this
+if [[ $MODE != "stdin" && $MODE != "args" ]]; then
+  if [[ $FILES != "" ]]; then
+    MODE="files"
+    PARAMS=("${FILES[@]}")
+  fi
+
+  if [[ $URLS != "" ]]; then
+    MODE="web"
+    PARAMS=("${URLS[@]}")
+  fi
+else
+  if [[ ${#ARGS[@]} == 0 ]]; then
+    # SPT needs a valid (to post to Solr) args string, useful for 'bin/post -c foo -d' to force a commit
+    ARGS+=("<add/>")
+  fi
+  PARAMS=("${ARGS[@]}")
+fi
+
+PROPS="$PROPS -Dc=$COLLECTION -Ddata=$MODE $RECURSIVE"
+
+echo "$JAVA" -classpath $TOOL_JAR $PROPS org.apache.solr.util.SimplePostTool "${PARAMS[@]}"
+"$JAVA" -classpath $TOOL_JAR $PROPS org.apache.solr.util.SimplePostTool "${PARAMS[@]}"
+
+# post smoker:
+# bin/post -c signals -out yes -type application/json -d '[{"id": 2, "val": 0.47}]'
+# bin/post -c signals -out yes -params "wt=json" -d '<add><doc><field name="id">1</field></doc></add>'
diff --git a/solr/bin/solr b/solr/bin/solr
index 1af990a..91c9ecc 100755
--- a/solr/bin/solr
+++ b/solr/bin/solr
@@ -48,7 +48,16 @@
 SOLR_SCRIPT="$0"
 verbose=false
 THIS_OS=`uname -s`
-hasLsof=$(which lsof)
+
+if hash jar 2>/dev/null ; then      # hash returns true if jar is on the path
+  UNPACK_WAR_CMD="$(command -v jar) xf"
+elif hash unzip 2>/dev/null ; then  # hash returns true if unzip is on the path
+  UNPACK_WAR_CMD="$(command -v unzip) -q"
+else
+  echo -e "This script requires extracting a WAR file with either the jar or unzip utility, please install these utilities or contact your administrator for assistance."
+  exit 1
+fi
+  
 stop_all=false
 
 # for now, we don't support running this script from cygwin due to problems
@@ -105,12 +114,33 @@
       break
     fi
   done
+  if [ -z "$JAVA" ]; then
+    echo >&2 "The currently defined JAVA_HOME ($JAVA_HOME) refers"
+    echo >&2 "to a location where Java could not be found.  Aborting."
+    echo >&2 "Either fix the JAVA_HOME variable or remove it from the"
+    echo >&2 "environment so that the system PATH will be searched."
+    exit 1
+  fi
 else
   JAVA=java
 fi
 
 # test that Java exists and is executable on this server
-$JAVA -version >/dev/null 2>&1 || { echo >&2 "Java is required to run Solr! Please install Java 7 or 8 before running this script."; exit 1; }
+$JAVA -version >/dev/null 2>&1 || {
+  echo >&2 "Java not found, or an error was encountered when running java."
+  echo >&2 "A working Java 8 is required to run Solr!"
+  echo >&2 "Please install Java 8 or fix JAVA_HOME before running this script."
+  echo >&2 "Command that we tried: '${JAVA} -version'"
+  echo >&2 "Active Path:"
+  echo >&2 "${PATH}"
+  exit 1
+}
+
+# URL scheme for contacting Solr
+SOLR_URL_SCHEME=http
+if [ -n "$SOLR_SSL_OPTS" ]; then
+  SOLR_URL_SCHEME=https
+fi
 
 function print_usage() {
   CMD="$1"
@@ -123,7 +153,7 @@
   if [ -z "$CMD" ]; then
     echo ""
     echo "Usage: solr COMMAND OPTIONS"
-    echo "       where COMMAND is one of: start, stop, restart, status, healthcheck, create_core, create_collection"
+    echo "       where COMMAND is one of: start, stop, restart, status, healthcheck, create, create_core, create_collection, delete"
     echo ""
     echo "  Standalone server example (start Solr running in the background on port 8984):"
     echo ""
@@ -209,42 +239,93 @@
     echo ""
     echo "  NOTE: This command will show the status of all running Solr servers"
     echo ""
+  elif [ "$CMD" == "create" ]; then
+    echo ""
+    echo "Usage: solr create [-c name] [-d confdir] [-n configName] [-shards #] [-replicationFactor #] [-p port]"
+    echo ""
+    echo "  Create a core or collection depending on whether Solr is running in standalone (core) or SolrCloud"
+    echo "  mode (collection). In other words, this action detects which mode Solr is running in, and then takes"
+    echo "  the appropriate action (either create_core or create_collection). For detailed usage instructions, do:"
+    echo ""
+    echo "    bin/solr create_core -help"
+    echo ""
+    echo "       or"
+    echo ""
+    echo "    bin/solr create_collection -help"
+    echo ""
+  elif [ "$CMD" == "delete" ]; then
+    echo ""
+    echo "Usage: solr delete [-c name] [-deleteConfig true|false] [-p port]"
+    echo ""
+    echo "  Deletes a core or collection depending on whether Solr is running in standalone (core) or SolrCloud"
+    echo "  mode (collection). If you're deleting a collection in SolrCloud mode, the default behavior is to also"
+    echo "  delete the configuration directory from ZooKeeper so long as it is not being used by another collection."
+    echo "  You can override this behavior by passing -deleteConfig false when running this command."
+    echo ""
+    echo "  -c <name>               Name of the core / collection to delete"
+    echo ""
+    echo "  -deleteConfig <boolean> Delete the configuration directory from ZooKeeper; default is true"
+    echo ""
+    echo "  -p <port>               Port of a local Solr instance where you want to delete the core/collection"
+    echo "                            If not specified, the script will search the local system for a running"
+    echo "                            Solr instance and will use the port of the first server it finds."
+    echo ""
   elif [ "$CMD" == "create_core" ]; then
     echo ""
-    echo "Usage: solr create_core [-n name] [-c configset]"
+    echo "Usage: solr create_core [-c core] [-d confdir] [-p port]"
     echo ""
-    echo "  -n <name>       Name of core to create"
+    echo "  -c <core>     Name of core to create"
     echo ""
-    echo "  -c <configset>  Name of configuration directory to use, built-in options are:"
+    echo "  -d <confdir>  Configuration directory to copy when creating the new core, built-in options are:"
+    echo ""
     echo "      basic_configs: Minimal Solr configuration"
     echo "      data_driven_schema_configs: Managed schema with field-guessing support enabled"
     echo "      sample_techproducts_configs: Example configuration with many optional features enabled to"
     echo "         demonstrate the full power of Solr"
+    echo ""
     echo "      If not specified, default is: data_driven_schema_configs"
     echo ""
     echo "      Alternatively, you can pass the path to your own configuration directory instead of using"
-    echo "      one of the built-in configurations, such as: bin/solr create_core -n mycore -c /tmp/myconfig"
+    echo "      one of the built-in configurations, such as: bin/solr create_core -c mycore -d /tmp/myconfig"
+    echo ""
+    echo "  -p <port>     Port of a local Solr instance where you want to create the new core"
+    echo "                  If not specified, the script will search the local system for a running"
+    echo "                  Solr instance and will use the port of the first server it finds."
     echo ""
   elif [ "$CMD" == "create_collection" ]; then
     echo ""
-    echo "Usage: solr create_collection [-n name] [-c configset] [-shards #] [-replicationFactor #]"
+    echo "Usage: solr create_collection [-c collection] [-d confdir] [-n configName] [-shards #] [-replicationFactor #] [-p port]"
     echo ""
-    echo "  -n <name>               Name of collection to create"
+    echo "  -c <collection>         Name of collection to create"
     echo ""
-    echo "  -c <configset>          Name of configuration directory to use, built-in options are:"
+    echo "  -d <confdir>            Configuration directory to copy when creating the new collection, built-in options are:"
+    echo ""
     echo "      basic_configs: Minimal Solr configuration"
     echo "      data_driven_schema_configs: Managed schema with field-guessing support enabled"
     echo "      sample_techproducts_configs: Example configuration with many optional features enabled to"
     echo "         demonstrate the full power of Solr"
+    echo ""
     echo "      If not specified, default is: data_driven_schema_configs"
     echo ""
     echo "      Alternatively, you can pass the path to your own configuration directory instead of using"
-    echo "      one of the built-in configurations, such as: bin/solr create_collection -n mycoll -c /tmp/myconfig"
+    echo "      one of the built-in configurations, such as: bin/solr create_collection -c mycoll -d /tmp/myconfig"
+    echo ""
+    echo "      By default the script will upload the specified confdir directory into ZooKeeper using the same"
+    echo "      name as the collection (-c) option. Alternatively, if you want to reuse an existing directory"
+    echo "      or create a confdir in ZooKeeper that can be shared by multiple collections, use the -n option"
+    echo ""
+    echo "  -n <configName>         Name the configuration directory in ZooKeeper; by default, the configuration"
+    echo "                            will be uploaded to ZooKeeper using the collection name (-c), but if you want"
+    echo "                            to use an existing directory or override the name of the configuration in"
+    echo "                            ZooKeeper, then use the -c option."
     echo ""
     echo "  -shards <#>             Number of shards to split the collection into; default is 1"
     echo ""
     echo "  -replicationFactor <#>  Number of copies of each document in the collection, default is 1 (no replication)"
     echo ""
+    echo "  -p <port>               Port of a local Solr instance where you want to create the new collection"
+    echo "                            If not specified, the script will search the local system for a running"
+    echo "                            Solr instance and will use the port of the first server it finds."
     echo ""
   fi
 } # end print_usage
@@ -299,10 +380,10 @@
   
   # Extract the solr.war if it hasn't been done already (so we can access the SolrCLI class)
   if [[ -e $DEFAULT_SERVER_DIR/webapps/solr.war && ! -d "$DEFAULT_SERVER_DIR/solr-webapp/webapp" ]]; then
-    (mkdir -p $DEFAULT_SERVER_DIR/solr-webapp/webapp && cd $DEFAULT_SERVER_DIR/solr-webapp/webapp && jar xf $DEFAULT_SERVER_DIR/webapps/solr.war)    
+    (mkdir -p $DEFAULT_SERVER_DIR/solr-webapp/webapp && cd $DEFAULT_SERVER_DIR/solr-webapp/webapp && $UNPACK_WAR_CMD $DEFAULT_SERVER_DIR/webapps/solr.war)    
   fi
   
-  "$JAVA" -Dlog4j.configuration=file:$DEFAULT_SERVER_DIR/scripts/cloud-scripts/log4j.properties \
+  "$JAVA" $SOLR_SSL_OPTS -Dsolr.install.dir=$SOLR_TIP -Dlog4j.configuration=file:$DEFAULT_SERVER_DIR/scripts/cloud-scripts/log4j.properties \
     -classpath "$DEFAULT_SERVER_DIR/solr-webapp/webapp/WEB-INF/lib/*:$DEFAULT_SERVER_DIR/lib/ext/*" \
     org.apache.solr.util.SolrCLI $*
 
@@ -321,7 +402,7 @@
         port=`jetty_port "$ID"`
         if [ "$port" != "" ]; then
           echo -e "\nSolr process $ID running on port $port"
-          run_tool status -solr http://localhost:$port/solr
+          run_tool status -solr $SOLR_URL_SCHEME://localhost:$port/solr
           echo ""
         else
           echo -e "\nSolr process $ID from $PIDF not found."
@@ -338,7 +419,7 @@
           if [ "$port" != "" ]; then
             echo ""
             echo "Solr process $ID running on port $port"
-            run_tool status -solr http://localhost:$port/solr
+            run_tool status -solr $SOLR_URL_SCHEME://localhost:$port/solr
             echo ""
           fi
       done
@@ -361,7 +442,7 @@
 
   if [ "$SOLR_PID" != "" ]; then
     echo -e "Sending stop command to Solr running on port $SOLR_PORT ... waiting 5 seconds to allow Jetty process $SOLR_PID to stop gracefully."
-    $JAVA -jar $DIR/start.jar STOP.PORT=$STOP_PORT STOP.KEY=$STOP_KEY --stop || true
+    $JAVA $SOLR_SSL_OPTS -jar $DIR/start.jar STOP.PORT=$STOP_PORT STOP.KEY=$STOP_KEY --stop || true
     (sleep 5) &
     spinner $!
     rm -f $SOLR_PID_DIR/solr-$SOLR_PORT.pid
@@ -479,32 +560,36 @@
 fi
 
 # create a core or collection
-if [[ "$SCRIPT_CMD" == "create_core" || "$SCRIPT_CMD" == "create_collection" ]]; then
+if [[ "$SCRIPT_CMD" == "create" || "$SCRIPT_CMD" == "create_core" || "$SCRIPT_CMD" == "create_collection" ]]; then
 
-  CREATE_TYPE=collection
   CREATE_NUM_SHARDS=1
   CREATE_REPFACT=1
-  if [ "$SCRIPT_CMD" == "create_core" ]; then
-    CREATE_TYPE=core
-  fi
 
   if [ $# -gt 0 ]; then
     while true; do
       case $1 in
-          -n|-name)
+          -c|-core|-collection)
               if [[ -z "$2" || "${2:0:1}" == "-" ]]; then
-                print_usage "$SCRIPT_CMD" "$CREATE_TYPE name is required when using the $1 option!"
+                print_usage "$SCRIPT_CMD" "name is required when using the $1 option!"
                 exit 1
               fi
               CREATE_NAME=$2
               shift 2
           ;;
-          -c|-configset)
+          -n|-confname)
               if [[ -z "$2" || "${2:0:1}" == "-" ]]; then
-                print_usage "$SCRIPT_CMD" "Configset name is required when using the $1 option!"
+                print_usage "$SCRIPT_CMD" "Configuration name is required when using the $1 option!"
                 exit 1
               fi
-              CREATE_CONFIGSET="$2"
+              CREATE_CONFNAME="$2"
+              shift 2
+          ;;
+          -d|-confdir)
+              if [[ -z "$2" || "${2:0:1}" == "-" ]]; then
+                print_usage "$SCRIPT_CMD" "Configuration directory is required when using the $1 option!"
+                exit 1
+              fi
+              CREATE_CONFDIR="$2"
               shift 2
           ;;
           -s|-shards)
@@ -551,22 +636,27 @@
     done
   fi
 
-  if [ -z "$CREATE_CONFIGSET" ]; then
-    CREATE_CONFIGSET=data_driven_schema_configs
+  if [ -z "$CREATE_CONFDIR" ]; then
+    CREATE_CONFDIR=data_driven_schema_configs
   fi
 
-  # validate the configset arg
-  if [[ ! -d "$SOLR_TIP/server/solr/configsets/$CREATE_CONFIGSET" && ! -d "$CREATE_CONFIGSET" ]]; then
-    echo -e "\nSpecified configset $CREATE_CONFIGSET not found!\n"
+  # validate the confdir arg
+  if [[ ! -d "$SOLR_TIP/server/solr/configsets/$CREATE_CONFDIR" && ! -d "$CREATE_CONFDIR" ]]; then
+    echo -e "\nSpecified configuration directory $CREATE_CONFDIR not found!\n"
     exit 1
   fi
 
   if [ -z "$CREATE_NAME" ]; then
-    echo "$CREATE_TYPE name is required!"
+    echo "Name (-c) argument is required!"
     print_usage "$SCRIPT_CMD"
     exit 1
   fi
 
+  # If not defined, use the collection name for the name of the configuration in ZooKeeper
+  if [ -z "$CREATE_CONFNAME" ]; then
+    CREATE_CONFNAME=$CREATE_NAME
+  fi
+
   if [ -z "$CREATE_PORT" ]; then
     for ID in `ps auxww | grep java | grep start.jar | awk '{print $2}' | sort -r`
       do
@@ -579,21 +669,104 @@
   fi
 
   if [ -z "$CREATE_PORT" ]; then
-    echo "Failed to determine the port of a local Solr instance, cannot create $CREATE_TYPE $CREATE_NAME"
+    echo "Failed to determine the port of a local Solr instance, cannot create $CREATE_NAME!"
     exit 1
   fi
 
-  if [ "$SCRIPT_CMD" == "create_collection" ]; then
-    run_tool create_collection -name $CREATE_NAME -shards $CREATE_NUM_SHARDS -replicationFactor $CREATE_REPFACT \
-      -config $CREATE_CONFIGSET -configsetsDir $SOLR_TIP/server/solr/configsets -solrUrl http://localhost:$CREATE_PORT/solr
+  if [ "$SCRIPT_CMD" == "create_core" ]; then
+    run_tool create_core -name $CREATE_NAME -solrUrl $SOLR_URL_SCHEME://localhost:$CREATE_PORT/solr \
+      -confdir $CREATE_CONFDIR -configsetsDir $SOLR_TIP/server/solr/configsets
     exit $?
   else
-    run_tool create_core -name $CREATE_NAME -solrUrl http://localhost:$CREATE_PORT/solr \
-      -config $CREATE_CONFIGSET -configsetsDir $SOLR_TIP/server/solr/configsets
+    run_tool $SCRIPT_CMD -name $CREATE_NAME -shards $CREATE_NUM_SHARDS -replicationFactor $CREATE_REPFACT \
+      -confname $CREATE_CONFNAME -confdir $CREATE_CONFDIR \
+      -configsetsDir $SOLR_TIP/server/solr/configsets \
+      -solrUrl $SOLR_URL_SCHEME://localhost:$CREATE_PORT/solr
     exit $?
   fi
 fi
 
+# delete a core or collection
+if [[ "$SCRIPT_CMD" == "delete" ]]; then
+
+  if [ $# -gt 0 ]; then
+    while true; do
+      case $1 in
+          -c|-core|-collection)
+              if [[ -z "$2" || "${2:0:1}" == "-" ]]; then
+                print_usage "$SCRIPT_CMD" "name is required when using the $1 option!"
+                exit 1
+              fi
+              DELETE_NAME=$2
+              shift 2
+          ;;
+          -p|-port)
+              if [[ -z "$2" || "${2:0:1}" == "-" ]]; then
+                print_usage "$SCRIPT_CMD" "Solr port is required when using the $1 option!"
+                exit 1
+              fi
+              DELETE_PORT="$2"
+              shift 2
+          ;;
+          -deleteConfig)
+              if [[ -z "$2" || "${2:0:1}" == "-" ]]; then
+                print_usage "$SCRIPT_CMD" "true|false is required when using the $1 option!"
+                exit 1
+              fi
+              DELETE_CONFIG="$2"
+              shift 2
+          ;;
+          -help|-usage)
+              print_usage "$SCRIPT_CMD"
+              exit 0
+          ;;
+          --)
+              shift
+              break
+          ;;
+          *)
+              if [ "$1" != "" ]; then
+                print_usage "$SCRIPT_CMD" "Unrecognized or misplaced argument: $1!"
+                exit 1
+              else
+                break # out-of-args, stop looping
+              fi
+          ;;
+      esac
+    done
+  fi
+
+  if [ -z "$DELETE_NAME" ]; then
+    echo "Name (-c) argument is required!"
+    print_usage "$SCRIPT_CMD"
+    exit 1
+  fi
+
+  # If not defined, use the collection name for the name of the configuration in ZooKeeper
+  if [ -z "$DELETE_CONFIG" ]; then
+    DELETE_CONFIG=true
+  fi
+
+  if [ -z "$DELETE_PORT" ]; then
+    for ID in `ps auxww | grep java | grep start.jar | awk '{print $2}' | sort -r`
+      do
+        port=`jetty_port "$ID"`
+        if [ "$port" != "" ]; then
+          DELETE_PORT=$port
+          break
+        fi
+    done
+  fi
+
+  if [ -z "$DELETE_PORT" ]; then
+    echo "Failed to determine the port of a local Solr instance, cannot delete $DELETE_NAME!"
+    exit 1
+  fi
+
+  run_tool delete -name $DELETE_NAME -deleteConfig $DELETE_CONFIG \
+    -solrUrl $SOLR_URL_SCHEME://localhost:$DELETE_PORT/solr
+  exit $?
+fi
 
 # verify the command given is supported
 if [ "$SCRIPT_CMD" != "stop" ] && [ "$SCRIPT_CMD" != "start" ] && [ "$SCRIPT_CMD" != "restart" ] && [ "$SCRIPT_CMD" != "status" ]; then
@@ -798,7 +971,7 @@
                   fi
                                   
                   # check to see if something is already bound to that port
-                  if [ "$hasLsof" != "" ]; then
+                  if hash lsof 2>/dev/null ; then  # hash returns true if lsof is on the path
                     PORT_IN_USE=`lsof -Pni:$CLOUD_PORT`
                     if [ "$PORT_IN_USE" != "" ]; then
                       read -e -p "Oops! Looks like port $CLOUD_PORT is already being used by another process. Please choose a different port. " USER_INPUT
@@ -844,6 +1017,7 @@
             mkdir -p $SOLR_HOME
             if [ ! -f "$SOLR_HOME/solr.xml" ]; then
               cp $DEFAULT_SERVER_DIR/solr/solr.xml $SOLR_HOME/solr.xml
+              cp $DEFAULT_SERVER_DIR/solr/zoo.cfg $SOLR_HOME/zoo.cfg
             fi
             EXAMPLE_CONFIGSET=sample_techproducts_configs
             shift
@@ -857,6 +1031,7 @@
             mkdir -p $SOLR_HOME
             if [ ! -f "$SOLR_HOME/solr.xml" ]; then
               cp $DEFAULT_SERVER_DIR/solr/solr.xml $SOLR_HOME/solr.xml
+              cp $DEFAULT_SERVER_DIR/solr/zoo.cfg $SOLR_HOME/zoo.cfg
             fi
             EXAMPLE_CONFIGSET=data_driven_schema_configs
             shift
@@ -897,8 +1072,10 @@
   else
     # not stopping all and don't have a port, but if we can find the pid file for the default port 8983, then use that
     none_stopped=true
-    if [ -e "$SOLR_PID_DIR/solr-8983.pid" ]; then
-      PID=`cat $SOLR_PID_DIR/solr-8983.pid`
+    numSolrs=`find $SOLR_PID_DIR -name "solr-*.pid" -type f | wc -l | tr -d ' '`
+    if [ $numSolrs -eq 1 ]; then
+      # only do this if there is only 1 node running, otherwise they must provide the -p or -all
+      PID=`find $SOLR_PID_DIR -name "solr-*.pid" -type f -exec cat {} \;`
       CHECK_PID=`ps auxww | awk '{print $2}' | grep $PID | sort -r | tr -d ' '`
       if [ "$CHECK_PID" != "" ]; then
         port=`jetty_port "$CHECK_PID"`
@@ -906,11 +1083,15 @@
           stop_solr "$SOLR_SERVER_DIR" "$port" "$STOP_KEY" "$CHECK_PID"
           none_stopped=false
         fi
-        rm -f $SOLR_PID_DIR/solr-8983.pid
       fi
     fi
+
     if $none_stopped; then
-      echo -e "\nMust either specify a port using -p or -all to stop all Solr nodes on this host.\nUse the status command to see if any Solr nodes are running."
+      if [ $numSolrs -gt 0 ]; then
+        echo -e "\nFound $numSolrs Solr nodes running! Must either specify a port using -p or -all to stop all Solr nodes on this host.\n"
+      else
+        echo -e "\nNo Solr nodes found to stop.\n"
+      fi
       exit 1
     fi
   fi
@@ -1042,7 +1223,7 @@
     
 fi
 
-# These are useful for attaching remove profilers like VisualVM/JConsole
+# These are useful for attaching remote profilers like VisualVM/JConsole
 if [ "$ENABLE_REMOTE_JMX_OPTS" == "true" ]; then
 
   if [ -z "$RMI_PORT" ]; then
@@ -1096,6 +1277,16 @@
     fi
   fi
 
+  # If SSL-related system props are set, add them to SOLR_OPTS
+  if [ -n "$SOLR_SSL_OPTS" ]; then
+    # If using SSL and jetty.ssl.port not set explicitly, use the jetty.port
+    SSL_PORT_PROP="-Djetty.ssl.port=$SOLR_PORT"
+    if [ -n "$SOLR_SSL_PORT" ]; then
+      SSL_PORT_PROP="-Djetty.ssl.port=$SOLR_SSL_PORT"
+    fi
+    SOLR_OPTS="$SOLR_OPTS $SOLR_SSL_OPTS $SSL_PORT_PROP"
+  fi
+
   if $verbose ; then
     echo -e "\nStarting Solr using the following settings:"
     echo -e "    JAVA            = $JAVA"
@@ -1138,6 +1329,7 @@
 
   SOLR_START_OPTS="-server -Xss256k $SOLR_JAVA_MEM $GC_TUNE $GC_LOG_OPTS $REMOTE_JMX_OPTS \
  $CLOUD_MODE_OPTS \
+-Djetty.home=$SOLR_SERVER_DIR
 -DSTOP.PORT=$stop_port -DSTOP.KEY=$STOP_KEY \
 $SOLR_HOST_ARG -Djetty.port=$SOLR_PORT \
 -Dsolr.solr.home=$SOLR_HOME \
@@ -1160,7 +1352,7 @@
     nohup $JAVA $SOLR_START_OPTS $SOLR_ADDL_ARGS -XX:OnOutOfMemoryError="$SOLR_TIP/bin/oom_solr.sh $SOLR_PORT $SOLR_LOGS_DIR" -jar start.jar 1>$SOLR_LOGS_DIR/solr-$SOLR_PORT-console.log 2>&1 & echo $! > $SOLR_PID_DIR/solr-$SOLR_PORT.pid
   
     # no lsof on cygwin though
-    if [ "$hasLsof" != "" ]; then
+    if hash lsof 2>/dev/null ; then  # hash returns true if lsof is on the path
       echo -n "Waiting to see Solr listening on port $SOLR_PORT"
       # Launch in a subshell to show the spinner
       (loops=0
@@ -1203,26 +1395,19 @@
       EXAMPLE_NAME=$EXAMPLE
     fi
 
-    if [ "$SOLR_MODE" == "solrcloud" ]; then
-      run_tool create_collection -name $EXAMPLE_NAME -shards 1 -replicationFactor 1 \
-        -config $EXAMPLE_CONFIGSET -configsetsDir $SOLR_TIP/server/solr/configsets -solrUrl http://localhost:$SOLR_PORT/solr
-      if [ $? -ne 0 ]; then
-        exit 1
-      fi
-    else
-      run_tool create_core -name $EXAMPLE_NAME -solrUrl http://localhost:$SOLR_PORT/solr \
-        -config $EXAMPLE_CONFIGSET -configsetsDir $SOLR_TIP/server/solr/configsets
-      if [ $? -ne 0 ]; then
-        exit 1
-      fi
+    run_tool create -name $EXAMPLE_NAME -shards 1 -replicationFactor 1 \
+      -confname $EXAMPLE_NAME -confdir $EXAMPLE_CONFIGSET \
+      -configsetsDir $SOLR_TIP/server/solr/configsets -solrUrl $SOLR_URL_SCHEME://localhost:$SOLR_PORT/solr
+    if [ $? -ne 0 ]; then
+      exit 1
     fi
 
     if [ "$EXAMPLE" == "techproducts" ]; then
       echo "Indexing tech product example docs from $SOLR_TIP/example/exampledocs"
-      "$JAVA" -Durl=http://localhost:$SOLR_PORT/solr/$EXAMPLE/update -jar $SOLR_TIP/example/exampledocs/post.jar $SOLR_TIP/example/exampledocs/*.xml
+      "$JAVA" $SOLR_SSL_OPTS -Durl=$SOLR_URL_SCHEME://localhost:$SOLR_PORT/solr/$EXAMPLE/update -jar $SOLR_TIP/example/exampledocs/post.jar $SOLR_TIP/example/exampledocs/*.xml
     fi
 
-    echo -e "\nSolr $EXAMPLE example launched successfully. Direct your Web browser to http://localhost:$SOLR_PORT/solr to visit the Solr Admin UI\n"
+    echo -e "\nSolr $EXAMPLE example launched successfully. Direct your Web browser to $SOLR_URL_SCHEME://localhost:$SOLR_PORT/solr to visit the Solr Admin UI\n"
   fi
 else
   #
@@ -1245,7 +1430,7 @@
   fi
 
   echo -e "\nStarting up SolrCloud node1 on port ${CLOUD_PORTS[0]} using command:\n"
-  echo -e "solr start -cloud -s $SOLR_HOME -p $SOLR_PORT $DASHZ $DASHM $DASHA\n\n"
+  echo -e "solr start -cloud -s example/cloud/node1/solr -p $SOLR_PORT $DASHZ $DASHM $DASHA\n\n"
     
   # can't launch this node in the foreground else we can't run anymore commands
   launch_solr "false" "$ADDITIONAL_CMD_OPTS"
@@ -1262,7 +1447,7 @@
     ndx=$[$s+1]
     next_port=${CLOUD_PORTS[$s]}
     echo -e "\n\nStarting node$ndx on port $next_port using command:\n"
-    echo -e "solr start -cloud -s $SOLR_TIP/example/cloud/node$ndx/solr -p $next_port -z $zk_host $DASHM $DASHA \n\n"
+    echo -e "solr start -cloud -s example/cloud/node$ndx/solr -p $next_port -z $zk_host $DASHM $DASHA \n\n"
     # call this script again with correct args for next node    
     $SOLR_TIP/bin/solr start -cloud -s $SOLR_TIP/example/cloud/node$ndx/solr -p $next_port -z $zk_host $DASHM $DASHA
   done
@@ -1321,7 +1506,7 @@
         CLOUD_CONFIG=data_driven_schema_configs
       fi
 
-      # validate the configset arg
+      # validate the confdir arg
       if [[ ! -d "$SOLR_TIP/server/solr/configsets/$CLOUD_CONFIG" && ! -d "$CLOUD_CONFIG" ]]; then
         echo -e "\nOops! Specified configuration $CLOUD_CONFIG not found!"
         read -e -p "Choose one of: basic_configs, data_driven_schema_configs, or sample_techproducts_configs [data_driven_schema_configs] " USER_INPUT
@@ -1334,8 +1519,10 @@
   fi
    
   run_tool create_collection -name $CLOUD_COLLECTION -shards $CLOUD_NUM_SHARDS -replicationFactor $CLOUD_REPFACT \
-    -config $CLOUD_CONFIG -configsetsDir $SOLR_TIP/server/solr/configsets -solrUrl http://localhost:$SOLR_PORT/solr
-  echo -e "\n\nSolrCloud example running, please visit http://localhost:$SOLR_PORT/solr \n\n"
+    -confname $CLOUD_COLLECTION -confdir $CLOUD_CONFIG \
+    -configsetsDir $SOLR_TIP/server/solr/configsets -solrUrl $SOLR_URL_SCHEME://localhost:$SOLR_PORT/solr
+
+  echo -e "\n\nSolrCloud example running, please visit $SOLR_URL_SCHEME://localhost:$SOLR_PORT/solr \n\n"
 fi
 
 exit $?
diff --git a/solr/bin/solr.cmd b/solr/bin/solr.cmd
index 53413db..fe8b77d 100644
--- a/solr/bin/solr.cmd
+++ b/solr/bin/solr.cmd
@@ -36,6 +36,11 @@
 IF "%SOLR_INCLUDE%"=="" set "SOLR_INCLUDE=%SOLR_TIP%\bin\solr.in.cmd"

 IF EXIST "%SOLR_INCLUDE%" CALL "%SOLR_INCLUDE%"

 

+REM URL scheme for contacting Solr

+set SOLR_URL_SCHEME=http

+IF DEFINED SOLR_SSL_OPTS set SOLR_URL_SCHEME=https

+IF NOT DEFINED SOLR_SSL_OPTS set SOLR_SSL_OPTS=

+

 REM Verify Java is available

 IF DEFINED SOLR_JAVA_HOME set "JAVA_HOME=%SOLR_JAVA_HOME%"

 REM Try to detect JAVA_HOME from the registry

@@ -76,6 +81,11 @@
   SHIFT

   goto parse_healthcheck_args

 )

+IF "%1"=="create" (

+  set SCRIPT_CMD=create

+  SHIFT

+  goto parse_create_args

+)

 IF "%1"=="create_core" (

   set SCRIPT_CMD=create_core

   SHIFT

@@ -86,6 +96,11 @@
   SHIFT

   goto parse_create_args

 )

+IF "%1"=="delete" (

+  set SCRIPT_CMD=delete

+  SHIFT

+  goto parse_delete_args

+)

 goto parse_args

 

 :usage

@@ -98,14 +113,16 @@
 IF "%SCRIPT_CMD%"=="restart" goto start_usage

 IF "%SCRIPT_CMD%"=="stop" goto stop_usage

 IF "%SCRIPT_CMD%"=="healthcheck" goto healthcheck_usage

+IF "%SCRIPT_CMD%"=="create" goto create_usage

 IF "%SCRIPT_CMD%"=="create_core" goto create_core_usage

 IF "%SCRIPT_CMD%"=="create_collection" goto create_collection_usage

+IF "%SCRIPT_CMD%"=="delete" goto delete_usage

 goto done

 

 :script_usage

 @echo.

 @echo Usage: solr COMMAND OPTIONS

-@echo        where COMMAND is one of: start, stop, restart, healthcheck, create_core, create_collection

+@echo        where COMMAND is one of: start, stop, restart, healthcheck, create, create_core, create_collection, delete

 @echo.

 @echo   Standalone server example (start Solr running in the background on port 8984):

 @echo.

@@ -188,42 +205,107 @@
 @echo.

 goto done

 

+:create_usage

+echo.

+echo Usage: solr create [-c name] [-d confdir] [-n confname] [-shards #] [-replicationFactor #] [-p port]

+echo.

+echo   Create a core or collection depending on whether Solr is running in standalone (core) or SolrCloud

+echo   mode (collection). In other words, this action detects which mode Solr is running in, and then takes

+echo   the appropriate action (either create_core or create_collection). For detailed usage instructions, do:

+echo.

+echo     bin\solr create_core -help

+echo.

+echo        or

+echo.

+echo     bin\solr create_collection -help

+echo.

+goto done

+

+:delete_usage

+echo.

+echo Usage: solr delete [-c name] [-deleteConfig boolean] [-p port]

+echo.

+echo  Deletes a core or collection depending on whether Solr is running in standalone (core) or SolrCloud

+echo  mode (collection). If you're deleting a collection in SolrCloud mode, the default behavior is to also

+echo  delete the configuration directory from ZooKeeper so long as it is not being used by another collection.

+echo  You can override this behavior by passing -deleteConfig false when running this command.

+echo.

+echo   -c name     Name of core to create

+echo.

+echo   -deleteConfig boolean Delete the configuration directory from ZooKeeper; default is true

+echo.

+echo   -p port     Port of a local Solr instance where you want to create the new core

+echo                 If not specified, the script will search the local system for a running

+echo                 Solr instance and will use the port of the first server it finds.

+echo.

+goto done

+

 :create_core_usage

 echo.

-echo Usage: solr create_core [-n name] [-c configset]

+echo Usage: solr create_core [-c name] [-d confdir] [-p port]

 echo.

-echo   -n name       Name of core to create

+echo   -c name     Name of core to create

 echo.

-echo   -c configset  Name of configuration directory to use, valid options are:

+echo   -d confdir  Configuration directory to copy when creating the new core, built-in options are:

+echo.

 echo       basic_configs: Minimal Solr configuration

 echo       data_driven_schema_configs: Managed schema with field-guessing support enabled

 echo       sample_techproducts_configs: Example configuration with many optional features enabled to

 echo          demonstrate the full power of Solr

+echo.

 echo       If not specified, default is: data_driven_schema_configs

 echo.

+echo       Alternatively, you can pass the path to your own configuration directory instead of using

+echo       one of the built-in configurations, such as: bin\solr create_core -c mycore -d c:/tmp/myconfig

+echo.

+echo   -p port     Port of a local Solr instance where you want to create the new core

+echo                 If not specified, the script will search the local system for a running

+echo                 Solr instance and will use the port of the first server it finds.

+echo.

 goto done

 

 :create_collection_usage

 echo.

-echo Usage: solr create_collection [-n name] [-c configset] [-shards #] [-replicationFactor #]

+echo Usage: solr create_collection [-c name] [-d confdir] [-n confname] [-shards #] [-replicationFactor #] [-p port]

 echo.

-echo   -n name               Name of collection to create

+echo   -c name               Name of collection to create

 echo.

-echo   -c configset          Name of configuration directory to use, valid options are:

+echo   -d confdir            Configuration directory to copy when creating the new collection, built-in options are:

+echo.

 echo       basic_configs: Minimal Solr configuration

 echo       data_driven_schema_configs: Managed schema with field-guessing support enabled

 echo       sample_techproducts_configs: Example configuration with many optional features enabled to

 echo          demonstrate the full power of Solr

+echo.

 echo       If not specified, default is: data_driven_schema_configs

 echo.

+echo       Alternatively, you can pass the path to your own configuration directory instead of using

+echo       one of the built-in configurations, such as: bin\solr create_collection -c mycoll -d c:/tmp/myconfig

+echo.

+echo       By default the script will upload the specified confdir directory into ZooKeeper using the same

+echo         name as the collection (-c) option. Alternatively, if you want to reuse an existing directory

+echo         or create a confdir in ZooKeeper that can be shared by multiple collections, use the -n option

+echo.

+echo   -n configName         Name the configuration directory in ZooKeeper; by default, the configuration

+echo                             will be uploaded to ZooKeeper using the collection name (-c), but if you want

+echo                             to use an existing directory or override the name of the configuration in

+echo                              ZooKeeper, then use the -c option.

+echo.

 echo   -shards #             Number of shards to split the collection into

 echo.

 echo   -replicationFactor #  Number of copies of each document in the collection

 echo.

+echo   -p port               Port of a local Solr instance where you want to create the new collection

+echo                           If not specified, the script will search the local system for a running

+echo                           Solr instance and will use the port of the first server it finds.

+echo.

 goto done

 

 REM Really basic command-line arg parsing

 :parse_args

+

+set "arg=%~1"

+set "firstTwo=%arg:~0,2%"

 IF "%SCRIPT_CMD%"=="" set SCRIPT_CMD=start

 IF [%1]==[] goto process_script_cmd

 IF "%1"=="-help" goto usage

@@ -255,6 +337,7 @@
 IF "%1"=="-k" goto set_stop_key

 IF "%1"=="-key" goto set_stop_key

 IF "%1"=="-all" goto set_stop_all

+IF "%firstTwo%"=="-D" goto set_passthru

 IF NOT "%1"=="" goto invalid_cmd_line

 

 :set_script_cmd

@@ -440,6 +523,17 @@
 SHIFT

 goto parse_args

 

+:set_passthru

+set "PASSTHRU=%~1=%~2"

+IF NOT "%SOLR_OPTS%"=="" (

+  set "SOLR_OPTS=%SOLR_OPTS% %PASSTHRU%"

+) ELSE (

+  set "SOLR_OPTS=%PASSTHRU%"

+)

+SHIFT

+SHIFT

+goto parse_args

+

 :set_noprompt

 set NO_USER_PROMPT=1

 SHIFT

@@ -475,6 +569,9 @@
   IF NOT EXIST "!SOLR_HOME!\solr.xml" (

     copy "%DEFAULT_SERVER_DIR%\solr\solr.xml" "!SOLR_HOME!\solr.xml"

   )

+  IF NOT EXIST "!SOLR_HOME!\zoo.cfg" (

+    copy "%DEFAULT_SERVER_DIR%\solr\zoo.cfg" "!SOLR_HOME!\zoo.cfg"

+  )

 ) ELSE IF "%EXAMPLE%"=="cloud" (

   set SOLR_MODE=solrcloud

   goto cloud_example_start

@@ -486,6 +583,9 @@
   IF NOT EXIST "!SOLR_HOME!\solr.xml" (

     copy "%DEFAULT_SERVER_DIR%\solr\solr.xml" "!SOLR_HOME!\solr.xml"

   )

+  IF NOT EXIST "!SOLR_HOME!\zoo.cfg" (

+    copy "%DEFAULT_SERVER_DIR%\solr\zoo.cfg" "!SOLR_HOME!\zoo.cfg"

+  )

 ) ELSE (

   @echo.

   @echo 'Unrecognized example %EXAMPLE%!'

@@ -528,36 +628,72 @@
 IF "%SCRIPT_CMD%"=="stop" (

   IF "%SOLR_PORT%"=="" (

     IF "%STOP_ALL%"=="1" (

-      for /f "usebackq" %%i in (`dir /b %SOLR_TIP%\bin ^| findstr /i "^solr-.*\.port$"`) do (

+      set found_it=0

+      for /f "usebackq" %%i in (`dir /b "%SOLR_TIP%\bin" ^| findstr /i "^solr-.*\.port$"`) do (

         set SOME_SOLR_PORT=

-        For /F "Delims=" %%J In (%SOLR_TIP%\bin\%%i) do set SOME_SOLR_PORT=%%~J

+        For /F "Delims=" %%J In ('type "%SOLR_TIP%\bin\%%i"') do set SOME_SOLR_PORT=%%~J

         if NOT "!SOME_SOLR_PORT!"=="" (

-          for /f "tokens=2,5" %%j in ('netstat -aon ^| find /i "listening" ^| find ":!SOME_SOLR_PORT!"') do (

-            @echo Stopping Solr running on port !SOME_SOLR_PORT!

-            set /A STOP_PORT=!SOME_SOLR_PORT! - 1000

-            "%JAVA%" -jar "%SOLR_SERVER_DIR%\start.jar" STOP.PORT=!STOP_PORT! STOP.KEY=%STOP_KEY% --stop

-            del %SOLR_TIP%\bin\solr-!SOME_SOLR_PORT!.port

-            timeout /T 5

-            REM Kill it if it is still running after the graceful shutdown

-            For /f "tokens=5" %%M in ('netstat -nao ^| find /i "listening" ^| find ":!SOME_SOLR_PORT!"') do (taskkill /f /PID %%M)

+          for /f "tokens=2,5" %%j in ('netstat -aon ^| find "TCP " ^| find ":!SOME_SOLR_PORT! "') do (

+            @REM j is the ip:port and k is the pid

+            IF NOT "%%k"=="0" (

+              @REM split the ip:port var by colon to see if the ip is 0.0.0.0

+              for /f "delims=: tokens=1,2" %%x IN ("%%j") do (

+                @REM x is the ip

+                IF "%%x"=="0.0.0.0" (

+                  set found_it=1

+                  @echo Stopping Solr process %%k running on port !SOME_SOLR_PORT!

+                  set /A STOP_PORT=!SOME_SOLR_PORT! - 1000

+                  "%JAVA%" %SOLR_SSL_OPTS% -Djetty.home="%SOLR_SERVER_DIR%" -jar "%SOLR_SERVER_DIR%\start.jar" STOP.PORT=!STOP_PORT! STOP.KEY=%STOP_KEY% --stop

+                  del "%SOLR_TIP%"\bin\solr-!SOME_SOLR_PORT!.port

+                  timeout /T 5

+                  REM Kill it if it is still running after the graceful shutdown

+                  For /f "tokens=2,5" %%M in ('netstat -nao ^| find "TCP " ^| find ":!SOME_SOLR_PORT! "') do (

+                    IF "%%N"=="%%k" (

+                      for /f "delims=: tokens=1,2" %%a IN ("%%M") do (

+                        IF "%%a"=="0.0.0.0" (

+                          @echo Forcefully killing process %%N

+                          taskkill /f /PID %%N

+                        )

+                      )

+                    )

+                  )

+                )

+              )

+            )

           )

         )

       )

+      if "!found_it!"=="0" echo No Solr nodes found to stop.

     ) ELSE (

       set SCRIPT_ERROR=Must specify the port when trying to stop Solr, or use -all to stop all running nodes on this host.

       goto err

     )

   ) ELSE (

     set found_it=0

-    For /f "tokens=5" %%M in ('netstat -nao ^| find /i "listening" ^| find ":%SOLR_PORT%"') do (

-      set found_it=1

-      @echo Stopping Solr running on port %SOLR_PORT%

-      set /A STOP_PORT=%SOLR_PORT% - 1000

-      "%JAVA%" -jar "%SOLR_SERVER_DIR%\start.jar" STOP.PORT=!STOP_PORT! STOP.KEY=%STOP_KEY% --stop

-      del %SOLR_TIP%\bin\solr-%SOLR_PORT%.port

-      timeout /T 5

-      REM Kill it if it is still running after the graceful shutdown

-      For /f "tokens=5" %%j in ('netstat -nao ^| find /i "listening" ^| find ":%SOLR_PORT%"') do (taskkill /f /PID %%j)

+    For /f "tokens=2,5" %%M in ('netstat -nao ^| find "TCP " ^| find ":%SOLR_PORT% "') do (

+      IF NOT "%%N"=="0" (

+        for /f "delims=: tokens=1,2" %%x IN ("%%M") do (

+          IF "%%x"=="0.0.0.0" (

+            set found_it=1

+            @echo Stopping Solr process %%N running on port %SOLR_PORT%

+            set /A STOP_PORT=%SOLR_PORT% - 1000

+            "%JAVA%" %SOLR_SSL_OPTS% -Djetty.home="%SOLR_SERVER_DIR%" -jar "%SOLR_SERVER_DIR%\start.jar" STOP.PORT=!STOP_PORT! STOP.KEY=%STOP_KEY% --stop

+            del "%SOLR_TIP%"\bin\solr-%SOLR_PORT%.port

+            timeout /T 5

+            REM Kill it if it is still running after the graceful shutdown

+            For /f "tokens=2,5" %%j in ('netstat -nao ^| find "TCP " ^| find ":%SOLR_PORT% "') do (

+              IF "%%N"=="%%k" (

+                for /f "delims=: tokens=1,2" %%a IN ("%%j") do (

+                  IF "%%a"=="0.0.0.0" (

+                    @echo Forcefully killing process %%N

+                    taskkill /f /PID %%N

+                  )

+                )

+              )

+            )

+          )

+        )

+      )

     )

     if "!found_it!"=="0" echo No Solr found running on port %SOLR_PORT%

   )

@@ -569,9 +705,15 @@
 

 IF "%SCRIPT_CMD%"=="start" (

   REM see if Solr is already running using netstat

-  For /f "tokens=5" %%j in ('netstat -aon ^| find /i "listening" ^| find ":%SOLR_PORT%"') do (

-    set "SCRIPT_ERROR=Process %%j is already listening on port %SOLR_PORT%. If this is Solr, please stop it first before starting (or use restart). If this is not Solr, then please choose a different port using -p PORT"

-    goto err

+  For /f "tokens=2,5" %%j in ('netstat -aon ^| find "TCP " ^| find ":%SOLR_PORT% "') do (

+    IF NOT "%%k"=="0" (

+      for /f "delims=: tokens=1,2" %%x IN ("%%j") do (

+        IF "%%x"=="0.0.0.0" (

+          set "SCRIPT_ERROR=Process %%k is already listening on port %SOLR_PORT%. If this is Solr, please stop it first before starting (or use restart). If this is not Solr, then please choose a different port using -p PORT"

+          goto err

+        )

+      )

+    )

   )

 )

 

@@ -691,7 +833,13 @@
 IF NOT "%SOLR_ADDL_ARGS%"=="" set "START_OPTS=%START_OPTS% %SOLR_ADDL_ARGS%"

 IF NOT "%SOLR_HOST_ARG%"=="" set "START_OPTS=%START_OPTS% %SOLR_HOST_ARG%"

 IF NOT "%SOLR_OPTS%"=="" set "START_OPTS=%START_OPTS% %SOLR_OPTS%"

-IF NOT "%LOG4J_CONFIG%"=="" set "START_OPTS=%START_OPTS% -Dlog4j.configuration=%LOG4J_CONFIG%"

+IF NOT "%SOLR_SSL_OPTS%"=="" (

+  set "SSL_PORT_PROP=-Djetty.ssl.port=%SOLR_PORT%"

+  IF DEFINED SOLR_SSL_PORT set "SSL_PORT_PROP=-Djetty.ssl.port=%SOLR_SSL_PORT%"

+  set "START_OPTS=%START_OPTS% %SOLR_SSL_OPTS% !SSL_PORT_PROP!"

+)

+

+IF NOT DEFINED LOG4J_CONFIG set "LOG4J_CONFIG=file:%SOLR_SERVER_DIR%\resources\log4j.properties"

 

 cd "%SOLR_SERVER_DIR%"

 

@@ -699,19 +847,23 @@
   mkdir "!SOLR_LOGS_DIR!"

 )

 

+IF NOT EXIST "%SOLR_SERVER_DIR%\tmp" (

+  mkdir "%SOLR_SERVER_DIR%\tmp"

+)

+

 @echo.

 @echo Starting Solr on port %SOLR_PORT% from %SOLR_SERVER_DIR%

 @echo.

 IF "%FG%"=="1" (

   REM run solr in the foreground

   title "Solr-%SOLR_PORT%"

-  echo %SOLR_PORT%>%SOLR_TIP%\bin\solr-%SOLR_PORT%.port

-  "%JAVA%" -server -Xss256k %SOLR_JAVA_MEM% %START_OPTS% -DSTOP.PORT=!STOP_PORT! -DSTOP.KEY=%STOP_KEY% ^

-    -Djetty.port=%SOLR_PORT% -Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" -jar start.jar

+  echo %SOLR_PORT%>"%SOLR_TIP%"\bin\solr-%SOLR_PORT%.port

+  "%JAVA%" -server -Xss256k %SOLR_JAVA_MEM% %START_OPTS% -Dlog4j.configuration="%LOG4J_CONFIG%" -DSTOP.PORT=!STOP_PORT! -DSTOP.KEY=%STOP_KEY% ^

+    -Djetty.port=%SOLR_PORT% -Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" -Djetty.home="%SOLR_SERVER_DIR%" -Djava.io.tmpdir="%SOLR_SERVER_DIR%\tmp" -jar start.jar

 ) ELSE (

-  START /B "Solr-%SOLR_PORT%" "%JAVA%" -server -Xss256k %SOLR_JAVA_MEM% %START_OPTS% -DSTOP.PORT=!STOP_PORT! -DSTOP.KEY=%STOP_KEY% ^

-    -Djetty.port=%SOLR_PORT% -Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" -jar start.jar > "!SOLR_LOGS_DIR!\solr-%SOLR_PORT%-console.log"

-  echo %SOLR_PORT%>%SOLR_TIP%\bin\solr-%SOLR_PORT%.port

+  START /B "Solr-%SOLR_PORT%" /D "%SOLR_SERVER_DIR%" "%JAVA%" -server -Xss256k %SOLR_JAVA_MEM% %START_OPTS% -Dlog4j.configuration="%LOG4J_CONFIG%" -DSTOP.PORT=!STOP_PORT! -DSTOP.KEY=%STOP_KEY% ^

+    -Djetty.port=%SOLR_PORT% -Dsolr.solr.home="%SOLR_HOME%" -Dsolr.install.dir="%SOLR_TIP%" -Djetty.home="%SOLR_SERVER_DIR%" -Djava.io.tmpdir="%SOLR_SERVER_DIR%\tmp" -jar start.jar > "!SOLR_LOGS_DIR!\solr-%SOLR_PORT%-console.log"

+  echo %SOLR_PORT%>"%SOLR_TIP%"\bin\solr-%SOLR_PORT%.port

 )

 

 

@@ -728,29 +880,29 @@
 IF NOT "!CREATE_EXAMPLE_CONFIG!"=="" (

   timeout /T 10

   IF "%SOLR_MODE%"=="solrcloud" (

-    "%JAVA%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^

+    "%JAVA%" %SOLR_SSL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^

       -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^

       org.apache.solr.util.SolrCLI create_collection -name !EXAMPLE_NAME! -shards 1 -replicationFactor 1 ^

-      -config !CREATE_EXAMPLE_CONFIG! -configsetsDir "%SOLR_SERVER_DIR%\solr\configsets" -solrUrl http://localhost:%SOLR_PORT%/solr

+      -confdir !CREATE_EXAMPLE_CONFIG! -configsetsDir "%SOLR_SERVER_DIR%\solr\configsets" -solrUrl !SOLR_URL_SCHEME!://localhost:%SOLR_PORT%/solr

   ) ELSE (

-    "%JAVA%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^

+    "%JAVA%" %SOLR_SSL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^

       -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^

-      org.apache.solr.util.SolrCLI create_core -name !EXAMPLE_NAME! -solrUrl http://localhost:%SOLR_PORT%/solr ^

-      -config !CREATE_EXAMPLE_CONFIG! -configsetsDir "%SOLR_SERVER_DIR%\solr\configsets"

+      org.apache.solr.util.SolrCLI create_core -name !EXAMPLE_NAME! -solrUrl !SOLR_URL_SCHEME!://localhost:%SOLR_PORT%/solr ^

+      -confdir !CREATE_EXAMPLE_CONFIG! -configsetsDir "%SOLR_SERVER_DIR%\solr\configsets"

   )

 )

 

 IF "%EXAMPLE%"=="techproducts" (

   @echo.

   @echo Indexing tech product example docs from %SOLR_TIP%\example\exampledocs

-  "%JAVA%" -Durl=http://localhost:%SOLR_PORT%/solr/%EXAMPLE%/update -jar %SOLR_TIP%/example/exampledocs/post.jar %SOLR_TIP%/example/exampledocs/*.xml

+  "%JAVA%" %SOLR_SSL_OPTS% -Durl=!SOLR_URL_SCHEME!://localhost:%SOLR_PORT%/solr/%EXAMPLE%/update -jar "%SOLR_TIP%/example/exampledocs/post.jar" "%SOLR_TIP%/example/exampledocs/*.xml"

 )

 

 @echo.

 IF NOT "%EXAMPLE%"=="" (

   @echo Solr %EXAMPLE% example launched successfully.

 )

-@echo Direct your Web browser to http://localhost:%SOLR_PORT%/solr to visit the Solr Admin UI

+@echo Direct your Web browser to !SOLR_URL_SCHEME!://localhost:%SOLR_PORT%/solr to visit the Solr Admin UI

 @echo.

 

 goto done

@@ -842,19 +994,19 @@
       set "DASHZ="

     )

     @echo Starting node1 on port !NODE_PORT! using command:

-    @echo solr -cloud -p !NODE_PORT! !DASHZ! !DASHM! -s %CLOUD_EXAMPLE_DIR%\node1\solr

-    START "Solr-!NODE_PORT!" "%SDIR%\solr" -f -cloud -p !NODE_PORT! !DASHZ! !DASHM! -s %CLOUD_EXAMPLE_DIR%\node1\solr

+    @echo solr -cloud -p !NODE_PORT! -s example\node1\solr !DASHZ! !DASHM!

+    START "Solr-!NODE_PORT!" /D "%SDIR%" solr -f -cloud -p !NODE_PORT! !DASHZ! !DASHM! -s "%CLOUD_EXAMPLE_DIR%\node1\solr"

     set NODE1_PORT=!NODE_PORT!

-    echo !NODE_PORT!>%SOLR_TIP%\bin\solr-!NODE_PORT!.port

+    echo !NODE_PORT!>"%SOLR_TIP%"\bin\solr-!NODE_PORT!.port

   ) ELSE (

     IF "!ZK_HOST!"=="" (

       set /A ZK_PORT=!NODE1_PORT!+1000

       set "ZK_HOST=localhost:!ZK_PORT!"

     )

     @echo Starting node%%x on port !NODE_PORT! using command:

-    @echo solr -cloud -p !NODE_PORT! -z !ZK_HOST! !DASHM! -s %CLOUD_EXAMPLE_DIR%\node%%x\solr

-    START "Solr-!NODE_PORT!" "%SDIR%\solr" -f -cloud -p !NODE_PORT! -z !ZK_HOST! !DASHM! -s %CLOUD_EXAMPLE_DIR%\node%%x\solr

-    echo !NODE_PORT!>%SOLR_TIP%\bin\solr-!NODE_PORT!.port

+    @echo solr -cloud -p !NODE_PORT! -s example\node%%x\solr -z !ZK_HOST! !DASHM!

+    START "Solr-!NODE_PORT!" /D "%SDIR%" solr -f -cloud -p !NODE_PORT! -z !ZK_HOST! !DASHM! -s "%CLOUD_EXAMPLE_DIR%\node%%x\solr"

+    echo !NODE_PORT!>"%SOLR_TIP%"\bin\solr-!NODE_PORT!.port

   )

 

   timeout /T 10

@@ -899,13 +1051,13 @@
 goto create_collection

 

 :create_collection

-"%JAVA%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^

+"%JAVA%" %SOLR_SSL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^

   -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^

   org.apache.solr.util.SolrCLI create_collection -name !CLOUD_COLLECTION! -shards !CLOUD_NUM_SHARDS! -replicationFactor !CLOUD_REPFACT! ^

-  -config !CLOUD_CONFIG! -configsetsDir "%SOLR_SERVER_DIR%\solr\configsets" -zkHost %zk_host%

+  -confdir !CLOUD_CONFIG! -configsetsDir "%SOLR_SERVER_DIR%\solr\configsets" -zkHost %zk_host%

 

 echo.

-echo SolrCloud example is running, please visit http://localhost:%NODE1_PORT%/solr"

+echo SolrCloud example is running, please visit !SOLR_URL_SCHEME!://localhost:%NODE1_PORT%/solr"

 echo.

 

 REM End of interactive cloud example

@@ -915,19 +1067,23 @@
 :get_info

 REM Find all Java processes, correlate with those listening on a port

 REM and then try to contact via that port using the status tool

-for /f "usebackq" %%i in (`dir /b %SOLR_TIP%\bin ^| findstr /i "^solr-.*\.port$"`) do (

+for /f "usebackq" %%i in (`dir /b "%SOLR_TIP%\bin" ^| findstr /i "^solr-.*\.port$"`) do (

   set SOME_SOLR_PORT=

-  For /F "Delims=" %%J In (%SOLR_TIP%\bin\%%i) do set SOME_SOLR_PORT=%%~J

+  For /F "Delims=" %%J In ('type "%SOLR_TIP%\bin\%%i"') do set SOME_SOLR_PORT=%%~J

   if NOT "!SOME_SOLR_PORT!"=="" (

-    for /f "tokens=2,5" %%j in ('netstat -aon ^| find /i "listening" ^| find /i "!SOME_SOLR_PORT!"') do (

-      for /f "delims=: tokens=1,2" %%x IN ("%%j") do (

-        @echo.

-        set has_info=1

-        echo Found Solr process %%k running on port !SOME_SOLR_PORT!

-        "%JAVA%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^

-          -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^

-          org.apache.solr.util.SolrCLI status -solr http://localhost:!SOME_SOLR_PORT!/solr

-        @echo.

+    for /f "tokens=2,5" %%j in ('netstat -aon ^| find "TCP " ^| find ":!SOME_SOLR_PORT! "') do (

+      IF NOT "%%k"=="0" (

+        for /f "delims=: tokens=1,2" %%x IN ("%%j") do (

+          if "%%x"=="0.0.0.0" (

+            @echo.

+            set has_info=1

+            echo Found Solr process %%k running on port !SOME_SOLR_PORT!

+            "%JAVA%" %SOLR_SSL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^

+              -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^

+              org.apache.solr.util.SolrCLI status -solr !SOLR_URL_SCHEME!://localhost:!SOME_SOLR_PORT!/solr

+            @echo.

+          )

+        )

       )

     )

   )

@@ -962,33 +1118,45 @@
 :run_healthcheck

 IF NOT DEFINED HEALTHCHECK_COLLECTION goto healthcheck_usage

 IF NOT DEFINED HEALTHCHECK_ZK_HOST set "HEALTHCHECK_ZK_HOST=localhost:9983"

-"%JAVA%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^

+"%JAVA%" %SOLR_SSL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^

   -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^

   org.apache.solr.util.SolrCLI healthcheck -collection !HEALTHCHECK_COLLECTION! -zkHost !HEALTHCHECK_ZK_HOST!

 goto done

 

 :parse_create_args

 IF [%1]==[] goto run_create

-IF "%1"=="-c" goto set_create_config

-IF "%1"=="-configset" goto set_create_config

-IF "%1"=="-n" goto set_create_name

-IF "%1"=="-name" goto set_create_name

+IF "%1"=="-c" goto set_create_name

+IF "%1"=="-core" goto set_create_name

+IF "%1"=="-collection" goto set_create_name

+IF "%1"=="-d" goto set_create_confdir

+IF "%1"=="-confdir" goto set_create_confdir

+IF "%1"=="-n" goto set_create_confname

+IF "%1"=="-confname" goto set_create_confname

+IF "%1"=="-s" goto set_create_shards

 IF "%1"=="-shards" goto set_create_shards

+IF "%1"=="-rf" goto set_create_rf

 IF "%1"=="-replicationFactor" goto set_create_rf

 IF "%1"=="-p" goto set_create_port

+IF "%1"=="-port" goto set_create_port

 IF "%1"=="-help" goto usage

 IF "%1"=="-usage" goto usage

 IF "%1"=="/?" goto usage

 goto run_create

 

-:set_create_configset

-set CREATE_CONFIGSET=%~2

+:set_create_name

+set CREATE_NAME=%~2

 SHIFT

 SHIFT

 goto parse_create_args

 

-:set_create_name

-set CREATE_NAME=%~2

+:set_create_confdir

+set CREATE_CONFDIR=%~2

+SHIFT

+SHIFT

+goto parse_create_args

+

+:set_create_confname

+set CREATE_CONFNAME=%~2

 SHIFT

 SHIFT

 goto parse_create_args

@@ -1013,21 +1181,22 @@
 

 :run_create

 IF "!CREATE_NAME!"=="" (

-  set "SCRIPT_ERROR=Name (-n) is a required parameter for %SCRIPT_CMD%"

+  set "SCRIPT_ERROR=Name (-c) is a required parameter for %SCRIPT_CMD%"

   goto invalid_cmd_line

 )

-IF "!CREATE_CONFIGSET!"=="" set CREATE_CONFIGSET=data_driven_schema_configs

+IF "!CREATE_CONFDIR!"=="" set CREATE_CONFDIR=data_driven_schema_configs

 IF "!CREATE_NUM_SHARDS!"=="" set CREATE_NUM_SHARDS=1

 IF "!CREATE_REPFACT!"=="" set CREATE_REPFACT=1

+IF "!CREATE_CONFNAME!"=="" set CREATE_CONFNAME=!CREATE_NAME!

 

 REM Find a port that Solr is running on

 if "!CREATE_PORT!"=="" (

-  for /f "usebackq" %%i in (`dir /b %SOLR_TIP%\bin ^| findstr /i "^solr-.*\.port$"`) do (

+  for /f "usebackq" %%i in (`dir /b "%SOLR_TIP%\bin" ^| findstr /i "^solr-.*\.port$"`) do (

     set SOME_SOLR_PORT=

-    For /F "Delims=" %%J In (%SOLR_TIP%\bin\%%i) do set SOME_SOLR_PORT=%%~J

+    For /F "Delims=" %%J In ('type "%SOLR_TIP%\bin\%%i"') do set SOME_SOLR_PORT=%%~J

     if NOT "!SOME_SOLR_PORT!"=="" (

-      for /f "tokens=2,5" %%j in ('netstat -aon ^| find /i "listening" ^| find /i "!SOME_SOLR_PORT!"') do (

-        set CREATE_PORT=!SOME_SOLR_PORT!

+      for /f "tokens=2,5" %%j in ('netstat -aon ^| find "TCP " ^| find ":!SOME_SOLR_PORT! "') do (

+        IF NOT "%%k"=="0" set CREATE_PORT=!SOME_SOLR_PORT!

       )

     )

   )

@@ -1037,21 +1206,86 @@
   goto err

 )

 

-@echo Found Solr node running on port !CREATE_PORT!

-

 if "%SCRIPT_CMD%"=="create_core" (

-  "%JAVA%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^

+  "%JAVA%" %SOLR_SSL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^

     -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^

-    org.apache.solr.util.SolrCLI create_core -name !CREATE_NAME!  -solrUrl http://localhost:!CREATE_PORT!/solr ^

-    -config !CREATE_CONFIGSET! -configsetsDir "%SOLR_TIP%\server\solr\configsets"

+    org.apache.solr.util.SolrCLI create_core -name !CREATE_NAME! -solrUrl !SOLR_URL_SCHEME!://localhost:!CREATE_PORT!/solr ^

+    -confdir !CREATE_CONFDIR! -configsetsDir "%SOLR_TIP%\server\solr\configsets"

 ) else (

-  "%JAVA%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^

-    -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^

-    org.apache.solr.util.SolrCLI create_collection -name !CREATE_NAME! -shards !CREATE_NUM_SHARDS! -replicationFactor !CREATE_REPFACT! ^

-    -config !CREATE_CONFIGSET! -configsetsDir "%SOLR_TIP%\server\solr\configsets" -solrUrl http://localhost:!CREATE_PORT!/solr

+  "%JAVA%" %SOLR_SSL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^

+  -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^

+  org.apache.solr.util.SolrCLI create -name !CREATE_NAME! -shards !CREATE_NUM_SHARDS! -replicationFactor !CREATE_REPFACT! ^

+  -confname !CREATE_CONFNAME! -confdir !CREATE_CONFDIR! -configsetsDir "%SOLR_TIP%\server\solr\configsets" -solrUrl !SOLR_URL_SCHEME!://localhost:!CREATE_PORT!/solr

 )

+

 goto done

 

+:parse_delete_args

+IF [%1]==[] goto run_delete

+IF "%1"=="-c" goto set_delete_name

+IF "%1"=="-core" goto set_delete_name

+IF "%1"=="-collection" goto set_delete_name

+IF "%1"=="-p" goto set_delete_port

+IF "%1"=="-port" goto set_delete_port

+IF "%1"=="-deleteConfig" goto set_delete_config

+IF "%1"=="-help" goto usage

+IF "%1"=="-usage" goto usage

+IF "%1"=="/?" goto usage

+goto run_delete

+

+:set_delete_name

+set DELETE_NAME=%~2

+SHIFT

+SHIFT

+goto parse_delete_args

+

+:set_delete_port

+set DELETE_PORT=%~2

+SHIFT

+SHIFT

+goto parse_delete_args

+

+:set_delete_config

+set DELETE_CONFIG=%~2

+SHIFT

+SHIFT

+goto parse_delete_args

+

+:run_delete

+IF "!DELETE_NAME!"=="" (

+  set "SCRIPT_ERROR=Name (-c) is a required parameter for %SCRIPT_CMD%"

+  goto invalid_cmd_line

+)

+

+REM Find a port that Solr is running on

+if "!DELETE_PORT!"=="" (

+  for /f "usebackq" %%i in (`dir /b "%SOLR_TIP%\bin" ^| findstr /i "^solr-.*\.port$"`) do (

+    set SOME_SOLR_PORT=

+    For /F "Delims=" %%J In ('type "%SOLR_TIP%\bin\%%i"') do set SOME_SOLR_PORT=%%~J

+    if NOT "!SOME_SOLR_PORT!"=="" (

+      for /f "tokens=2,5" %%j in ('netstat -aon ^| find "TCP " ^| find ":!SOME_SOLR_PORT! "') do (

+        IF NOT "%%k"=="0" set DELETE_PORT=!SOME_SOLR_PORT!

+      )

+    )

+  )

+)

+if "!DELETE_PORT!"=="" (

+  set "SCRIPT_ERROR=Could not find a running Solr instance on this host! Please use the -p option to specify the port."

+  goto err

+)

+

+if "!DELETE_CONFIG!"=="" (

+  set DELETE_CONFIG=true

+)

+

+"%JAVA%" %SOLR_SSL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" -Dlog4j.configuration="file:%DEFAULT_SERVER_DIR%\scripts\cloud-scripts\log4j.properties" ^

+-classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^

+org.apache.solr.util.SolrCLI delete -name !DELETE_NAME! -deleteConfig !DELETE_CONFIG! ^

+-solrUrl !SOLR_URL_SCHEME!://localhost:!DELETE_PORT!/solr

+

+goto done

+

+

 :invalid_cmd_line

 @echo.

 IF "!SCRIPT_ERROR!"=="" (

@@ -1070,6 +1304,8 @@
   goto stop_usage

 ) ELSE IF "%FIRST_ARG%"=="healthcheck" (

   goto healthcheck_usage

+) ELSE IF "%FIRST_ARG%"=="create" (

+  goto create_usage

 ) ELSE IF "%FIRST_ARG%"=="create_core" (

   goto create_core_usage

 ) ELSE IF "%FIRST_ARG%"=="create_collection" (

diff --git a/solr/bin/solr.in.cmd b/solr/bin/solr.in.cmd
index d8ab1ba..905284e 100644
--- a/solr/bin/solr.in.cmd
+++ b/solr/bin/solr.in.cmd
@@ -79,3 +79,11 @@
 

 REM Sets the port Solr binds to, default is 8983

 REM set SOLR_PORT=8983

+

+REM Uncomment to set SSL-related system properties

+REM Be sure to update the paths to the correct keystore for your environment

+REM set SOLR_SSL_OPTS=-Djavax.net.ssl.keyStore=etc/solr-ssl.keystore.jks -Djavax.net.ssl.keyStorePassword=secret -Djavax.net.ssl.trustStore=etc/solr-ssl.keystore.jks -Djavax.net.ssl.trustStorePassword=secret

+

+REM Uncomment to set a specific SSL port (-Djetty.ssl.port=N); if not set

+REM and you are using SSL, then the start script will use SOLR_PORT for the SSL port

+REM set SOLR_SSL_PORT=

diff --git a/solr/bin/solr.in.sh b/solr/bin/solr.in.sh
index 9f4114b..ccaea12 100644
--- a/solr/bin/solr.in.sh
+++ b/solr/bin/solr.in.sh
@@ -91,3 +91,13 @@
 # Sets the port Solr binds to, default is 8983
 #SOLR_PORT=8983
 
+# Uncomment to set SSL-related system properties
+# Be sure to update the paths to the correct keystore for your environment
+#SOLR_SSL_OPTS="-Djavax.net.ssl.keyStore=etc/solr-ssl.keystore.jks \
+#-Djavax.net.ssl.keyStorePassword=secret \
+#-Djavax.net.ssl.trustStore=etc/solr-ssl.keystore.jks \
+#-Djavax.net.ssl.trustStorePassword=secret"
+
+# Uncomment to set a specific SSL port (-Djetty.ssl.port=N); if not set
+# and you are using SSL, then the start script will use SOLR_PORT for the SSL port
+#SOLR_SSL_PORT=
diff --git a/solr/build.xml b/solr/build.xml
index 852b7b2..d077946 100644
--- a/solr/build.xml
+++ b/solr/build.xml
@@ -20,8 +20,8 @@
   
   <target name="usage" description="Prints out instructions">
     <echo message="Welcome to the Solr project!" />
-    <echo message="Use 'ant example' to create a runnable example configuration." />
-    <echo message="Use 'ant run-example' to create and run the example." />
+    <echo message="Use 'ant server' to create the Solr server." />
+    <echo message="Use 'bin/solr' to run the Solr after it is created." />
     <echo message="And for developers:"/>
     <echo message="Use 'ant clean' to clean compiled files." />
     <echo message="Use 'ant compile' to compile the source code." />
@@ -38,9 +38,9 @@
   <!-- ========================================================================= -->
   <!-- ============================== USER TASKS =============================== -->
   <!-- ========================================================================= -->
- 
-  <target name="example" description="Creates a runnable example configuration."
-          depends="dist-contrib,build-war">
+
+  <target name="server" depends="dist-contrib,server-war"
+          description="Creates a Solr server">
     <jar destfile="${example}/exampledocs/post.jar"
          basedir="${dest}/solr-core/classes/java"
          includes="org/apache/solr/util/SimplePostTool*.class">
@@ -51,11 +51,12 @@
     <delete includeemptydirs="true">
       <fileset dir="${server.dir}/solr-webapp" includes="**/*"/>
     </delete>
-    <echo>See ${example}/README.txt for how to run the Solr example configuration.</echo>
+    <echo>See ${common-solr.dir}/README.txt for how to run the Solr server.</echo>
   </target>
   
-  <target name="run-example" depends="example"
+  <target name="run-example" depends="server"
           description="Run Solr interactively, via Jetty.  -Dexample.debug=true to enable JVM debugger">
+    <property name="example.solr.home" location="${server.dir}/solr"/>
     <property name="example.debug.suspend" value="n"/>
     <property name="example.jetty.port" value="8983"/>
     <condition property="example.jvm.line" value="-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=${example.debug.suspend},address=5005">
@@ -63,9 +64,12 @@
     </condition>
     <property name="example.jvm.line" value=""/>
     <property name="example.heap.size" value="512M"/>
-    <exec executable="${common-solr.dir}/bin/solr" failonerror="true">
-      <arg line="-e techproducts -p ${example.jetty.port} -m ${example.heap.size} -d ${server.dir} -a '${example.jvm.line}'"/>
-    </exec>
+    <java jar="${server.dir}/start.jar" fork="true" dir="${server.dir}" maxmemory="${example.heap.size}">
+      <jvmarg line="${example.jvm.line}"/>
+      <sysproperty key="solr.solr.home" file="${example.solr.home}"/>
+      <sysproperty key="jetty.port" value="${example.jetty.port}"/>
+      <sysproperty key="jetty.home" value="${server.dir}"/>
+    </java>
   </target>
  
   <!-- setup proxy for download tasks -->
@@ -151,7 +155,7 @@
       so we pass ourself (${ant.file}) here. The list of module build.xmls is given
       via string parameter, that must be splitted by the XSL at '|'.
     --> 
-    <xslt in="${ant.file}" out="${javadoc.dir}/index.html" style="site/xsl/index.xsl" force="true">
+    <xslt in="${ant.file}" out="${javadoc.dir}/index.html" style="site/index.xsl" force="true">
       <outputproperty name="method" value="html"/>
       <outputproperty name="version" value="4.0"/>
       <outputproperty name="encoding" value="UTF-8"/>
@@ -162,12 +166,12 @@
     </xslt>
     
     <pegdown todir="${javadoc.dir}">
-      <fileset dir="." includes="SYSTEM_REQUIREMENTS.txt"/>
-      <globmapper from="*.txt" to="*.html"/>
+      <fileset dir="site" includes="**/*.mdtext"/>
+      <globmapper from="*.mdtext" to="*.html"/>
     </pegdown>
 
     <copy todir="${javadoc.dir}">
-      <fileset dir="site/html" includes="**/*"/>
+      <fileset dir="site/assets" />
     </copy>
   </target>
 
@@ -308,7 +312,7 @@
       </fileset>
       <fileset dir="server">
         <include name="**/data/**/*" />
-        <include name="solr/zoo_data" />
+        <include name="solr/zoo_data/" />
         <include name="start.jar" />
         <include name="logs/*" />
         <include name="webapps/**/*" />
@@ -337,7 +341,7 @@
   
   <target name="dist"
           description="Creates the Solr distribution files."
-          depends="dist-solrj, dist-core, dist-test-framework, dist-contrib, build-war" />
+          depends="dist-solrj, dist-core, dist-test-framework, dist-contrib" />
  
   <target name="dist-test-framework" depends="init-dist"
           description="Creates the Solr test-framework JAR.">
@@ -351,9 +355,9 @@
     <contrib-crawl target="dist" failonerror="true" />
   </target>
   
-  <target name="build-war"
+  <target name="server-war"
           description="Creates the Solr WAR Distribution file.">
-    <ant dir="webapp" target="dist" inheritall="false">
+    <ant dir="webapp" target="server-war" inheritall="false">
       <propertyset refid="uptodate.and.compiled.properties"/>
     </ant>
   </target>
@@ -450,7 +454,7 @@
       <param name="called.from.create-package" value="true"/>
       <target name="init-dist"/>
       <target name="dist"/>
-      <target name="example"/>
+      <target name="server"/>
       <target name="documentation"/>
     </antcall>
     <mkdir dir="${dest}/${fullnamever}"/>
@@ -478,14 +482,15 @@
                   excludes="licenses/README.committers.txt **/data/ **/logs/* 
                             **/classes/ **/*.sh **/ivy.xml **/build.xml
                             **/bin/ **/*.iml **/*.ipr **/*.iws **/pom.xml 
-                            **/*pom.xml.template" />
+                            **/*pom.xml.template server/etc/test/" />
       <tarfileset dir="${dest}/contrib-lucene-libs-to-package"
                   prefix="${fullnamever}"
                   includes="**" />
       <tarfileset dir="."
                   filemode="755"
                   prefix="${fullnamever}"
-                  includes="bin/** server/**/*.sh example/**/*.sh example/**/bin/" />
+                  includes="bin/** server/**/*.sh example/**/*.sh example/**/bin/"
+                  excludes="server/etc/test/**" />
       <tarfileset dir="."
                   prefix="${fullnamever}"
                   includes="dist/*.jar
@@ -567,7 +572,7 @@
        <!-- NOTE: must currently exclude deprecated-list due to a javadocs bug (as of 1.7.0_09)
             javadocs generates invalid XML if you deprecate a method that takes a parameter
             with a generic type -->
-      <fileset dir="build/docs" includes="**/*.html" excludes="**/deprecated-list.html"/>
+      <fileset dir="build/docs" includes="**/*.html" excludes="**/deprecated-list.html,quickstart.html"/>
     </jtidy-macro>
     <echo message="Checking for broken links..."/>
     <check-broken-links dir="${javadoc.dir}"/>
@@ -575,7 +580,7 @@
     <!-- TODO: add missing docs for all classes and bump this to level=class -->
     <check-missing-javadocs dir="${javadoc.dir}" level="package"/>
   </target>
- 
+
   <target name="-ecj-javadoc-lint" depends="compile,compile-test,jar-test-framework,-ecj-javadoc-lint-unsupported,-ecj-resolve" if="ecj-javadoc-lint.supported">
     <subant target="-ecj-javadoc-lint" failonerror="true" inheritall="false">
       <propertyset refid="uptodate.and.compiled.properties"/>
@@ -738,4 +743,19 @@
     <ant dir="test-framework" target="-append-module-dependencies-properties" inheritAll="false"/>
     <contrib-crawl target="-append-module-dependencies-properties"/>
   </target>
+
+  <target name="example" depends="server">
+    <!-- no description so -p doesn't list it -->
+    <echo>
+
+    ! ! ! NOTICE NOTICE NOTICE ! ! !
+
+    'ant example' is no longer recomended
+
+    Use 'ant server' instead
+
+    'ant example' is going to be removed at some point
+
+    </echo>
+  </target>
 </project>
diff --git a/solr/cloud-dev/cli-test-solrcloud-start.sh b/solr/cloud-dev/cli-test-solrcloud-start.sh
index d2776c0..1634ab7 100755
--- a/solr/cloud-dev/cli-test-solrcloud-start.sh
+++ b/solr/cloud-dev/cli-test-solrcloud-start.sh
@@ -16,7 +16,7 @@
 rm -r -f server/solr/data
 rm -f server/server.log
 
-ant example dist
+ant server dist
 
 cp -r -f server server2
 cp -r -f server server3
diff --git a/solr/cloud-dev/example1.sh b/solr/cloud-dev/example1.sh
index cb1cbf3..418642d 100755
--- a/solr/cloud-dev/example1.sh
+++ b/solr/cloud-dev/example1.sh
@@ -10,7 +10,7 @@
 rm -r -f example/solr/collection1/data
 rm -f example/example.log
 
-ant example dist
+ant server dist
 
 cp -r -f example example2
 
diff --git a/solr/cloud-dev/example2.sh b/solr/cloud-dev/example2.sh
index fcd57b0..3c9f232 100755
--- a/solr/cloud-dev/example2.sh
+++ b/solr/cloud-dev/example2.sh
@@ -12,7 +12,7 @@
 rm -r -f example/solr/collection1/data
 rm -f example/example.log
 
-ant example dist
+ant server dist
 
 cp -r -f example example2
 cp -r -f example example3
diff --git a/solr/cloud-dev/example3.sh b/solr/cloud-dev/example3.sh
index 0f76657..404db01 100755
--- a/solr/cloud-dev/example3.sh
+++ b/solr/cloud-dev/example3.sh
@@ -12,7 +12,7 @@
 rm -r -f example/solr/collection1/data
 rm -f example/example.log
 
-ant example dist
+ant server dist
 
 cp -r -f example example2
 cp -r -f example example3
diff --git a/solr/cloud-dev/functions.sh b/solr/cloud-dev/functions.sh
index 3b2da92..e7ab517 100755
--- a/solr/cloud-dev/functions.sh
+++ b/solr/cloud-dev/functions.sh
@@ -11,7 +11,7 @@
 	rm -r -f build
 	rm -r -f server/solr/zoo_data
 	rm -f server/server.log
-	ant example dist
+	ant server dist
 }
 
 setports() {
diff --git a/solr/cloud-dev/solrcloud-extzk-start.sh b/solr/cloud-dev/solrcloud-extzk-start.sh
index 67d1064..419e0e1 100755
--- a/solr/cloud-dev/solrcloud-extzk-start.sh
+++ b/solr/cloud-dev/solrcloud-extzk-start.sh
@@ -18,7 +18,7 @@
 rm -r -f server/solr/collection1/data
 rm -f server/server.log
 
-ant example dist
+ant server dist
 
 cp -r -f server server2
 cp -r -f server server3
diff --git a/solr/cloud-dev/solrcloud-multi-start.sh b/solr/cloud-dev/solrcloud-multi-start.sh
index ebea0ab..937f32e 100755
--- a/solr/cloud-dev/solrcloud-multi-start.sh
+++ b/solr/cloud-dev/solrcloud-multi-start.sh
@@ -18,7 +18,7 @@
 rm -r -f server/solr/collection1/data
 rm -f server/server.log
 
-ant example dist
+ant server dist
 
 cp -r -f server server2
 cp -r -f server server3
diff --git a/solr/cloud-dev/solrcloud-start.sh b/solr/cloud-dev/solrcloud-start.sh
index 3904fcf..e0f2aa7 100755
--- a/solr/cloud-dev/solrcloud-start.sh
+++ b/solr/cloud-dev/solrcloud-start.sh
@@ -36,7 +36,7 @@
 rm -f server/server.log
 
 ant -f ../build.xml clean
-ant example dist
+ant server dist
 
 rm -r server/solr-webapp/*
 unzip server/webapps/solr.war -d server/solr-webapp/webapp
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/accumulator/ValueAccumulator.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/accumulator/ValueAccumulator.java
index 1507953..8ed37aa 100644
--- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/accumulator/ValueAccumulator.java
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/accumulator/ValueAccumulator.java
@@ -38,9 +38,4 @@
     // NOP
   }
 
-  @Override
-  public boolean acceptsDocsOutOfOrder() {
-    return true;
-  }
-
 }
diff --git a/solr/contrib/analytics/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml b/solr/contrib/analytics/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml
index 055f3d7..7514aa4 100644
--- a/solr/contrib/analytics/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml
+++ b/solr/contrib/analytics/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml
@@ -35,7 +35,6 @@
   <ramBufferSizeMB>${solr.tests.ramBufferSizeMB}</ramBufferSizeMB>
 
   <mergeScheduler class="${solr.tests.mergeScheduler}" />
-  <nrtMode>${solr.tests.nrtMode:true}</nrtMode>
 
   <writeLockTimeout>1000</writeLockTimeout>
   <commitLockTimeout>10000</commitLockTimeout>
diff --git a/solr/contrib/clustering/src/test-files/clustering/solr/collection1/core.properties b/solr/contrib/clustering/src/test-files/clustering/solr/collection1/core.properties
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/solr/contrib/clustering/src/test-files/clustering/solr/collection1/core.properties
diff --git a/solr/contrib/clustering/src/test-files/clustering/solr/solr.xml b/solr/contrib/clustering/src/test-files/clustering/solr/solr.xml
index 98a31e6..f39bd64 100644
--- a/solr/contrib/clustering/src/test-files/clustering/solr/solr.xml
+++ b/solr/contrib/clustering/src/test-files/clustering/solr/solr.xml
@@ -20,14 +20,10 @@
  solr.xml mimicking the old default solr.xml
 -->
 
-<solr persistent="false">
-  <cores adminPath="/admin/cores" defaultCoreName="collection1"
-         host="${host:}" hostPort="${hostPort:}" hostContext="${hostContext:}"
-         zkClientTimeout="${zkClientTimeout:15000}">
-    <core name="collection1" shard="${shard:}" collection="${collection:collection1}" instanceDir="collection1"/>
-    
+<solr>
+
     <shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
      <str name="urlScheme">${urlScheme:}</str>
      </shardHandlerFactory>
-  </cores>
+
 </solr>
diff --git a/solr/contrib/clustering/src/test/org/apache/solr/handler/clustering/DistributedClusteringComponentTest.java b/solr/contrib/clustering/src/test/org/apache/solr/handler/clustering/DistributedClusteringComponentTest.java
index da654c7..62b4b48 100644
--- a/solr/contrib/clustering/src/test/org/apache/solr/handler/clustering/DistributedClusteringComponentTest.java
+++ b/solr/contrib/clustering/src/test/org/apache/solr/handler/clustering/DistributedClusteringComponentTest.java
@@ -20,6 +20,7 @@
 import org.apache.solr.BaseDistributedSearchTestCase;
 import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
 import org.apache.solr.common.params.CommonParams;
+import org.junit.Test;
 
 @SuppressSSL
 public class DistributedClusteringComponentTest extends
@@ -30,8 +31,8 @@
     return getFile("clustering/solr/collection1").getParent();
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     del("*:*");
     int numberOfDocs = 0;
     for (String[] doc : AbstractClusteringTestCase.DOCUMENTS) {
@@ -47,7 +48,7 @@
         CommonParams.Q, "*:*",
         CommonParams.SORT, id + " desc",
         ClusteringParams.USE_SEARCH_RESULTS, "true");
-    // destroy is not needed because tearDown method of base class does it.
+    // destroy is not needed because distribTearDown method of base class does it.
     //destroyServers();
   }
 
diff --git a/solr/contrib/dataimporthandler-extras/src/test/org/apache/solr/handler/dataimport/TestTikaEntityProcessor.java b/solr/contrib/dataimporthandler-extras/src/test/org/apache/solr/handler/dataimport/TestTikaEntityProcessor.java
index 13026e3..b617957 100644
--- a/solr/contrib/dataimporthandler-extras/src/test/org/apache/solr/handler/dataimport/TestTikaEntityProcessor.java
+++ b/solr/contrib/dataimporthandler-extras/src/test/org/apache/solr/handler/dataimport/TestTikaEntityProcessor.java
@@ -30,6 +30,7 @@
 import javax.xml.transform.stream.StreamResult;
 import java.io.StringReader;
 import java.io.StringWriter;
+import java.util.Locale;
 
 /**Testcase for TikaEntityProcessor
  *
@@ -73,6 +74,8 @@
 
   @BeforeClass
   public static void beforeClass() throws Exception {
+    assumeFalse("This test fails on UNIX with Turkish default locale (https://issues.apache.org/jira/browse/SOLR-6387)",
+        new Locale("tr").getLanguage().equals(Locale.getDefault().getLanguage()));
     initCore("dataimport-solrconfig.xml", "dataimport-schema-no-unique-key.xml", getFile("dihextras/solr").getAbsolutePath());
   }
 
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/CachedSqlEntityProcessor.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/CachedSqlEntityProcessor.java
deleted file mode 100644
index 56c464c..0000000
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/CachedSqlEntityProcessor.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.handler.dataimport;
-
-/**
- * This class enables caching of data obtained from the DB to avoid too many sql
- * queries
- * <p/>
- * <p>
- * Refer to <a
- * href="http://wiki.apache.org/solr/DataImportHandler">http://wiki.apache
- * .org/solr/DataImportHandler</a> for more details.
- * </p>
- * <p/>
- * <b>This API is experimental and subject to change</b>
- * 
- * @since solr 1.3
- * @deprecated - Use SqlEntityProcessor with cacheImpl parameter.
- */
-@Deprecated
-public class CachedSqlEntityProcessor extends SqlEntityProcessor {
-    @Override
-    protected void initCache(Context context) {
-      cacheSupport = new DIHCacheSupport(context, "SortedMapBackedCache");
-    }
-
-}
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
index dfb7736..eaee297 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
@@ -492,15 +492,29 @@
             getDebugLogger().log(DIHLogLevels.ENTITY_OUT, epw.getEntity().getName(), arow);
           }
           importStatistics.rowsCount.incrementAndGet();
+          
+          DocWrapper childDoc = null;
           if (doc != null) {
-            handleSpecialCommands(arow, doc);
-            addFields(epw.getEntity(), doc, arow, vr);
+            if (epw.getEntity().isChild()) {
+              childDoc = new DocWrapper();
+              handleSpecialCommands(arow, childDoc);
+              addFields(epw.getEntity(), childDoc, arow, vr);
+              doc.addChildDocument(childDoc);
+            } else {
+              handleSpecialCommands(arow, doc);
+              addFields(epw.getEntity(), doc, arow, vr);
+            }
           }
           if (epw.getEntity().getChildren() != null) {
             vr.addNamespace(epw.getEntity().getName(), arow);
             for (EntityProcessorWrapper child : epw.getChildren()) {
-              buildDocument(vr, doc,
+              if (childDoc != null) {
+              buildDocument(vr, childDoc,
                   child.getEntity().isDocRoot() ? pk : null, child, false, ctx, entitiesToDestroy);
+              } else {
+                buildDocument(vr, doc,
+                    child.getEntity().isDocRoot() ? pk : null, child, false, ctx, entitiesToDestroy);
+              }
             }
             vr.removeNamespace(epw.getEntity().getName());
           }
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/HttpDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/HttpDataSource.java
deleted file mode 100644
index 04633f3..0000000
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/HttpDataSource.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.handler.dataimport;
-
-/**
- * <p>
- * A data source implementation which can be used to read character files using
- * HTTP.
- * </p>
- * <p/>
- * <p>
- * Refer to <a
- * href="http://wiki.apache.org/solr/DataImportHandler">http://wiki.apache.org/solr/DataImportHandler</a>
- * for more details.
- * </p>
- * <p/>
- * <b>This API is experimental and may change in the future.</b>
- *
- * @since solr 1.3
- * @deprecated use {@link org.apache.solr.handler.dataimport.URLDataSource} instead
- */
-@Deprecated
-public class HttpDataSource extends URLDataSource {
-  
-}
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrEntityProcessor.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrEntityProcessor.java
index a0aab81..cabd7a0 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrEntityProcessor.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrEntityProcessor.java
@@ -18,8 +18,8 @@
  */
 
 import org.apache.http.client.HttpClient;
-import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.HttpClientUtil;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
@@ -31,6 +31,7 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.IOException;
 import java.net.MalformedURLException;
 import java.net.URL;
 import java.util.Collection;
@@ -71,6 +72,17 @@
   private String[] fields;
   private String requestHandler;// 'qt' param
   private int timeout = TIMEOUT_SECS;
+  
+  @Override
+  public void destroy() {
+    try {
+      solrClient.close();
+    } catch (IOException e) {
+
+    } finally {
+      HttpClientUtil.close(((HttpSolrClient) solrClient).getHttpClient());
+    }
+  }
 
   /**
    * Factory method that returns a {@link HttpClient} instance used for interfacing with a source Solr service.
@@ -94,7 +106,6 @@
             "SolrEntityProcessor: parameter 'url' is required");
       }
 
-      // TODO: we should close this client!
       HttpClient client = getHttpClient();
       URL url = new URL(serverPath);
       // (wt="javabin|xml") default is javabin
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/ConfigNameConstants.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/ConfigNameConstants.java
index d959e75..a34e31d 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/ConfigNameConstants.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/ConfigNameConstants.java
@@ -16,12 +16,12 @@
  */
 package org.apache.solr.handler.dataimport.config;
 
+import org.apache.solr.handler.dataimport.SolrWriter;
+
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.Set;
 
-import org.apache.solr.handler.dataimport.SolrWriter;
-
 public class ConfigNameConstants {
   public static final String SCRIPT = "script";
 
@@ -31,15 +31,13 @@
   
   public static final String PROPERTY_WRITER = "propertyWriter";
 
-  /**
-   * @deprecated use IMPORTER_NS_SHORT instead
-   */
-  @Deprecated
   public static final String IMPORTER_NS = "dataimporter";
 
   public static final String IMPORTER_NS_SHORT = "dih";
 
   public static final String ROOT_ENTITY = "rootEntity";
+  
+  public static final String CHILD = "child";
 
   public static final String FUNCTION = "function";
 
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/Entity.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/Entity.java
index 6d66064..e7c4f58 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/Entity.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/Entity.java
@@ -41,6 +41,7 @@
   private final String processorName;
   private final Entity parentEntity;
   private final boolean docRoot;
+  private final boolean child;
   private final List<Entity> children;
   private final List<EntityField> fields;
   private final Map<String,Set<EntityField>> colNameVsField;
@@ -77,6 +78,9 @@
       docRoot = false;
     }
     
+    String childValue = ConfigParseUtil.getStringAttribute(element, ConfigNameConstants.CHILD, null);
+    child = "true".equals(childValue);
+    
     Map<String,String> modAttributes = ConfigParseUtil
         .getAllAttributes(element);
     modAttributes.put(ConfigNameConstants.DATA_SRC, this.dataSourceName);
@@ -219,4 +223,8 @@
   public List<Map<String,String>> getAllFieldsList() {
     return allFieldAttributes;
   }
+
+  public boolean isChild() {
+    return child;
+  }
 }
diff --git a/solr/contrib/dataimporthandler/src/test-files/dih/solr/collection1/conf/dataimport-schema.xml b/solr/contrib/dataimporthandler/src/test-files/dih/solr/collection1/conf/dataimport-schema.xml
index e6bcf81..7b10481 100644
--- a/solr/contrib/dataimporthandler/src/test-files/dih/solr/collection1/conf/dataimport-schema.xml
+++ b/solr/contrib/dataimporthandler/src/test-files/dih/solr/collection1/conf/dataimport-schema.xml
@@ -43,6 +43,7 @@
     <field name="DO_NOT_INDEX" type="ignored" />
 
     <field name="_version_" type="tlong" indexed="true" stored="true" multiValued="false"/>
+    <field name="_root_" type="string" indexed="true" stored="true" multiValued="false"/>
        
     <dynamicField name="*_i"       type="tint"    indexed="true"  stored="true"/>
     <dynamicField name="*_s"       type="string"  indexed="true"  stored="true"/>
diff --git a/solr/contrib/dataimporthandler/src/test-files/dih/solr/solr.xml b/solr/contrib/dataimporthandler/src/test-files/dih/solr/solr.xml
index 4088f52..330eef1 100644
--- a/solr/contrib/dataimporthandler/src/test-files/dih/solr/solr.xml
+++ b/solr/contrib/dataimporthandler/src/test-files/dih/solr/solr.xml
@@ -20,13 +20,8 @@
  solr.xml mimicking the old default solr.xml
 -->
 
-<solr persistent="false">
-  <cores adminPath="/admin/cores" defaultCoreName="collection1"
-         host="${host:}" hostPort="${hostPort:}" hostContext="${hostContext:}"
-         zkClientTimeout="${zkClientTimeout:15000}">
-    <core name="collection1" shard="${shard:}" collection="${collection:collection1}" instanceDir="collection1"/>
+<solr>
     <shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
       <str name="urlScheme">${urlScheme:}</str>
     </shardHandlerFactory>
-  </cores>
 </solr>
\ No newline at end of file
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractSqlEntityProcessorTestCase.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractSqlEntityProcessorTestCase.java
index c9afeb7..e4eff3c 100644
--- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractSqlEntityProcessorTestCase.java
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/AbstractSqlEntityProcessorTestCase.java
@@ -1,5 +1,9 @@
 package org.apache.solr.handler.dataimport;
 
+import junit.framework.Assert;
+import org.junit.After;
+import org.junit.Before;
+
 import java.io.File;
 import java.nio.file.Files;
 import java.sql.Connection;
@@ -17,11 +21,6 @@
 import java.util.Map;
 import java.util.Set;
 
-import junit.framework.Assert;
-
-import org.junit.After;
-import org.junit.Before;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -634,8 +633,7 @@
           + "newColumnName=''countryAdded_s'' newColumnValue=''country_added'' "
           : "");
       if (countryCached) {
-        sb.append(random().nextBoolean() ? "processor=''SqlEntityProcessor'' cacheImpl=''SortedMapBackedCache'' "
-            : "processor=''CachedSqlEntityProcessor'' ");
+        sb.append("processor=''SqlEntityProcessor'' cacheImpl=''SortedMapBackedCache'' ");
         if (useSimpleCaches) {
           sb.append("query=''SELECT CODE, COUNTRY_NAME FROM COUNTRIES WHERE DELETED != 'Y' AND CODE='${People.COUNTRY_CODE}' ''>\n");
         } else {
@@ -671,8 +669,7 @@
           + "newColumnName=''sportsAdded_s'' newColumnValue=''sport_added'' "
           : "");
       if (sportsCached) {
-        sb.append(random().nextBoolean() ? "processor=''SqlEntityProcessor'' cacheImpl=''SortedMapBackedCache'' "
-            : "processor=''CachedSqlEntityProcessor'' ");
+        sb.append("processor=''SqlEntityProcessor'' cacheImpl=''SortedMapBackedCache'' ");
         if (useSimpleCaches) {
           sb.append("query=''SELECT ID, SPORT_NAME FROM PEOPLE_SPORTS WHERE DELETED != 'Y' AND PERSON_ID=${People.ID} ORDER BY ID'' ");
         } else {
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestContentStreamDataSource.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestContentStreamDataSource.java
index 946b73c..ec4e2f7 100644
--- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestContentStreamDataSource.java
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestContentStreamDataSource.java
@@ -16,9 +16,6 @@
  */
 package org.apache.solr.handler.dataimport;
 
-import java.io.File;
-import java.util.List;
-
 import org.apache.commons.io.FileUtils;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
@@ -32,6 +29,10 @@
 import org.junit.Before;
 import org.junit.Test;
 
+import java.io.File;
+import java.nio.file.Files;
+import java.util.List;
+
 /**
  * Test for ContentStreamDataSource
  *
@@ -67,17 +68,17 @@
     params.set("command", "full-import");
     params.set("clean", "false");
     req.setParams(params);
-    HttpSolrClient solrServer = new HttpSolrClient(buildUrl(jetty.getLocalPort(), "/solr"));
-    solrServer.request(req);
-    ModifiableSolrParams qparams = new ModifiableSolrParams();
-    qparams.add("q", "*:*");
-    QueryResponse qres = solrServer.query(qparams);
-    SolrDocumentList results = qres.getResults();
-    assertEquals(2, results.getNumFound());
-    SolrDocument doc = results.get(0);
-    assertEquals("1", doc.getFieldValue("id"));
-    assertEquals("Hello C1", ((List)doc.getFieldValue("desc")).get(0));
-    solrServer.shutdown();
+    try (HttpSolrClient solrClient = new HttpSolrClient(buildUrl(jetty.getLocalPort(), "/solr/collection1"))) {
+      solrClient.request(req);
+      ModifiableSolrParams qparams = new ModifiableSolrParams();
+      qparams.add("q", "*:*");
+      QueryResponse qres = solrClient.query(qparams);
+      SolrDocumentList results = qres.getResults();
+      assertEquals(2, results.getNumFound());
+      SolrDocument doc = results.get(0);
+      assertEquals("1", doc.getFieldValue("id"));
+      assertEquals("Hello C1", ((List) doc.getFieldValue("desc")).get(0));
+    }
   }
 
   @Test
@@ -87,22 +88,22 @@
         "clean", "false", UpdateParams.COMMIT, "false", 
         UpdateParams.COMMIT_WITHIN, "1000");
     req.setParams(params);
-    HttpSolrClient solrServer = new HttpSolrClient(buildUrl(jetty.getLocalPort(), "/solr"));
-    solrServer.request(req);
-    Thread.sleep(100);
-    ModifiableSolrParams queryAll = params("q", "*");
-    QueryResponse qres = solrServer.query(queryAll);
-    SolrDocumentList results = qres.getResults();
-    assertEquals(0, results.getNumFound());
-    Thread.sleep(1000);
-    for (int i = 0; i < 10; i++) {
-      qres = solrServer.query(queryAll);
-      results = qres.getResults();
-      if (2 == results.getNumFound()) {
-        solrServer.shutdown();
-        return;
+    try (HttpSolrClient solrServer = new HttpSolrClient(buildUrl(jetty.getLocalPort(), "/solr/collection1"))) {
+      solrServer.request(req);
+      Thread.sleep(100);
+      ModifiableSolrParams queryAll = params("q", "*");
+      QueryResponse qres = solrServer.query(queryAll);
+      SolrDocumentList results = qres.getResults();
+      assertEquals(0, results.getNumFound());
+      Thread.sleep(1000);
+      for (int i = 0; i < 10; i++) {
+        qres = solrServer.query(queryAll);
+        results = qres.getResults();
+        if (2 == results.getNumFound()) {
+          return;
+        }
+        Thread.sleep(500);
       }
-      Thread.sleep(500);
     }
     fail("Commit should have occured but it did not");
   }
@@ -165,6 +166,8 @@
       FileUtils.copyFile(getFile(getSchemaFile()), f);
       f = new File(confDir, "data-config.xml");
       FileUtils.copyFile(getFile(CONF_DIR + "dataconfig-contentstream.xml"), f);
+
+      Files.createFile(homeDir.toPath().resolve("collection1/core.properties"));
     }
 
   }
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestHierarchicalDocBuilder.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestHierarchicalDocBuilder.java
new file mode 100644
index 0000000..31602e8
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestHierarchicalDocBuilder.java
@@ -0,0 +1,470 @@
+package org.apache.solr.handler.dataimport;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import java.text.MessageFormat;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+
+import org.apache.lucene.index.StoredDocument;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause.Occur;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.QueryWrapperFilter;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.join.BitDocIdSetCachingWrapperFilter;
+import org.apache.lucene.search.join.BitDocIdSetFilter;
+import org.apache.lucene.search.join.ScoreMode;
+import org.apache.lucene.search.join.ToParentBlockJoinQuery;
+import org.apache.solr.handler.dataimport.config.ConfigNameConstants;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.search.SolrIndexSearcher;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Test for DocBuilder using the test harness. 
+ * <b> Documents are hierarchical in this test, i.e. each document have nested children documents.</b>
+ */
+public class TestHierarchicalDocBuilder extends AbstractDataImportHandlerTestCase {
+
+  private static final String FIELD_ID = "id";
+  private int id = 0; //unique id
+  private SolrQueryRequest req;
+  
+  /**
+   * Holds the data related to randomly created index.
+   * It is used for making assertions.
+   */
+  private static class ContextHolder {
+    /** Overall documents number **/
+    int counter = 0;
+    
+    /**
+     * Each Hierarchy object represents nested documents with a parent at the root of hierarchy
+     */
+    List<Hierarchy> hierarchies = new ArrayList<Hierarchy>();
+  }
+  
+  /**
+   * Represents a hierarchical document structure
+   */
+  private static class Hierarchy {
+    
+    /**
+     * Type of element, i.e. parent, child, grandchild, etc..
+     */
+    String elementType;
+    
+    /**
+     * Fields of a current element
+     */
+    Map<String, Object> elementData = new HashMap<String,Object>();
+    
+    /**
+     * Nested elements/documents hierarchies. 
+     */
+    List<Hierarchy> elements = new ArrayList<Hierarchy>();
+  }
+  
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    initCore("dataimport-solrconfig.xml", "dataimport-schema.xml");    
+  }
+  
+  @Before
+  public void before() {
+    req = req("*:*"); // don't really care about query
+    MockDataSource.clearCache();
+  }
+  
+  @After
+  public void after() {
+    req.close();
+    MockDataSource.clearCache();
+  }
+
+  @Test
+  public void testThreeLevelHierarchy() throws Exception {
+    int parentsNum = 3; //fixed for simplicity of test
+    int childrenNum = 0;
+    int grandChildrenNum = 0;
+    
+    final String parentType = "parent";
+    final String childType = "child";
+    final String grandChildType = "grand_child";
+
+    List<String> parentIds = createDataIterator("select * from PARENT", parentType, parentType, parentsNum);
+    Collections.shuffle(parentIds, random());
+    String parentId1 = parentIds.get(0);
+    String parentId2 = parentIds.get(1);
+    
+    //parent 1 children
+    int firstParentChildrenNum = 3; //fixed for simplicity of test
+    String select = "select * from CHILD where parent_id='" + parentId1 + "'";
+    List<String> childrenIds = createDataIterator(select, childType, "child of first parent", firstParentChildrenNum);
+    List<String> firstParentChildrenIds = new ArrayList<String>(childrenIds);
+    childrenNum += childrenIds.size();
+    
+    // grand children of first parent first child
+    String childId = childrenIds.get(0);
+    String description = "grandchild of first parent, child of " + childId + " child";
+    select = "select * from GRANDCHILD where parent_id='" + childId + "'";
+    List<String> grandChildrenIds = createDataIterator(select, grandChildType, description, atLeast(2));
+    grandChildrenNum += grandChildrenIds.size();
+    
+    // grand children of first parent second child
+    childId = childrenIds.get(1);
+    description = "grandchild of first parent, child of " + childId + " child";
+    select = "select * from GRANDCHILD where parent_id='" + childId + "'";
+    List<String> grandChildrenIds2 = createDataIterator(select, grandChildType, description, atLeast(2));
+    grandChildrenNum += grandChildrenIds2.size();
+    
+    grandChildrenIds.addAll(grandChildrenIds2);
+    
+    // third children of first parent has no grand children
+    
+    // parent 2 children (no grand children)   
+    select = "select * from CHILD where parent_id='" + parentId2 + "'";
+    childrenIds = createDataIterator(select, childType, "child of second parent", atLeast(2));
+    childrenNum += childrenIds.size();
+    
+    // parent 3 has no children and grand children
+    
+    int totalDocsNum = parentsNum + childrenNum + grandChildrenNum;
+    
+    runFullImport(threeLevelHierarchyConfig);
+    
+    assertTrue("Update request processor processAdd was not called", TestUpdateRequestProcessor.processAddCalled);
+    assertTrue("Update request processor processCommit was not callled", TestUpdateRequestProcessor.processCommitCalled);
+    assertTrue("Update request processor finish was not called", TestUpdateRequestProcessor.finishCalled);
+    
+    // very simple asserts to check that we at least have correct num of docs indexed
+    assertQ(req("*:*"), "//*[@numFound='" + totalDocsNum + "']");
+    assertQ(req("type_s:parent"), "//*[@numFound='" + parentsNum + "']");
+    assertQ(req("type_s:child"), "//*[@numFound='" + childrenNum + "']");
+    assertQ(req("type_s:grand_child"), "//*[@numFound='" + grandChildrenNum + "']");
+
+    // let's check BlockJoin
+    // get first parent by any grand children
+    String randomGrandChildId = grandChildrenIds.get(random().nextInt(grandChildrenIds.size()));
+    Query query = createToParentQuery(parentType, FIELD_ID, randomGrandChildId);
+    assertSearch(query, FIELD_ID, parentId1);
+
+    // get first parent by any children 
+    String randomChildId = firstParentChildrenIds.get(random().nextInt(firstParentChildrenIds.size()));
+    query = createToParentQuery(parentType, FIELD_ID, randomChildId);
+    assertSearch(query, FIELD_ID, parentId1);
+    
+    // get parent by children by grand children
+    randomGrandChildId = grandChildrenIds.get(random().nextInt(grandChildrenIds.size()));
+    ToParentBlockJoinQuery childBlockJoinQuery = createToParentQuery(childType, FIELD_ID, randomGrandChildId);
+    ToParentBlockJoinQuery blockJoinQuery = new ToParentBlockJoinQuery(childBlockJoinQuery, createParentFilter(parentType), ScoreMode.Avg);
+    assertSearch(blockJoinQuery, FIELD_ID, parentId1);
+  }
+
+  @Test
+  public void testRandomDepthHierarchy() throws Exception {
+    final String parentType = "parent";
+    
+    // Be aware that hierarchies grows exponentially, thus 
+    // numbers bigger than 6 may lead to significant memory usage
+    // and cause OOME
+    int parentsNum = 2 + random().nextInt(3);
+    int depth = 2 + random().nextInt(3);
+    
+    ContextHolder holder = new ContextHolder();
+    
+    String config = createRandomizedConfig(depth, parentType, parentsNum, holder);
+    runFullImport(config);
+    
+    assertTrue("Update request processor processAdd was not called", TestUpdateRequestProcessor.processAddCalled);
+    assertTrue("Update request processor processCommit was not callled", TestUpdateRequestProcessor.processCommitCalled);
+    assertTrue("Update request processor finish was not called", TestUpdateRequestProcessor.finishCalled);
+    
+    assertQ(req("type_s:" + parentType), "//*[@numFound='" + parentsNum + "']");
+    assertQ(req("-type_s:"+ parentType), "//*[@numFound='" + (holder.counter - parentsNum) + "']");
+    
+    // let's check BlockJoin
+    Hierarchy randomHierarchy = holder.hierarchies.get(random().nextInt(holder.hierarchies.size()));
+       
+    Query deepestQuery = createBlockJoinQuery(randomHierarchy);
+    assertSearch(deepestQuery, FIELD_ID, (String) randomHierarchy.elementData.get(FIELD_ID));
+  }
+  
+  private Query createBlockJoinQuery(Hierarchy hierarchy) {
+    List<Hierarchy> elements = hierarchy.elements;
+    if (elements.isEmpty()) {
+      BooleanQuery childQuery = new BooleanQuery();
+      childQuery.add(new TermQuery(new Term(FIELD_ID, (String) hierarchy.elementData.get(FIELD_ID))), Occur.MUST);
+      return childQuery;
+    }
+    
+    Query childQuery = createBlockJoinQuery(elements.get(random().nextInt(elements.size())));
+    return createToParentQuery(hierarchy.elementType, childQuery);
+  }
+
+  private ToParentBlockJoinQuery createToParentQuery(String parentType, String childField, String childFieldValue) {
+    BooleanQuery childQuery = new BooleanQuery();
+    childQuery.add(new TermQuery(new Term(childField, childFieldValue)), Occur.MUST);
+    ToParentBlockJoinQuery result = createToParentQuery(parentType, childQuery);
+    
+    return result;
+  }
+  
+  private ToParentBlockJoinQuery createToParentQuery(String parentType, Query childQuery) {
+    ToParentBlockJoinQuery blockJoinQuery = new ToParentBlockJoinQuery(childQuery, createParentFilter(parentType), ScoreMode.Avg);
+    
+    return blockJoinQuery;
+  }
+  
+  private void assertSearch(Query query, String field, String... values) throws IOException {
+    /* The limit of search queue is doubled to catch the error in case when for some reason there are more docs than expected  */
+    SolrIndexSearcher searcher = req.getSearcher();
+    TopDocs result = searcher.search(query, values.length * 2);
+    assertEquals(values.length, result.totalHits);
+    List<String> actualValues = new ArrayList<String>();
+    for (int index = 0; index < values.length; ++index) {
+      StoredDocument doc = searcher.doc(result.scoreDocs[index].doc);
+      actualValues.add(doc.get(field));
+    }
+    
+    for (String expectedValue: values) {
+      boolean removed = actualValues.remove(expectedValue);
+      if (!removed) {
+        fail("Search result does not contain expected values");
+      }
+    }
+  }
+  
+  @SuppressWarnings("unchecked")
+  private List<String> createDataIterator(String query, String type, String description, int count) {
+    List<Map<String, Object>> data = new ArrayList<Map<String, Object>>();
+    List<String> ids = new ArrayList<String>(count);
+    for (int index = 0; index < count; ++index) {
+      String docId = nextId();
+      ids.add(docId);
+      Map<String, Object> doc = createMap(FIELD_ID, docId, "desc", docId + " " + description, "type_s", type);
+      data.add(doc);
+    }
+    Collections.shuffle(data, random());
+    MockDataSource.setIterator(query, data.iterator());
+    
+    return ids;
+  }
+  
+  /**
+   * Creates randomized configuration of a specified depth. Simple configuration example:
+   * 
+   * <pre>
+   * 
+   * &lt;dataConfig>
+   *   <dataSource type="MockDataSource" />
+   *   &lt;document>
+   *     &lt;entity name="parent" query="SELECT * FROM parent">
+   *       &lt;field column="id" />
+   *       &lt;field column="desc" />
+   *       &lt;field column="type_s" />
+   *       &lt;entity child="true" name="parentChild0" query="select * from parentChild0 where parentChild0_parent_id='${parent.id}'">
+   *         &lt;field column="id" />
+   *         &lt;field column="desc" />
+   *         &lt;field column="type_s" />
+   *         &lt;entity child="true" name="parentChild0Child0" query="select * from parentChild0Child0 where parentChild0Child0_parent_id='${parentChild0.id}'">
+   *           &lt;field column="id" />
+   *           &lt;field column="desc" />
+   *           &lt;field column="type_s" />
+   *         &lt;/entity>
+   *         &lt;entity child="true" name="parentChild0Child1" query="select * from parentChild0Child1 where parentChild0Child1_parent_id='${parentChild0.id}'">
+   *           &lt;field column="id" />
+   *           &lt;field column="desc" />
+   *           &lt;field column="type_s" />
+   *         &lt;/entity>
+   *       &lt;/entity>
+   *       &lt;entity child="true" name="parentChild1" query="select * from parentChild1 where parentChild1_parent_id='${parent.id}'">
+   *         &lt;field column="id" />
+   *         &lt;field column="desc" />
+   *         &lt;field column="type_s" />
+   *         &lt;entity child="true" name="parentChild1Child0" query="select * from parentChild1Child0 where parentChild1Child0_parent_id='${parentChild1.id}'">
+   *           &lt;field column="id" />
+   *           &lt;field column="desc" />
+   *           &lt;field column="type_s" />
+   *         &lt;/entity>
+   *         &lt;entity child="true" name="parentChild1Child1" query="select * from parentChild1Child1 where parentChild1Child1_parent_id='${parentChild1.id}'">
+   *           &lt;field column="id" />
+   *           &lt;field column="desc" />
+   *           &lt;field column="type_s" />
+   *         &lt;/entity>
+   *       &lt;/entity>
+   *     &lt;/entity>
+   *   &lt;/document>
+   * &lt;/dataConfig>
+   * 
+   * </pre>
+   * 
+   * Internally configures MockDataSource.
+   **/
+  private String createRandomizedConfig(int depth, String parentType, int parentsNum, ContextHolder holder) {
+    List<Hierarchy> parentData = createMockedIterator(parentType, "SELECT * FROM " + parentType, parentsNum, holder);
+    
+    holder.hierarchies = parentData;
+    
+    String children = createChildren(parentType, 0, depth, parentData, holder);
+    
+    String rootFields = createFieldsList(FIELD_ID, "desc", "type_s");
+    String rootEntity = MessageFormat.format(rootEntityTemplate, parentType, "SELECT * FROM " + parentType, rootFields, children);
+
+    String config = MessageFormat.format(dataConfigTemplate, rootEntity);
+    return config;
+  }
+  
+  @SuppressWarnings("unchecked")
+  private List<Hierarchy> createMockedIterator(String type, String query, int amount, ContextHolder holder) {
+    List<Hierarchy> hierarchies = new ArrayList<Hierarchy>();
+    List<Map<String, Object>> data = new ArrayList<Map<String, Object>>();
+    for (int index = 0; index < amount; ++index) {
+      holder.counter++;      
+      String idStr = String.valueOf(holder.counter);
+      Map<String, Object> element = createMap(FIELD_ID, idStr, "desc", type + "_" + holder.counter, "type_s", type);
+      data.add(element);
+      
+      Hierarchy hierarchy = new Hierarchy();
+      hierarchy.elementType = type;
+      hierarchy.elementData = element;
+      hierarchies.add(hierarchy);
+    }
+    
+    MockDataSource.setIterator(query, data.iterator());
+    
+    return hierarchies;
+  }
+  
+  private List<Hierarchy> createMockedIterator(String type, List<Hierarchy> parentData, ContextHolder holder) {
+    List<Hierarchy> result = new ArrayList<Hierarchy>();
+    for (Hierarchy parentHierarchy: parentData) {
+      Map<String, Object> data = parentHierarchy.elementData;
+      String id = (String) data.get(FIELD_ID);
+      String select = String.format(Locale.ROOT, "select * from %s where %s='%s'", type, type + "_parent_id", id);
+      
+      // Number of actual children documents
+      int childrenNum = 1 + random().nextInt(3);
+      List<Hierarchy> childHierarchies = createMockedIterator(type, select, childrenNum, holder);
+      parentHierarchy.elements.addAll(childHierarchies);
+      result.addAll(childHierarchies);
+    }
+    return result;
+  }
+
+  private String createChildren(String parentName, int currentLevel, int maxLevel,
+      List<Hierarchy> parentData, ContextHolder holder) {
+    
+    if (currentLevel == maxLevel) { //recursion base
+      return "";
+    }
+    
+    // number of different children <b>types</b> of parent, i.e. parentChild0, parentChild1
+    // @see #createMockedIterator for the actual number of each children type 
+    int childrenNumber = 2 + random().nextInt(3);
+    StringBuilder builder = new StringBuilder();
+    for (int childIndex = 0; childIndex < childrenNumber; ++childIndex) {
+      String childName = parentName + "Child" + childIndex;
+      String fields = createFieldsList(FIELD_ID, "desc", "type_s");
+      String select = String.format(Locale.ROOT, "select * from %s where %s='%s'", childName, childName + "_parent_id", "${" + parentName + ".id}");
+      
+      //for each child entity create several iterators
+      List<Hierarchy> childData = createMockedIterator(childName, parentData, holder);
+      
+      String subChildren = createChildren(childName, currentLevel + 1, maxLevel, childData, holder);
+      String child = MessageFormat.format(childEntityTemplate, childName, select, fields, subChildren);
+      builder.append(child);
+      builder.append('\n');
+    }
+    
+    return builder.toString();
+  }
+  
+  private String createFieldsList(String... fields) {
+    StringBuilder builder = new StringBuilder();
+    for (String field: fields) {
+      String text = String.format(Locale.ROOT, "<field column='%s' />", field);
+      builder.append(text);
+      builder.append('\n');
+    }
+    return builder.toString();
+  }
+
+  private final String threeLevelHierarchyConfig = "<dataConfig>\n" +
+      "  <dataSource type='MockDataSource' />\n" +
+      "  <document>\n" +
+      "    <entity name='PARENT' query='select * from PARENT'>\n" +
+      "      <field column='id' />\n" +
+      "      <field column='desc' />\n" +
+      "      <field column='type_s' />\n" +
+      "      <entity child='true' name='CHILD' query=\"select * from CHILD where parent_id='${PARENT.id}'\">\n" +
+      "        <field column='id' />\n" +
+      "        <field column='desc' />\n" +
+      "        <field column='type_s' />\n" +
+      "          <entity child='true' name='GRANDCHILD' query=\"select * from GRANDCHILD where parent_id='${CHILD.id}'\">\n" +
+      "            <field column='id' />\n" +
+      "            <field column='desc' />\n" +
+      "            <field column='type_s' />\n" +
+      "          </entity>\n" +
+      "      </entity>\n" +
+      "    </entity>\n" +
+      "  </document>\n" +
+      "</dataConfig>";
+  
+  /** {0} is rootEntity block **/
+  private final String dataConfigTemplate = "<dataConfig><dataSource type=\"MockDataSource\" />\n<document>\n {0}</document></dataConfig>";
+  
+  /** 
+   * {0} - entityName, 
+   * {1} - select query
+   * {2} - fieldsList
+   * {3} - childEntitiesList 
+   **/
+  private final String rootEntityTemplate = "<entity name=\"{0}\" query=\"{1}\">\n{2} {3}\n</entity>\n";
+  
+  /** 
+   * {0} - entityName, 
+   * {1} - select query
+   * {2} - fieldsList
+   * {3} - childEntitiesList 
+   **/
+  private final String childEntityTemplate = "<entity " + ConfigNameConstants.CHILD + "=\"true\" name=\"{0}\" query=\"{1}\">\n {2} {3} </entity>\n";
+  
+  private BitDocIdSetFilter createParentFilter(String type) {
+    BooleanQuery parentQuery = new BooleanQuery();
+    parentQuery.add(new TermQuery(new Term("type_s", type)), Occur.MUST);
+    return new BitDocIdSetCachingWrapperFilter(new QueryWrapperFilter(parentQuery));
+  }
+  
+  private String nextId() {
+    ++id;
+    return String.valueOf(id);
+  }
+  
+}
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
index 82ceaab..fdb2d3c 100644
--- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
@@ -17,14 +17,6 @@
  * limitations under the License.
  */
 
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
 import org.apache.commons.io.FileUtils;
 import org.apache.lucene.util.IOUtils;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -38,6 +30,15 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
 /**
  * End-to-end test of SolrEntityProcessor. "Real" test using embedded Solr
  */
@@ -94,7 +95,7 @@
   }
   
   private String getSourceUrl() {
-    return buildUrl(jetty.getLocalPort(), "/solr");
+    return buildUrl(jetty.getLocalPort(), "/solr/collection1");
   }
   
   //TODO: fix this test to close its directories
@@ -279,15 +280,12 @@
       }
       sidl.add(sd);
     }
-    
-    HttpSolrClient solrServer = new HttpSolrClient(getSourceUrl());
-    try {
+
+    try (HttpSolrClient solrServer = new HttpSolrClient(getSourceUrl())) {
       solrServer.setConnectionTimeout(15000);
       solrServer.setSoTimeout(30000);
       solrServer.add(sidl);
       solrServer.commit(true, true);
-    } finally {
-      solrServer.shutdown();
     }
   }
   
@@ -332,6 +330,8 @@
       FileUtils.copyFile(getFile(getSchemaFile()), f);
       f = new File(confDir, "data-config.xml");
       FileUtils.copyFile(getFile(SOURCE_CONF_DIR + "dataconfig-contentstream.xml"), f);
+
+      Files.createFile(confDir.toPath().resolve("../core.properties"));
     }
 
     public void tearDown() throws Exception {
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java
index 04f3490..687f7fc 100644
--- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java
@@ -32,9 +32,12 @@
     List<Doc> docs = generateUniqueDocs(2);
 
     MockSolrEntityProcessor processor = createAndInit(docs);
-
-    assertExpectedDocs(docs, processor);
-    assertEquals(1, processor.getQueryCount());
+    try {
+      assertExpectedDocs(docs, processor);
+      assertEquals(1, processor.getQueryCount());
+    } finally {
+      processor.destroy();
+    }
   }
 
   private MockSolrEntityProcessor createAndInit(List<Doc> docs) {
@@ -46,8 +49,12 @@
 
     int rowsNum = 10;
     MockSolrEntityProcessor processor = createAndInit(docs, rowsNum);
-    assertExpectedDocs(docs, processor);
-    assertEquals(5, processor.getQueryCount());
+    try {
+      assertExpectedDocs(docs, processor);
+      assertEquals(5, processor.getQueryCount());
+    } finally {
+      processor.destroy();
+    }
   }
 
   private MockSolrEntityProcessor createAndInit(List<Doc> docs, int rowsNum) {
@@ -67,15 +74,19 @@
     docs.add(testDoc);
 
     MockSolrEntityProcessor processor = createAndInit(docs);
-    Map<String, Object> next = processor.nextRow();
-    assertNotNull(next);
-
-    @SuppressWarnings("unchecked")
-    List<Comparable> multiField = (List<Comparable>) next.get("description");
-    assertEquals(testDoc.getValues("description").size(), multiField.size());
-    assertEquals(testDoc.getValues("description"), multiField);
-    assertEquals(1, processor.getQueryCount());
-    assertNull(processor.nextRow());
+    try {
+      Map<String, Object> next = processor.nextRow();
+      assertNotNull(next);
+  
+      @SuppressWarnings("unchecked")
+      List<Comparable> multiField = (List<Comparable>) next.get("description");
+      assertEquals(testDoc.getValues("description").size(), multiField.size());
+      assertEquals(testDoc.getValues("description"), multiField);
+      assertEquals(1, processor.getQueryCount());
+      assertNull(processor.nextRow());
+    } finally {
+      processor.destroy();
+    }
   }
 
   private List<Doc> generateUniqueDocs(int numDocs) {
diff --git a/solr/contrib/extraction/ivy.xml b/solr/contrib/extraction/ivy.xml
index 353c126..e5f632a 100644
--- a/solr/contrib/extraction/ivy.xml
+++ b/solr/contrib/extraction/ivy.xml
@@ -27,6 +27,7 @@
     <dependency org="org.apache.tika" name="tika-core" rev="${/org.apache.tika/tika-core}" conf="compile"/>
     <dependency org="org.apache.tika" name="tika-parsers" rev="${/org.apache.tika/tika-parsers}" conf="compile"/>
     <dependency org="org.apache.tika" name="tika-xmp" rev="${/org.apache.tika/tika-xmp}" conf="compile"/>
+    <dependency org="org.apache.tika" name="tika-java7" rev="${/org.apache.tika/tika-java7}" conf="compile"/>
     <!-- Tika dependencies - see http://tika.apache.org/1.3/gettingstarted.html#Using_Tika_as_a_Maven_dependency -->
     <!-- When upgrading Tika, upgrade dependencies versions and add any new ones
          (except slf4j-api, commons-codec, commons-logging, commons-httpclient, geronimo-stax-api_1.0_spec, jcip-annotations, xml-apis, asm)
diff --git a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java
index 74406cb..29fad26 100644
--- a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java
+++ b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java
@@ -45,6 +45,7 @@
 import org.apache.tika.parser.ParseContext;
 import org.apache.tika.parser.Parser;
 import org.apache.tika.parser.PasswordProvider;
+import org.apache.tika.parser.html.HtmlMapper;
 import org.apache.tika.sax.XHTMLContentHandler;
 import org.apache.tika.sax.xpath.Matcher;
 import org.apache.tika.sax.xpath.MatchingContentHandler;
@@ -199,6 +200,7 @@
         try{
           //potentially use a wrapper handler for parsing, but we still need the SolrContentHandler for getting the document.
           ParseContext context = new ParseContext();//TODO: should we design a way to pass in parse context?
+          context.set(HtmlMapper.class, MostlyPassthroughHtmlMapper.INSTANCE);
 
           // Password handling
           RegexRulesPasswordProvider epp = new RegexRulesPasswordProvider();
@@ -250,4 +252,34 @@
       throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Stream type of " + streamType + " didn't match any known parsers.  Please supply the " + ExtractingParams.STREAM_TYPE + " parameter.");
     }
   }
-}
+
+  public static class MostlyPassthroughHtmlMapper implements HtmlMapper {
+    public static final HtmlMapper INSTANCE = new MostlyPassthroughHtmlMapper();
+
+    /** 
+     * Keep all elements and their content.
+     *  
+     * Apparently &lt;SCRIPT&gt; and &lt;STYLE&gt; elements are blocked elsewhere
+     */
+    @Override
+    public boolean isDiscardElement(String name) {     
+      return false;
+    }
+
+    /** Lowercases the attribute name */
+    @Override
+    public String mapSafeAttribute(String elementName, String attributeName) {
+      return attributeName.toLowerCase(Locale.ENGLISH);
+    }
+
+    /**
+     * Lowercases the element name, but returns null for &lt;BR&gt;,
+     * which suppresses the start-element event for lt;BR&gt; tags.
+     */
+    @Override
+    public String mapSafeElement(String name) {
+      String lowerName = name.toLowerCase(Locale.ROOT);
+      return lowerName.equals("br") ? null : lowerName;
+    }
+   }
+ }
\ No newline at end of file
diff --git a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java
index 234dc16..b460849 100644
--- a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java
+++ b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingRequestHandler.java
@@ -94,9 +94,7 @@
     if (config == null) {
       try {
         config = getDefaultConfig(core.getResourceLoader().getClassLoader());
-      } catch (MimeTypeException e) {
-        throw new SolrException(ErrorCode.SERVER_ERROR, e);
-      } catch (IOException e) {
+      } catch (MimeTypeException | IOException e) {
         throw new SolrException(ErrorCode.SERVER_ERROR, e);
       }
     }
diff --git a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java
index 4484bc5..fe1ecf5 100644
--- a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java
+++ b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/SolrContentHandler.java
@@ -280,7 +280,7 @@
       }
     } else {
       for (int i = 0; i < attributes.getLength(); i++) {
-        bldrStack.getLast().append(attributes.getValue(i)).append(' ');
+        bldrStack.getLast().append(' ').append(attributes.getValue(i));
       }
     }
     bldrStack.getLast().append(' ');
diff --git a/solr/contrib/extraction/src/test-files/extraction/open-document.odt b/solr/contrib/extraction/src/test-files/extraction/open-document.odt
new file mode 100644
index 0000000..57f4369
--- /dev/null
+++ b/solr/contrib/extraction/src/test-files/extraction/open-document.odt
Binary files differ
diff --git a/solr/contrib/extraction/src/test-files/extraction/simple.html b/solr/contrib/extraction/src/test-files/extraction/simple.html
index 656b656..3c807fb 100644
--- a/solr/contrib/extraction/src/test-files/extraction/simple.html
+++ b/solr/contrib/extraction/src/test-files/extraction/simple.html
@@ -1,6 +1,9 @@
 <html>
 <head>
   <title>Welcome to Solr</title>
+  <style type="text/css">
+    body { font-family: serif; }
+  </style>
 </head>
 <body>
 <p>
@@ -10,4 +13,7 @@
 <div>Here is some text in a div</div>
 <div>This has a <a href="http://www.apache.org">link</a>.</div>
 </body>
+<script>
+  document.getElementById("div").blur();
+</script>
 </html>
diff --git a/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java b/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java
index 89533bd..4104d4b 100644
--- a/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java
+++ b/solr/contrib/extraction/src/test/org/apache/solr/handler/extraction/ExtractingRequestHandlerTest.java
@@ -18,6 +18,8 @@
 
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Locale;
+
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.ContentStream;
@@ -41,6 +43,8 @@
 
   @BeforeClass
   public static void beforeClass() throws Exception {
+    assumeFalse("This test fails on UNIX with Turkish default locale (https://issues.apache.org/jira/browse/SOLR-6387)",
+        new Locale("tr").getLanguage().equals(Locale.getDefault().getLanguage()));
     initCore("solrconfig.xml", "schema.xml", getFile("extraction/solr").getAbsolutePath());
   }
 
@@ -107,6 +111,8 @@
     //assertQ(req("+id:simple2 +t_content_type:[* TO *]"), "//*[@numFound='1']");
     assertQ(req("+id:simple2 +t_href:[* TO *]"), "//*[@numFound='1']");
     assertQ(req("+id:simple2 +t_abcxyz:[* TO *]"), "//*[@numFound='1']");
+    assertQ(req("+id:simple2 +t_content:serif"), "//*[@numFound='0']"); // make sure <style> content is excluded
+    assertQ(req("+id:simple2 +t_content:blur"), "//*[@numFound='0']"); // make sure <script> content is excluded
 
     // load again in the exact same way, but boost one field
     loadLocal("extraction/simple.html",
@@ -123,16 +129,6 @@
     assertQ(req("t_href:http"), "//doc[1]/str[.='simple3']");
     assertQ(req("+id:simple3 +t_content_type:[* TO *]"), "//*[@numFound='1']");//test lowercase and then uprefix
 
-    // test capture
-     loadLocal("extraction/simple.html",
-      "literal.id","simple4",
-      "uprefix", "t_",
-      "capture","p",     // capture only what is in the title element
-      "commit", "true"
-    );
-    assertQ(req("+id:simple4 +t_content:Solr"), "//*[@numFound='1']");
-    assertQ(req("+id:simple4 +t_p:\"here is some text\""), "//*[@numFound='1']");
-
     loadLocal("extraction/version_control.xml", "fmap.created", "extractedDate", "fmap.producer", "extractedProducer",
             "fmap.creator", "extractedCreator", "fmap.Keywords", "extractedKeywords",
             "fmap.Author", "extractedAuthor",
@@ -181,8 +177,45 @@
             , "//*/arr[@name='stream_name']/str[.='tiny.txt.gz']"
             );
 
+    // compressed file
+    loadLocal("extraction/open-document.odt", 
+              "uprefix", "ignored_",
+              "fmap.content", "extractedContent",
+              "literal.id", "open-document");
+    assertU(commit());
+    assertQ(req("extractedContent:\"Práctica sobre GnuPG\"")
+            , "//*[@numFound='1']"
+            , "//*/arr[@name='stream_name']/str[.='open-document.odt']"
+            );
   }
 
+  @Test
+  public void testCapture() throws Exception {
+    loadLocal("extraction/simple.html",
+        "literal.id","capture1",
+        "uprefix","t_",
+        "capture","div",
+        "fmap.div", "foo_t",
+        "commit", "true"
+    );
+    assertQ(req("+id:capture1 +t_content:Solr"), "//*[@numFound='1']");
+    assertQ(req("+id:capture1 +foo_t:\"here is some text in a div\""), "//*[@numFound='1']");
+
+    loadLocal("extraction/simple.html",
+        "literal.id", "capture2",
+        "captureAttr", "true",
+        "defaultField", "text",
+        "fmap.div", "div_t",
+        "fmap.a", "anchor_t",
+        "capture", "div",
+        "capture", "a",
+        "commit", "true"
+    );
+    assertQ(req("+id:capture2 +text:Solr"), "//*[@numFound='1']");
+    assertQ(req("+id:capture2 +div_t:\"here is some text in a div\""), "//*[@numFound='1']");
+    assertQ(req("+id:capture2 +anchor_t:http\\://www.apache.org"), "//*[@numFound='1']");
+    assertQ(req("+id:capture2 +anchor_t:link"), "//*[@numFound='1']");
+  }
 
   @Test
   public void testDefaultField() throws Exception {
@@ -462,14 +495,25 @@
     ExtractingRequestHandler handler = (ExtractingRequestHandler) h.getCore().getRequestHandler("/update/extract");
     assertTrue("handler is null and it shouldn't be", handler != null);
     SolrQueryResponse rsp = loadLocal("extraction/example.html",
-            ExtractingParams.XPATH_EXPRESSION, "/xhtml:html/xhtml:body/xhtml:a/descendant:node()",
+            ExtractingParams.XPATH_EXPRESSION, "/xhtml:html/xhtml:body/xhtml:a/descendant::node()",
             ExtractingParams.EXTRACT_ONLY, "true"
     );
     assertTrue("rsp is null and it shouldn't be", rsp != null);
     NamedList list = rsp.getValues();
     String val = (String) list.get("example.html");
-    val = val.trim();
-    assertTrue(val + " is not equal to " + "linkNews", val.equals("linkNews") == true);//there are two <a> tags, and they get collapesd
+    assertEquals("News", val.trim()); //there is only one matching <a> tag
+
+    loadLocal("extraction/example.html",
+        "literal.id", "example1",
+        "captureAttr", "true",
+        "defaultField", "text",
+        "capture", "div",
+        "fmap.div", "foo_t",
+        "boost.foo_t", "3",
+        "xpath", "/xhtml:html/xhtml:body/xhtml:div//node()",
+        "commit", "true"
+    );
+    assertQ(req("+id:example1 +foo_t:\"here is some text in a div\""), "//*[@numFound='1']");
   }
 
   /** test arabic PDF extraction is functional */
diff --git a/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/BatchWriter.java b/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/BatchWriter.java
index 34701d5..dba832c 100644
--- a/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/BatchWriter.java
+++ b/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/BatchWriter.java
@@ -16,6 +16,15 @@
  */
 package org.apache.solr.hadoop;
 
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
+import org.apache.solr.client.solrj.response.UpdateResponse;
+import org.apache.solr.common.SolrInputDocument;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -26,15 +35,6 @@
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.TaskID;
-import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
-import org.apache.solr.client.solrj.response.UpdateResponse;
-import org.apache.solr.common.SolrInputDocument;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 /**
  * Enables adding batches of documents to an EmbeddedSolrServer.
  */
@@ -209,7 +209,7 @@
     context.setStatus("Committing Solr Phase 2");
     solr.commit(true, false);
     context.setStatus("Shutting down Solr");
-    solr.shutdown();
+    solr.close();
   }
 
   /**
diff --git a/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/GoLive.java b/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/GoLive.java
index b7d26eb..1b337b3 100644
--- a/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/GoLive.java
+++ b/solr/contrib/map-reduce/src/java/org/apache/solr/hadoop/GoLive.java
@@ -16,6 +16,15 @@
  */
 package org.apache.solr.hadoop;
 
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.CoreAdminRequest;
+import org.apache.solr.hadoop.MapReduceIndexerTool.Options;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.HashSet;
@@ -31,15 +40,6 @@
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
-import org.apache.solr.client.solrj.impl.HttpSolrClient;
-import org.apache.solr.client.solrj.request.CoreAdminRequest;
-import org.apache.solr.hadoop.MapReduceIndexerTool.Options;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 /**
  * The optional (parallel) GoLive phase merges the output shards of the previous
  * phase into a set of live customer facing Solr servers, typically a SolrCloud.
@@ -91,23 +91,14 @@
             public Request call() {
               Request req = new Request();
               LOG.info("Live merge " + dir.getPath() + " into " + mergeUrl);
-              final HttpSolrClient server = new HttpSolrClient(mergeUrl);
-              try {
+              try (final HttpSolrClient client = new HttpSolrClient(mergeUrl)) {
                 CoreAdminRequest.MergeIndexes mergeRequest = new CoreAdminRequest.MergeIndexes();
                 mergeRequest.setCoreName(name);
                 mergeRequest.setIndexDirs(Arrays.asList(dir.getPath().toString() + "/data/index"));
-                try {
-                  mergeRequest.process(server);
-                  req.success = true;
-                } catch (SolrServerException e) {
-                  req.e = e;
-                  return req;
-                } catch (IOException e) {
-                  req.e = e;
-                  return req;
-                }
-              } finally {
-                server.shutdown();
+                mergeRequest.process(client);
+                req.success = true;
+              } catch (SolrServerException | IOException e) {
+                req.e = e;
               }
               return req;
             }
@@ -149,17 +140,17 @@
       try {
         LOG.info("Committing live merge...");
         if (options.zkHost != null) {
-          CloudSolrClient server = new CloudSolrClient(options.zkHost);
-          server.setDefaultCollection(options.collection);
-          server.commit();
-          server.shutdown();
+          try (CloudSolrClient server = new CloudSolrClient(options.zkHost)) {
+            server.setDefaultCollection(options.collection);
+            server.commit();
+          }
         } else {
           for (List<String> urls : options.shardUrls) {
             for (String url : urls) {
               // TODO: we should do these concurrently
-              HttpSolrClient server = new HttpSolrClient(url);
-              server.commit();
-              server.shutdown();
+              try (HttpSolrClient server = new HttpSolrClient(url)) {
+                server.commit();
+              }
             }
           }
         }
diff --git a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineBasicMiniMRTest.java b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineBasicMiniMRTest.java
index 0c8b3a0..4250fc1 100644
--- a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineBasicMiniMRTest.java
+++ b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineBasicMiniMRTest.java
@@ -16,15 +16,13 @@
  */
 package org.apache.solr.hadoop;
 
-import java.io.File;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-import java.lang.reflect.Array;
-import java.nio.charset.StandardCharsets;
-import java.util.Arrays;
-
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -49,13 +47,14 @@
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence;
+import java.io.File;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.lang.reflect.Array;
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
 
 @ThreadLeakAction({Action.WARN})
 @ThreadLeakLingering(linger = 0)
@@ -118,8 +117,6 @@
         Boolean.parseBoolean(System.getProperty("tests.disableHdfs", "false")));
     
     assumeFalse("FIXME: This test does not work with Windows because of native library requirements", Constants.WINDOWS);
-    assumeFalse("FIXME: This test fails under Java 8 due to the Saxon dependency - see SOLR-1301", Constants.JRE_IS_MINIMUM_JAVA8);
-    assumeFalse("FIXME: This test fails under J9 due to the Saxon dependency - see SOLR-1301", System.getProperty("java.vm.info", "<?>").contains("IBM J9"));
     
     AbstractZkTestCase.SOLRHOME = solrHomeDirectory;
     FileUtils.copyDirectory(MINIMR_CONF_DIR, solrHomeDirectory);
diff --git a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java
index c43586c..c642c11 100644
--- a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java
+++ b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineGoLiveMiniMRTest.java
@@ -16,23 +16,13 @@
  */
 package org.apache.solr.hadoop;
 
-import java.io.File;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.io.UnsupportedEncodingException;
-import java.io.Writer;
-import java.lang.reflect.Array;
-import java.net.URI;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -47,9 +37,9 @@
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
+import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrQuery.ORDER;
-import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
@@ -71,19 +61,26 @@
 import org.apache.solr.hadoop.hack.MiniMRClientCluster;
 import org.apache.solr.hadoop.hack.MiniMRClientClusterFactory;
 import org.apache.solr.morphlines.solr.AbstractSolrMorphlineTestBase;
-import org.junit.After;
 import org.junit.AfterClass;
-import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence;
+import java.io.File;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.UnsupportedEncodingException;
+import java.io.Writer;
+import java.lang.reflect.Array;
+import java.net.URI;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 @ThreadLeakAction({Action.WARN})
 @ThreadLeakLingering(linger = 0)
@@ -120,10 +117,9 @@
     this.inputAvroFile1 = "sample-statuses-20120521-100919.avro";
     this.inputAvroFile2 = "sample-statuses-20120906-141433.avro";
     this.inputAvroFile3 = "sample-statuses-20120906-141433-medium.avro";
-    
-    fixShardCount = true;
+
     sliceCount = TEST_NIGHTLY ? 7 : 3;
-    shardCount = TEST_NIGHTLY ? 7 : 3;
+    fixShardCount(TEST_NIGHTLY ? 7 : 3);
   }
   
   @BeforeClass
@@ -141,8 +137,6 @@
         Boolean.parseBoolean(System.getProperty("tests.disableHdfs", "false")));
     
     assumeFalse("FIXME: This test does not work with Windows because of native library requirements", Constants.WINDOWS);
-    assumeFalse("FIXME: This test fails under Java 8 due to the Saxon dependency - see SOLR-1301", Constants.JRE_IS_MINIMUM_JAVA8);
-    assumeFalse("FIXME: This test fails under J9 due to the Saxon dependency - see SOLR-1301", System.getProperty("java.vm.info", "<?>").contains("IBM J9"));
     
     AbstractZkTestCase.SOLRHOME = solrHomeDirectory;
     FileUtils.copyDirectory(MINIMR_INSTANCE_DIR, AbstractZkTestCase.SOLRHOME);
@@ -199,9 +193,8 @@
   }
   
   @Override
-  @Before
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     System.setProperty("host", "127.0.0.1");
     System.setProperty("numShards", Integer.toString(sliceCount));
     URI uri = dfsCluster.getFileSystem().getUri();
@@ -210,9 +203,8 @@
   }
   
   @Override
-  @After
-  public void tearDown() throws Exception {
-    super.tearDown();
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
     System.clearProperty("host");
     System.clearProperty("numShards");
     System.clearProperty("solr.hdfs.home");
@@ -245,12 +237,6 @@
   }
   
   @Test
-  @Override
-  public void testDistribSearch() throws Exception {
-    super.testDistribSearch();
-  }
-  
-  @Test
   public void testBuildShardUrls() throws Exception {
     // 2x3
     Integer numShards = 2;
@@ -352,9 +338,9 @@
     };
     return concat(head, args); 
   }
-  
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  public void test() throws Exception {
     
     waitForRecoveriesToFinish(false);
     
@@ -383,143 +369,144 @@
     MapReduceIndexerTool tool;
     int res;
     QueryResponse results;
-    HttpSolrClient server = new HttpSolrClient(cloudJettys.get(0).url);
     String[] args = new String[]{};
-
-    args = new String[] {
-        "--solr-home-dir=" + MINIMR_CONF_DIR.getAbsolutePath(),
-        "--output-dir=" + outDir.toString(),
-        "--log4j=" + getFile("log4j.properties").getAbsolutePath(),
-        "--mappers=3",
-        random().nextBoolean() ? "--input-list=" + INPATH.toString() : dataDir.toString(),  
-        "--go-live-threads", Integer.toString(random().nextInt(15) + 1),
-        "--verbose",
-        "--go-live"
-    };
-    args = prependInitialArgs(args);
     List<String> argList = new ArrayList<>();
-    getShardUrlArgs(argList);
-    args = concat(args, argList.toArray(new String[0]));
-    
-    if (true) {
-      tool = new MapReduceIndexerTool();
-      res = ToolRunner.run(jobConf, tool, args);
-      assertEquals(0, res);
-      assertTrue(tool.job.isComplete());
-      assertTrue(tool.job.isSuccessful());
-      results = server.query(new SolrQuery("*:*"));
-      assertEquals(20, results.getResults().getNumFound());
-    }    
-    
-    fs.delete(inDir, true);   
-    fs.delete(outDir, true);  
-    fs.delete(dataDir, true); 
-    assertTrue(fs.mkdirs(inDir));
-    INPATH = upAvroFile(fs, inDir, DATADIR, dataDir, inputAvroFile2);
 
-    args = new String[] {
-        "--solr-home-dir=" + MINIMR_CONF_DIR.getAbsolutePath(),
-        "--output-dir=" + outDir.toString(),
-        "--mappers=3",
-        "--verbose",
-        "--go-live",
-        random().nextBoolean() ? "--input-list=" + INPATH.toString() : dataDir.toString(), 
-        "--go-live-threads", Integer.toString(random().nextInt(15) + 1)
-    };
-    args = prependInitialArgs(args);
-    argList = new ArrayList<>();
-    getShardUrlArgs(argList);
-    args = concat(args, argList.toArray(new String[0]));
-    
-    if (true) {
-      tool = new MapReduceIndexerTool();
-      res = ToolRunner.run(jobConf, tool, args);
-      assertEquals(0, res);
-      assertTrue(tool.job.isComplete());
-      assertTrue(tool.job.isSuccessful());      
-      results = server.query(new SolrQuery("*:*"));
-      
-      assertEquals(22, results.getResults().getNumFound());
-    }    
-    
-    // try using zookeeper
-    String collection = "collection1";
-    if (random().nextBoolean()) {
-      // sometimes, use an alias
-      createAlias("updatealias", "collection1");
-      collection = "updatealias";
-    }
-    
-    fs.delete(inDir, true);   
-    fs.delete(outDir, true);  
-    fs.delete(dataDir, true);    
-    INPATH = upAvroFile(fs, inDir, DATADIR, dataDir, inputAvroFile3);
+    try (HttpSolrClient server = new HttpSolrClient(cloudJettys.get(0).url)) {
 
-    cloudClient.deleteByQuery("*:*");
-    cloudClient.commit();
-    assertEquals(0, cloudClient.query(new SolrQuery("*:*")).getResults().getNumFound());      
+      args = new String[]{
+          "--solr-home-dir=" + MINIMR_CONF_DIR.getAbsolutePath(),
+          "--output-dir=" + outDir.toString(),
+          "--log4j=" + getFile("log4j.properties").getAbsolutePath(),
+          "--mappers=3",
+          random().nextBoolean() ? "--input-list=" + INPATH.toString() : dataDir.toString(),
+          "--go-live-threads", Integer.toString(random().nextInt(15) + 1),
+          "--verbose",
+          "--go-live"
+      };
+      args = prependInitialArgs(args);
+      getShardUrlArgs(argList);
+      args = concat(args, argList.toArray(new String[0]));
 
-    args = new String[] {
-        "--output-dir=" + outDir.toString(),
-        "--mappers=3",
-        "--reducers=12",
-        "--fanout=2",
-        "--verbose",
-        "--go-live",
-        random().nextBoolean() ? "--input-list=" + INPATH.toString() : dataDir.toString(), 
-        "--zk-host", zkServer.getZkAddress(), 
-        "--collection", collection
-    };
-    args = prependInitialArgs(args);
+      if (true) {
+        tool = new MapReduceIndexerTool();
+        res = ToolRunner.run(jobConf, tool, args);
+        assertEquals(0, res);
+        assertTrue(tool.job.isComplete());
+        assertTrue(tool.job.isSuccessful());
+        results = server.query(new SolrQuery("*:*"));
+        assertEquals(20, results.getResults().getNumFound());
+      }
 
-    if (true) {
-      tool = new MapReduceIndexerTool();
-      res = ToolRunner.run(jobConf, tool, args);
-      assertEquals(0, res);
-      assertTrue(tool.job.isComplete());
-      assertTrue(tool.job.isSuccessful());
-      
-      SolrDocumentList resultDocs = executeSolrQuery(cloudClient, "*:*");      
-      assertEquals(RECORD_COUNT, resultDocs.getNumFound());
-      assertEquals(RECORD_COUNT, resultDocs.size());
-      
-      // perform updates
-      for (int i = 0; i < RECORD_COUNT; i++) {
+      fs.delete(inDir, true);
+      fs.delete(outDir, true);
+      fs.delete(dataDir, true);
+      assertTrue(fs.mkdirs(inDir));
+      INPATH = upAvroFile(fs, inDir, DATADIR, dataDir, inputAvroFile2);
+
+      args = new String[]{
+          "--solr-home-dir=" + MINIMR_CONF_DIR.getAbsolutePath(),
+          "--output-dir=" + outDir.toString(),
+          "--mappers=3",
+          "--verbose",
+          "--go-live",
+          random().nextBoolean() ? "--input-list=" + INPATH.toString() : dataDir.toString(),
+          "--go-live-threads", Integer.toString(random().nextInt(15) + 1)
+      };
+      args = prependInitialArgs(args);
+
+      getShardUrlArgs(argList);
+      args = concat(args, argList.toArray(new String[0]));
+
+      if (true) {
+        tool = new MapReduceIndexerTool();
+        res = ToolRunner.run(jobConf, tool, args);
+        assertEquals(0, res);
+        assertTrue(tool.job.isComplete());
+        assertTrue(tool.job.isSuccessful());
+        results = server.query(new SolrQuery("*:*"));
+
+        assertEquals(22, results.getResults().getNumFound());
+      }
+
+      // try using zookeeper
+      String collection = "collection1";
+      if (random().nextBoolean()) {
+        // sometimes, use an alias
+        createAlias("updatealias", "collection1");
+        collection = "updatealias";
+      }
+
+      fs.delete(inDir, true);
+      fs.delete(outDir, true);
+      fs.delete(dataDir, true);
+      INPATH = upAvroFile(fs, inDir, DATADIR, dataDir, inputAvroFile3);
+
+      cloudClient.deleteByQuery("*:*");
+      cloudClient.commit();
+      assertEquals(0, cloudClient.query(new SolrQuery("*:*")).getResults().getNumFound());
+
+      args = new String[]{
+          "--output-dir=" + outDir.toString(),
+          "--mappers=3",
+          "--reducers=12",
+          "--fanout=2",
+          "--verbose",
+          "--go-live",
+          random().nextBoolean() ? "--input-list=" + INPATH.toString() : dataDir.toString(),
+          "--zk-host", zkServer.getZkAddress(),
+          "--collection", collection
+      };
+      args = prependInitialArgs(args);
+
+      if (true) {
+        tool = new MapReduceIndexerTool();
+        res = ToolRunner.run(jobConf, tool, args);
+        assertEquals(0, res);
+        assertTrue(tool.job.isComplete());
+        assertTrue(tool.job.isSuccessful());
+
+        SolrDocumentList resultDocs = executeSolrQuery(cloudClient, "*:*");
+        assertEquals(RECORD_COUNT, resultDocs.getNumFound());
+        assertEquals(RECORD_COUNT, resultDocs.size());
+
+        // perform updates
+        for (int i = 0; i < RECORD_COUNT; i++) {
           SolrDocument doc = resultDocs.get(i);
           SolrInputDocument update = new SolrInputDocument();
           for (Map.Entry<String, Object> entry : doc.entrySet()) {
-              update.setField(entry.getKey(), entry.getValue());
+            update.setField(entry.getKey(), entry.getValue());
           }
           update.setField("user_screen_name", "Nadja" + i);
           update.removeField("_version_");
           cloudClient.add(update);
-      }
-      cloudClient.commit();
-      
-      // verify updates
-      SolrDocumentList resultDocs2 = executeSolrQuery(cloudClient, "*:*");   
-      assertEquals(RECORD_COUNT, resultDocs2.getNumFound());
-      assertEquals(RECORD_COUNT, resultDocs2.size());
-      for (int i = 0; i < RECORD_COUNT; i++) {
+        }
+        cloudClient.commit();
+
+        // verify updates
+        SolrDocumentList resultDocs2 = executeSolrQuery(cloudClient, "*:*");
+        assertEquals(RECORD_COUNT, resultDocs2.getNumFound());
+        assertEquals(RECORD_COUNT, resultDocs2.size());
+        for (int i = 0; i < RECORD_COUNT; i++) {
           SolrDocument doc = resultDocs.get(i);
           SolrDocument doc2 = resultDocs2.get(i);
           assertEquals(doc.getFirstValue("id"), doc2.getFirstValue("id"));
           assertEquals("Nadja" + i, doc2.getFirstValue("user_screen_name"));
           assertEquals(doc.getFirstValue("text"), doc2.getFirstValue("text"));
-          
+
           // perform delete
-          cloudClient.deleteById((String)doc.getFirstValue("id"));
+          cloudClient.deleteById((String) doc.getFirstValue("id"));
+        }
+        cloudClient.commit();
+
+        // verify deletes
+        assertEquals(0, executeSolrQuery(cloudClient, "*:*").size());
       }
+
+      cloudClient.deleteByQuery("*:*");
       cloudClient.commit();
-      
-      // verify deletes
-      assertEquals(0, executeSolrQuery(cloudClient, "*:*").size());
-    }    
-    
-    cloudClient.deleteByQuery("*:*");
-    cloudClient.commit();
-    assertEquals(0, cloudClient.query(new SolrQuery("*:*")).getResults().getNumFound());      
-    server.shutdown();
+      assertEquals(0, cloudClient.query(new SolrQuery("*:*")).getResults().getNumFound());
+    }
     
     // try using zookeeper with replication
     String replicatedCollection = "replicated_collection";
@@ -693,7 +680,7 @@
   }
 
   private void getShardUrlArgs(List<String> args) {
-    for (int i = 0; i < shardCount; i++) {
+    for (int i = 0; i < getShardCount(); i++) {
       args.add("--shard-url");
       args.add(cloudJettys.get(i).url);
     }
@@ -706,25 +693,24 @@
   }
 
   private void checkConsistency(String replicatedCollection)
-      throws SolrServerException {
+      throws Exception {
     Collection<Slice> slices = cloudClient.getZkStateReader().getClusterState()
         .getSlices(replicatedCollection);
     for (Slice slice : slices) {
       Collection<Replica> replicas = slice.getReplicas();
       long found = -1;
       for (Replica replica : replicas) {
-        HttpSolrClient client = new HttpSolrClient(
-            new ZkCoreNodeProps(replica).getCoreUrl());
-        SolrQuery query = new SolrQuery("*:*");
-        query.set("distrib", false);
-        QueryResponse replicaResults = client.query(query);
-        long count = replicaResults.getResults().getNumFound();
-        if (found != -1) {
-          assertEquals(slice.getName() + " is inconsistent "
-              + new ZkCoreNodeProps(replica).getCoreUrl(), found, count);
+        try (HttpSolrClient client = new HttpSolrClient(new ZkCoreNodeProps(replica).getCoreUrl())) {
+          SolrQuery query = new SolrQuery("*:*");
+          query.set("distrib", false);
+          QueryResponse replicaResults = client.query(query);
+          long count = replicaResults.getResults().getNumFound();
+          if (found != -1) {
+            assertEquals(slice.getName() + " is inconsistent "
+                + new ZkCoreNodeProps(replica).getCoreUrl(), found, count);
+          }
+          found = count;
         }
-        found = count;
-        client.shutdown();
       }
     }
   }
diff --git a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineMapperTest.java b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineMapperTest.java
index fed109f..3aa08a5 100644
--- a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineMapperTest.java
+++ b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineMapperTest.java
@@ -16,6 +16,7 @@
  */
 package org.apache.solr.hadoop;
 
+import java.net.URLEncoder;
 import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
@@ -34,8 +35,6 @@
   @BeforeClass
   public static void beforeClass() {
     assumeFalse("Does not work on Windows, because it uses UNIX shell commands or POSIX paths", Constants.WINDOWS);
-    assumeFalse("FIXME: This test fails under Java 8 due to the Saxon dependency - see SOLR-1301", Constants.JRE_IS_MINIMUM_JAVA8);
-    assumeFalse("FIXME: This test fails under J9 due to the Saxon dependency - see SOLR-1301", System.getProperty("java.vm.info", "<?>").contains("IBM J9"));
   }
   
   @Test
@@ -46,7 +45,9 @@
     Configuration config = mapDriver.getConfiguration();
     setupHadoopConfig(config);
 
-    mapDriver.withInput(new LongWritable(0L), new Text("hdfs://localhost/" + DOCUMENTS_DIR + "/sample-statuses-20120906-141433.avro"));
+    mapDriver.withInput(new LongWritable(0L), new Text("hdfs://localhost/" +
+        URLEncoder.encode(DOCUMENTS_DIR, "UTF-8").replace("+", "%20") +
+        "/sample-statuses-20120906-141433.avro"));
 
     SolrInputDocument sid = new SolrInputDocument();
     sid.addField("id", "uniqueid1");
diff --git a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineReducerTest.java b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineReducerTest.java
index 663ff2a..89a5110 100644
--- a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineReducerTest.java
+++ b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineReducerTest.java
@@ -48,8 +48,6 @@
   @BeforeClass
   public static void beforeClass2() {
     assumeFalse("Does not work on Windows, because it uses UNIX shell commands or POSIX paths", Constants.WINDOWS);
-    assumeFalse("FIXME: This test fails under Java 8 due to the Saxon dependency - see SOLR-1301", Constants.JRE_IS_MINIMUM_JAVA8);
-    assumeFalse("FIXME: This test fails under J9 due to the Saxon dependency - see SOLR-1301", System.getProperty("java.vm.info", "<?>").contains("IBM J9"));
     
     System.setProperty("verifyPartitionAssignment", "false");
   }
diff --git a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/UtilsForTests.java b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/UtilsForTests.java
index 17f3ae9..bc5148f 100644
--- a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/UtilsForTests.java
+++ b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/UtilsForTests.java
@@ -16,11 +16,6 @@
  */
 package org.apache.solr.hadoop;
 
-import static org.junit.Assert.assertEquals;
-
-import java.io.File;
-import java.io.IOException;
-
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -29,6 +24,11 @@
 import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
 import org.apache.solr.client.solrj.response.QueryResponse;
 
+import java.io.File;
+import java.io.IOException;
+
+import static org.junit.Assert.assertEquals;
+
 
 public class UtilsForTests {
   
@@ -40,17 +40,13 @@
     for (FileStatus dir : fs.listStatus(outDir)) { // for each shard
       if (dir.getPath().getName().startsWith("part") && dir.isDirectory()) {
         actualShards++;
-        EmbeddedSolrServer solr = SolrRecordWriter.createEmbeddedSolrServer(
-            new Path(solrHomeDir.getAbsolutePath()), fs, dir.getPath());
-        
-        try {
+        try (EmbeddedSolrServer solr
+                 = SolrRecordWriter.createEmbeddedSolrServer(new Path(solrHomeDir.getAbsolutePath()), fs, dir.getPath())) {
           SolrQuery query = new SolrQuery();
           query.setQuery("*:*");
           QueryResponse resp = solr.query(query);
           long numDocs = resp.getResults().getNumFound();
           actualDocs += numDocs;
-        } finally {
-          solr.shutdown();
         }
       }
     }
diff --git a/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java b/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java
index 23e9bf6..188b379 100644
--- a/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java
+++ b/solr/contrib/morphlines-cell/src/java/org/apache/solr/morphlines/cell/SolrCellBuilder.java
@@ -245,11 +245,7 @@
 
         try {
           parser.parse(inputStream, parsingHandler, metadata, parseContext);
-        } catch (IOException e) {
-          throw new MorphlineRuntimeException("Cannot parse", e);
-        } catch (SAXException e) {
-          throw new MorphlineRuntimeException("Cannot parse", e);
-        } catch (TikaException e) {
+        } catch (IOException | TikaException | SAXException e) {
           throw new MorphlineRuntimeException("Cannot parse", e);
         }
       } finally {
diff --git a/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java b/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java
index 984d88d..aead56c 100644
--- a/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java
+++ b/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java
@@ -35,7 +35,6 @@
 import org.junit.Test;
 import org.apache.lucene.util.LuceneTestCase.AwaitsFix;
 
-@AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-6489")
 public class SolrCellMorphlineTest extends AbstractSolrMorphlineTestBase {
 
   private Map<String,Integer> expectedRecords = new HashMap<>();
@@ -140,6 +139,7 @@
   }
   
   @Test
+  @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-6489")
   public void testSolrCellJPGCompressed() throws Exception {
     morphline = createMorphline("test-morphlines" + File.separator + "solrCellJPGCompressed");    
     String path = RESOURCES_DIR + File.separator + "test-documents" + File.separator;
@@ -163,6 +163,7 @@
   }  
 
   @Test
+  @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-6489")
   public void testSolrCellDocumentTypes() throws Exception {
     AbstractSolrMorphlineTestBase.setupMorphline(tempDir, "test-morphlines/solrCellDocumentTypes", false);
     
diff --git a/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/LoadSolrBuilder.java b/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/LoadSolrBuilder.java
index 0b31f54..2c2660b 100644
--- a/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/LoadSolrBuilder.java
+++ b/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/LoadSolrBuilder.java
@@ -85,35 +85,27 @@
         if (event == Notifications.LifecycleEvent.BEGIN_TRANSACTION) {
           try {
             loader.beginTransaction();
-          } catch (SolrServerException e) {
-            throw new MorphlineRuntimeException(e);
-          } catch (IOException e) {
+          } catch (SolrServerException | IOException e) {
             throw new MorphlineRuntimeException(e);
           }
         } else if (event == Notifications.LifecycleEvent.COMMIT_TRANSACTION) {
           try {
             loader.commitTransaction();
-          } catch (SolrServerException e) {
-            throw new MorphlineRuntimeException(e);
-          } catch (IOException e) {
+          } catch (SolrServerException | IOException e) {
             throw new MorphlineRuntimeException(e);
           }
         }
         else if (event == Notifications.LifecycleEvent.ROLLBACK_TRANSACTION) {
           try {
             loader.rollbackTransaction();
-          } catch (SolrServerException e) {
-            throw new MorphlineRuntimeException(e);
-          } catch (IOException e) {
+          } catch (SolrServerException | IOException e) {
             throw new MorphlineRuntimeException(e);
           }
         }
         else if (event == Notifications.LifecycleEvent.SHUTDOWN) {
           try {
             loader.shutdown();
-          } catch (SolrServerException e) {
-            throw new MorphlineRuntimeException(e);
-          } catch (IOException e) {
+          } catch (SolrServerException | IOException e) {
             throw new MorphlineRuntimeException(e);
           }
         }
@@ -127,9 +119,7 @@
       SolrInputDocument doc = convert(record);
       try {
         loader.load(doc);
-      } catch (IOException e) {
-        throw new MorphlineRuntimeException(e);
-      } catch (SolrServerException e) {
+      } catch (IOException | SolrServerException e) {
         throw new MorphlineRuntimeException(e);
       } finally {
         timerContext.stop();
diff --git a/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/SolrClientDocumentLoader.java b/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/SolrClientDocumentLoader.java
index d9c8cc3..c52863f 100644
--- a/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/SolrClientDocumentLoader.java
+++ b/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/SolrClientDocumentLoader.java
@@ -105,9 +105,9 @@
   }
 
   @Override
-  public void shutdown() {
+  public void shutdown() throws IOException {
     LOGGER.trace("shutdown");
-    client.shutdown();
+    client.close();
   }
 
   @Override
diff --git a/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/SolrLocator.java b/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/SolrLocator.java
index e4f65c4..10a80ff 100644
--- a/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/SolrLocator.java
+++ b/solr/contrib/morphlines-core/src/java/org/apache/solr/morphlines/solr/SolrLocator.java
@@ -140,11 +140,7 @@
           downloadedSolrHomeDir = Files.createTempDir();
           downloadedSolrHomeDir = zki.downloadConfigDir(zkClient, configName, downloadedSolrHomeDir);
           mySolrHomeDir = downloadedSolrHomeDir.getAbsolutePath();
-        } catch (KeeperException e) {
-          throw new MorphlineCompilationException("Cannot download schema.xml from ZooKeeper", config, e);
-        } catch (InterruptedException e) {
-          throw new MorphlineCompilationException("Cannot download schema.xml from ZooKeeper", config, e);
-        } catch (IOException e) {
+        } catch (KeeperException | InterruptedException | IOException e) {
           throw new MorphlineCompilationException("Cannot download schema.xml from ZooKeeper", config, e);
         } finally {
           zkClient.close();
@@ -161,11 +157,7 @@
         IndexSchema schema = new IndexSchema(solrConfig, "schema.xml", is);
         validateSchema(schema);
         return schema;
-      } catch (ParserConfigurationException e) {
-        throw new MorphlineRuntimeException(e);
-      } catch (IOException e) {
-        throw new MorphlineRuntimeException(e);
-      } catch (SAXException e) {
+      } catch (ParserConfigurationException | IOException | SAXException e) {
         throw new MorphlineRuntimeException(e);
       }
     } finally {
diff --git a/solr/contrib/morphlines-core/src/test-files/solr/minimr/conf/schema.xml b/solr/contrib/morphlines-core/src/test-files/solr/minimr/conf/schema.xml
index aa7fe8f..5a865e1 100644
--- a/solr/contrib/morphlines-core/src/test-files/solr/minimr/conf/schema.xml
+++ b/solr/contrib/morphlines-core/src/test-files/solr/minimr/conf/schema.xml
@@ -579,7 +579,7 @@
       http://wiki.apache.org/solr/SolrAdaptersForLuceneSpatial4
     -->
     <fieldType name="location_rpt" class="solr.SpatialRecursivePrefixTreeFieldType"
-        geo="true" distErrPct="0.025" maxDistErr="0.000009" units="degrees" />
+        geo="true" distErrPct="0.025" maxDistErr="0.001" distanceUnits="kilometers" />
 
    <!-- Money/currency field type. See http://wiki.apache.org/solr/MoneyFieldType
         Parameters:
diff --git a/solr/contrib/morphlines-core/src/test-files/solr/mrunit/conf/schema.xml b/solr/contrib/morphlines-core/src/test-files/solr/mrunit/conf/schema.xml
index 5e28827..68816c2 100644
--- a/solr/contrib/morphlines-core/src/test-files/solr/mrunit/conf/schema.xml
+++ b/solr/contrib/morphlines-core/src/test-files/solr/mrunit/conf/schema.xml
@@ -579,7 +579,7 @@
       http://wiki.apache.org/solr/SolrAdaptersForLuceneSpatial4
     -->
     <fieldType name="location_rpt" class="solr.SpatialRecursivePrefixTreeFieldType"
-        geo="true" distErrPct="0.025" maxDistErr="0.000009" units="degrees" />
+        geo="true" distErrPct="0.025" maxDistErr="0.001" distanceUnits="kilometers" />
 
    <!-- Money/currency field type. See http://wiki.apache.org/solr/MoneyFieldType
         Parameters:
diff --git a/solr/contrib/morphlines-core/src/test-files/solr/solr.xml b/solr/contrib/morphlines-core/src/test-files/solr/solr.xml
index 3c6317e..5c8756e 100644
--- a/solr/contrib/morphlines-core/src/test-files/solr/solr.xml
+++ b/solr/contrib/morphlines-core/src/test-files/solr/solr.xml
@@ -16,18 +16,25 @@
  limitations under the License.
 -->
 
-<!--
- All (relative) paths are relative to the installation path
-  
-  persistent: Save changes made via the API to this file
-  sharedLib: path to a lib directory that will be shared across all cores
--->
-<solr persistent="${solr.xml.persist:false}">
+<solr>
+
+  <solrcloud>
+    <int name="hostPort">${hostPort:8983}</int>
+    <str name="hostContext">${hostContext:solr}</str>
+    <int name="zkClientTimeout">${solr.zkclienttimeout:30000}</int>
+    <str name="host">127.0.0.1</str>
+  </solrcloud>
+
+  <shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
+    <str name="urlScheme">${urlScheme:}</str>
+    <int name="socketTimeout">${socketTimeout:120000}</int>
+    <int name="connTimeout">${connTimeout:15000}</int>
+  </shardHandlerFactory>
 
   <!--
   adminPath: RequestHandler path to manage cores.  
     If 'null' (or absent), cores will not be manageable via request handler
-  -->
+
   <cores adminPath="/admin/cores" defaultCoreName="collection1" host="127.0.0.1" hostPort="${hostPort:8983}" 
          hostContext="${hostContext:solr}" zkClientTimeout="${solr.zkclienttimeout:30000}" numShards="${numShards:3}" shareSchema="${shareSchema:false}" 
          genericCoreNodeNames="${genericCoreNodeNames:true}" leaderVoteWait="0"
@@ -40,5 +47,6 @@
       <int name="connTimeout">${connTimeout:15000}</int>
     </shardHandlerFactory>
   </cores>
+  -->
   
 </solr>
diff --git a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineTestBase.java b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineTestBase.java
index 336f5de..6c10597 100644
--- a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineTestBase.java
+++ b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineTestBase.java
@@ -16,25 +16,14 @@
  */
 package org.apache.solr.morphlines.solr;
 
-import java.io.ByteArrayInputStream;
-import java.io.File;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Calendar;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.TimeZone;
-import java.util.concurrent.atomic.AtomicInteger;
-
+import com.codahale.metrics.MetricRegistry;
+import com.google.common.base.Joiner;
+import com.google.common.io.Files;
+import com.typesafe.config.Config;
 import org.apache.commons.io.FileUtils;
 import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.impl.XMLResponseParser;
@@ -56,10 +45,20 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.codahale.metrics.MetricRegistry;
-import com.google.common.base.Joiner;
-import com.google.common.io.Files;
-import com.typesafe.config.Config;
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Calendar;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.TimeZone;
+import java.util.concurrent.atomic.AtomicInteger;
 
 public class AbstractSolrMorphlineTestBase extends SolrTestCaseJ4 {
   private static Locale savedLocale;
@@ -123,7 +122,7 @@
       ((HttpSolrClient) solrClient).setParser(new XMLResponseParser());
     } else {
       if (TEST_WITH_EMBEDDED_SOLR_SERVER) {
-        solrClient = new EmbeddedTestSolrServer(h.getCoreContainer(), "");
+        solrClient = new EmbeddedTestSolrServer(h.getCoreContainer(), DEFAULT_TEST_CORENAME);
       } else {
         throw new RuntimeException("Not yet implemented");
         //solrServer = new TestSolrServer(getSolrClient());
@@ -140,7 +139,7 @@
   @After
   public void tearDown() throws Exception {
     collector = null;
-    solrClient.shutdown();
+    solrClient.close();
     solrClient = null;
     super.tearDown();
   }
diff --git a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineZkTestBase.java b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineZkTestBase.java
index 13e4796..84514e2 100644
--- a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineZkTestBase.java
+++ b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineZkTestBase.java
@@ -17,11 +17,9 @@
 
 package org.apache.solr.morphlines.solr;
 
-import java.io.File;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.Locale;
-
+import com.codahale.metrics.MetricRegistry;
+import com.google.common.collect.ListMultimap;
+import com.typesafe.config.Config;
 import org.apache.commons.io.FileUtils;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
@@ -29,10 +27,7 @@
 import org.apache.solr.cloud.AbstractZkTestCase;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.cloud.SolrZkClient;
-import org.junit.After;
-import org.junit.Before;
 import org.junit.BeforeClass;
-import org.junit.Test;
 import org.kitesdk.morphline.api.Collector;
 import org.kitesdk.morphline.api.Command;
 import org.kitesdk.morphline.api.MorphlineContext;
@@ -42,9 +37,10 @@
 import org.kitesdk.morphline.base.Notifications;
 import org.kitesdk.morphline.stdlib.PipeBuilder;
 
-import com.codahale.metrics.MetricRegistry;
-import com.google.common.collect.ListMultimap;
-import com.typesafe.config.Config;
+import java.io.File;
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.Locale;
 
 public abstract class AbstractSolrMorphlineZkTestBase extends AbstractFullDistribZkTestBase {
   private static File solrHomeDirectory;
@@ -62,9 +58,8 @@
   }
   
   public AbstractSolrMorphlineZkTestBase() {
-    fixShardCount = true;
     sliceCount = 3;
-    shardCount = 3;
+    fixShardCount(3);
   }
   
   @BeforeClass
@@ -77,9 +72,8 @@
   }
   
   @Override
-  @Before
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     System.setProperty("host", "127.0.0.1");
     System.setProperty("numShards", Integer.toString(sliceCount));
     uploadConfFiles();
@@ -87,19 +81,12 @@
   }
   
   @Override
-  @After
-  public void tearDown() throws Exception {
-    super.tearDown();
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
     System.clearProperty("host");
     System.clearProperty("numShards");
   }
   
-  @Test
-  @Override
-  public void testDistribSearch() throws Exception {
-    super.testDistribSearch();
-  }
-  
   @Override
   protected void commit() throws Exception {
     Notifications.notifyCommitTransaction(morphline);    
@@ -155,7 +142,8 @@
   public JettySolrRunner createJetty(File solrHome, String dataDir,
       String shardList, String solrConfigOverride, String schemaOverride)
       throws Exception {
-    
+
+    writeCoreProperties(solrHome.toPath(), DEFAULT_TEST_CORENAME);
     JettySolrRunner jetty = new JettySolrRunner(solrHome.getAbsolutePath(),
         context, 0, solrConfigOverride, schemaOverride, true, null, sslConfig);
 
diff --git a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/EmbeddedTestSolrServer.java b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/EmbeddedTestSolrServer.java
index 3bd5390..9599511 100644
--- a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/EmbeddedTestSolrServer.java
+++ b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/EmbeddedTestSolrServer.java
@@ -34,7 +34,7 @@
   }
 
   @Override
-  public void shutdown() {
+  public void close() {
     ; // NOP
   }
 
diff --git a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineTest.java b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineTest.java
index d68ad48..64fa2ec 100644
--- a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineTest.java
+++ b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineTest.java
@@ -16,25 +16,16 @@
  */
 package org.apache.solr.morphlines.solr;
 
-import java.io.File;
-import java.util.Arrays;
-
-import org.apache.lucene.util.Constants;
-import org.junit.BeforeClass;
 import org.junit.Test;
-
 import org.kitesdk.morphline.api.Record;
 import org.kitesdk.morphline.base.Fields;
 import org.kitesdk.morphline.base.Notifications;
 
+import java.io.File;
+import java.util.Arrays;
+
 public class SolrMorphlineTest extends AbstractSolrMorphlineTestBase {
 
-  @BeforeClass
-  public static void beforeClass2() {
-    assumeFalse("FIXME: This test fails under Java 8 due to the Saxon dependency - see SOLR-1301", Constants.JRE_IS_MINIMUM_JAVA8);
-    assumeFalse("FIXME: This test fails under J9 due to the Saxon dependency - see SOLR-1301", System.getProperty("java.vm.info", "<?>").contains("IBM J9"));
-  }
-  
   @Test
   public void testLoadSolrBasic() throws Exception {
     //System.setProperty("ENV_SOLR_HOME", testSolrHome + "/collection1");
diff --git a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineZkAliasTest.java b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineZkAliasTest.java
index be5626c..a9ba3a9 100644
--- a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineZkAliasTest.java
+++ b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineZkAliasTest.java
@@ -16,11 +16,13 @@
  */
 package org.apache.solr.morphlines.solr;
 
-import java.io.File;
-import java.io.IOException;
-import java.util.Iterator;
-
-import org.apache.lucene.util.Constants;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -31,17 +33,14 @@
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.junit.BeforeClass;
+import org.junit.Test;
 import org.kitesdk.morphline.api.Record;
 import org.kitesdk.morphline.base.Fields;
 import org.kitesdk.morphline.base.Notifications;
 
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence;
+import java.io.File;
+import java.io.IOException;
+import java.util.Iterator;
 
 @ThreadLeakAction({Action.WARN})
 @ThreadLeakLingering(linger = 0)
@@ -49,15 +48,10 @@
 @ThreadLeakScope(Scope.NONE)
 @Slow
 public class SolrMorphlineZkAliasTest extends AbstractSolrMorphlineZkTestBase {
-  
-  @BeforeClass
-  public static void beforeClass2() {
-    assumeFalse("FIXME: This test fails under Java 8 due to the Saxon dependency - see SOLR-1301", Constants.JRE_IS_MINIMUM_JAVA8);
-    assumeFalse("FIXME: This test fails under J9 due to the Saxon dependency - see SOLR-1301", System.getProperty("java.vm.info", "<?>").contains("IBM J9"));
-  }
-  
-  @Override
-  public void doTest() throws Exception {
+
+
+  @Test
+  public void test() throws Exception {
     
     waitForRecoveriesToFinish(false);
     
diff --git a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineZkAvroTest.java b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineZkAvroTest.java
index 816e827..7fe1759 100644
--- a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineZkAvroTest.java
+++ b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineZkAvroTest.java
@@ -16,28 +16,6 @@
  */
 package org.apache.solr.morphlines.solr;
 
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Iterator;
-import java.util.List;
-
-import org.apache.avro.Schema.Field;
-import org.apache.avro.file.DataFileReader;
-import org.apache.avro.file.FileReader;
-import org.apache.avro.generic.GenericData;
-import org.apache.avro.generic.GenericDatumReader;
-import org.apache.lucene.util.Constants;
-import org.apache.lucene.util.LuceneTestCase.Slow;
-import org.apache.solr.client.solrj.SolrQuery;
-import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.common.SolrDocument;
-import org.junit.BeforeClass;
-import org.kitesdk.morphline.api.Record;
-import org.kitesdk.morphline.base.Fields;
-import org.kitesdk.morphline.base.Notifications;
-
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
@@ -48,6 +26,27 @@
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.io.Files;
+import org.apache.avro.Schema.Field;
+import org.apache.avro.file.DataFileReader;
+import org.apache.avro.file.FileReader;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.lucene.util.LuceneTestCase.Slow;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.SolrDocument;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.kitesdk.morphline.api.Record;
+import org.kitesdk.morphline.base.Fields;
+import org.kitesdk.morphline.base.Notifications;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.List;
 
 @ThreadLeakAction({Action.WARN})
 @ThreadLeakLingering(linger = 0)
@@ -55,15 +54,10 @@
 @ThreadLeakScope(Scope.NONE)
 @Slow
 public class SolrMorphlineZkAvroTest extends AbstractSolrMorphlineZkTestBase {
-  
-  @BeforeClass
-  public static void beforeClass2() {
-    assumeFalse("FIXME: This test fails under Java 8 due to the Saxon dependency - see SOLR-1301", Constants.JRE_IS_MINIMUM_JAVA8);
-    assumeFalse("FIXME: This test fails under J9 due to the Saxon dependency - see SOLR-1301", System.getProperty("java.vm.info", "<?>").contains("IBM J9"));
-  }
-  
-  @Override
-  public void doTest() throws Exception {
+
+
+  @Test
+  public void test() throws Exception {
     Joiner joiner = Joiner.on(File.separator);
     File file = new File(joiner.join(RESOURCES_DIR, "test-documents", "sample-statuses-20120906-141433-medium.avro"));
     
@@ -138,7 +132,7 @@
     
     Notifications.notifyRollbackTransaction(morphline);
     Notifications.notifyShutdown(morphline);
-    cloudClient.shutdown();
+    cloudClient.close();
   }
   
   private void assertTweetEquals(GenericData.Record expected, Record actual, int i) {
diff --git a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineZkTest.java b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineZkTest.java
index 69865a6..feb2d6a 100644
--- a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineZkTest.java
+++ b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/SolrMorphlineZkTest.java
@@ -16,19 +16,6 @@
  */
 package org.apache.solr.morphlines.solr;
 
-import java.io.File;
-import java.util.Iterator;
-
-import org.apache.lucene.util.Constants;
-import org.apache.lucene.util.LuceneTestCase.Slow;
-import org.apache.solr.client.solrj.SolrQuery;
-import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.common.SolrDocument;
-import org.junit.BeforeClass;
-import org.kitesdk.morphline.api.Record;
-import org.kitesdk.morphline.base.Fields;
-import org.kitesdk.morphline.base.Notifications;
-
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction.Action;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
@@ -36,6 +23,17 @@
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies.Consequence;
+import org.apache.lucene.util.LuceneTestCase.Slow;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.SolrDocument;
+import org.junit.Test;
+import org.kitesdk.morphline.api.Record;
+import org.kitesdk.morphline.base.Fields;
+import org.kitesdk.morphline.base.Notifications;
+
+import java.io.File;
+import java.util.Iterator;
 
 @ThreadLeakAction({Action.WARN})
 @ThreadLeakLingering(linger = 0)
@@ -43,15 +41,10 @@
 @ThreadLeakScope(Scope.NONE)
 @Slow
 public class SolrMorphlineZkTest extends AbstractSolrMorphlineZkTestBase {
-  
-  @BeforeClass
-  public static void beforeClass2() {
-    assumeFalse("FIXME: This test fails under Java 8 due to the Saxon dependency - see SOLR-1301", Constants.JRE_IS_MINIMUM_JAVA8);
-    assumeFalse("FIXME: This test fails under J9 due to the Saxon dependency - see SOLR-1301", System.getProperty("java.vm.info", "<?>").contains("IBM J9"));
-  }
-  
-  @Override
-  public void doTest() throws Exception {
+
+
+  @Test
+  public void test() throws Exception {
     
     waitForRecoveriesToFinish(false);
     
@@ -99,7 +92,7 @@
     
     Notifications.notifyRollbackTransaction(morphline);
     Notifications.notifyShutdown(morphline);
-    cloudClient.shutdown();
+    cloudClient.close();
   }
 
 }
diff --git a/solr/contrib/velocity/src/java/org/apache/solr/response/SolrParamResourceLoader.java b/solr/contrib/velocity/src/java/org/apache/solr/response/SolrParamResourceLoader.java
index 8f033b5..864dfc2 100644
--- a/solr/contrib/velocity/src/java/org/apache/solr/response/SolrParamResourceLoader.java
+++ b/solr/contrib/velocity/src/java/org/apache/solr/response/SolrParamResourceLoader.java
@@ -31,6 +31,8 @@
 import java.util.Map;
 
 public class SolrParamResourceLoader extends ResourceLoader {
+  public static final String TEMPLATE_PARAM_PREFIX = VelocityResponseWriter.TEMPLATE + ".";
+
   private Map<String,String> templates = new HashMap<>();
   public SolrParamResourceLoader(SolrQueryRequest request) {
     super();
@@ -44,8 +46,8 @@
     while (names.hasNext()) {
       String name = names.next();
       
-      if (name.startsWith("v.template.")) {
-        templates.put(name.substring(11) + ".vm",params.get(name));
+      if (name.startsWith(TEMPLATE_PARAM_PREFIX)) {
+        templates.put(name.substring(TEMPLATE_PARAM_PREFIX.length()) + VelocityResponseWriter.TEMPLATE_EXTENSION,params.get(name));
       }
     }
   }
diff --git a/solr/contrib/velocity/src/java/org/apache/solr/response/SolrVelocityLogger.java b/solr/contrib/velocity/src/java/org/apache/solr/response/SolrVelocityLogger.java
new file mode 100644
index 0000000..4cb824a
--- /dev/null
+++ b/solr/contrib/velocity/src/java/org/apache/solr/response/SolrVelocityLogger.java
@@ -0,0 +1,115 @@
+package org.apache.solr.response;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.velocity.runtime.RuntimeServices;
+import org.apache.velocity.runtime.log.LogChute;
+import org.slf4j.Logger;
+
+public class SolrVelocityLogger implements LogChute {
+  private final Logger log;
+
+  public SolrVelocityLogger(Logger log) {
+    this.log = log;
+  }
+
+  @Override
+  public void init(RuntimeServices runtimeServices) throws Exception {
+  }
+
+  @Override
+  public void log(int level, String message) {
+    switch(level) {
+      case LogChute.TRACE_ID:
+        log.trace(message);
+        break;
+
+      case LogChute.DEBUG_ID:
+        log.debug(message);
+        break;
+
+      case LogChute.INFO_ID:
+        log.info(message);
+        break;
+
+      case LogChute.WARN_ID:
+        log.warn(message);
+        break;
+
+      case LogChute.ERROR_ID:
+        log.error(message);
+        break;
+
+      default: // unknown logging level, use warn
+        log.warn(message);
+        break;
+    }
+  }
+
+  @Override
+  public void log(int level, String message, Throwable throwable) {
+    switch(level) {
+      case LogChute.TRACE_ID:
+        log.trace(message, throwable);
+        break;
+
+      case LogChute.DEBUG_ID:
+        log.debug(message, throwable);
+        break;
+
+      case LogChute.INFO_ID:
+        log.info(message, throwable);
+        break;
+
+      case LogChute.WARN_ID:
+        log.warn(message, throwable);
+        break;
+
+      case LogChute.ERROR_ID:
+        log.error(message, throwable);
+        break;
+
+      default: // unknown logging level, use warn
+        log.warn(message, throwable);
+        break;
+    }
+  }
+
+  @Override
+  public boolean isLevelEnabled(int level) {
+    switch(level) {
+      case LogChute.TRACE_ID:
+        return log.isTraceEnabled();
+
+      case LogChute.DEBUG_ID:
+        return log.isDebugEnabled();
+
+      case LogChute.INFO_ID:
+        return log.isInfoEnabled();
+
+      case LogChute.WARN_ID:
+        return log.isWarnEnabled();
+
+      case LogChute.ERROR_ID:
+        return log.isErrorEnabled();
+
+      default:
+        return false;
+    }
+  }
+}
diff --git a/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java b/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java
index 892fb67..6828374 100644
--- a/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java
+++ b/solr/contrib/velocity/src/java/org/apache/solr/response/VelocityResponseWriter.java
@@ -17,46 +17,187 @@
 
 package org.apache.solr.response;
 
+import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.StringWriter;
 import java.io.Writer;
 import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Locale;
 import java.util.Properties;
+import java.util.ResourceBundle;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.solr.client.solrj.SolrResponse;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.client.solrj.response.SolrResponseBase;
+import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.SolrCore;
 import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.util.plugin.SolrCoreAware;
 import org.apache.velocity.Template;
 import org.apache.velocity.VelocityContext;
 import org.apache.velocity.app.VelocityEngine;
 import org.apache.velocity.runtime.RuntimeConstants;
+import org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader;
+import org.apache.velocity.tools.ConversionUtils;
 import org.apache.velocity.tools.generic.ComparisonDateTool;
+import org.apache.velocity.tools.generic.ContextTool;
 import org.apache.velocity.tools.generic.DisplayTool;
 import org.apache.velocity.tools.generic.EscapeTool;
 import org.apache.velocity.tools.generic.ListTool;
 import org.apache.velocity.tools.generic.MathTool;
 import org.apache.velocity.tools.generic.NumberTool;
+import org.apache.velocity.tools.generic.ResourceTool;
 import org.apache.velocity.tools.generic.SortTool;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-public class VelocityResponseWriter implements QueryResponseWriter {
+public class VelocityResponseWriter implements QueryResponseWriter, SolrCoreAware {
+  // init param names, these are _only_ loaded at init time (no per-request control of these)
+  //   - multiple different named writers could be created with different init params
+  public static final String TEMPLATE_BASE_DIR = "template.base.dir";
+  public static final String PARAMS_RESOURCE_LOADER_ENABLED = "params.resource.loader.enabled";
+  public static final String SOLR_RESOURCE_LOADER_ENABLED = "solr.resource.loader.enabled";
+  public static final String PROPERTIES_FILE = "init.properties.file";
 
-  // TODO: maybe pass this Logger to the template for logging from there?
-//  private static final Logger log = LoggerFactory.getLogger(VelocityResponseWriter.class);
+  // request param names
+  public static final String TEMPLATE = "v.template";
+  public static final String LAYOUT = "v.layout";
+  public static final String LAYOUT_ENABLED = "v.layout.enabled";
+  public static final String CONTENT_TYPE = "v.contentType";
+  public static final String JSON = "v.json";
+  public static final String LOCALE = "v.locale";
+
+  public static final String TEMPLATE_EXTENSION = ".vm";
+  public static final String DEFAULT_CONTENT_TYPE = "text/html;charset=UTF-8";
+  public static final String JSON_CONTENT_TYPE = "application/json;charset=UTF-8";
+
+  private File fileResourceLoaderBaseDir;
+  private boolean paramsResourceLoaderEnabled;
+  private boolean solrResourceLoaderEnabled;
+  private String initPropertiesFileName;  // used just to hold from init() to inform()
+
+  private static final Logger log = LoggerFactory.getLogger(VelocityResponseWriter.class);
+  private static final SolrVelocityLogger velocityLogger = new SolrVelocityLogger(log);
+  private Properties velocityInitProps = new Properties();
+
+  @Override
+  public void init(NamedList args) {
+    fileResourceLoaderBaseDir = null;
+    String templateBaseDir = (String) args.get(TEMPLATE_BASE_DIR);
+
+    if (templateBaseDir != null && !templateBaseDir.isEmpty()) {
+      fileResourceLoaderBaseDir = new File(templateBaseDir).getAbsoluteFile();
+      if (!fileResourceLoaderBaseDir.exists()) { // "*not* exists" condition!
+        log.warn(TEMPLATE_BASE_DIR + " specified does not exist: " + fileResourceLoaderBaseDir);
+        fileResourceLoaderBaseDir = null;
+      } else {
+        if (!fileResourceLoaderBaseDir.isDirectory()) { // "*not* a directory" condition
+          log.warn(TEMPLATE_BASE_DIR + " specified is not a directory: " + fileResourceLoaderBaseDir);
+          fileResourceLoaderBaseDir = null;
+        }
+      }
+    }
+
+    // params resource loader: off by default
+    Boolean prle = args.getBooleanArg(PARAMS_RESOURCE_LOADER_ENABLED);
+    paramsResourceLoaderEnabled = (null == prle ? false : prle);
+
+    // solr resource loader: on by default
+    Boolean srle = args.getBooleanArg(SOLR_RESOURCE_LOADER_ENABLED);
+    solrResourceLoaderEnabled = (null == srle ? true : srle);
+
+    initPropertiesFileName = (String) args.get(PROPERTIES_FILE);
+  }
+
+  @Override
+  public void inform(SolrCore core) {
+    // need to leverage SolrResourceLoader, so load init.properties.file here instead of init()
+    if (initPropertiesFileName != null) {
+      InputStream is = null;
+      try {
+        velocityInitProps.load(new InputStreamReader(core.getResourceLoader().openResource(initPropertiesFileName), StandardCharsets.UTF_8));
+      } catch (IOException e) {
+        log.warn("Error loading " + PROPERTIES_FILE + " specified property file: " + initPropertiesFileName, e);
+      }
+    }
+  }
+
+  @Override
+  public String getContentType(SolrQueryRequest request, SolrQueryResponse response) {
+    String contentType = request.getParams().get(CONTENT_TYPE);
+
+    // Use the v.contentType specified, or either of the default content types depending on the presence of v.json
+    return (contentType != null) ? contentType : ((request.getParams().get(JSON) == null) ? DEFAULT_CONTENT_TYPE : JSON_CONTENT_TYPE);
+  }
 
   @Override
   public void write(Writer writer, SolrQueryRequest request, SolrQueryResponse response) throws IOException {
-    VelocityEngine engine = getEngine(request);  // TODO: have HTTP headers available for configuring engine
+    VelocityEngine engine = createEngine(request);  // TODO: have HTTP headers available for configuring engine
 
     Template template = getTemplate(engine, request);
 
+    VelocityContext context = createContext(request, response);
+    context.put("engine", engine);  // for $engine.resourceExists(...)
+
+    String layoutTemplate = request.getParams().get(LAYOUT);
+    boolean layoutEnabled = request.getParams().getBool(LAYOUT_ENABLED, true) && layoutTemplate != null;
+
+    String jsonWrapper = request.getParams().get(JSON);
+    boolean wrapResponse = layoutEnabled || jsonWrapper != null;
+
+    // create output
+    if (!wrapResponse) {
+      // straight-forward template/context merge to output
+      template.merge(context, writer);
+    }
+    else {
+      // merge to a string buffer, then wrap with layout and finally as JSON
+      StringWriter stringWriter = new StringWriter();
+      template.merge(context, stringWriter);
+
+      if (layoutEnabled) {
+        context.put("content", stringWriter.toString());
+        stringWriter = new StringWriter();
+        try {
+          engine.getTemplate(layoutTemplate + TEMPLATE_EXTENSION).merge(context, stringWriter);
+        } catch (Exception e) {
+          throw new IOException(e.getMessage());
+        }
+      }
+
+      if (jsonWrapper != null) {
+        writer.write(jsonWrapper + "(");
+        writer.write(getJSONWrap(stringWriter.toString()));
+        writer.write(')');
+      } else {  // using a layout, but not JSON wrapping
+        writer.write(stringWriter.toString());
+      }
+    }
+  }
+
+  private VelocityContext createContext(SolrQueryRequest request, SolrQueryResponse response) {
     VelocityContext context = new VelocityContext();
 
     context.put("request", request);
 
+    // Register useful Velocity "tools"
+    context.put("log", log);   // TODO: add test
+    context.put("esc", new EscapeTool());
+    context.put("date", new ComparisonDateTool());
+    context.put("list", new ListTool());
+    context.put("math", new MathTool());
+    context.put("number", new NumberTool());
+    context.put("sort", new SortTool());
+    context.put("display", new DisplayTool());
+    context.put("resource", new SolrVelocityResourceTool(
+        request.getCore().getSolrConfig().getResourceLoader().getClassLoader(),
+        request.getParams().get(LOCALE)));
+
     // Turn the SolrQueryResponse into a SolrResponse.
     // QueryResponse has lots of conveniences suitable for a view
     // Problem is, which SolrResponse class to use?
@@ -65,14 +206,15 @@
     // create a new instance.  But for now the implementation simply
     // uses QueryResponse, and if it chokes in a known way, fall back
     // to bare bones SolrResponseBase.
-    // TODO: Can this writer know what the handler class is?  With echoHandler=true it can get its string name at least
+    // Can this writer know what the handler class is?  With echoHandler=true it can get its string name at least
     SolrResponse rsp = new QueryResponse();
     NamedList<Object> parsedResponse = BinaryResponseWriter.getParsedResponse(request, response);
     try {
       rsp.setResponse(parsedResponse);
 
       // page only injected if QueryResponse works
-      context.put("page", new PageTool(request, response));  // page tool only makes sense for a SearchHandler request... *sigh*
+      context.put("page", new PageTool(request, response));  // page tool only makes sense for a SearchHandler request
+      context.put("debug",((QueryResponse)rsp).getDebugMap());
     } catch (ClassCastException e) {
       // known edge case where QueryResponse's extraction assumes "response" is a SolrDocumentList
       // (AnalysisRequestHandler emits a "response")
@@ -81,81 +223,69 @@
     }
     context.put("response", rsp);
 
-    // Velocity context tools - TODO: make these pluggable
-    context.put("esc", new EscapeTool());
-    context.put("date", new ComparisonDateTool());
-    context.put("list", new ListTool());
-    context.put("math", new MathTool());
-    context.put("number", new NumberTool());
-    context.put("sort", new SortTool());
-    context.put("display", new DisplayTool());
-
-    context.put("engine", engine);  // for $engine.resourceExists(...)
-
-    String layout_template = request.getParams().get("v.layout");
-    String json_wrapper = request.getParams().get("v.json");
-    boolean wrap_response = (layout_template != null) || (json_wrapper != null);
-
-    // create output, optionally wrap it into a json object
-    if (wrap_response) {
-      StringWriter stringWriter = new StringWriter();
-      template.merge(context, stringWriter);
-
-      if (layout_template != null) {
-        context.put("content", stringWriter.toString());
-        stringWriter = new StringWriter();
-        try {
-          engine.getTemplate(layout_template + ".vm").merge(context, stringWriter);
-        } catch (Exception e) {
-          throw new IOException(e.getMessage());
-        }
-      }
-
-      if (json_wrapper != null) {
-        writer.write(request.getParams().get("v.json") + "(");
-        writer.write(getJSONWrap(stringWriter.toString()));
-        writer.write(')');
-      } else {  // using a layout, but not JSON wrapping
-        writer.write(stringWriter.toString());
-      }
-    } else {
-      template.merge(context, writer);
-    }
+    return context;
   }
 
-  private VelocityEngine getEngine(SolrQueryRequest request) {
+  private VelocityEngine createEngine(SolrQueryRequest request) {
     VelocityEngine engine = new VelocityEngine();
 
-    engine.setProperty("params.resource.loader.instance", new SolrParamResourceLoader(request));
-    SolrVelocityResourceLoader resourceLoader =
-        new SolrVelocityResourceLoader(request.getCore().getSolrConfig().getResourceLoader());
-    engine.setProperty("solr.resource.loader.instance", resourceLoader);
-    engine.setProperty(RuntimeConstants.RESOURCE_LOADER, "params,solr");
+    // route all Velocity logging through Solr's logging facility
+    engine.setProperty(RuntimeConstants.RUNTIME_LOG_LOGSYSTEM, velocityLogger);
 
-    // TODO: Externalize Velocity properties
-    String propFile = request.getParams().get("v.properties");
-    try {
-      Properties props = new Properties();
-      // Don't create a separate velocity log file by default.
-      props.put(RuntimeConstants.RUNTIME_LOG, "");
+    // Set some engine properties that improve the experience
+    //   - these could be considered in the future for parameterization, but can also be overridden by using
+    //     the init.properties.file setting.  (TODO: add a test for this properties set here overridden)
 
-      if (propFile == null) {
-        engine.init(props);
-      } else {
-        InputStream is = null;
-        try {
-          is = resourceLoader.getResourceStream(propFile);
-          props.load(new InputStreamReader(is, StandardCharsets.UTF_8));
-          engine.init(props);
-        }
-        finally {
-          if (is != null) is.close();
-        }
-      }
+    // load the built-in _macros.vm first, then load VM_global_library.vm for legacy (pre-5.0) support,
+    // and finally allow macros.vm to have the final say and override anything defined in the preceding files.
+    engine.setProperty(RuntimeConstants.VM_LIBRARY, "_macros.vm,VM_global_library.vm,macros.vm");
+
+    // Standard templates autoload, but not the macro one(s), by default, so let's just make life
+    // easier, and consistent, for macro development too.
+    engine.setProperty(RuntimeConstants.VM_LIBRARY_AUTORELOAD, "true");
+
+    /*
+      Set up Velocity resource loader(s)
+       terminology note: "resource loader" is overloaded here, there is Solr's resource loader facility for plugins,
+       and there are Velocity template resource loaders.  It's confusing, they overlap: there is a Velocity resource
+       loader that loads templates from Solr's resource loader (SolrVelocityResourceLoader).
+
+      The Velocity resource loader order is [params,][file,][solr], intentionally ordered in this manner, and each
+      one optional and individually enable-able.  By default, only "solr" (resource loader) is used, parsing templates
+      from a velocity/ sub-tree in either the classpath or under conf/.
+
+      A common usage would be to enable the file template loader, keeping the solr loader enabled; the Velocity resource
+      loader path would then be "file,solr" (params is disabled by default).  The basic browse templates are built into
+      this plugin, but can be individually overridden by placing a same-named template in the template.base.dir specified
+      directory.
+     */
+    ArrayList<String> loaders = new ArrayList<String>();
+    if (paramsResourceLoaderEnabled) {
+      loaders.add("params");
+      engine.setProperty("params.resource.loader.instance", new SolrParamResourceLoader(request));
     }
-    catch (Exception e) {
-      throw new RuntimeException(e);
+    if (fileResourceLoaderBaseDir != null) {
+      loaders.add("file");
+      engine.setProperty(RuntimeConstants.FILE_RESOURCE_LOADER_PATH, fileResourceLoaderBaseDir.getAbsolutePath());
     }
+    if (solrResourceLoaderEnabled) {
+      // The solr resource loader serves templates under a velocity/ subtree from <lib>, conf/,
+      // or SolrCloud's configuration tree.  Or rather the other way around, other resource loaders are rooted
+      // from the top, whereas this is velocity/ sub-tree rooted.
+      loaders.add("solr");
+      engine.setProperty("solr.resource.loader.instance", new SolrVelocityResourceLoader(request.getCore().getSolrConfig().getResourceLoader()));
+    }
+
+    // Always have the built-in classpath loader.  This is needed when using VM_LIBRARY macros, as they are required
+    // to be present if specified, and we want to have a nice macros facility built-in for users to use easily, and to
+    // extend in custom ways.
+    loaders.add("builtin");
+    engine.setProperty("builtin.resource.loader.instance", new ClasspathResourceLoader());
+
+    engine.setProperty(RuntimeConstants.RESOURCE_LOADER, StringUtils.join(loaders,','));
+
+    // bring in any custom properties too
+    engine.init(velocityInitProps);
 
     return engine;
   }
@@ -163,18 +293,19 @@
   private Template getTemplate(VelocityEngine engine, SolrQueryRequest request) throws IOException {
     Template template;
 
-    String template_name = request.getParams().get("v.template");
-    String qt = request.getParams().get("qt");
+    String templateName = request.getParams().get(TEMPLATE);
+
+    String qt = request.getParams().get(CommonParams.QT);
     String path = (String) request.getContext().get("path");
-    if (template_name == null && path != null) {
-      template_name = path;
+    if (templateName == null && path != null) {
+      templateName = path;
     }  // TODO: path is never null, so qt won't get picked up  maybe special case for '/select' to use qt, otherwise use path?
-    if (template_name == null && qt != null) {
-      template_name = qt;
+    if (templateName == null && qt != null) {
+      templateName = qt;
     }
-    if (template_name == null) template_name = "index";
+    if (templateName == null) templateName = "index";
     try {
-      template = engine.getTemplate(template_name + ".vm");
+      template = engine.getTemplate(templateName + TEMPLATE_EXTENSION);
     } catch (Exception e) {
       throw new IOException(e.getMessage());
     }
@@ -182,12 +313,7 @@
     return template;
   }
 
-  @Override
-  public String getContentType(SolrQueryRequest request, SolrQueryResponse response) {
-    return request.getParams().get("v.contentType", "text/html;charset=UTF-8");
-  }
-
-  private String getJSONWrap(String xmlResult) {  // TODO: maybe noggit or Solr's JSON utilities can make this cleaner?
+  private String getJSONWrap(String xmlResult) {  // maybe noggit or Solr's JSON utilities can make this cleaner?
     // escape the double quotes and backslashes
     String replace1 = xmlResult.replaceAll("\\\\", "\\\\\\\\");
     replace1 = replace1.replaceAll("\\n", "\\\\n");
@@ -197,7 +323,34 @@
     return "{\"result\":\"" + replaced + "\"}";
   }
 
-  @Override
-  public void init(NamedList args) {
+  // see: http://svn.apache.org/repos/asf/velocity/tools/branches/2.0.x/src/main/java/org/apache/velocity/tools/generic/ResourceTool.java
+  private class SolrVelocityResourceTool extends ResourceTool {
+
+    private final Locale locale;
+    private ClassLoader solrClassLoader;
+
+    public SolrVelocityResourceTool(ClassLoader cl, String localeString) {
+      this.solrClassLoader = cl;
+      Locale l = toLocale(localeString);
+      this.locale = (l == null ? Locale.ROOT : l);
+    }
+
+    @Override
+    protected ResourceBundle getBundle(String baseName, Object loc) {
+      // resource bundles for this tool must be in velocity "package"
+      return ResourceBundle.getBundle("velocity." + baseName, locale, solrClassLoader);
+    }
+
+    // Why did Velocity Tools make this private?  Copied from ResourceTools.java
+    private Locale toLocale(Object obj) {
+      if (obj == null) {
+        return null;
+      }
+      if (obj instanceof Locale) {
+        return (Locale) obj;
+      }
+      String s = String.valueOf(obj);
+      return ConversionUtils.toLocale(s);
+    }
   }
 }
diff --git a/solr/contrib/velocity/src/resources/VM_global_library.vm b/solr/contrib/velocity/src/resources/VM_global_library.vm
new file mode 100644
index 0000000..fd3699b
--- /dev/null
+++ b/solr/contrib/velocity/src/resources/VM_global_library.vm
@@ -0,0 +1,4 @@
+## legacy support, the writer will load this as a macro library as it used to
+## but best not to have your own file called VM_global_library.vm; put them in macros.vm instead.
+## This file is needed for the "builtin" resource loader as Velocity requires all macro library files exist, but
+## we don't want users to have to have a macro library file in their template directories.
diff --git a/solr/contrib/velocity/src/resources/_macros.vm b/solr/contrib/velocity/src/resources/_macros.vm
new file mode 100644
index 0000000..c65b0f4
--- /dev/null
+++ b/solr/contrib/velocity/src/resources/_macros.vm
@@ -0,0 +1,32 @@
+#macro(param $key)$request.params.get($key)#end
+
+#macro(url_root)/solr#end
+
+#macro(core_name)$request.core.name#end
+#macro(url_for_solr)#{url_root}#if($request.core.name != "")/$request.core.name#end#end
+#macro(url_for_home)#url_for_solr$request.context.path#end
+
+#macro(q)&q=$!{esc.url($request.params.get('q'))}#end
+
+#macro(fqs $p)#foreach($fq in $p)#if($velocityCount>1)&#{end}fq=$esc.url($fq)#end#end
+
+#macro(debug)#if($debug)&debug=true#end#end
+
+#macro(sort $p)#if($p)#foreach($s in $p)&sort=$esc.url($s)#end#end#end
+
+#macro(lensNoQ)?#if($request.params.getParams('fq') and $list.size($request.params.getParams('fq')) > 0)&#fqs($request.params.getParams('fq'))#end#sort($request.params.getParams('sort'))#debug#end
+#macro(lens)#lensNoQ#q#end
+
+#macro(url_for_lens)#{url_for_home}#lens#end
+
+#macro(url_for_start $start)#url_for_home#lens&start=$start#end
+
+#macro(url_for_filters $p)#url_for_home?#q#if($list.size($p) > 0)&#fqs($p)#end#debug#end
+
+#macro(url_for_nested_facet_query $field)#url_for_home#lens&fq=$esc.url($field)#end
+
+#macro(url_for_facet_filter $field $value)#url_for_home#lens&fq=#if($value!=$null)$esc.url($field):%22$esc.url($value)%22#else-$esc.url($field):[*+TO+*]#end#end
+
+#macro(url_for_facet_date_filter $field $value)#url_for_home#lens&fq=$esc.url($field):$esc.url($value)#end
+
+#macro(url_for_facet_range_filter $field $value)#url_for_home#lens&fq=$esc.url($field):$esc.url($value)#end
diff --git a/solr/contrib/velocity/src/resources/macros.vm b/solr/contrib/velocity/src/resources/macros.vm
new file mode 100644
index 0000000..0018c5f
--- /dev/null
+++ b/solr/contrib/velocity/src/resources/macros.vm
@@ -0,0 +1,3 @@
+## placeholder for users to specify their own macros
+## This file is needed for the "builtin" resource loader as Velocity requires all macro library files exist.
+## This is the file we want users to override to add their own macros.
diff --git a/solr/contrib/velocity/src/resources/velocity/VM_global_library.vm b/solr/contrib/velocity/src/resources/velocity/VM_global_library.vm
deleted file mode 100644
index cf5ee0f..0000000
--- a/solr/contrib/velocity/src/resources/velocity/VM_global_library.vm
+++ /dev/null
@@ -1,82 +0,0 @@
-#**
- *  Global macros used by other templates.
- *  This file must be named VM_global_library.vm
- *  in order for Velocity to find it.
- *#
-
-#macro(param $key)$request.params.get($key)#end
-
-#macro(url_root)/solr#end
-
-## TODO: s/url_for_solr/url_for_core/ and s/url_root/url_for_solr/
-#macro(core_name)$request.core.name#end
-#macro(url_for_solr)#{url_root}#if($request.core.name != "")/$request.core.name#end#end
-#macro(url_for_home)#url_for_solr/browse#end   ## TODO:   What request handler path to use for _home?
-
-#macro(q)&q=$!{esc.url($params.get('q'))}#end
-
-#macro(fqs $p)#foreach($fq in $p)#if($velocityCount>1)&#{end}fq=$esc.url($fq)#end#end
-
-#macro(debug)#if($request.params.get('debugQuery'))&debugQuery=true#end#end ## TODO: leverage actual debug mode setting, or at least also support debug=all/query/etc
-
-#macro(sort $p)#if($p)#foreach($s in $p)&sort=$esc.url($s)#end#end#end
-
-#macro(lensNoQ)?#if($request.params.getParams('fq') and $list.size($request.params.getParams('fq')) > 0)&#fqs($request.params.getParams('fq'))#end#sort($request.params.getParams('sort'))#debug#end
-#macro(lens)#lensNoQ#q#end
-
-#macro(url_for_lens)#{url_for_home}#lens#end
-
-#macro(url_for_start $start)#url_for_home#lens&start=$start#end
-
-#macro(url_for_filters $p)#url_for_home?#q#if($list.size($p) > 0)&#fqs($p)#end#debug#end
-
-#macro(url_for_nested_facet_query $field)#url_for_home#lens&fq=$esc.url($field)#end
-
-## TODO: convert to use {!raw f=$field}$value (with escaping of course)
-#macro(url_for_facet_filter $field $value)#url_for_home#lens&fq=#if($value!=$null)$esc.url($field):%22$esc.url($value)%22#else-$esc.url($field):[*+TO+*]#end#end
-
-#macro(url_for_facet_date_filter $field $value)#url_for_home#lens&fq=$esc.url($field):$esc.url($value)#end
-
-#macro(url_for_facet_range_filter $field $value)#url_for_home#lens&fq=$esc.url($field):$esc.url($value)#end
-
-
-#macro(link_to_previous_page $text)
-  #if($page.current_page_number > 1)
-    #set($prev_start = $page.start - $page.results_per_page)
-    <a class="prev-page" href="#url_for_start($prev_start)">$text</a>
-  #end
-#end
-
-#macro(link_to_next_page $text)
-  #if($page.current_page_number < $page.page_count)
-    #set($next_start = $page.start + $page.results_per_page)
-    <a class="next-page" href="#url_for_start($next_start)">$text</a>
-  #end
-#end
-
-#macro(link_to_page $page_number $text)
-  #if($page_number == $page.current_page_number)
-    $text
-  #else
-    #if($page_number <= $page.page_count)
-      #set($page_start = $page_number * $page.results_per_page - $page.results_per_page)
-      <a class="page" href="#url_for_start($page_start)">$text</a>
-    #end
-  #end
-#end
-
-
-#macro(field $f)
-  #if($response.response.highlighting.get($docId).get($f).get(0))
-    #set($pad = "")
-    #foreach($v in $response.response.highlighting.get($docId).get($f))
-$pad$v##
-      #set($pad = " ... ")
-    #end
-  #else
-    #foreach($v in $doc.getFieldValues($f))
-$v##
-    #end
-  #end
-#end  
-
diff --git a/solr/contrib/velocity/src/resources/velocity/browse.vm b/solr/contrib/velocity/src/resources/velocity/browse.vm
index 9d24d25..b651295 100644
--- a/solr/contrib/velocity/src/resources/velocity/browse.vm
+++ b/solr/contrib/velocity/src/resources/velocity/browse.vm
@@ -1,4 +1,8 @@
-#set($params = $request.params)
+#*
+  - Make search box bigger
+  - Add in pivot and other facets?
+  - Work on template default selection logic
+*#
 
 ## Show Error Message, if any
 <div class="error">
@@ -7,11 +11,11 @@
 
 <div class="query-box">
   <form id="query-form" action="#{url_for_home}" method="GET">
-    Find:
-    <input type="text" id="q" name="q" value="$!esc.html($params.get('q'))"/>
+    $resource.find:
+    <input type="text" id="q" name="q" value="$!esc.html($request.params.get('q'))"/>
     <input type="submit"/>
 
-    #if($request.params.get('debugQuery'))
+    #if($debug) ## TODO: this would automatically happen when arbitrary parameters are kept on URLs
       <input type="hidden" name="debug" value="true"/>
     #end
     #foreach($fq in $request.params.getParams('fq'))
@@ -19,7 +23,7 @@
     #end
 
     <div class="constraints">
-      #foreach($fq in $params.getParams('fq'))
+      #foreach($fq in $request.params.getParams('fq'))
         #set($previous_fq_count=$velocityCount - 1)
         #if($fq != '')
           &gt;
@@ -29,7 +33,7 @@
     </div>
 
     <div class="parsed_query_header">
-      #if($request.params.get('debugQuery'))
+      #if($debug)
         <a href="#" onclick='jQuery(this).siblings("div").toggle(); return false;'>toggle parsed query</a>
         <div class="parsed_query" style="display:none">$response.response.debug.parsedquery</div>
       #end
@@ -46,11 +50,10 @@
   <span>
     <span class="results-found">$page.results_found</span>
     results found in
-    ${response.responseHeader.QTime} ms
+    ${response.responseHeader.QTime}ms
   </span>
 
-  Page <span class="page-num">$page.current_page_number</span>
-    of <span class="page-count">$page.page_count</span>
+  $resource.page_of.insert($page.current_page_number,$page.page_count)
 </div>
 
 ## Render Results, actual matching docs
@@ -59,13 +62,12 @@
 </div>
 
 <div class="pagination">
-  #link_to_previous_page("previous")
+  #link_to_previous_page
 
   <span class="results-found">$page.results_found</span>
   results found.
 
-  Page <span class="page-num">$page.current_page_number</span>
-    of <span class="page-count">$page.page_count</span>
+  $resource.page_of.insert($page.current_page_number,$page.page_count)
 
-  #link_to_next_page("next")
+  #link_to_next_page
 </div>
diff --git a/solr/contrib/velocity/src/resources/velocity/debug.vm b/solr/contrib/velocity/src/resources/velocity/debug.vm
deleted file mode 100644
index 6984be1..0000000
--- a/solr/contrib/velocity/src/resources/velocity/debug.vm
+++ /dev/null
@@ -1,26 +0,0 @@
-#**
- *  Show Debugging Information, if enabled
- *#
-
-#if( $params.getBool("debugQuery",false) )
-  <a href="#" onclick='jQuery(this).siblings("pre").toggle(); return false;'>toggle explain</a>
-
-  <pre style="display:none">
-    $response.getExplainMap().get($doc.getFirstValue('id'))
-  </pre>
-
-  <a href="#" onclick='jQuery(this).siblings("pre2").toggle(); return false;'>toggle all fields</a>
-
-  <pre2 style="display:none">
-    #foreach($fieldname in $doc.fieldNames)
-      <br>
-        <span class="field-name">$fieldname :</span>
-        <span>
-          #foreach($value in $doc.getFieldValues($fieldname))
-            $esc.html($value)
-          #end
-        </span>
-      </br>
-    #end
-  </pre2>
-#end
diff --git a/solr/contrib/velocity/src/resources/velocity/error.vm b/solr/contrib/velocity/src/resources/velocity/error.vm
index 80b5819..1c4d1dc 100644
--- a/solr/contrib/velocity/src/resources/velocity/error.vm
+++ b/solr/contrib/velocity/src/resources/velocity/error.vm
@@ -1,10 +1,3 @@
-#**
- *  Show Error Message, if any
- *#
-
-## Show Error Message, if any
-## Usually rendered inside div class=error
-
 #if( $response.response.error.code )
   <h1>ERROR $response.response.error.code</h1>
   $response.response.error.msg
diff --git a/solr/contrib/velocity/src/resources/velocity/footer.vm b/solr/contrib/velocity/src/resources/velocity/footer.vm
index 09eb8b5..cb0954c 100644
--- a/solr/contrib/velocity/src/resources/velocity/footer.vm
+++ b/solr/contrib/velocity/src/resources/velocity/footer.vm
@@ -2,14 +2,14 @@
 <div>
   <span>Options:</span>
 
-  #if($request.params.get('debugQuery'))
+  #if($debug)
     <a href="#url_for_home?#q#if($list.size($request.params.getParams('fq')) > 0)&#fqs($request.params.getParams('fq'))#end">
       disable debug</a>
   #else
     <a href="#url_for_lens&debug=true&fl=*,score">enable debug</a>
   #end
   -
-  <a href="#url_for_lens&wt=xml#if($request.params.get('debugQuery'))&debug=true#end">XML results</a>
+  <a href="#url_for_lens&wt=xml#if($debug)&debug=true#end">XML results</a> ## TODO: Add links for other formats, maybe dynamically?
 
 </div>
 
diff --git a/solr/contrib/velocity/src/resources/velocity/head.vm b/solr/contrib/velocity/src/resources/velocity/head.vm
index a8dd524..38e5f0c 100644
--- a/solr/contrib/velocity/src/resources/velocity/head.vm
+++ b/solr/contrib/velocity/src/resources/velocity/head.vm
@@ -3,9 +3,12 @@
  *#
 
   <title>Solr browse: #core_name</title>
+
   <meta http-equiv="content-type" content="text/html; charset=UTF-8"/>
+
   <link rel="icon" type="image/x-icon" href="#{url_root}/img/favicon.ico"/>
   <link rel="shortcut icon" type="image/x-icon" href="#{url_root}/img/favicon.ico"/>
+
   <script type="text/javascript" src="#{url_root}/js/lib/jquery-1.7.2.min.js"></script>
 
   <style>
@@ -17,17 +20,6 @@
     #head{
       width: 100%;
     }
-    .array-field {
-      border: 2px solid #474747;
-      background: #FFE9D8;
-      padding: 5px;
-      margin: 5px;
-    }
-
-    .array-field-list li {
-      list-style: circle;
-      margin-left: 20px;
-    }
 
     .parsed_query_header {
       font-family: Helvetica, Arial, sans-serif;
@@ -47,7 +39,11 @@
     }
 
     a {
-      color: #43a4b1;
+      color: #305CB3;
+    }
+
+    em {
+      color: #FF833D;
     }
 
     .facets {
@@ -61,7 +57,7 @@
     }
 
     .facets h2 {
-      background: #EA897E;
+      background: #D9411E;
       padding: 2px 5px;
     }
 
@@ -82,18 +78,9 @@
       font-weight: bold;
     }
 
-    .highlight {
-      color: white;
-      background-color: gray;
-      border: 1px black solid;
-    }
-
-    .highlight-box {
-      margin-left: 15px;
-    }
-
     .field-name {
       font-weight: bold;
+      // align="right" valign="top"
     }
 
     .highlighted-facet-field {
diff --git a/solr/contrib/velocity/src/resources/velocity/hit.vm b/solr/contrib/velocity/src/resources/velocity/hit.vm
index 10fbef9..ec4dfd8 100644
--- a/solr/contrib/velocity/src/resources/velocity/hit.vm
+++ b/solr/contrib/velocity/src/resources/velocity/hit.vm
@@ -1,24 +1,27 @@
-#set($docId = $doc.getFieldValue('id'))
+#set($docId = $doc.getFirstValue($request.schema.uniqueKeyField.name))
+
 
 <div class="result-document">
 
   <table>
     #foreach( $fieldName in $doc.fieldNames )
-      #foreach( $value in $doc.getFieldValues($fieldName) )
         <tr>
-          <th align="right" valign="top">
-            #if( $foreach.count == 1 )
-              $fieldName:
-            #end
+          <th align="right" valign="top" style="field-name">
+            $esc.html($fieldName):
           </th>
 
           <td align="left" valign="top">
-            $esc.html($value) <br/>
+            #field($fieldName)
           </td>
         </tr>
-      #end
     #end
   </table>
 
+  #if($debug)
+    <a href="#" onclick='jQuery(this).siblings("pre").toggle(); return false;'>toggle explain</a>
 
+    <pre style="display:none">
+      $response.getExplainMap().get($docId)
+    </pre>
+  #end
 </div>
diff --git a/solr/contrib/velocity/src/resources/velocity/layout.vm b/solr/contrib/velocity/src/resources/velocity/layout.vm
index ae5f223..aa68ffc 100644
--- a/solr/contrib/velocity/src/resources/velocity/layout.vm
+++ b/solr/contrib/velocity/src/resources/velocity/layout.vm
@@ -5,7 +5,7 @@
   <body>
     <div id="admin"><a href="#url_root/#/#core_name">Solr Admin</a></div>
     <div id="head">
-      <a href="#url_for_home#if($request.params.get('debugQuery'))?debugQuery=true#end"><img src="#{url_root}/img/solr.svg" id="logo"/></a>
+      <a href="#url_for_home#if($debug)?debug=true#end"><img src="#{url_root}/img/solr.svg" id="logo"/></a>
     </div>
 
     <div id="content">
diff --git a/solr/contrib/velocity/src/resources/velocity/macros.vm b/solr/contrib/velocity/src/resources/velocity/macros.vm
new file mode 100644
index 0000000..2c98fb0
--- /dev/null
+++ b/solr/contrib/velocity/src/resources/velocity/macros.vm
@@ -0,0 +1,38 @@
+# TODO: make this parameterized fully, no context sensitivity
+#macro(field $f)
+  #if($response.response.highlighting.get($docId).get($f).get(0))
+    #set($pad = "")
+    #foreach($v in $response.response.highlighting.get($docId).get($f))
+$pad$v##  #TODO: $esc.html() or maybe make that optional?
+      #set($pad = " ... ")
+    #end
+  #else
+    #foreach($v in $doc.getFieldValues($f))$esc.html($v)
+    #end
+  #end
+#end
+
+#macro(link_to_previous_page)
+  #if($page.current_page_number > 1)
+    #set($prev_start = $page.start - $page.results_per_page)
+    <a class="prev-page" href="#url_for_start($prev_start)">$resource.previous</a>
+  #end
+#end
+
+#macro(link_to_next_page)
+  #if($page.current_page_number < $page.page_count)
+    #set($next_start = $page.start + $page.results_per_page)
+    <a class="next-page" href="#url_for_start($next_start)">$resource.next</a>
+  #end
+#end
+
+#macro(link_to_page $page_number $text)
+  #if($page_number == $page.current_page_number)
+    $text
+  #else
+    #if($page_number <= $page.page_count)
+      #set($page_start = $page_number * $page.results_per_page - $page.results_per_page)
+      <a class="page" href="#url_for_start($page_start)">$text</a>
+    #end
+  #end
+#end
diff --git a/solr/contrib/velocity/src/resources/velocity/resources.properties b/solr/contrib/velocity/src/resources/velocity/resources.properties
new file mode 100644
index 0000000..dff221c
--- /dev/null
+++ b/solr/contrib/velocity/src/resources/velocity/resources.properties
@@ -0,0 +1,6 @@
+find=Find
+page_of=Page <span class="page-num">{0}</span> of <span class="page-count">{1}</span>
+previous=previous
+next=next
+
+
diff --git a/solr/contrib/velocity/src/test-files/velocity/file.vm b/solr/contrib/velocity/src/test-files/velocity/file.vm
new file mode 100644
index 0000000..9a2c773
--- /dev/null
+++ b/solr/contrib/velocity/src/test-files/velocity/file.vm
@@ -0,0 +1 @@
+testing
\ No newline at end of file
diff --git a/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/solrconfig.xml b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/solrconfig.xml
index 11f6968..ba60305 100644
--- a/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/solrconfig.xml
+++ b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/solrconfig.xml
@@ -16,66 +16,13 @@
  limitations under the License.
 -->
 
-<!-- 
-     For more details about configurations options that may appear in
-     this file, see http://wiki.apache.org/solr/SolrConfigXml. 
--->
 <config>
   <luceneMatchVersion>${tests.luceneMatchVersion:LATEST}</luceneMatchVersion>
-  <indexConfig>
-    <useCompoundFile>${useCompoundFile:false}</useCompoundFile>
-  </indexConfig>
 
-  <lib dir="../../contrib/velocity/lib" />
-  <lib dir="../../dist/" regex="solr-velocity-\d.*\.jar" />
-  <dataDir>${solr.data.dir:}</dataDir>
+  <!--<lib dir="../../contrib/velocity/lib" />-->
+  <!--<lib dir="../../dist/" regex="solr-velocity-\d.*\.jar" />-->
 
 
-  <directoryFactory name="DirectoryFactory"
-                    class="${solr.directoryFactory:solr.StandardDirectoryFactory}"/>
-
-  <updateHandler class="solr.DirectUpdateHandler2">
-  </updateHandler>
-  
-  <query>
-    <maxBooleanClauses>1024</maxBooleanClauses>
-    <filterCache class="solr.FastLRUCache"
-                 size="512"
-                 initialSize="512"
-                 autowarmCount="0"/>
-    <queryResultCache class="solr.LRUCache"
-                     size="512"
-                     initialSize="512"
-                     autowarmCount="0"/>
-    <documentCache class="solr.LRUCache"
-                   size="512"
-                   initialSize="512"
-                   autowarmCount="0"/>
-    <enableLazyFieldLoading>true</enableLazyFieldLoading>
-
-   <queryResultWindowSize>20</queryResultWindowSize>
-   <queryResultMaxDocsCached>200</queryResultMaxDocsCached>
-    <listener event="newSearcher" class="solr.QuerySenderListener">
-      <arr name="queries">
-      </arr>
-    </listener>
-    <listener event="firstSearcher" class="solr.QuerySenderListener">
-      <arr name="queries">
-        <lst>
-          <str name="q">static firstSearcher warming in solrconfig.xml</str>
-        </lst>
-      </arr>
-    </listener>
-    <useColdSearcher>false</useColdSearcher>
-    <maxWarmingSearchers>2</maxWarmingSearchers>
-
-  </query>
-  <requestDispatcher handleSelect="true" >
-    <requestParsers enableRemoteStreaming="true"
-                    multipartUploadLimitInKB="2048000" />
-    <httpCaching never304="true" />
-  </requestDispatcher>
-
   <requestHandler name="search" class="solr.SearchHandler" default="true">
      <lst name="defaults">
        <str name="echoParams">explicit</str>
@@ -83,13 +30,9 @@
      </lst>
     </requestHandler>
 
-  <requestHandler name="/update" class="solr.UpdateRequestHandler"  />
-
   <queryResponseWriter name="velocity" class="solr.VelocityResponseWriter"/>
 
-  <!-- Legacy config for the admin interface -->
-  <admin>
-    <defaultQuery>*:*</defaultQuery>
-  </admin>
-
+  <queryResponseWriter name="velocityWithInitProps" class="solr.VelocityResponseWriter">
+    <str name="init.properties.file">velocity-init.properties</str>
+  </queryResponseWriter>
 </config>
diff --git a/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity-init.properties b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity-init.properties
new file mode 100644
index 0000000..853e5fc
--- /dev/null
+++ b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity-init.properties
@@ -0,0 +1,18 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+foreach.provide.scope.control=false
diff --git a/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/VM_global_library.vm b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/VM_global_library.vm
index 398b8a2..7bd767e 100644
--- a/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/VM_global_library.vm
+++ b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/VM_global_library.vm
@@ -1,50 +1,3 @@
+#macro(legacy_macro)legacy_macro_SUCCESS#end
 
-
-
-#macro(nl2ul $named_list)
-  <ul>
-  #foreach($kv in $named_list)
-    <li>$kv.key ($kv.value)
-      #nl2ul($kv.value)
-    </li>
-  #end
-  </ul>
-#end
-
-#macro(url_for_filters $filter_queries)
-  /solr/itas?fq=$filter_queries
-#end
-
-#macro(url_for_home)/solr/itas#end
-
-#macro(url_for_start $start)/solr/itas?start=$start&q=$!{esc.url($params.get('q'))}#end
-
-## TODO: need to add escaping
-#macro(url_for_nested_facet $field $value)/solr/itas?fq=$field:%22$value%22#end
-
-#macro(link_to_previous_page $text)
-  #if($page.current_page_number > 1)
-    #set($prev_start = $page.start - $page.results_per_page)
-    <a class="prev-page" href="#url_for_start($prev_start)">$text</a>
-  #end
-#end
-
-#macro(link_to_next_page $text)
-  #if($page.current_page_number < $page.page_count)
-    #set($next_start = $page.start + $page.results_per_page)
-    <a class="next-page" href="#url_for_start($next_start)">$text</a>
-  #end
-#end
-
-#macro(link_to_page $page_number $text)
-  #if($page_number == $page.current_page_number)
-    $text
-  #else
-    #if($page_number <= $page.page_count)
-      #set($page_start = $page_number * $page.results_per_page - $page.results_per_page)
-      <a class="page" href="#url_for_start($page_start)">$text</a>
-    #end
-  #end
-#end
-
-#macro(param $key)$request.params.get($key)#end
+#macro(url_root)Loaded from: VM_global_library.vm#end
\ No newline at end of file
diff --git a/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/foreach.vm b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/foreach.vm
new file mode 100644
index 0000000..3449dac
--- /dev/null
+++ b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/foreach.vm
@@ -0,0 +1 @@
+#foreach($x in ["a","b"])$!foreach.index#end
\ No newline at end of file
diff --git a/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/layout.vm b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/layout.vm
new file mode 100644
index 0000000..39136e1
--- /dev/null
+++ b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/layout.vm
@@ -0,0 +1 @@
+{{{$content}}}
\ No newline at end of file
diff --git a/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/locale.vm b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/locale.vm
new file mode 100644
index 0000000..a85fd05
--- /dev/null
+++ b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/locale.vm
@@ -0,0 +1 @@
+$resource.color
\ No newline at end of file
diff --git a/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/macros.vm b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/macros.vm
new file mode 100644
index 0000000..46a508f
--- /dev/null
+++ b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/macros.vm
@@ -0,0 +1,3 @@
+#macro(test_macro)test_macro_SUCCESS#end
+
+#macro(url_root)Loaded from: macros.vm#end
\ No newline at end of file
diff --git a/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/test_macro_legacy_support.vm b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/test_macro_legacy_support.vm
new file mode 100644
index 0000000..30f32fe
--- /dev/null
+++ b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/test_macro_legacy_support.vm
@@ -0,0 +1 @@
+#legacy_macro
\ No newline at end of file
diff --git a/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/test_macro_overridden.vm b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/test_macro_overridden.vm
new file mode 100644
index 0000000..f06b28f
--- /dev/null
+++ b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/test_macro_overridden.vm
@@ -0,0 +1 @@
+#url_root
\ No newline at end of file
diff --git a/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/test_macro_visible.vm b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/test_macro_visible.vm
new file mode 100644
index 0000000..7a5baed
--- /dev/null
+++ b/solr/contrib/velocity/src/test-files/velocity/solr/collection1/conf/velocity/test_macro_visible.vm
@@ -0,0 +1 @@
+#test_macro
\ No newline at end of file
diff --git a/solr/contrib/velocity/src/test/org/apache/solr/velocity/VelocityResponseWriterTest.java b/solr/contrib/velocity/src/test/org/apache/solr/velocity/VelocityResponseWriterTest.java
index f06ff1b..0d2c3d1 100644
--- a/solr/contrib/velocity/src/test/org/apache/solr/velocity/VelocityResponseWriterTest.java
+++ b/solr/contrib/velocity/src/test/org/apache/solr/velocity/VelocityResponseWriterTest.java
@@ -18,27 +18,39 @@
 package org.apache.solr.velocity;
 
 import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.common.util.NamedList;
 import org.apache.solr.response.QueryResponseWriter;
+import org.apache.solr.response.SolrParamResourceLoader;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.response.VelocityResponseWriter;
 import org.apache.solr.request.SolrQueryRequest;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+import java.io.IOException;
 import java.io.StringWriter;
 
 public class VelocityResponseWriterTest extends SolrTestCaseJ4 {
   @BeforeClass
   public static void beforeClass() throws Exception {
     initCore("solrconfig.xml", "schema.xml", getFile("velocity/solr").getAbsolutePath());
+    System.out.println(getFile("velocity/solr").getAbsolutePath());
+  }
+
+  @Test
+  public void testVelocityResponseWriterRegistered() {
+    QueryResponseWriter writer = h.getCore().getQueryResponseWriter("velocity");
+    assertTrue("VrW registered check", writer instanceof VelocityResponseWriter);
   }
 
   @Test
   public void testCustomParamTemplate() throws Exception {
-    // This test doesn't use the Solr core, just the response writer directly
-
     org.apache.solr.response.VelocityResponseWriter vrw = new VelocityResponseWriter();
-    SolrQueryRequest req = req("v.template","custom", "v.template.custom","$response.response.response_data");
+    NamedList<String> nl = new NamedList<String>();
+    nl.add(VelocityResponseWriter.PARAMS_RESOURCE_LOADER_ENABLED, "true");
+    vrw.init(nl);
+    SolrQueryRequest req = req(VelocityResponseWriter.TEMPLATE,"custom",
+        SolrParamResourceLoader.TEMPLATE_PARAM_PREFIX+"custom","$response.response.response_data");
     SolrQueryResponse rsp = new SolrQueryResponse();
     StringWriter buf = new StringWriter();
     rsp.add("response_data", "testing");
@@ -47,14 +59,118 @@
   }
 
   @Test
-  public void testVelocityResponseWriterRegistered() {
-    QueryResponseWriter writer = h.getCore().getQueryResponseWriter("velocity");
+  public void testParamResourceLoaderDisabled() throws Exception {
+    VelocityResponseWriter vrw = new VelocityResponseWriter();
+    // by default param resource loader is disabled, no need to set it here
+    SolrQueryRequest req = req(VelocityResponseWriter.TEMPLATE,"custom",
+        SolrParamResourceLoader.TEMPLATE_PARAM_PREFIX+"custom","$response.response.response_data");
+    SolrQueryResponse rsp = new SolrQueryResponse();
+    StringWriter buf = new StringWriter();
+    try {
+      vrw.write(buf, req, rsp);
+      fail("Should have thrown exception due to missing template");
+    } catch (IOException e) {
+      // expected exception
+    }
+  }
 
-    assertTrue("VrW registered check", writer instanceof VelocityResponseWriter);
+  @Test
+  public void testFileResourceLoader() throws Exception {
+    VelocityResponseWriter vrw = new VelocityResponseWriter();
+    NamedList<String> nl = new NamedList<String>();
+    nl.add("template.base.dir", getFile("velocity").getAbsolutePath());
+    vrw.init(nl);
+    SolrQueryRequest req = req(VelocityResponseWriter.TEMPLATE,"file");
+    SolrQueryResponse rsp = new SolrQueryResponse();
+    StringWriter buf = new StringWriter();
+    vrw.write(buf, req, rsp);
+    assertEquals("testing", buf.toString());
   }
 
   @Test
   public void testSolrResourceLoaderTemplate() throws Exception {
-    assertEquals("0", h.query(req("q","*:*", "wt","velocity","v.template","numFound")));
+    assertEquals("0", h.query(req("q","*:*", "wt","velocity",VelocityResponseWriter.TEMPLATE,"numFound")));
   }
+
+  @Test
+  public void testMacros() throws Exception {
+    // tests that a macro in a custom macros.vm is visible
+    assertEquals("test_macro_SUCCESS", h.query(req("q","*:*", "wt","velocity",VelocityResponseWriter.TEMPLATE,"test_macro_visible")));
+
+    // tests that a builtin (_macros.vm) macro, #url_root in this case, can be overridden in a custom macros.vm
+    // the macro is also defined in VM_global_library.vm, which should also be overridden by macros.vm
+    assertEquals("Loaded from: macros.vm", h.query(req("q","*:*", "wt","velocity",VelocityResponseWriter.TEMPLATE,"test_macro_overridden")));
+
+    // tests that macros defined in VM_global_library.vm are visible.  This file was where macros in pre-5.0 versions were defined
+    assertEquals("legacy_macro_SUCCESS", h.query(req("q","*:*", "wt","velocity",VelocityResponseWriter.TEMPLATE,"test_macro_legacy_support")));
+  }
+
+  @Test
+  public void testInitProps() throws Exception {
+    // The test init properties file turns off being able to use $foreach.index (the implicit loop counter)
+    // The foreach.vm template uses $!foreach.index, with ! suppressing the literal "$foreach.index" output
+
+    assertEquals("01", h.query(req("q","*:*", "wt","velocity",VelocityResponseWriter.TEMPLATE,"foreach")));
+    assertEquals("", h.query(req("q","*:*", "wt","velocityWithInitProps",VelocityResponseWriter.TEMPLATE,"foreach")));
+  }
+
+  @Test
+  public void testLocaleFeature() throws Exception {
+    assertEquals("Color", h.query(req("q", "*:*", "wt", "velocity", VelocityResponseWriter.TEMPLATE, "locale",
+        VelocityResponseWriter.LOCALE,"en_US")));
+    assertEquals("Colour", h.query(req("q", "*:*", "wt", "velocity", VelocityResponseWriter.TEMPLATE, "locale",
+        VelocityResponseWriter.LOCALE,"en_UK")));
+  }
+
+  @Test
+  public void testLayoutFeature() throws Exception {
+    assertEquals("{{{0}}}", h.query(req("q","*:*", "wt","velocity",
+        VelocityResponseWriter.TEMPLATE,"numFound", VelocityResponseWriter.LAYOUT,"layout")));
+
+    // even with v.layout specified, layout can be disabled explicitly
+    assertEquals("0", h.query(req("q","*:*", "wt","velocity",
+        VelocityResponseWriter.TEMPLATE,"numFound",
+        VelocityResponseWriter.LAYOUT,"layout",
+        VelocityResponseWriter.LAYOUT_ENABLED,"false")));
+  }
+
+  @Test
+  public void testJSONWrapper() throws Exception {
+    assertEquals("foo({\"result\":\"0\"})", h.query(req("q", "*:*", "wt", "velocity",
+        VelocityResponseWriter.TEMPLATE, "numFound",
+        VelocityResponseWriter.JSON,"foo")));
+
+    // Now with layout, for good measure
+    assertEquals("foo({\"result\":\"{{{0}}}\"})", h.query(req("q", "*:*", "wt", "velocity",
+        VelocityResponseWriter.TEMPLATE, "numFound",
+        VelocityResponseWriter.JSON,"foo",
+        VelocityResponseWriter.LAYOUT,"layout")));
+  }
+
+  @Test
+  public void testContentType() throws Exception {
+    VelocityResponseWriter vrw = new VelocityResponseWriter();
+    NamedList<String> nl = new NamedList<String>();
+    vrw.init(nl);
+    SolrQueryResponse rsp = new SolrQueryResponse();
+
+    // with v.json=wrf, content type should default to application/json
+    assertEquals("application/json;charset=UTF-8",
+        vrw.getContentType(req(VelocityResponseWriter.TEMPLATE, "numFound",
+            VelocityResponseWriter.JSON, "wrf"), rsp));
+
+    // with no v.json specified, the default text/html should be returned
+    assertEquals("text/html;charset=UTF-8",
+        vrw.getContentType(req(VelocityResponseWriter.TEMPLATE, "numFound"), rsp));
+
+    // if v.contentType is specified, that should be used, even if v.json is specified
+    assertEquals("text/plain",
+        vrw.getContentType(req(VelocityResponseWriter.TEMPLATE, "numFound",
+            VelocityResponseWriter.CONTENT_TYPE,"text/plain"), rsp));
+    assertEquals("text/plain",
+        vrw.getContentType(req(VelocityResponseWriter.TEMPLATE, "numFound",
+            VelocityResponseWriter.JSON,"wrf",
+            VelocityResponseWriter.CONTENT_TYPE,"text/plain"), rsp));
+  }
+
 }
diff --git a/solr/contrib/velocity/src/test/velocity/resources.properties b/solr/contrib/velocity/src/test/velocity/resources.properties
new file mode 100644
index 0000000..ec6320f
--- /dev/null
+++ b/solr/contrib/velocity/src/test/velocity/resources.properties
@@ -0,0 +1,18 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+color=Color
\ No newline at end of file
diff --git a/solr/contrib/velocity/src/test/velocity/resources_en_UK.properties b/solr/contrib/velocity/src/test/velocity/resources_en_UK.properties
new file mode 100644
index 0000000..21a476f
--- /dev/null
+++ b/solr/contrib/velocity/src/test/velocity/resources_en_UK.properties
@@ -0,0 +1,18 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+color=Colour
\ No newline at end of file
diff --git a/solr/core/build.xml b/solr/core/build.xml
index 7a746d1..f320f76 100644
--- a/solr/core/build.xml
+++ b/solr/core/build.xml
@@ -63,7 +63,7 @@
   <target name="resolve" depends="ivy-availability-check,ivy-fail,ivy-configure">
     <sequential>
       <ivy:retrieve conf="compile,compile.hadoop" type="jar,bundle" sync="${ivy.sync}" log="download-only" symlink="${ivy.symlink}"/>
-      <ivy:retrieve conf="test,test.DfsMiniCluster" type="jar,bundle,test" sync="${ivy.sync}" log="download-only" symlink="${ivy.symlink}"
+      <ivy:retrieve conf="test,test.DfsMiniCluster,test.MiniKdc" type="jar,bundle,test" sync="${ivy.sync}" log="download-only" symlink="${ivy.symlink}"
                     pattern="${test.lib.dir}/[artifact]-[revision](-[classifier]).[ext]"/>
     </sequential>
   </target>
diff --git a/solr/core/ivy.xml b/solr/core/ivy.xml
index 60afeef..5fc9f1e 100644
--- a/solr/core/ivy.xml
+++ b/solr/core/ivy.xml
@@ -19,13 +19,14 @@
 <ivy-module version="2.0"  xmlns:maven="http://ant.apache.org/ivy/maven">
   <info organisation="org.apache.solr" module="core"/>
   
-  <configurations defaultconfmapping="compile->master;compile.hadoop->master;test->master;test.DfsMiniCluster->master">
+  <configurations defaultconfmapping="compile->master;compile.hadoop->master;test->master;test.DfsMiniCluster->master;test.MiniKdc->master">
     <!-- artifacts in the "compile" and "compile.hadoop" configurations will go into solr/core/lib/ -->
     <conf name="compile" transitive="false"/>
     <conf name="compile.hadoop" transitive="false"/>
-    <!-- artifacts in the "test" and "test.DfsMiniCluster" configuration will go into solr/core/test-lib/ -->
+    <!-- artifacts in the "test", "test.DfsMiniCluster", and "test.MiniKdc" configuration will go into solr/core/test-lib/ -->
     <conf name="test" transitive="false"/>
     <conf name="test.DfsMiniCluster" transitive="false"/>
+    <conf name="test.MiniKdc" transitive="false"/>
   </configurations>
 
   <dependencies>
@@ -46,7 +47,6 @@
     <dependency org="log4j" name="log4j" rev="${/log4j/log4j}" conf="compile"/>
     <dependency org="org.slf4j" name="slf4j-log4j12" rev="${/org.slf4j/slf4j-log4j12}" conf="compile"/>
 
-    <dependency org="javax.servlet" name="javax.servlet-api" rev="${/javax.servlet/javax.servlet-api}" conf="test"/>
     <dependency org="org.easymock" name="easymock" rev="${/org.easymock/easymock}" conf="test"/>
     <dependency org="cglib" name="cglib-nodep" rev="${/cglib/cglib-nodep}" conf="test"/>
     <dependency org="org.objenesis" name="objenesis" rev="${/org.objenesis/objenesis}" conf="test"/>
@@ -62,6 +62,8 @@
     <dependency org="org.apache.hadoop" name="hadoop-annotations" rev="${/org.apache.hadoop/hadoop-annotations}" conf="compile.hadoop"/>
     <dependency org="org.apache.hadoop" name="hadoop-auth" rev="${/org.apache.hadoop/hadoop-auth}" conf="compile.hadoop"/>
     <dependency org="commons-configuration" name="commons-configuration" rev="${/commons-configuration/commons-configuration}" conf="compile.hadoop"/>
+    <dependency org="commons-collections" name="commons-collections" rev="${/commons-collections/commons-collections}" conf="compile.hadoop"/>
+    
     <dependency org="com.google.protobuf" name="protobuf-java" rev="${/com.google.protobuf/protobuf-java}" conf="compile.hadoop"/>
     <dependency org="com.googlecode.concurrentlinkedhashmap" name="concurrentlinkedhashmap-lru" rev="${/com.googlecode.concurrentlinkedhashmap/concurrentlinkedhashmap-lru}" conf="compile.hadoop"/>
 
@@ -75,8 +77,13 @@
     <dependency org="org.mortbay.jetty" name="jetty" rev="${/org.mortbay.jetty/jetty}" conf="test.DfsMiniCluster"/>
     <dependency org="org.mortbay.jetty" name="jetty-util" rev="${/org.mortbay.jetty/jetty-util}" conf="test.DfsMiniCluster"/>
     <dependency org="com.sun.jersey" name="jersey-core" rev="${/com.sun.jersey/jersey-core}" conf="test.DfsMiniCluster"/>
+    <dependency org="com.sun.jersey" name="jersey-server" rev="${/com.sun.jersey/jersey-server}" conf="test.DfsMiniCluster"/>
     <dependency org="commons-collections" name="commons-collections" rev="${/commons-collections/commons-collections}" conf="test.DfsMiniCluster"/>
 
+    <!-- Hadoop MiniKdc Dependencies-->
+    <dependency org="org.apache.hadoop" name="hadoop-minikdc" rev="${/org.apache.hadoop/hadoop-minikdc}" conf="test.MiniKdc"/>
+    <dependency org="org.apache.directory.server" name="apacheds-all" rev="${/org.apache.directory.server/apacheds-all}" conf="test.MiniKdc"/>
+
     <exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/> 
   </dependencies>
 </ivy-module>
diff --git a/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilter.java b/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilter.java
deleted file mode 100644
index 515f8b7..0000000
--- a/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilter.java
+++ /dev/null
@@ -1,1372 +0,0 @@
-package org.apache.solr.analysis;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.nio.charset.Charset;
-import java.util.HashMap;
-import java.util.Set;
-
-import org.apache.lucene.analysis.charfilter.BaseCharFilter;
-
-/**
- * <p>
- * This class is <b>NOT</b> recommended for new users and should be
- * considered <b>UNSUPPORTED</b>.
- * </p>
- * <p>
- * In Solr version 3.5 and earlier, <tt>HTMLStripCharFilter(Factory)</tt>
- * had known bugs in the offsets it provided, triggering e.g. exceptions in
- * highlighting.
- * </p>
- * <p>
- * This class is provided as possible alternative for people who depend on
- * the "broken" behavior of <tt>HTMLStripCharFilter</tt> in Solr version 3.5
- * and earlier, and/or who don't like the changes introduced by the Solr 3.6+
- * version of <tt>HTMLStripCharFilterFactory</tt>.  (See the 3.6.0 release
- * section of solr/CHANGES.txt for a list of differences in behavior.)
- * </p>
- * @deprecated use {@link org.apache.lucene.analysis.charfilter.HTMLStripCharFilter}
- */
-@Deprecated
-public class LegacyHTMLStripCharFilter extends BaseCharFilter {
-  private int readAheadLimit = DEFAULT_READ_AHEAD;
-  private int safeReadAheadLimit = readAheadLimit - 3;
-  private int numWhitespace = 0;
-  private int numRead = 0;
-  private int numEaten = 0;
-  private int numReturned = 0;
-  private int lastMark;
-  private Set<String> escapedTags;
-
-  // pushback buffer
-  private final StringBuilder pushed = new StringBuilder();
-  private static final int EOF=-1;
-  private static final int MISMATCH=-2;
-
-  private static final int MATCH=-3;
-  // temporary buffer
-  private final StringBuilder sb = new StringBuilder();
-  public static final int DEFAULT_READ_AHEAD = 8192;
-
-
-  public static void main(String[] args) throws IOException {
-    Reader in = new LegacyHTMLStripCharFilter(
-            new InputStreamReader(System.in, Charset.defaultCharset()));
-    int ch;
-    while ( (ch=in.read()) != -1 ) System.out.print((char)ch);
-  }
-
-  public LegacyHTMLStripCharFilter(Reader source) {
-    super(source.markSupported() ? source : new BufferedReader(source));
-  }
-
-  public LegacyHTMLStripCharFilter(Reader source, Set<String> escapedTags){
-    this(source);
-    this.escapedTags = escapedTags;
-  }
-
-  public LegacyHTMLStripCharFilter(Reader source, Set<String> escapedTags, int readAheadLimit){
-    this(source);
-    this.escapedTags = escapedTags;
-    this.readAheadLimit = readAheadLimit;
-    safeReadAheadLimit = readAheadLimit - 3;
-  }
-
-  public int getReadAheadLimit() {
-    return readAheadLimit;
-  }
-
-  private int next() throws IOException {
-    int len = pushed.length();
-    if (len>0) {
-      int ch = pushed.charAt(len-1);
-      pushed.setLength(len-1);
-      return ch;
-    }
-    numRead++;
-    return input.read();
-  }
-
-  private int nextSkipWS() throws IOException {
-    int ch=next();
-    while(isSpace(ch)) ch=next();
-    return ch;
-  }
-
-  private int peek() throws IOException {
-    int len = pushed.length();
-    if (len>0) {
-      return pushed.charAt(len-1);
-    }
-    numRead++;
-    int ch = input.read();
-    push(ch);
-    return ch;
-  }
-
-  private void push(int ch) {
-    pushed.append((char)ch);
-  }
-
-
-  private boolean isSpace(int ch) {
-    switch (ch) {
-      case ' ':
-      case '\n':
-      case '\r':
-      case '\t': return true;
-      default: return false;
-    }
-  }
-
-  private boolean isHex(int ch) {
-    return (ch>='0' && ch<='9') ||
-           (ch>='A' && ch<='F') ||
-           (ch>='a' && ch<='f');
-  }
-
-  private boolean isAlpha(int ch) {
-    return ch>='a' && ch<='z' || ch>='A' && ch<='Z';
-  }
-
-  private boolean isDigit(int ch) {
-    return ch>='0' && ch<='9';
-  }
-
-/*** From HTML 4.0
-[4]     NameChar     ::=    Letter | Digit | '.' | '-' | '_' | ':' | CombiningChar | Extender
-[5]     Name     ::=    (Letter | '_' | ':') (NameChar)*
-[6]     Names    ::=    Name (#x20 Name)*
-[7]     Nmtoken    ::=    (NameChar)+
-[8]     Nmtokens     ::=    Nmtoken (#x20 Nmtoken)*
-***/
-
-  // should I include all id chars allowable by HTML/XML here?
-  // including accented chars, ':', etc?
-  private boolean isIdChar(int ch) {
-    // return Character.isUnicodeIdentifierPart(ch);
-    // isUnicodeIdentiferPart doesn't include '-'... shoudl I still
-    // use it and add in '-',':',etc?
-    return isAlpha(ch) || isDigit(ch) || ch=='.' ||
-            ch=='-' || ch=='_' || ch==':'
-            || Character.isLetter(ch);
-
-  }
-
-  private boolean isFirstIdChar(int ch) {
-    return Character.isUnicodeIdentifierStart(ch);
-    // return isAlpha(ch) || ch=='_' || Character.isLetter(ch);
-  }
-
-
-  private void saveState() throws IOException {
-    lastMark = numRead;
-    input.mark(readAheadLimit);
-  }
-
-  private void restoreState() throws IOException {
-    input.reset();
-    pushed.setLength(0);
-  }
-
-  private int readNumericEntity() throws IOException {
-    // "&#" has already been read at this point
-    int eaten = 2;
-
-    // is this decimal, hex, or nothing at all.
-    int ch = next();
-    int base=10;
-    boolean invalid=false;
-    sb.setLength(0);
-
-    if (isDigit(ch)) {
-      // decimal character entity
-      sb.append((char)ch);
-      for (int i=0; i<10; i++) {
-        ch = next();
-        if (isDigit(ch)) {
-          sb.append((char)ch);
-        } else {
-          break;
-        }
-      }
-    } else if (ch=='x') {
-      eaten++;
-      // hex character entity
-      base=16;
-      sb.setLength(0);
-      for (int i=0; i<10; i++) {
-        ch = next();
-        if (isHex(ch)) {
-          sb.append((char)ch);
-        } else {
-          break;
-        }
-      }
-    } else {
-      return MISMATCH;
-    }
-
-
-    // In older HTML, an entity may not have always been terminated
-    // with a semicolon.  We'll also treat EOF or whitespace as terminating
-    // the entity.
-    try {
-      if (ch==';' || ch==-1) {
-        // do not account for the eaten ";" due to the fact that we do output a char
-        numWhitespace = sb.length() + eaten;
-        return Integer.parseInt(sb.toString(), base);
-      }
-
-      // if whitespace terminated the entity, we need to return
-      // that whitespace on the next call to read().
-      if (isSpace(ch)) {
-        push(ch);
-        numWhitespace = sb.length() + eaten;
-        return Integer.parseInt(sb.toString(), base);
-      }
-    } catch (NumberFormatException e) {
-      return MISMATCH;
-    }
-
-    // Not an entity...
-    return MISMATCH;
-  }
-
-  private int readEntity() throws IOException {
-    int ch = next();
-    if (ch=='#') return readNumericEntity();
-
-    //read an entity reference
-
-    // for an entity reference, require the ';' for safety.
-    // otherwise we may try and convert part of some company
-    // names to an entity.  "Alpha&Beta Corp" for instance.
-    //
-    // TODO: perhaps I should special case some of the
-    // more common ones like &amp to make the ';' optional...
-
-    sb.setLength(0);
-    sb.append((char)ch);
-
-    for (int i=0; i< safeReadAheadLimit; i++) {
-      ch=next();
-      if (Character.isLetter(ch)) {
-        sb.append((char)ch);
-      } else {
-        break;
-      }
-    }
-
-    if (ch==';') {
-      String entity=sb.toString();
-      Character entityChar = entityTable.get(entity);
-      if (entityChar!=null) {
-        numWhitespace = entity.length() + 1 ;
-        return entityChar.charValue();
-      }
-    }
-
-    return MISMATCH;
-  }
-
-  /*** valid comments according to HTML specs
-   <!-- Hello -->
-   <!-- Hello -- -- Hello-->
-   <!---->
-   <!------ Hello -->
-   <!>
-   <!------> Hello -->
-
-   #comments inside of an entity decl:
-   <!ENTITY amp     CDATA "&#38;"   -- ampersand, U+0026 ISOnum -->
-
-   Turns out, IE & mozilla don't parse comments correctly.
-   Since this is meant to be a practical stripper, I'll just
-   try and duplicate what the browsers do.
-
-   <!-- (stuff_including_markup)* -->
-   <!FOO (stuff, not including markup) >
-   <! (stuff, not including markup)* >
-
-
-  ***/
-
-  private int readBang(boolean inScript) throws IOException {
-    // at this point, "<!" has been read
-    int ret = readComment(inScript);
-    if (ret==MATCH) return MATCH;
-
-    if ((numRead - lastMark) < safeReadAheadLimit || peek() == '>' ) {
-
-      int ch = next();
-      if (ch=='>') return MATCH;
-
-      // if it starts with <! and isn't a comment,
-      // simply read until ">"
-      //since we did readComment already, it may be the case that we are already deep into the read ahead buffer
-      //so, we may need to abort sooner
-      while ((numRead - lastMark) < safeReadAheadLimit) {
-        ch = next();
-        if (ch=='>') {
-          return MATCH;
-        }
-        else if (ch<0) {
-          return MISMATCH;
-        }
-      }
-    }
-    return MISMATCH;
-  }
-
-  // tries to read comments the way browsers do, not
-  // strictly by the standards.
-  //
-  // GRRRR.  it turns out that in the wild, a <script> can have a HTML comment
-  // that contains a script that contains a quoted comment.
-  // <script><!-- document.write("<!--embedded comment-->") --></script>
-  //
-  private int readComment(boolean inScript) throws IOException {
-    // at this point "<!" has  been read
-    int ch = next();
-      if (ch!='-') {
-      // not a comment
-      push(ch);
-      return MISMATCH;
-    }
-
-    ch = next();
-      if (ch!='-') {
-      // not a comment
-      push(ch);
-      push('-');
-      return MISMATCH;
-    }
-    /*two extra calls to next() here, so make sure we don't read past our mark*/
-    while ((numRead - lastMark) < safeReadAheadLimit -3 ) {
-      ch = next();
-      if (ch<0) return MISMATCH;
-      if (ch=='-') {
-        ch = next();
-        if (ch<0) return MISMATCH;
-        if (ch!='-') {
-          push(ch);
-          continue;
-        }
-
-        ch = next();
-        if (ch<0) return MISMATCH;
-        if (ch!='>') {
-          push(ch);
-          push('-');
-          continue;
-        }
-
-        return MATCH;
-      } else if ((ch=='\'' || ch=='"') && inScript) {
-        push(ch);
-        int ret=readScriptString();
-        // if this wasn't a string, there's not much we can do
-        // at this point without having a stack of stream states in
-        // order to "undo" just the latest.
-      } else if (ch=='<') {
-        eatSSI();
-      }
-
-    }
-    return MISMATCH;
-
-  }
-
-
-
-  private int readTag() throws IOException {
-    // at this point '<' has already been read
-    int ch = next();
-    if (!isAlpha(ch)) {
-      push(ch);
-      return MISMATCH;
-    }
-
-    sb.setLength(0);
-    sb.append((char)ch);
-    while((numRead - lastMark) < safeReadAheadLimit) {
-
-      ch = next();
-      if (isIdChar(ch)) {
-        sb.append((char)ch);
-      } else if (ch=='/') {
-        // Hmmm, a tag can close with "/>" as well as "/ >"
-        // read end tag '/>' or '/ >', etc
-        return nextSkipWS()=='>' ? MATCH : MISMATCH;
-      } else {
-        break;
-      }
-    }
-    if (escapedTags!=null && escapedTags.contains(sb.toString())){
-      //if this is a reservedTag, then keep it
-      return MISMATCH;
-    }
-    // After the tag id, there needs to be either whitespace or
-    // '>'
-    if ( !(ch=='>' || isSpace(ch)) ) {
-      return MISMATCH;
-    }
-
-    if (ch!='>') {
-      // process attributes
-      while ((numRead - lastMark) < safeReadAheadLimit) {
-        ch=next();
-        if (isSpace(ch)) {
-          continue;
-        } else if (isFirstIdChar(ch)) {
-          push(ch);
-          int ret = readAttr2();
-          if (ret==MISMATCH) return ret;
-        } else if (ch=='/') {
-          // read end tag '/>' or '/ >', etc
-          return nextSkipWS()=='>' ? MATCH : MISMATCH;
-        } else if (ch=='>') {
-          break;
-        } else {
-          return MISMATCH;
-        }
-
-      }
-      if ((numRead - lastMark) >= safeReadAheadLimit){
-        return MISMATCH;//exit out if we exceeded the buffer
-      }
-    }
-
-    // We only get to this point after we have read the
-    // entire tag.  Now let's see if it's a special tag.
-    String name=sb.toString();
-    if (name.equalsIgnoreCase("script") || name.equalsIgnoreCase("style")) {
-     // The content of script and style elements is
-     //  CDATA in HTML 4 but PCDATA in XHTML.
-
-     /* From HTML4:
-       Although the STYLE and SCRIPT elements use CDATA for their data model,
-       for these elements, CDATA must be handled differently by user agents.
-       Markup and entities must be treated as raw text and passed to the application
-       as is. The first occurrence of the character sequence "</" (end-tag open
-       delimiter) is treated as terminating the end of the element's content. In
-       valid documents, this would be the end tag for the element.
-      */
-
-     // discard everything until endtag is hit (except
-     // if it occurs in a comment.
-
-     // reset the stream mark to here, since we know that we sucessfully matched
-     // a tag, and if we can't find the end tag, this is where we will want
-     // to roll back to.
-     saveState();
-     pushed.setLength(0);
-     return findEndTag();
-    }
-    return MATCH;
-  }
-
-
-  // find an end tag, but beware of comments...
-  // <script><!-- </script> -->foo</script>
-  // beware markup in script strings: </script>...document.write("</script>")foo</script>
-  // TODO: do I need to worry about CDATA sections "<![CDATA["  ?
-  int findEndTag() throws IOException {
-
-    while ((numRead - lastMark) < safeReadAheadLimit) {
-      int ch = next();
-      if (ch=='<') {
-        ch = next();
-        // skip looking for end-tag in comments
-        if (ch=='!') {
-          int ret = readBang(true);
-          if (ret==MATCH) continue;
-          // yikes... what now?  It wasn't a comment, but I can't get
-          // back to the state I was at.  Just continue from where I
-          // am I guess...
-          continue;
-        }
-        // did we match "</"
-        if (ch!='/') {
-          push(ch);
-          continue;
-        }
-        int ret = readName(false);
-        if (ret==MISMATCH) return MISMATCH;
-        ch=nextSkipWS();
-        if (ch!='>') return MISMATCH;
-        return MATCH;
-      } else if (ch=='\'' || ch=='"') {
-        // read javascript string to avoid a false match.
-        push(ch);
-        int ret = readScriptString();
-        // what to do about a non-match (non-terminated string?)
-        // play it safe and index the rest of the data I guess...
-        if (ret==MISMATCH) return MISMATCH;
-      } else if (ch<0) {
-        return MISMATCH;
-      }
-
-    }
-    return MISMATCH;
-  }
-
-
-  // read a string escaped by backslashes
-  private int readScriptString() throws IOException {
-    int quoteChar = next();
-    if (quoteChar!='\'' && quoteChar!='"') return MISMATCH;
-
-    while((numRead - lastMark) < safeReadAheadLimit) {
-      int ch = next();
-      if (ch==quoteChar) return MATCH;
-      else if (ch=='\\') {
-        ch=next();
-      } else if (ch<0) {
-        return MISMATCH;
-      } else if (ch=='<') {
-        eatSSI();
-      }
-
-    }
-    return MISMATCH;
-  }
-
-
-  private int readName(boolean checkEscaped) throws IOException {
-    StringBuilder builder = (checkEscaped && escapedTags!=null) ? new StringBuilder() : null;
-    int ch = next();
-    if (builder!=null) builder.append((char)ch);
-    if (!isFirstIdChar(ch)) return MISMATCH;
-    ch = next();
-    if (builder!=null) builder.append((char)ch);
-    while(isIdChar(ch)) {
-      ch=next();
-      if (builder!=null) builder.append((char)ch);
-    }
-    if (ch!=-1) {
-      push(ch);
-
-    }
-    //strip off the trailing >
-    if (builder!=null && escapedTags.contains(builder.substring(0, builder.length() - 1))){
-      return MISMATCH;
-    }
-    return MATCH;
-  }
-
-  /***
-  [10]    AttValue     ::=    '"' ([^<&"] | Reference)* '"'
-        |  "'" ([^<&'] | Reference)* "'"
-
-  need to also handle unquoted attributes, and attributes w/o values:
-  <td id=msviGlobalToolbar height="22" nowrap align=left>
-
-  ***/
-
-    // This reads attributes and attempts to handle any
-    // embedded server side includes that would otherwise
-    // mess up the quote handling.
-    //  <a href="a/<!--#echo "path"-->">
-    private int readAttr2() throws IOException {
-    if ((numRead - lastMark < safeReadAheadLimit)) {
-      int ch = next();
-      if (!isFirstIdChar(ch)) return MISMATCH;
-      ch = next();
-      while(isIdChar(ch) && ((numRead - lastMark) < safeReadAheadLimit)){
-        ch=next();
-      }
-      if (isSpace(ch)) ch = nextSkipWS();
-
-      // attributes may not have a value at all!
-      // if (ch != '=') return MISMATCH;
-      if (ch != '=') {
-        push(ch);
-        return MATCH;
-      }
-
-      int quoteChar = nextSkipWS();
-
-      if (quoteChar=='"' || quoteChar=='\'') {
-        while ((numRead - lastMark) < safeReadAheadLimit) {
-          ch = next();
-          if (ch<0) return MISMATCH;
-          else if (ch=='<') {
-            eatSSI();
-          }
-          else if (ch==quoteChar) {
-            return MATCH;
-          //} else if (ch=='<') {
-          //  return MISMATCH;
-          }
-
-        }
-      } else {
-        // unquoted attribute
-        while ((numRead - lastMark) < safeReadAheadLimit) {
-          ch = next();
-          if (ch<0) return MISMATCH;
-          else if (isSpace(ch)) {
-            push(ch);
-            return MATCH;
-          } else if (ch=='>') {
-            push(ch);
-            return MATCH;
-          } else if (ch=='<') {
-            eatSSI();
-          }
-
-        }
-      }
-    }
-    return MISMATCH;
-  }
-
-  // skip past server side include
-  private int eatSSI() throws IOException {
-    // at this point, only a "<" was read.
-    // on a mismatch, push back the last char so that if it was
-    // a quote that closes the attribute, it will be re-read and matched.
-    int ch = next();
-    if (ch!='!') {
-      push(ch);
-      return MISMATCH;
-    }
-    ch=next();
-    if (ch!='-') {
-      push(ch);
-      return MISMATCH;
-    }
-    ch=next();
-    if (ch!='-') {
-      push(ch);
-      return MISMATCH;
-    }
-    ch=next();
-    if (ch!='#') {
-      push(ch);
-      return MISMATCH;
-    }
-
-    push('#'); push('-'); push('-');
-    return readComment(false);
-  }
-
-  private int readProcessingInstruction() throws IOException {
-    // "<?" has already been read
-    while ((numRead - lastMark) < safeReadAheadLimit) {
-      int ch = next();
-      if (ch=='?' && peek()=='>') {
-        next();
-        return MATCH;
-      } else if (ch==-1) {
-        return MISMATCH;
-      }
-
-    }
-    return MISMATCH;
-  }
-
-
-
-  @Override
-  public int read() throws IOException {
-    // TODO: Do we ever want to preserve CDATA sections?
-    // where do we have to worry about them?
-    // <![ CDATA [ unescaped markup ]]>
-    if (numWhitespace > 0){
-      numEaten += numWhitespace;
-      addOffCorrectMap(numReturned, numEaten);
-      numWhitespace = 0;
-    }
-    numReturned++;
-    //do not limit this one by the READAHEAD
-    while(true) {
-      int lastNumRead = numRead;
-      int ch = next();
-
-      switch (ch) {
-        case '&':
-          saveState();
-          ch = readEntity();
-          if (ch>=0) return ch;
-          if (ch==MISMATCH) {
-            restoreState();
-
-            return '&';
-          }
-          break;
-
-        case '<':
-          saveState();
-          ch = next();
-          int ret = MISMATCH;
-          if (ch=='!') {
-            ret = readBang(false);
-          } else if (ch=='/') {
-            ret = readName(true);
-            if (ret==MATCH) {
-              ch=nextSkipWS();
-              ret= ch=='>' ? MATCH : MISMATCH;
-            }
-          } else if (isAlpha(ch)) {
-            push(ch);
-            ret = readTag();
-          } else if (ch=='?') {
-            ret = readProcessingInstruction();
-          }
-
-          // matched something to be discarded, so break
-          // from this case and continue in the loop
-          if (ret==MATCH) {
-            //break;//was
-            //return whitespace from
-            numWhitespace = (numRead - lastNumRead) - 1;//tack on the -1 since we are returning a space right now
-            return ' ';
-          }
-
-          // didn't match any HTML constructs, so roll back
-          // the stream state and just return '<'
-          restoreState();
-          return '<';
-
-        default: return ch;
-      }
-
-    }
-
-
-  }
-
-  @Override
-  public int read(char cbuf[], int off, int len) throws IOException {
-    int i=0;
-    for (i=0; i<len; i++) {
-      int ch = read();
-      if (ch==-1) break;
-      cbuf[off++] = (char)ch;
-    }
-    if (i==0) {
-      if (len==0) return 0;
-      return -1;
-    }
-    return i;
-  }
-
-  private static final HashMap<String,Character> entityTable;
-  static {
-    entityTable = new HashMap<>();
-    // entityName and entityVal generated from the python script
-    // included in comments at the end of this file.
-    final String[] entityName={ "zwnj","aring","gt","yen","ograve","Chi","delta","rang","sup","trade","Ntilde","xi","upsih","nbsp","Atilde","radic","otimes","aelig","oelig","equiv","ni","infin","Psi","auml","cup","Epsilon","otilde","lt","Icirc","Eacute","Lambda","sbquo","Prime","prime","psi","Kappa","rsaquo","Tau","uacute","ocirc","lrm","zwj","cedil","Alpha","not","amp","AElig","oslash","acute","lceil","alefsym","laquo","shy","loz","ge","Igrave","nu","Ograve","lsaquo","sube","euro","rarr","sdot","rdquo","Yacute","lfloor","lArr","Auml","Dagger","brvbar","Otilde","szlig","clubs","diams","agrave","Ocirc","Iota","Theta","Pi","zeta","Scaron","frac14","egrave","sub","iexcl","frac12","ordf","sum","prop","Uuml","ntilde","atilde","asymp","uml","prod","nsub","reg","rArr","Oslash","emsp","THORN","yuml","aacute","Mu","hArr","le","thinsp","dArr","ecirc","bdquo","Sigma","Aring","tilde","nabla","mdash","uarr","times","Ugrave","Eta","Agrave","chi","real","circ","eth","rceil","iuml","gamma","lambda","harr","Egrave","frac34","dagger","divide","Ouml","image","ndash","hellip","igrave","Yuml","ang","alpha","frasl","ETH","lowast","Nu","plusmn","bull","sup1","sup2","sup3","Aacute","cent","oline","Beta","perp","Delta","there4","pi","iota","empty","euml","notin","iacute","para","epsilon","weierp","OElig","uuml","larr","icirc","Upsilon","omicron","upsilon","copy","Iuml","Oacute","Xi","kappa","ccedil","Ucirc","cap","mu","scaron","lsquo","isin","Zeta","minus","deg","and","tau","pound","curren","int","ucirc","rfloor","ensp","crarr","ugrave","exist","cong","theta","oplus","permil","Acirc","piv","Euml","Phi","Iacute","quot","Uacute","Omicron","ne","iquest","eta","rsquo","yacute","Rho","darr","Ecirc","Omega","acirc","sim","phi","sigmaf","macr","thetasym","Ccedil","ordm","uArr","forall","beta","fnof","rho","micro","eacute","omega","middot","Gamma","rlm","lang","spades","supe","thorn","ouml","or","raquo","part","sect","ldquo","hearts","sigma","oacute"};
-    final char[] entityVal={ 8204,229,62,165,242,935,948,9002,8835,8482,209,958,978,160,195,8730,8855,230,339,8801,8715,8734,936,228,8746,917,245,60,206,201,923,8218,8243,8242,968,922,8250,932,250,244,8206,8205,184,913,172,38,198,248,180,8968,8501,171,173,9674,8805,204,957,210,8249,8838,8364,8594,8901,8221,221,8970,8656,196,8225,166,213,223,9827,9830,224,212,921,920,928,950,352,188,232,8834,161,189,170,8721,8733,220,241,227,8776,168,8719,8836,174,8658,216,8195,222,255,225,924,8660,8804,8201,8659,234,8222,931,197,732,8711,8212,8593,215,217,919,192,967,8476,710,240,8969,239,947,955,8596,200,190,8224,247,214,8465,8211,8230,236,376,8736,945,8260,208,8727,925,177,8226,185,178,179,193,162,8254,914,8869,916,8756,960,953,8709,235,8713,237,182,949,8472,338,252,8592,238,933,959,965,169,207,211,926,954,231,219,8745,956,353,8216,8712,918,8722,176,8743,964,163,164,8747,251,8971,8194,8629,249,8707,8773,952,8853,8240,194,982,203,934,205,34,218,927,8800,191,951,8217,253,929,8595,202,937,226,8764,966,962,175,977,199,186,8657,8704,946,402,961,181,233,969,183,915,8207,9001,9824,8839,254,246,8744,187,8706,167,8220,9829,963,243};
-    for (int i=0; i<entityName.length; i++) {
-      entityTable.put(entityName[i], new Character(entityVal[i]));
-    }
-    // special-case nbsp to a simple space instead of 0xa0
-    entityTable.put("nbsp",new Character(' '));
-  }
-
-}
-
-/********************* htmlentity.py **********************
-# a simple python script to generate an HTML entity table
-# from text taken from http://www.w3.org/TR/REC-html40/sgml/entities.html
-
-text="""
-24 Character entity references in HTML 4
-
-Contents
-
-   1. Introduction to character entity references
-   2. Character entity references for ISO 8859-1 characters
-         1. The list of characters
-   3. Character entity references for symbols, mathematical symbols, and Greek letters
-         1. The list of characters
-   4. Character entity references for markup-significant and internationalization characters
-         1. The list of characters
-
-24.1 Introduction to character entity references
-A character entity reference is an SGML construct that references a character of the document character set.
-
-This version of HTML supports several sets of character entity references:
-
-    * ISO 8859-1 (Latin-1) characters In accordance with section 14 of [RFC1866], the set of Latin-1 entities has been extended by this specification to cover the whole right part of ISO-8859-1 (all code positions with the high-order bit set), including the already commonly used &nbsp;, &copy; and &reg;. The names of the entities are taken from the appendices of SGML (defined in [ISO8879]).
-    * symbols, mathematical symbols, and Greek letters. These characters may be represented by glyphs in the Adobe font "Symbol".
-    * markup-significant and internationalization characters (e.g., for bidirectional text).
-
-The following sections present the complete lists of character entity references. Although, by convention, [ISO10646] the comments following each entry are usually written with uppercase letters, we have converted them to lowercase in this specification for reasons of readability.
-24.2 Character entity references for ISO 8859-1 characters
-
-The character entity references in this section produce characters whose numeric equivalents should already be supported by conforming HTML 2.0 user agents. Thus, the character entity reference &divide; is a more convenient form than &#247; for obtaining the division sign.
-
-To support these named entities, user agents need only recognize the entity names and convert them to characters that lie within the repertoire of [ISO88591].
-
-Character 65533 (FFFD hexadecimal) is the last valid character in UCS-2. 65534 (FFFE hexadecimal) is unassigned and reserved as the byte-swapped version of ZERO WIDTH NON-BREAKING SPACE for byte-order detection purposes. 65535 (FFFF hexadecimal) is unassigned.
-24.2.1 The list of characters
-
-<!-- Portions (c) International Organization for Standardization 1986
-     Permission to copy in any form is granted for use with
-     conforming SGML systems and applications as defined in
-     ISO 8879, provided this notice is included in all copies.
--->
-<!-- Character entity set. Typical invocation:
-     <!ENTITY % HTMLlat1 PUBLIC
-       "-//W3C//ENTITIES Latin 1//EN//HTML">
-     %HTMLlat1;
--->
-
-<!ENTITY nbsp   CDATA "&#160;" -- no-break space = non-breaking space,
-                                  U+00A0 ISOnum -->
-<!ENTITY iexcl  CDATA "&#161;" -- inverted exclamation mark, U+00A1 ISOnum -->
-<!ENTITY cent   CDATA "&#162;" -- cent sign, U+00A2 ISOnum -->
-<!ENTITY pound  CDATA "&#163;" -- pound sign, U+00A3 ISOnum -->
-<!ENTITY curren CDATA "&#164;" -- currency sign, U+00A4 ISOnum -->
-<!ENTITY yen    CDATA "&#165;" -- yen sign = yuan sign, U+00A5 ISOnum -->
-<!ENTITY brvbar CDATA "&#166;" -- broken bar = broken vertical bar,
-                                  U+00A6 ISOnum -->
-<!ENTITY sect   CDATA "&#167;" -- section sign, U+00A7 ISOnum -->
-<!ENTITY uml    CDATA "&#168;" -- diaeresis = spacing diaeresis,
-                                  U+00A8 ISOdia -->
-<!ENTITY copy   CDATA "&#169;" -- copyright sign, U+00A9 ISOnum -->
-<!ENTITY ordf   CDATA "&#170;" -- feminine ordinal indicator, U+00AA ISOnum -->
-<!ENTITY laquo  CDATA "&#171;" -- left-pointing double angle quotation mark
-                                  = left pointing guillemet, U+00AB ISOnum -->
-<!ENTITY not    CDATA "&#172;" -- not sign, U+00AC ISOnum -->
-<!ENTITY shy    CDATA "&#173;" -- soft hyphen = discretionary hyphen,
-                                  U+00AD ISOnum -->
-<!ENTITY reg    CDATA "&#174;" -- registered sign = registered trade mark sign,
-                                  U+00AE ISOnum -->
-<!ENTITY macr   CDATA "&#175;" -- macron = spacing macron = overline
-                                  = APL overbar, U+00AF ISOdia -->
-<!ENTITY deg    CDATA "&#176;" -- degree sign, U+00B0 ISOnum -->
-<!ENTITY plusmn CDATA "&#177;" -- plus-minus sign = plus-or-minus sign,
-                                  U+00B1 ISOnum -->
-<!ENTITY sup2   CDATA "&#178;" -- superscript two = superscript digit two
-                                  = squared, U+00B2 ISOnum -->
-<!ENTITY sup3   CDATA "&#179;" -- superscript three = superscript digit three
-                                  = cubed, U+00B3 ISOnum -->
-<!ENTITY acute  CDATA "&#180;" -- acute accent = spacing acute,
-                                  U+00B4 ISOdia -->
-<!ENTITY micro  CDATA "&#181;" -- micro sign, U+00B5 ISOnum -->
-<!ENTITY para   CDATA "&#182;" -- pilcrow sign = paragraph sign,
-                                  U+00B6 ISOnum -->
-<!ENTITY middot CDATA "&#183;" -- middle dot = Georgian comma
-                                  = Greek middle dot, U+00B7 ISOnum -->
-<!ENTITY cedil  CDATA "&#184;" -- cedilla = spacing cedilla, U+00B8 ISOdia -->
-<!ENTITY sup1   CDATA "&#185;" -- superscript one = superscript digit one,
-                                  U+00B9 ISOnum -->
-<!ENTITY ordm   CDATA "&#186;" -- masculine ordinal indicator,
-                                  U+00BA ISOnum -->
-<!ENTITY raquo  CDATA "&#187;" -- right-pointing double angle quotation mark
-                                  = right pointing guillemet, U+00BB ISOnum -->
-<!ENTITY frac14 CDATA "&#188;" -- vulgar fraction one quarter
-                                  = fraction one quarter, U+00BC ISOnum -->
-<!ENTITY frac12 CDATA "&#189;" -- vulgar fraction one half
-                                  = fraction one half, U+00BD ISOnum -->
-<!ENTITY frac34 CDATA "&#190;" -- vulgar fraction three quarters
-                                  = fraction three quarters, U+00BE ISOnum -->
-<!ENTITY iquest CDATA "&#191;" -- inverted question mark
-                                  = turned question mark, U+00BF ISOnum -->
-<!ENTITY Agrave CDATA "&#192;" -- latin capital letter A with grave
-                                  = latin capital letter A grave,
-                                  U+00C0 ISOlat1 -->
-<!ENTITY Aacute CDATA "&#193;" -- latin capital letter A with acute,
-                                  U+00C1 ISOlat1 -->
-<!ENTITY Acirc  CDATA "&#194;" -- latin capital letter A with circumflex,
-                                  U+00C2 ISOlat1 -->
-<!ENTITY Atilde CDATA "&#195;" -- latin capital letter A with tilde,
-                                  U+00C3 ISOlat1 -->
-<!ENTITY Auml   CDATA "&#196;" -- latin capital letter A with diaeresis,
-                                  U+00C4 ISOlat1 -->
-<!ENTITY Aring  CDATA "&#197;" -- latin capital letter A with ring above
-                                  = latin capital letter A ring,
-                                  U+00C5 ISOlat1 -->
-<!ENTITY AElig  CDATA "&#198;" -- latin capital letter AE
-                                  = latin capital ligature AE,
-                                  U+00C6 ISOlat1 -->
-<!ENTITY Ccedil CDATA "&#199;" -- latin capital letter C with cedilla,
-                                  U+00C7 ISOlat1 -->
-<!ENTITY Egrave CDATA "&#200;" -- latin capital letter E with grave,
-                                  U+00C8 ISOlat1 -->
-<!ENTITY Eacute CDATA "&#201;" -- latin capital letter E with acute,
-                                  U+00C9 ISOlat1 -->
-<!ENTITY Ecirc  CDATA "&#202;" -- latin capital letter E with circumflex,
-                                  U+00CA ISOlat1 -->
-<!ENTITY Euml   CDATA "&#203;" -- latin capital letter E with diaeresis,
-                                  U+00CB ISOlat1 -->
-<!ENTITY Igrave CDATA "&#204;" -- latin capital letter I with grave,
-                                  U+00CC ISOlat1 -->
-<!ENTITY Iacute CDATA "&#205;" -- latin capital letter I with acute,
-                                  U+00CD ISOlat1 -->
-<!ENTITY Icirc  CDATA "&#206;" -- latin capital letter I with circumflex,
-                                  U+00CE ISOlat1 -->
-<!ENTITY Iuml   CDATA "&#207;" -- latin capital letter I with diaeresis,
-                                  U+00CF ISOlat1 -->
-<!ENTITY ETH    CDATA "&#208;" -- latin capital letter ETH, U+00D0 ISOlat1 -->
-<!ENTITY Ntilde CDATA "&#209;" -- latin capital letter N with tilde,
-                                  U+00D1 ISOlat1 -->
-<!ENTITY Ograve CDATA "&#210;" -- latin capital letter O with grave,
-                                  U+00D2 ISOlat1 -->
-<!ENTITY Oacute CDATA "&#211;" -- latin capital letter O with acute,
-                                  U+00D3 ISOlat1 -->
-<!ENTITY Ocirc  CDATA "&#212;" -- latin capital letter O with circumflex,
-                                  U+00D4 ISOlat1 -->
-<!ENTITY Otilde CDATA "&#213;" -- latin capital letter O with tilde,
-                                  U+00D5 ISOlat1 -->
-<!ENTITY Ouml   CDATA "&#214;" -- latin capital letter O with diaeresis,
-                                  U+00D6 ISOlat1 -->
-<!ENTITY times  CDATA "&#215;" -- multiplication sign, U+00D7 ISOnum -->
-<!ENTITY Oslash CDATA "&#216;" -- latin capital letter O with stroke
-                                  = latin capital letter O slash,
-                                  U+00D8 ISOlat1 -->
-<!ENTITY Ugrave CDATA "&#217;" -- latin capital letter U with grave,
-                                  U+00D9 ISOlat1 -->
-<!ENTITY Uacute CDATA "&#218;" -- latin capital letter U with acute,
-                                  U+00DA ISOlat1 -->
-<!ENTITY Ucirc  CDATA "&#219;" -- latin capital letter U with circumflex,
-                                  U+00DB ISOlat1 -->
-<!ENTITY Uuml   CDATA "&#220;" -- latin capital letter U with diaeresis,
-                                  U+00DC ISOlat1 -->
-<!ENTITY Yacute CDATA "&#221;" -- latin capital letter Y with acute,
-                                  U+00DD ISOlat1 -->
-<!ENTITY THORN  CDATA "&#222;" -- latin capital letter THORN,
-                                  U+00DE ISOlat1 -->
-<!ENTITY szlig  CDATA "&#223;" -- latin small letter sharp s = ess-zed,
-                                  U+00DF ISOlat1 -->
-<!ENTITY agrave CDATA "&#224;" -- latin small letter a with grave
-                                  = latin small letter a grave,
-                                  U+00E0 ISOlat1 -->
-<!ENTITY aacute CDATA "&#225;" -- latin small letter a with acute,
-                                  U+00E1 ISOlat1 -->
-<!ENTITY acirc  CDATA "&#226;" -- latin small letter a with circumflex,
-                                  U+00E2 ISOlat1 -->
-<!ENTITY atilde CDATA "&#227;" -- latin small letter a with tilde,
-                                  U+00E3 ISOlat1 -->
-<!ENTITY auml   CDATA "&#228;" -- latin small letter a with diaeresis,
-                                  U+00E4 ISOlat1 -->
-<!ENTITY aring  CDATA "&#229;" -- latin small letter a with ring above
-                                  = latin small letter a ring,
-                                  U+00E5 ISOlat1 -->
-<!ENTITY aelig  CDATA "&#230;" -- latin small letter ae
-                                  = latin small ligature ae, U+00E6 ISOlat1 -->
-<!ENTITY ccedil CDATA "&#231;" -- latin small letter c with cedilla,
-                                  U+00E7 ISOlat1 -->
-<!ENTITY egrave CDATA "&#232;" -- latin small letter e with grave,
-                                  U+00E8 ISOlat1 -->
-<!ENTITY eacute CDATA "&#233;" -- latin small letter e with acute,
-                                  U+00E9 ISOlat1 -->
-<!ENTITY ecirc  CDATA "&#234;" -- latin small letter e with circumflex,
-                                  U+00EA ISOlat1 -->
-<!ENTITY euml   CDATA "&#235;" -- latin small letter e with diaeresis,
-                                  U+00EB ISOlat1 -->
-<!ENTITY igrave CDATA "&#236;" -- latin small letter i with grave,
-                                  U+00EC ISOlat1 -->
-<!ENTITY iacute CDATA "&#237;" -- latin small letter i with acute,
-                                  U+00ED ISOlat1 -->
-<!ENTITY icirc  CDATA "&#238;" -- latin small letter i with circumflex,
-                                  U+00EE ISOlat1 -->
-<!ENTITY iuml   CDATA "&#239;" -- latin small letter i with diaeresis,
-                                  U+00EF ISOlat1 -->
-<!ENTITY eth    CDATA "&#240;" -- latin small letter eth, U+00F0 ISOlat1 -->
-<!ENTITY ntilde CDATA "&#241;" -- latin small letter n with tilde,
-                                  U+00F1 ISOlat1 -->
-<!ENTITY ograve CDATA "&#242;" -- latin small letter o with grave,
-                                  U+00F2 ISOlat1 -->
-<!ENTITY oacute CDATA "&#243;" -- latin small letter o with acute,
-                                  U+00F3 ISOlat1 -->
-<!ENTITY ocirc  CDATA "&#244;" -- latin small letter o with circumflex,
-                                  U+00F4 ISOlat1 -->
-<!ENTITY otilde CDATA "&#245;" -- latin small letter o with tilde,
-                                  U+00F5 ISOlat1 -->
-<!ENTITY ouml   CDATA "&#246;" -- latin small letter o with diaeresis,
-                                  U+00F6 ISOlat1 -->
-<!ENTITY divide CDATA "&#247;" -- division sign, U+00F7 ISOnum -->
-<!ENTITY oslash CDATA "&#248;" -- latin small letter o with stroke,
-                                  = latin small letter o slash,
-                                  U+00F8 ISOlat1 -->
-<!ENTITY ugrave CDATA "&#249;" -- latin small letter u with grave,
-                                  U+00F9 ISOlat1 -->
-<!ENTITY uacute CDATA "&#250;" -- latin small letter u with acute,
-                                  U+00FA ISOlat1 -->
-<!ENTITY ucirc  CDATA "&#251;" -- latin small letter u with circumflex,
-                                  U+00FB ISOlat1 -->
-<!ENTITY uuml   CDATA "&#252;" -- latin small letter u with diaeresis,
-                                  U+00FC ISOlat1 -->
-<!ENTITY yacute CDATA "&#253;" -- latin small letter y with acute,
-                                  U+00FD ISOlat1 -->
-<!ENTITY thorn  CDATA "&#254;" -- latin small letter thorn,
-                                  U+00FE ISOlat1 -->
-<!ENTITY yuml   CDATA "&#255;" -- latin small letter y with diaeresis,
-                                  U+00FF ISOlat1 -->
-
-24.3 Character entity references for symbols, mathematical symbols, and Greek letters
-
-The character entity references in this section produce characters that may be represented by glyphs in the widely available Adobe Symbol font, including Greek characters, various bracketing symbols, and a selection of mathematical operators such as gradient, product, and summation symbols.
-
-To support these entities, user agents may support full [ISO10646] or use other means. Display of glyphs for these characters may be obtained by being able to display the relevant [ISO10646] characters or by other means, such as internally mapping the listed entities, numeric character references, and characters to the appropriate position in some font that contains the requisite glyphs.
-
-When to use Greek entities. This entity set contains all the letters used in modern Greek. However, it does not include Greek punctuation, precomposed accented characters nor the non-spacing accents (tonos, dialytika) required to compose them. There are no archaic letters, Coptic-unique letters, or precomposed letters for Polytonic Greek. The entities defined here are not intended for the representation of modern Greek text and would not be an efficient representation; rather, they are intended for occasional Greek letters used in technical and mathematical works.
-24.3.1 The list of characters
-
-<!-- Mathematical, Greek and Symbolic characters for HTML -->
-
-<!-- Character entity set. Typical invocation:
-     <!ENTITY % HTMLsymbol PUBLIC
-       "-//W3C//ENTITIES Symbols//EN//HTML">
-     %HTMLsymbol; -->
-
-<!-- Portions (c) International Organization for Standardization 1986:
-     Permission to copy in any form is granted for use with
-     conforming SGML systems and applications as defined in
-     ISO 8879, provided this notice is included in all copies.
--->
-
-<!-- Relevant ISO entity set is given unless names are newly introduced.
-     New names (i.e., not in ISO 8879 list) do not clash with any
-     existing ISO 8879 entity names. ISO 10646 character numbers
-     are given for each character, in hex. CDATA values are decimal
-     conversions of the ISO 10646 values and refer to the document
-     character set. Names are ISO 10646 names.
-
--->
-
-<!-- Latin Extended-B -->
-<!ENTITY fnof     CDATA "&#402;" -- latin small f with hook = function
-                                    = florin, U+0192 ISOtech -->
-
-<!-- Greek -->
-<!ENTITY Alpha    CDATA "&#913;" -- greek capital letter alpha, U+0391 -->
-<!ENTITY Beta     CDATA "&#914;" -- greek capital letter beta, U+0392 -->
-<!ENTITY Gamma    CDATA "&#915;" -- greek capital letter gamma,
-                                    U+0393 ISOgrk3 -->
-<!ENTITY Delta    CDATA "&#916;" -- greek capital letter delta,
-                                    U+0394 ISOgrk3 -->
-<!ENTITY Epsilon  CDATA "&#917;" -- greek capital letter epsilon, U+0395 -->
-<!ENTITY Zeta     CDATA "&#918;" -- greek capital letter zeta, U+0396 -->
-<!ENTITY Eta      CDATA "&#919;" -- greek capital letter eta, U+0397 -->
-<!ENTITY Theta    CDATA "&#920;" -- greek capital letter theta,
-                                    U+0398 ISOgrk3 -->
-<!ENTITY Iota     CDATA "&#921;" -- greek capital letter iota, U+0399 -->
-<!ENTITY Kappa    CDATA "&#922;" -- greek capital letter kappa, U+039A -->
-<!ENTITY Lambda   CDATA "&#923;" -- greek capital letter lambda,
-                                    U+039B ISOgrk3 -->
-<!ENTITY Mu       CDATA "&#924;" -- greek capital letter mu, U+039C -->
-<!ENTITY Nu       CDATA "&#925;" -- greek capital letter nu, U+039D -->
-<!ENTITY Xi       CDATA "&#926;" -- greek capital letter xi, U+039E ISOgrk3 -->
-<!ENTITY Omicron  CDATA "&#927;" -- greek capital letter omicron, U+039F -->
-<!ENTITY Pi       CDATA "&#928;" -- greek capital letter pi, U+03A0 ISOgrk3 -->
-<!ENTITY Rho      CDATA "&#929;" -- greek capital letter rho, U+03A1 -->
-<!-- there is no Sigmaf, and no U+03A2 character either -->
-<!ENTITY Sigma    CDATA "&#931;" -- greek capital letter sigma,
-                                    U+03A3 ISOgrk3 -->
-<!ENTITY Tau      CDATA "&#932;" -- greek capital letter tau, U+03A4 -->
-<!ENTITY Upsilon  CDATA "&#933;" -- greek capital letter upsilon,
-                                    U+03A5 ISOgrk3 -->
-<!ENTITY Phi      CDATA "&#934;" -- greek capital letter phi,
-                                    U+03A6 ISOgrk3 -->
-<!ENTITY Chi      CDATA "&#935;" -- greek capital letter chi, U+03A7 -->
-<!ENTITY Psi      CDATA "&#936;" -- greek capital letter psi,
-                                    U+03A8 ISOgrk3 -->
-<!ENTITY Omega    CDATA "&#937;" -- greek capital letter omega,
-                                    U+03A9 ISOgrk3 -->
-
-<!ENTITY alpha    CDATA "&#945;" -- greek small letter alpha,
-                                    U+03B1 ISOgrk3 -->
-<!ENTITY beta     CDATA "&#946;" -- greek small letter beta, U+03B2 ISOgrk3 -->
-<!ENTITY gamma    CDATA "&#947;" -- greek small letter gamma,
-                                    U+03B3 ISOgrk3 -->
-<!ENTITY delta    CDATA "&#948;" -- greek small letter delta,
-                                    U+03B4 ISOgrk3 -->
-<!ENTITY epsilon  CDATA "&#949;" -- greek small letter epsilon,
-                                    U+03B5 ISOgrk3 -->
-<!ENTITY zeta     CDATA "&#950;" -- greek small letter zeta, U+03B6 ISOgrk3 -->
-<!ENTITY eta      CDATA "&#951;" -- greek small letter eta, U+03B7 ISOgrk3 -->
-<!ENTITY theta    CDATA "&#952;" -- greek small letter theta,
-                                    U+03B8 ISOgrk3 -->
-<!ENTITY iota     CDATA "&#953;" -- greek small letter iota, U+03B9 ISOgrk3 -->
-<!ENTITY kappa    CDATA "&#954;" -- greek small letter kappa,
-                                    U+03BA ISOgrk3 -->
-<!ENTITY lambda   CDATA "&#955;" -- greek small letter lambda,
-                                    U+03BB ISOgrk3 -->
-<!ENTITY mu       CDATA "&#956;" -- greek small letter mu, U+03BC ISOgrk3 -->
-<!ENTITY nu       CDATA "&#957;" -- greek small letter nu, U+03BD ISOgrk3 -->
-<!ENTITY xi       CDATA "&#958;" -- greek small letter xi, U+03BE ISOgrk3 -->
-<!ENTITY omicron  CDATA "&#959;" -- greek small letter omicron, U+03BF NEW -->
-<!ENTITY pi       CDATA "&#960;" -- greek small letter pi, U+03C0 ISOgrk3 -->
-<!ENTITY rho      CDATA "&#961;" -- greek small letter rho, U+03C1 ISOgrk3 -->
-<!ENTITY sigmaf   CDATA "&#962;" -- greek small letter final sigma,
-                                    U+03C2 ISOgrk3 -->
-<!ENTITY sigma    CDATA "&#963;" -- greek small letter sigma,
-                                    U+03C3 ISOgrk3 -->
-<!ENTITY tau      CDATA "&#964;" -- greek small letter tau, U+03C4 ISOgrk3 -->
-<!ENTITY upsilon  CDATA "&#965;" -- greek small letter upsilon,
-                                    U+03C5 ISOgrk3 -->
-<!ENTITY phi      CDATA "&#966;" -- greek small letter phi, U+03C6 ISOgrk3 -->
-<!ENTITY chi      CDATA "&#967;" -- greek small letter chi, U+03C7 ISOgrk3 -->
-<!ENTITY psi      CDATA "&#968;" -- greek small letter psi, U+03C8 ISOgrk3 -->
-<!ENTITY omega    CDATA "&#969;" -- greek small letter omega,
-                                    U+03C9 ISOgrk3 -->
-<!ENTITY thetasym CDATA "&#977;" -- greek small letter theta symbol,
-                                    U+03D1 NEW -->
-<!ENTITY upsih    CDATA "&#978;" -- greek upsilon with hook symbol,
-                                    U+03D2 NEW -->
-<!ENTITY piv      CDATA "&#982;" -- greek pi symbol, U+03D6 ISOgrk3 -->
-
-<!-- General Punctuation -->
-<!ENTITY bull     CDATA "&#8226;" -- bullet = black small circle,
-                                     U+2022 ISOpub  -->
-<!-- bullet is NOT the same as bullet operator, U+2219 -->
-<!ENTITY hellip   CDATA "&#8230;" -- horizontal ellipsis = three dot leader,
-                                     U+2026 ISOpub  -->
-<!ENTITY prime    CDATA "&#8242;" -- prime = minutes = feet, U+2032 ISOtech -->
-<!ENTITY Prime    CDATA "&#8243;" -- double prime = seconds = inches,
-                                     U+2033 ISOtech -->
-<!ENTITY oline    CDATA "&#8254;" -- overline = spacing overscore,
-                                     U+203E NEW -->
-<!ENTITY frasl    CDATA "&#8260;" -- fraction slash, U+2044 NEW -->
-
-<!-- Letterlike Symbols -->
-<!ENTITY weierp   CDATA "&#8472;" -- script capital P = power set
-                                     = Weierstrass p, U+2118 ISOamso -->
-<!ENTITY image    CDATA "&#8465;" -- blackletter capital I = imaginary part,
-                                     U+2111 ISOamso -->
-<!ENTITY real     CDATA "&#8476;" -- blackletter capital R = real part symbol,
-                                     U+211C ISOamso -->
-<!ENTITY trade    CDATA "&#8482;" -- trade mark sign, U+2122 ISOnum -->
-<!ENTITY alefsym  CDATA "&#8501;" -- alef symbol = first transfinite cardinal,
-                                     U+2135 NEW -->
-<!-- alef symbol is NOT the same as hebrew letter alef,
-     U+05D0 although the same glyph could be used to depict both characters -->
-
-<!-- Arrows -->
-<!ENTITY larr     CDATA "&#8592;" -- leftwards arrow, U+2190 ISOnum -->
-<!ENTITY uarr     CDATA "&#8593;" -- upwards arrow, U+2191 ISOnum-->
-<!ENTITY rarr     CDATA "&#8594;" -- rightwards arrow, U+2192 ISOnum -->
-<!ENTITY darr     CDATA "&#8595;" -- downwards arrow, U+2193 ISOnum -->
-<!ENTITY harr     CDATA "&#8596;" -- left right arrow, U+2194 ISOamsa -->
-<!ENTITY crarr    CDATA "&#8629;" -- downwards arrow with corner leftwards
-                                     = carriage return, U+21B5 NEW -->
-<!ENTITY lArr     CDATA "&#8656;" -- leftwards double arrow, U+21D0 ISOtech -->
-<!-- ISO 10646 does not say that lArr is the same as the 'is implied by' arrow
-    but also does not have any other character for that function. So ? lArr can
-    be used for 'is implied by' as ISOtech suggests -->
-<!ENTITY uArr     CDATA "&#8657;" -- upwards double arrow, U+21D1 ISOamsa -->
-<!ENTITY rArr     CDATA "&#8658;" -- rightwards double arrow,
-                                     U+21D2 ISOtech -->
-<!-- ISO 10646 does not say this is the 'implies' character but does not have
-     another character with this function so ?
-     rArr can be used for 'implies' as ISOtech suggests -->
-<!ENTITY dArr     CDATA "&#8659;" -- downwards double arrow, U+21D3 ISOamsa -->
-<!ENTITY hArr     CDATA "&#8660;" -- left right double arrow,
-                                     U+21D4 ISOamsa -->
-
-<!-- Mathematical Operators -->
-<!ENTITY forall   CDATA "&#8704;" -- for all, U+2200 ISOtech -->
-<!ENTITY part     CDATA "&#8706;" -- partial differential, U+2202 ISOtech  -->
-<!ENTITY exist    CDATA "&#8707;" -- there exists, U+2203 ISOtech -->
-<!ENTITY empty    CDATA "&#8709;" -- empty set = null set = diameter,
-                                     U+2205 ISOamso -->
-<!ENTITY nabla    CDATA "&#8711;" -- nabla = backward difference,
-                                     U+2207 ISOtech -->
-<!ENTITY isin     CDATA "&#8712;" -- element of, U+2208 ISOtech -->
-<!ENTITY notin    CDATA "&#8713;" -- not an element of, U+2209 ISOtech -->
-<!ENTITY ni       CDATA "&#8715;" -- contains as member, U+220B ISOtech -->
-<!-- should there be a more memorable name than 'ni'? -->
-<!ENTITY prod     CDATA "&#8719;" -- n-ary product = product sign,
-                                     U+220F ISOamsb -->
-<!-- prod is NOT the same character as U+03A0 'greek capital letter pi' though
-     the same glyph might be used for both -->
-<!ENTITY sum      CDATA "&#8721;" -- n-ary sumation, U+2211 ISOamsb -->
-<!-- sum is NOT the same character as U+03A3 'greek capital letter sigma'
-     though the same glyph might be used for both -->
-<!ENTITY minus    CDATA "&#8722;" -- minus sign, U+2212 ISOtech -->
-<!ENTITY lowast   CDATA "&#8727;" -- asterisk operator, U+2217 ISOtech -->
-<!ENTITY radic    CDATA "&#8730;" -- square root = radical sign,
-                                     U+221A ISOtech -->
-<!ENTITY prop     CDATA "&#8733;" -- proportional to, U+221D ISOtech -->
-<!ENTITY infin    CDATA "&#8734;" -- infinity, U+221E ISOtech -->
-<!ENTITY ang      CDATA "&#8736;" -- angle, U+2220 ISOamso -->
-<!ENTITY and      CDATA "&#8743;" -- logical and = wedge, U+2227 ISOtech -->
-<!ENTITY or       CDATA "&#8744;" -- logical or = vee, U+2228 ISOtech -->
-<!ENTITY cap      CDATA "&#8745;" -- intersection = cap, U+2229 ISOtech -->
-<!ENTITY cup      CDATA "&#8746;" -- union = cup, U+222A ISOtech -->
-<!ENTITY int      CDATA "&#8747;" -- integral, U+222B ISOtech -->
-<!ENTITY there4   CDATA "&#8756;" -- therefore, U+2234 ISOtech -->
-<!ENTITY sim      CDATA "&#8764;" -- tilde operator = varies with = similar to,
-                                     U+223C ISOtech -->
-<!-- tilde operator is NOT the same character as the tilde, U+007E,
-     although the same glyph might be used to represent both  -->
-<!ENTITY cong     CDATA "&#8773;" -- approximately equal to, U+2245 ISOtech -->
-<!ENTITY asymp    CDATA "&#8776;" -- almost equal to = asymptotic to,
-                                     U+2248 ISOamsr -->
-<!ENTITY ne       CDATA "&#8800;" -- not equal to, U+2260 ISOtech -->
-<!ENTITY equiv    CDATA "&#8801;" -- identical to, U+2261 ISOtech -->
-<!ENTITY le       CDATA "&#8804;" -- less-than or equal to, U+2264 ISOtech -->
-<!ENTITY ge       CDATA "&#8805;" -- greater-than or equal to,
-                                     U+2265 ISOtech -->
-<!ENTITY sub      CDATA "&#8834;" -- subset of, U+2282 ISOtech -->
-<!ENTITY sup      CDATA "&#8835;" -- superset of, U+2283 ISOtech -->
-<!-- note that nsup, 'not a superset of, U+2283' is not covered by the Symbol
-     font encoding and is not included. Should it be, for symmetry?
-     It is in ISOamsn  -->
-<!ENTITY nsub     CDATA "&#8836;" -- not a subset of, U+2284 ISOamsn -->
-<!ENTITY sube     CDATA "&#8838;" -- subset of or equal to, U+2286 ISOtech -->
-<!ENTITY supe     CDATA "&#8839;" -- superset of or equal to,
-                                     U+2287 ISOtech -->
-<!ENTITY oplus    CDATA "&#8853;" -- circled plus = direct sum,
-                                     U+2295 ISOamsb -->
-<!ENTITY otimes   CDATA "&#8855;" -- circled times = vector product,
-                                     U+2297 ISOamsb -->
-<!ENTITY perp     CDATA "&#8869;" -- up tack = orthogonal to = perpendicular,
-                                     U+22A5 ISOtech -->
-<!ENTITY sdot     CDATA "&#8901;" -- dot operator, U+22C5 ISOamsb -->
-<!-- dot operator is NOT the same character as U+00B7 middle dot -->
-
-<!-- Miscellaneous Technical -->
-<!ENTITY lceil    CDATA "&#8968;" -- left ceiling = apl upstile,
-                                     U+2308 ISOamsc  -->
-<!ENTITY rceil    CDATA "&#8969;" -- right ceiling, U+2309 ISOamsc  -->
-<!ENTITY lfloor   CDATA "&#8970;" -- left floor = apl downstile,
-                                     U+230A ISOamsc  -->
-<!ENTITY rfloor   CDATA "&#8971;" -- right floor, U+230B ISOamsc  -->
-<!ENTITY lang     CDATA "&#9001;" -- left-pointing angle bracket = bra,
-                                     U+2329 ISOtech -->
-<!-- lang is NOT the same character as U+003C 'less than'
-     or U+2039 'single left-pointing angle quotation mark' -->
-<!ENTITY rang     CDATA "&#9002;" -- right-pointing angle bracket = ket,
-                                     U+232A ISOtech -->
-<!-- rang is NOT the same character as U+003E 'greater than'
-     or U+203A 'single right-pointing angle quotation mark' -->
-
-<!-- Geometric Shapes -->
-<!ENTITY loz      CDATA "&#9674;" -- lozenge, U+25CA ISOpub -->
-
-<!-- Miscellaneous Symbols -->
-<!ENTITY spades   CDATA "&#9824;" -- black spade suit, U+2660 ISOpub -->
-<!-- black here seems to mean filled as opposed to hollow -->
-<!ENTITY clubs    CDATA "&#9827;" -- black club suit = shamrock,
-                                     U+2663 ISOpub -->
-<!ENTITY hearts   CDATA "&#9829;" -- black heart suit = valentine,
-                                     U+2665 ISOpub -->
-<!ENTITY diams    CDATA "&#9830;" -- black diamond suit, U+2666 ISOpub -->
-
-24.4 Character entity references for markup-significant and internationalization characters
-
-The character entity references in this section are for escaping markup-significant characters (these are the same as those in HTML 2.0 and 3.2), for denoting spaces and dashes. Other characters in this section apply to internationalization issues such as the disambiguation of bidirectional text (see the section on bidirectional text for details).
-
-Entities have also been added for the remaining characters occurring in CP-1252 which do not occur in the HTMLlat1 or HTMLsymbol entity sets. These all occur in the 128 to 159 range within the CP-1252 charset. These entities permit the characters to be denoted in a platform-independent manner.
-
-To support these entities, user agents may support full [ISO10646] or use other means. Display of glyphs for these characters may be obtained by being able to display the relevant [ISO10646] characters or by other means, such as internally mapping the listed entities, numeric character references, and characters to the appropriate position in some font that contains the requisite glyphs.
-24.4.1 The list of characters
-
-<!-- Special characters for HTML -->
-
-<!-- Character entity set. Typical invocation:
-     <!ENTITY % HTMLspecial PUBLIC
-       "-//W3C//ENTITIES Special//EN//HTML">
-     %HTMLspecial; -->
-
-<!-- Portions (c) International Organization for Standardization 1986:
-     Permission to copy in any form is granted for use with
-     conforming SGML systems and applications as defined in
-     ISO 8879, provided this notice is included in all copies.
--->
-
-<!-- Relevant ISO entity set is given unless names are newly introduced.
-     New names (i.e., not in ISO 8879 list) do not clash with any
-     existing ISO 8879 entity names. ISO 10646 character numbers
-     are given for each character, in hex. CDATA values are decimal
-     conversions of the ISO 10646 values and refer to the document
-     character set. Names are ISO 10646 names.
-
--->
-
-<!-- C0 Controls and Basic Latin -->
-<!ENTITY quot    CDATA "&#34;"   -- quotation mark = APL quote,
-                                    U+0022 ISOnum -->
-<!ENTITY amp     CDATA "&#38;"   -- ampersand, U+0026 ISOnum -->
-<!ENTITY lt      CDATA "&#60;"   -- less-than sign, U+003C ISOnum -->
-<!ENTITY gt      CDATA "&#62;"   -- greater-than sign, U+003E ISOnum -->
-
-<!-- Latin Extended-A -->
-<!ENTITY OElig   CDATA "&#338;"  -- latin capital ligature OE,
-                                    U+0152 ISOlat2 -->
-<!ENTITY oelig   CDATA "&#339;"  -- latin small ligature oe, U+0153 ISOlat2 -->
-<!-- ligature is a misnomer, this is a separate character in some languages -->
-<!ENTITY Scaron  CDATA "&#352;"  -- latin capital letter S with caron,
-                                    U+0160 ISOlat2 -->
-<!ENTITY scaron  CDATA "&#353;"  -- latin small letter s with caron,
-                                    U+0161 ISOlat2 -->
-<!ENTITY Yuml    CDATA "&#376;"  -- latin capital letter Y with diaeresis,
-                                    U+0178 ISOlat2 -->
-
-<!-- Spacing Modifier Letters -->
-<!ENTITY circ    CDATA "&#710;"  -- modifier letter circumflex accent,
-                                    U+02C6 ISOpub -->
-<!ENTITY tilde   CDATA "&#732;"  -- small tilde, U+02DC ISOdia -->
-
-<!-- General Punctuation -->
-<!ENTITY ensp    CDATA "&#8194;" -- en space, U+2002 ISOpub -->
-<!ENTITY emsp    CDATA "&#8195;" -- em space, U+2003 ISOpub -->
-<!ENTITY thinsp  CDATA "&#8201;" -- thin space, U+2009 ISOpub -->
-<!ENTITY zwnj    CDATA "&#8204;" -- zero width non-joiner,
-                                    U+200C NEW RFC 2070 -->
-<!ENTITY zwj     CDATA "&#8205;" -- zero width joiner, U+200D NEW RFC 2070 -->
-<!ENTITY lrm     CDATA "&#8206;" -- left-to-right mark, U+200E NEW RFC 2070 -->
-<!ENTITY rlm     CDATA "&#8207;" -- right-to-left mark, U+200F NEW RFC 2070 -->
-<!ENTITY ndash   CDATA "&#8211;" -- en dash, U+2013 ISOpub -->
-<!ENTITY mdash   CDATA "&#8212;" -- em dash, U+2014 ISOpub -->
-<!ENTITY lsquo   CDATA "&#8216;" -- left single quotation mark,
-                                    U+2018 ISOnum -->
-<!ENTITY rsquo   CDATA "&#8217;" -- right single quotation mark,
-                                    U+2019 ISOnum -->
-<!ENTITY sbquo   CDATA "&#8218;" -- single low-9 quotation mark, U+201A NEW -->
-<!ENTITY ldquo   CDATA "&#8220;" -- left double quotation mark,
-                                    U+201C ISOnum -->
-<!ENTITY rdquo   CDATA "&#8221;" -- right double quotation mark,
-                                    U+201D ISOnum -->
-<!ENTITY bdquo   CDATA "&#8222;" -- double low-9 quotation mark, U+201E NEW -->
-<!ENTITY dagger  CDATA "&#8224;" -- dagger, U+2020 ISOpub -->
-<!ENTITY Dagger  CDATA "&#8225;" -- double dagger, U+2021 ISOpub -->
-<!ENTITY permil  CDATA "&#8240;" -- per mille sign, U+2030 ISOtech -->
-<!ENTITY lsaquo  CDATA "&#8249;" -- single left-pointing angle quotation mark,
-                                    U+2039 ISO proposed -->
-<!-- lsaquo is proposed but not yet ISO standardized -->
-<!ENTITY rsaquo  CDATA "&#8250;" -- single right-pointing angle quotation mark,
-                                    U+203A ISO proposed -->
-<!-- rsaquo is proposed but not yet ISO standardized -->
-<!ENTITY euro   CDATA "&#8364;"  -- euro sign, U+20AC NEW -->
-"""
-
-codes={}
-for line in text.split('\n'):
-  parts = line.split()
-  if len(parts)<3 or parts[0]!='<!ENTITY' or parts[2]!='CDATA': continue
-  codes[parts[1]] = parts[3].strip('&#";')
-
-print 'entityName={', ','.join([ '"'+key+'"' for key in codes]), '};'
-print 'entityVal={', ','.join([ str(codes[key]) for key in codes]), '};'
-
-
-********************** end htmlentity.py ********************/
diff --git a/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilterFactory.java b/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilterFactory.java
deleted file mode 100644
index e326856..0000000
--- a/solr/core/src/java/org/apache/solr/analysis/LegacyHTMLStripCharFilterFactory.java
+++ /dev/null
@@ -1,71 +0,0 @@
-package org.apache.solr.analysis;
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.Reader;
-import java.util.Map;
-
-import org.apache.lucene.analysis.charfilter.HTMLStripCharFilterFactory;
-import org.apache.lucene.analysis.util.CharFilterFactory;
-
-/**
- * Factory for {@link LegacyHTMLStripCharFilter}.
- * <pre class="prettyprint" >
- * &lt;fieldType name="text_html_legacy" class="solr.TextField" positionIncrementGap="100"&gt;
- *   &lt;analyzer&gt;
- *     &lt;charFilter class="solr.LegacyHTMLStripCharFilterFactory"/&gt;
- *     &lt;tokenizer class="solr.WhitespaceTokenizerFactory"/&gt;
- *   &lt;/analyzer&gt;
- * &lt;/fieldType&gt;
- * </pre>
- * <p>
- * This factory is <b>NOT</b> recommended for new users and should be
- * considered <b>UNSUPPORTED</b>.
- * </p>
- * <p>
- * In Solr version 3.5 and earlier, <tt>HTMLStripCharFilter(Factory)</tt>
- * had known bugs in the offsets it provided, triggering e.g. exceptions in
- * highlighting.
- * </p>
- * <p>
- * This class is provided as possible alternative for people who depend on
- * the "broken" behavior of <tt>HTMLStripCharFilter</tt> in Solr version 3.5
- * and earlier, and/or who don't like the changes introduced by the Solr 3.6+
- * version of <tt>HTMLStripCharFilterFactory</tt>.  (See the 3.6.0 release
- * section of lucene/CHANGES.txt for a list of differences in behavior.)
- * </p>
- * @deprecated use {@link HTMLStripCharFilterFactory}
- */
-@Deprecated
-public class LegacyHTMLStripCharFilterFactory extends CharFilterFactory {
-
-  /** Creates a new LegacyHTMLStripCharFilterFactory */
-  public LegacyHTMLStripCharFilterFactory(Map<String,String> args) {
-    super(args);
-    if (!args.isEmpty()) {
-      throw new IllegalArgumentException("Unknown parameters: " + args);
-    }
-  }
-  
-  @Override
-  public LegacyHTMLStripCharFilter create(Reader input) {
-    return new LegacyHTMLStripCharFilter(input);
-  }
-
-}
diff --git a/solr/core/src/java/org/apache/solr/analysis/SolrAnalyzer.java b/solr/core/src/java/org/apache/solr/analysis/SolrAnalyzer.java
index 28c2bee..a33a373 100644
--- a/solr/core/src/java/org/apache/solr/analysis/SolrAnalyzer.java
+++ b/solr/core/src/java/org/apache/solr/analysis/SolrAnalyzer.java
@@ -17,10 +17,9 @@
 
 package org.apache.solr.analysis;
 
-import org.apache.lucene.analysis.*;
+import org.apache.lucene.analysis.Analyzer;
 
 import java.io.Reader;
-import java.io.IOException;
 
 /**
  *
@@ -37,14 +36,8 @@
     return posIncGap;
   }
 
-  /** wrap the reader in a CharStream, if appropriate */
-  @Deprecated
-  public Reader charStream(Reader reader) {
-    return reader;
-  }
-
   @Override
   protected Reader initReader(String fieldName, Reader reader) {
-    return charStream(reader);
+    return reader;
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java b/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java
index 376f08d..adc271c 100644
--- a/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java
+++ b/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java
@@ -17,6 +17,7 @@
 
 package org.apache.solr.client.solrj.embedded;
 
+import com.google.common.base.Strings;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -30,7 +31,6 @@
 import org.apache.solr.common.util.JavaBinCodec;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.CoreContainer;
-import org.apache.solr.core.CoreDescriptor;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestHandler;
@@ -62,25 +62,10 @@
   
   /**
    * Use the other constructor using a CoreContainer and a name.
-   * @deprecated use {@link #EmbeddedSolrServer(CoreContainer, String)} instead.
    */
-  @Deprecated
   public EmbeddedSolrServer(SolrCore core)
   {
-    if ( core == null ) {
-      throw new NullPointerException("SolrCore instance required");
-    }
-    CoreDescriptor dcore = core.getCoreDescriptor();
-    if (dcore == null)
-      throw new NullPointerException("CoreDescriptor required");
-    
-    CoreContainer cores = dcore.getCoreContainer();
-    if (cores == null)
-      throw new NullPointerException("CoreContainer required");
-    
-    coreName = dcore.getName();
-    coreContainer = cores;
-    _parser = new SolrRequestParsers( null );
+    this(core.getCoreDescriptor().getCoreContainer(), core.getName());
   }
     
   /**
@@ -93,8 +78,10 @@
     if ( coreContainer == null ) {
       throw new NullPointerException("CoreContainer instance required");
     }
+    if (Strings.isNullOrEmpty(coreName))
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Core name cannot be empty");
     this.coreContainer = coreContainer;
-    this.coreName = coreName == null? "" : coreName;
+    this.coreName = coreName;
     _parser = new SolrRequestParsers( null );
   }
   
@@ -129,10 +116,8 @@
         }
       }
       // Perhaps the path is to manage the cores
-      if( handler == null &&
-          coreContainer != null &&
-          path.equals( coreContainer.getAdminPath() ) ) {
-        handler = coreContainer.getMultiCoreHandler();
+      if (handler == null) {
+        handler = coreContainer.getRequestHandler(path);
       }
     }
     if( handler == null ) {
@@ -210,16 +195,11 @@
       }
       
       // Now write it out
-      NamedList<Object> normalized = getParsedResponse(req, rsp);
+      NamedList<Object> normalized = BinaryResponseWriter.getParsedResponse(req, rsp);
       return normalized;
-    }
-    catch( IOException iox ) {
+    } catch( IOException | SolrException iox ) {
       throw iox;
-    }
-    catch( SolrException sx ) {
-      throw sx;
-    }
-    catch( Exception ex ) {
+    } catch( Exception ex ) {
       throw new SolrServerException( ex );
     }
     finally {
@@ -230,22 +210,10 @@
   }
   
   /**
-   * Returns a response object equivalent to what you get from the XML/JSON/javabin parser. Documents
-   * become SolrDocuments, DocList becomes SolrDocumentList etc.
-   * 
-   * @deprecated use {@link BinaryResponseWriter#getParsedResponse(SolrQueryRequest, SolrQueryResponse)}
-   */
-  @Deprecated
-  public NamedList<Object> getParsedResponse( SolrQueryRequest req, SolrQueryResponse rsp )
-  {
-    return BinaryResponseWriter.getParsedResponse(req, rsp);
-  }
-  
-  /**
    * Shutdown all cores within the EmbeddedSolrServer instance
    */
   @Override
-  public void shutdown() {
+  public void close() throws IOException {
     coreContainer.shutdown();
   }
   
diff --git a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
index 72adbcd..33b4098 100644
--- a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
+++ b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
@@ -19,17 +19,18 @@
 
 import org.apache.solr.servlet.SolrDispatchFilter;
 import org.eclipse.jetty.server.Connector;
+import org.eclipse.jetty.server.HttpConfiguration;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.LowResourceMonitor;
+import org.eclipse.jetty.server.SecureRequestCustomizer;
 import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.server.bio.SocketConnector;
-import org.eclipse.jetty.server.handler.GzipHandler;
-import org.eclipse.jetty.server.nio.SelectChannelConnector;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.SslConnectionFactory;
 import org.eclipse.jetty.server.session.HashSessionIdManager;
-import org.eclipse.jetty.server.ssl.SslConnector;
-import org.eclipse.jetty.server.ssl.SslSelectChannelConnector;
-import org.eclipse.jetty.server.ssl.SslSocketConnector;
 import org.eclipse.jetty.servlet.FilterHolder;
 import org.eclipse.jetty.servlet.ServletContextHandler;
 import org.eclipse.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.servlets.GzipFilter;
 import org.eclipse.jetty.util.component.LifeCycle;
 import org.eclipse.jetty.util.log.Logger;
 import org.eclipse.jetty.util.ssl.SslContextFactory;
@@ -62,6 +63,9 @@
  * @since solr 1.3
  */
 public class JettySolrRunner {
+
+  private static final AtomicLong JETTY_ID_COUNTER = new AtomicLong();
+
   Server server;
 
   FilterHolder dispatchFilter;
@@ -71,6 +75,7 @@
 
   private String solrConfigFilename;
   private String schemaFilename;
+  private final String coreRootDirectory;
 
   private boolean waitOnSolr = false;
 
@@ -89,6 +94,8 @@
 
   private String coreNodeName;
 
+  private final String name;
+
   /** Maps servlet holders (i.e. factories: class + init params) to path specs */
   private SortedMap<ServletHolder,String> extraServlets = new TreeMap<>();
   private SortedMap<Class,String> extraRequestFilters;
@@ -147,12 +154,16 @@
 
   public JettySolrRunner(String solrHome, String context, int port) {
     this.init(solrHome, context, port, true);
+    this.name = "jetty-" + JETTY_ID_COUNTER.incrementAndGet();
+    this.coreRootDirectory = System.getProperty("coreRootDirectory", null);
   }
 
   public JettySolrRunner(String solrHome, String context, int port, String solrConfigFilename, String schemaFileName) {
     this.init(solrHome, context, port, true);
     this.solrConfigFilename = solrConfigFilename;
     this.schemaFilename = schemaFileName;
+    this.name = "jetty-" + JETTY_ID_COUNTER.incrementAndGet();
+    this.coreRootDirectory = System.getProperty("coreRootDirectory", null);
   }
   
   public JettySolrRunner(String solrHome, String context, int port,
@@ -160,6 +171,8 @@
     this.init(solrHome, context, port, stopAtShutdown);
     this.solrConfigFilename = solrConfigFilename;
     this.schemaFilename = schemaFileName;
+    this.name = "jetty-" + JETTY_ID_COUNTER.incrementAndGet();
+    this.coreRootDirectory = System.getProperty("coreRootDirectory", null);
   }
 
   /**
@@ -197,23 +210,20 @@
     this.schemaFilename = schemaFileName;
     this.sslConfig = sslConfig;
 
+    this.name = "jetty-" + JETTY_ID_COUNTER.incrementAndGet();
+    this.coreRootDirectory = System.getProperty("coreRootDirectory", null);
+
     this.init(solrHome, context, port, stopAtShutdown);
   }
   
   private void init(String solrHome, String context, int port, boolean stopAtShutdown) {
     this.context = context;
-    server = new Server(port);
 
     this.solrHome = solrHome;
     this.stopAtShutdown = stopAtShutdown;
-    server.setStopAtShutdown(stopAtShutdown);
-    if (!stopAtShutdown) {
-      server.setGracefulShutdown(0);
-    }
+
     System.setProperty("solr.solr.home", solrHome);
     if (System.getProperty("jetty.testMode") != null) {
-      final String connectorName = System.getProperty("tests.jettyConnector", "SelectChannel");
-
       // if this property is true, then jetty will be configured to use SSL
       // leveraging the same system properties as java to specify
       // the keystore/truststore if they are set unless specific config
@@ -227,52 +237,56 @@
       final SslContextFactory sslcontext = new SslContextFactory(false);
       sslInit(useSsl, sslcontext);
 
-      final Connector connector;
-      if ("SelectChannel".equals(connectorName)) {
-        final SelectChannelConnector c = useSsl
-          ? new SslSelectChannelConnector(sslcontext)
-          : new SelectChannelConnector();
-        c.setReuseAddress(true);
-        c.setLowResourcesMaxIdleTime(1500);
-        c.setSoLingerTime(0);
-        connector = c;
-      } else if ("Socket".equals(connectorName)) {
-        final SocketConnector c = useSsl
-          ? new SslSocketConnector(sslcontext)
-          : new SocketConnector();
-        c.setReuseAddress(true);
-        c.setSoLingerTime(0);
-        connector = c;
+      QueuedThreadPool qtp = new QueuedThreadPool();
+      qtp.setMaxThreads(10000);
+      qtp.setIdleTimeout((int) TimeUnit.SECONDS.toMillis(5));
+      qtp.setStopTimeout((int) TimeUnit.MINUTES.toMillis(1));
+
+      server = new Server(qtp);
+      server.setStopAtShutdown(stopAtShutdown);
+      server.manage(qtp);
+
+      ServerConnector connector;
+      if (useSsl) {
+        HttpConfiguration configuration = new HttpConfiguration();
+        configuration.setSecureScheme("https");
+        configuration.addCustomizer(new SecureRequestCustomizer());
+        connector = new ServerConnector(server, new SslConnectionFactory(sslcontext, "http/1.1"),
+            new HttpConnectionFactory(configuration));
       } else {
-        throw new IllegalArgumentException("Illegal value for system property 'tests.jettyConnector': " + connectorName);
+        connector = new ServerConnector(server, new HttpConnectionFactory());
       }
 
+      connector.setReuseAddress(true);
+      connector.setSoLingerTime(0);
       connector.setPort(port);
       connector.setHost("127.0.0.1");
 
-      // Connectors by default inherit server's thread pool.
-      QueuedThreadPool qtp = new QueuedThreadPool();
-      qtp.setMaxThreads(10000);
-      qtp.setMaxIdleTimeMs((int) TimeUnit.MILLISECONDS.toMillis(200));
-      qtp.setMaxStopTimeMs((int) TimeUnit.MINUTES.toMillis(1));
-      server.setThreadPool(qtp);
+      // Enable Low Resources Management
+      LowResourceMonitor lowResources = new LowResourceMonitor(server);
+      lowResources.setLowResourcesIdleTimeout(1500);
+      lowResources.setMaxConnections(10000);
+      server.addBean(lowResources);
 
       server.setConnectors(new Connector[] {connector});
       server.setSessionIdManager(new HashSessionIdManager(new Random()));
     } else {
-      if (server.getThreadPool() == null) {
-        // Connectors by default inherit server's thread pool.
-        QueuedThreadPool qtp = new QueuedThreadPool();
-        qtp.setMaxThreads(10000);
-        qtp.setMaxIdleTimeMs((int) TimeUnit.SECONDS.toMillis(5));
-        qtp.setMaxStopTimeMs((int) TimeUnit.SECONDS.toMillis(1));
-        server.setThreadPool(qtp);
-      }
+      ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory());
+      connector.setPort(port);
+
+      QueuedThreadPool qtp = new QueuedThreadPool();
+      qtp.setMaxThreads(10000);
+      qtp.setIdleTimeout((int) TimeUnit.SECONDS.toMillis(5));
+      qtp.setStopTimeout((int) TimeUnit.SECONDS.toMillis(1));
+
+      server = new Server(qtp);
+      server.setStopAtShutdown(stopAtShutdown);
+      server.manage(qtp);
     }
 
     // Initialize the servlets
-    final ServletContextHandler root = new ServletContextHandler(server,context,ServletContextHandler.SESSIONS);
-    root.setHandler(new GzipHandler());
+    final ServletContextHandler root = new ServletContextHandler(server, context, ServletContextHandler.SESSIONS);
+    root.addFilter(GzipFilter.class, "*", EnumSet.of(DispatcherType.REQUEST));
     server.addLifeCycleListener(new LifeCycle.Listener() {
 
       @Override
@@ -299,6 +313,8 @@
             solrConfigFilename);
         if (schemaFilename != null) System.setProperty("schema", 
             schemaFilename);
+        if (coreRootDirectory != null)
+          System.setProperty("coreRootDirectory", coreRootDirectory);
 //        SolrDispatchFilter filter = new SolrDispatchFilter();
 //        FilterHolder fh = new FilterHolder(filter);
         debugFilter = root.addFilter(DebugFilter.class, "*", EnumSet.of(DispatcherType.REQUEST) );
@@ -309,11 +325,11 @@
               EnumSet.of(DispatcherType.REQUEST)));
           }
         }
-        dispatchFilter = root.addFilter(SolrDispatchFilter.class, "*", EnumSet.of(DispatcherType.REQUEST) );
         for (ServletHolder servletHolder : extraServlets.keySet()) {
           String pathSpec = extraServlets.get(servletHolder);
           root.addServlet(servletHolder, pathSpec);
         }
+        dispatchFilter = root.addFilter(SolrDispatchFilter.class, "*", EnumSet.of(DispatcherType.REQUEST) );
         if (solrConfigFilename != null) System.clearProperty("solrconfig");
         if (schemaFilename != null) System.clearProperty("schema");
         System.clearProperty("solr.solr.home");
@@ -339,7 +355,7 @@
         sslcontext.setKeyStorePassword(sslConfig.getKeyStorePassword());
       }
       if (null != sslConfig.getTrustStore()) {
-        sslcontext.setTrustStore(System
+        sslcontext.setTrustStorePath(System
             .getProperty(sslConfig.getTrustStore()));
       }
       if (null != sslConfig.getTrustStorePassword()) {
@@ -359,7 +375,7 @@
             (System.getProperty("javax.net.ssl.keyStorePassword"));
         }
         if (null != System.getProperty("javax.net.ssl.trustStore")) {
-          sslcontext.setTrustStore
+          sslcontext.setTrustStorePath
             (System.getProperty("javax.net.ssl.trustStore"));
         }
         if (null != System.getProperty("javax.net.ssl.trustStorePassword")) {
@@ -464,7 +480,7 @@
     if (0 == conns.length) {
       throw new RuntimeException("Jetty Server has no Connectors");
     }
-    return (proxyPort != -1) ? proxyPort : conns[0].getLocalPort();
+    return (proxyPort != -1) ? proxyPort : ((ServerConnector) conns[0]).getLocalPort();
   }
   
   /**
@@ -489,7 +505,7 @@
   }
 
   /**
-   * Returns a base URL consisting of the protocal, host, and port for a 
+   * Returns a base URL consisting of the protocol, host, and port for a
    * Connector in use by the Jetty Server contained in this runner.
    */
   public URL getBaseUrl() {
@@ -499,12 +515,12 @@
       if (0 == conns.length) {
         throw new IllegalStateException("Jetty Server has no Connectors");
       }
-      Connector c = conns[0];
+      ServerConnector c = (ServerConnector) conns[0];
       if (c.getLocalPort() < 0) {
         throw new IllegalStateException("Jetty Connector is not open: " + 
                                         c.getLocalPort());
       }
-      protocol = (c instanceof SslConnector) ? "https" : "http";
+      protocol = c.getDefaultProtocol().equals("SSL-http/1.1")  ? "https" : "http";
       return new URL(protocol, c.getHost(), c.getLocalPort(), context);
 
     } catch (MalformedURLException e) {
@@ -615,6 +631,11 @@
   }
 
   @Override
+  public void debug(String s, long l) {
+
+  }
+
+  @Override
   public String getName() {
     return toString();
   }
diff --git a/solr/core/src/java/org/apache/solr/cloud/CloudDescriptor.java b/solr/core/src/java/org/apache/solr/cloud/CloudDescriptor.java
index 02f358a..4f4efcc 100644
--- a/solr/core/src/java/org/apache/solr/cloud/CloudDescriptor.java
+++ b/solr/core/src/java/org/apache/solr/cloud/CloudDescriptor.java
@@ -17,6 +17,7 @@
  * limitations under the License.
  */
 
+import com.google.common.base.Strings;
 import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.SolrParams;
@@ -49,10 +50,14 @@
   public CloudDescriptor(String coreName, Properties props, CoreDescriptor cd) {
     this.cd = cd;
     this.shardId = props.getProperty(CoreDescriptor.CORE_SHARD, null);
+    if (Strings.isNullOrEmpty(shardId))
+      this.shardId = null;
     // If no collection name is specified, we default to the core name
     this.collectionName = props.getProperty(CoreDescriptor.CORE_COLLECTION, coreName);
     this.roles = props.getProperty(CoreDescriptor.CORE_ROLES, null);
     this.nodeName = props.getProperty(CoreDescriptor.CORE_NODE_NAME);
+    if (Strings.isNullOrEmpty(nodeName))
+      this.nodeName = null;
     this.numShards = PropertiesUtil.toInteger(props.getProperty(CloudDescriptor.NUM_SHARDS), null);
   }
   
diff --git a/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java b/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java
index 1972015..99d7769 100644
--- a/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java
+++ b/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java
@@ -122,10 +122,13 @@
     for (String childName : childNames) {
       if (childName != null) {
         try {
-          ZkNodeProps message = ZkNodeProps.load(zookeeper.getData(dir + "/" + childName, null, null, true));
-          if (message.containsKey(OverseerCollectionProcessor.ASYNC)) {
-            LOG.info(">>>> {}", message.get(OverseerCollectionProcessor.ASYNC));
-            if(message.get(OverseerCollectionProcessor.ASYNC).equals(requestId)) return true;
+          byte[] data = zookeeper.getData(dir + "/" + childName, null, null, true);
+          if (data != null) {
+            ZkNodeProps message = ZkNodeProps.load(data);
+            if (message.containsKey(OverseerCollectionProcessor.ASYNC)) {
+              LOG.debug(">>>> {}", message.get(OverseerCollectionProcessor.ASYNC));
+              if(message.get(OverseerCollectionProcessor.ASYNC).equals(requestId)) return true;
+            }
           }
         } catch (KeeperException.NoNodeException e) {
           // Another client removed the node first, try next
diff --git a/solr/core/src/java/org/apache/solr/cloud/LeaderInitiatedRecoveryThread.java b/solr/core/src/java/org/apache/solr/cloud/LeaderInitiatedRecoveryThread.java
index 489a52e..830fa96 100644
--- a/solr/core/src/java/org/apache/solr/cloud/LeaderInitiatedRecoveryThread.java
+++ b/solr/core/src/java/org/apache/solr/cloud/LeaderInitiatedRecoveryThread.java
@@ -113,9 +113,8 @@
       } else {
         log.info("Asking core={} coreNodeName={} on " + recoveryUrl + " to recover", coreNeedingRecovery, replicaCoreNodeName);
       }
-      
-      HttpSolrClient client = new HttpSolrClient(recoveryUrl);
-      try {
+
+      try (HttpSolrClient client = new HttpSolrClient(recoveryUrl)) {
         client.setSoTimeout(60000);
         client.setConnectionTimeout(15000);
         try {
@@ -139,8 +138,6 @@
             continueTrying = false;
           }                                                
         }
-      } finally {
-        client.shutdown();
       }
       
       // wait a few seconds
@@ -193,7 +190,7 @@
 
         // additional safeguard against the replica trying to be in the active state
         // before acknowledging the leader initiated recovery command
-        if (continueTrying && collection != null && shardId != null) {
+        if (collection != null && shardId != null) {
           try {
             // call out to ZooKeeper to get the leader-initiated recovery state
             String lirState = 
@@ -218,20 +215,25 @@
               List<ZkCoreNodeProps> replicaProps = 
                   zkStateReader.getReplicaProps(collection, shardId, leaderCoreNodeName);
               if (replicaProps != null && replicaProps.size() > 0) {
-                String replicaState = replicaProps.get(0).getState();
-                if (ZkStateReader.ACTIVE.equals(replicaState)) {
-                  // replica published its state as "active", 
-                  // which is bad if lirState is still "down"
-                  if (ZkStateReader.DOWN.equals(lirState)) {
-                    // OK, so the replica thinks it is active, but it never ack'd the leader initiated recovery
-                    // so its state cannot be trusted and it needs to be told to recover again ... and we keep looping here
-                    log.warn("Replica core={} coreNodeName={} set to active but the leader thinks it should be in recovery;"
-                        + " forcing it back to down state to re-run the leader-initiated recovery process; props: "+replicaProps.get(0), coreNeedingRecovery, replicaCoreNodeName);
-                    zkController.ensureReplicaInLeaderInitiatedRecovery(collection, 
-                        shardId, replicaUrl, nodeProps, true); // force republish state to "down"
+                for (ZkCoreNodeProps prop : replicaProps) {
+                  if (replicaCoreNodeName.equals(((Replica) prop.getNodeProps()).getName())) {
+                    String replicaState = prop.getState();
+                    if (ZkStateReader.ACTIVE.equals(replicaState)) {
+                      // replica published its state as "active",
+                      // which is bad if lirState is still "down"
+                      if (ZkStateReader.DOWN.equals(lirState)) {
+                        // OK, so the replica thinks it is active, but it never ack'd the leader initiated recovery
+                        // so its state cannot be trusted and it needs to be told to recover again ... and we keep looping here
+                        log.warn("Replica core={} coreNodeName={} set to active but the leader thinks it should be in recovery;"
+                            + " forcing it back to down state to re-run the leader-initiated recovery process; props: "+replicaProps.get(0), coreNeedingRecovery, replicaCoreNodeName);
+                        zkController.ensureReplicaInLeaderInitiatedRecovery(collection,
+                            shardId, replicaUrl, nodeProps, true); // force republish state to "down"
+                      }
+                    }
+                    break;
                   }
-                }                    
-              }                    
+                }
+              }
             }                  
           } catch (Exception ignoreMe) {
             log.warn("Failed to determine state of core={} coreNodeName={} due to: "+ignoreMe, coreNeedingRecovery, replicaCoreNodeName);
diff --git a/solr/core/src/java/org/apache/solr/cloud/Overseer.java b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
index 621d9f3..7918ed3 100644
--- a/solr/core/src/java/org/apache/solr/cloud/Overseer.java
+++ b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
@@ -58,7 +58,7 @@
 import org.apache.solr.core.ConfigSolr;
 import org.apache.solr.handler.component.ShardHandler;
 import org.apache.solr.update.UpdateShardHandler;
-import org.apache.solr.util.IOUtils;
+import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.util.stats.Clock;
 import org.apache.solr.util.stats.Timer;
 import org.apache.solr.util.stats.TimerContext;
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerAutoReplicaFailoverThread.java b/solr/core/src/java/org/apache/solr/cloud/OverseerAutoReplicaFailoverThread.java
index 7e998cd..2582f13 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerAutoReplicaFailoverThread.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerAutoReplicaFailoverThread.java
@@ -17,19 +17,8 @@
  * limitations under the License.
  */
 
-import java.io.Closeable;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.TimeUnit;
-
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.CoreAdminRequest.Create;
 import org.apache.solr.common.SolrException;
@@ -44,8 +33,19 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.cache.Cache;
-import com.google.common.cache.CacheBuilder;
+import java.io.Closeable;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
 
 
 // TODO: how to tmp exclude nodes?
@@ -85,6 +85,7 @@
   private volatile boolean isClosed;
   private ZkStateReader zkStateReader;
   private final Cache<String,Long> baseUrlForBadNodes;
+  private Set<String> liveNodes = Collections.EMPTY_SET;
 
   private final int workLoopDelay;
   private final int waitAfterExpiration;
@@ -151,11 +152,13 @@
       return;
     }
     if (clusterState != null) {
-      if (lastClusterStateVersion == clusterState.getZkClusterStateVersion() && baseUrlForBadNodes.size() == 0) {
+      if (lastClusterStateVersion == clusterState.getZkClusterStateVersion() && baseUrlForBadNodes.size() == 0 &&
+          liveNodes.equals(clusterState.getLiveNodes())) {
         // nothing has changed, no work to do
         return;
       }
-      
+
+      liveNodes = clusterState.getLiveNodes();
       lastClusterStateVersion = clusterState.getZkClusterStateVersion();
       Set<String> collections = clusterState.getCollections();
       for (final String collection : collections) {
@@ -418,12 +421,11 @@
   private boolean createSolrCore(final String collection,
       final String createUrl, final String dataDir, final String ulogDir,
       final String coreNodeName, final String coreName) {
-    HttpSolrClient server = null;
-    try {
+
+    try (HttpSolrClient client = new HttpSolrClient(createUrl)) {
       log.debug("create url={}", createUrl);
-      server = new HttpSolrClient(createUrl);
-      server.setConnectionTimeout(30000);
-      server.setSoTimeout(60000);
+      client.setConnectionTimeout(30000);
+      client.setSoTimeout(60000);
       Create createCmd = new Create();
       createCmd.setCollection(collection);
       createCmd.setCoreNodeName(coreNodeName);
@@ -432,14 +434,10 @@
       createCmd.setCoreName(coreName);
       createCmd.setDataDir(dataDir);
       createCmd.setUlogDir(ulogDir);
-      server.request(createCmd);
+      client.request(createCmd);
     } catch (Exception e) {
       SolrException.log(log, "Exception trying to create new replica on " + createUrl, e);
       return false;
-    } finally {
-      if (server != null) {
-        server.shutdown();
-      }
     }
     return true;
   }
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java
index e559f1f..42cb504 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java
@@ -1161,7 +1161,7 @@
 
   private void deleteCollection(ZkNodeProps message, NamedList results)
       throws KeeperException, InterruptedException {
-    String collection = message.getStr("name");
+    final String collection = message.getStr("name");
     try {
       ModifiableSolrParams params = new ModifiableSolrParams();
       params.set(CoreAdminParams.ACTION, CoreAdminAction.UNLOAD.toString());
@@ -1181,7 +1181,7 @@
       boolean removed = false;
       while (System.nanoTime() < timeout) {
         Thread.sleep(100);
-        removed = !zkStateReader.getClusterState().hasCollection(message.getStr(collection));
+        removed = !zkStateReader.getClusterState().hasCollection(collection);
         if (removed) {
           Thread.sleep(500); // just a bit of time so it's more likely other
                              // readers see on return
@@ -1190,7 +1190,7 @@
       }
       if (!removed) {
         throw new SolrException(ErrorCode.SERVER_ERROR,
-            "Could not fully remove collection: " + message.getStr("name"));
+            "Could not fully remove collection: " + collection);
       }
       
     } finally {
@@ -1801,19 +1801,14 @@
 
 
   static UpdateResponse softCommit(String url) throws SolrServerException, IOException {
-    HttpSolrClient client = null;
-    try {
-      client = new HttpSolrClient(url);
+
+    try (HttpSolrClient client = new HttpSolrClient(url)) {
       client.setConnectionTimeout(30000);
       client.setSoTimeout(120000);
       UpdateRequest ureq = new UpdateRequest();
       ureq.setParams(new ModifiableSolrParams());
       ureq.setAction(AbstractUpdateRequest.ACTION.COMMIT, false, true, true);
       return ureq.process(client);
-    } finally {
-      if (client != null) {
-        client.shutdown();
-      }
     }
   }
   
@@ -2318,6 +2313,13 @@
     if (clusterState.hasCollection(collectionName)) {
       throw new SolrException(ErrorCode.BAD_REQUEST, "collection already exists: " + collectionName);
     }
+
+    String configName = getConfigName(collectionName, message);
+    if (configName == null) {
+      throw new SolrException(ErrorCode.BAD_REQUEST, "No config set found to associate with the collection.");
+    } else if (!validateConfig(configName)) {
+      throw new SolrException(ErrorCode.BAD_REQUEST, "Can not find the specified config set: " + configName);
+    }
     
     try {
       // look at the replication factor and see if it matches reality
@@ -2385,7 +2387,7 @@
       }
       boolean isLegacyCloud =  Overseer.isLegacy(zkStateReader.getClusterProps());
 
-      String configName = createConfNode(collectionName, message, isLegacyCloud);
+      createConfNode(configName, collectionName, isLegacyCloud);
 
       Overseer.getInQueue(zkStateReader.getZkClient()).offer(ZkStateReader.toJSON(message));
 
@@ -2621,24 +2623,38 @@
     } while (srsp != null);
   }
 
-  private String createConfNode(String coll, ZkNodeProps message, boolean isLegacyCloud) throws KeeperException, InterruptedException {
+  private String getConfigName(String coll, ZkNodeProps message) throws KeeperException, InterruptedException {
     String configName = message.getStr(OverseerCollectionProcessor.COLL_CONF);
-    if(configName == null){
+    
+    if (configName == null) {
       // if there is only one conf, use that
-      List<String> configNames=null;
+      List<String> configNames = null;
       try {
         configNames = zkStateReader.getZkClient().getChildren(ZkController.CONFIGS_ZKNODE, null, true);
         if (configNames != null && configNames.size() == 1) {
           configName = configNames.get(0);
           // no config set named, but there is only 1 - use it
           log.info("Only one config set found in zk - using it:" + configName);
+        } else if (configNames.contains(coll)) {
+          configName = coll;
         }
       } catch (KeeperException.NoNodeException e) {
 
       }
-
     }
+    return configName;
+  }
+  
+  private boolean validateConfig(String configName) throws KeeperException, InterruptedException {
+    return zkStateReader.getZkClient().exists(ZkController.CONFIGS_ZKNODE + "/" + configName, true);
+  }
 
+  /**
+   * This doesn't validate the config (path) itself and is just responsible for creating the confNode.
+   * That check should be done before the config node is created.
+   */
+  private void createConfNode(String configName, String coll, boolean isLegacyCloud) throws KeeperException, InterruptedException {
+    
     if (configName != null) {
       String collDir = ZkStateReader.COLLECTIONS_ZKNODE + "/" + coll;
       log.info("creating collections conf node {} ", collDir);
@@ -2655,7 +2671,6 @@
         throw new SolrException(ErrorCode.BAD_REQUEST,"Unable to get config name");
       }
     }
-    return configName;
 
   }
 
diff --git a/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java b/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
index 6d1fc83..f340ba3 100644
--- a/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
+++ b/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
@@ -200,17 +200,14 @@
 
   private void commitOnLeader(String leaderUrl) throws SolrServerException,
       IOException {
-    HttpSolrClient server = new HttpSolrClient(leaderUrl);
-    try {
-      server.setConnectionTimeout(30000);
+    try (HttpSolrClient client = new HttpSolrClient(leaderUrl)) {
+      client.setConnectionTimeout(30000);
       UpdateRequest ureq = new UpdateRequest();
       ureq.setParams(new ModifiableSolrParams());
       ureq.getParams().set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
       ureq.getParams().set(UpdateParams.OPEN_SEARCHER, false);
       ureq.setAction(AbstractUpdateRequest.ACTION.COMMIT, false, true).process(
-          server);
-    } finally {
-      server.shutdown();
+          client);
     }
   }
 
@@ -594,9 +591,9 @@
   
   private void sendPrepRecoveryCmd(String leaderBaseUrl, String leaderCoreName, Slice slice)
       throws SolrServerException, IOException, InterruptedException, ExecutionException {
-    HttpSolrClient server = new HttpSolrClient(leaderBaseUrl);
-    try {
-      server.setConnectionTimeout(30000);
+
+    try (HttpSolrClient client = new HttpSolrClient(leaderBaseUrl)) {
+      client.setConnectionTimeout(30000);
       WaitForState prepCmd = new WaitForState();
       prepCmd.setCoreName(leaderCoreName);
       prepCmd.setNodeName(zkController.getNodeName());
@@ -607,14 +604,12 @@
       if (!Slice.CONSTRUCTION.equals(slice.getState()) && !Slice.RECOVERY.equals(slice.getState())) {
         prepCmd.setOnlyIfLeaderActive(true);
       }
-      HttpUriRequestResponse mrr = server.httpUriRequest(prepCmd);
+      HttpUriRequestResponse mrr = client.httpUriRequest(prepCmd);
       prevSendPreRecoveryHttpUriRequest = mrr.httpUriRequest;
       
       log.info("Sending prep recovery command to {}; {}", leaderBaseUrl, prepCmd.toString());
       
       mrr.future.get();
-    } finally {
-      server.shutdown();
     }
   }
 
diff --git a/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java b/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java
index 8a00887..1896546 100644
--- a/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java
+++ b/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java
@@ -91,12 +91,9 @@
       if (zkProps.getClientPortAddress() == null) {
         zkProps.setClientPort(Integer.parseInt(solrPort)+1000);
       }
-    } catch (QuorumPeerConfig.ConfigException e) {
+    } catch (QuorumPeerConfig.ConfigException | IOException e) {
       if (zkRun != null)
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
-    } catch (IOException e) {
-      if (zkRun != null)
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
     }
   }
 
@@ -186,9 +183,7 @@
 
       return cfg;
 
-    } catch (IOException e) {
-      throw new ConfigException("Error processing " + path, e);
-    } catch (IllegalArgumentException e) {
+    } catch (IOException | IllegalArgumentException e) {
       throw new ConfigException("Error processing " + path, e);
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java b/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java
index c52f812..1ec4859 100644
--- a/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java
+++ b/solr/core/src/java/org/apache/solr/cloud/SyncStrategy.java
@@ -267,8 +267,8 @@
         recoverRequestCmd.setAction(CoreAdminAction.REQUESTRECOVERY);
         recoverRequestCmd.setCoreName(coreName);
         
-        HttpSolrClient client = new HttpSolrClient(baseUrl, SyncStrategy.this.client);
-        try {
+        ;
+        try (HttpSolrClient client = new HttpSolrClient(baseUrl, SyncStrategy.this.client)) {
           client.setConnectionTimeout(30000);
           client.setSoTimeout(120000);
           client.request(recoverRequestCmd);
@@ -277,8 +277,6 @@
           if (t instanceof Error) {
             throw (Error) t;
           }
-        } finally {
-          client.shutdown();
         }
       }
     };
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
index cbf4d4b..f7b30e1 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
@@ -50,7 +50,6 @@
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.CoreDescriptor;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.handler.component.ShardHandler;
 import org.apache.solr.update.UpdateLog;
 import org.apache.solr.update.UpdateShardHandler;
@@ -614,15 +613,13 @@
 
       ShardHandler shardHandler;
       UpdateShardHandler updateShardHandler;
-      String adminPath;
       shardHandler = cc.getShardHandlerFactory().getShardHandler();
       updateShardHandler = cc.getUpdateShardHandler();
-      adminPath = cc.getAdminPath();
       
       if (!zkRunOnly) {
         overseerElector = new LeaderElector(zkClient);
         this.overseer = new Overseer(shardHandler, updateShardHandler,
-            adminPath, zkStateReader, this, cc.getConfig());
+            CoreContainer.CORES_HANDLER_PATH, zkStateReader, this, cc.getConfig());
         ElectionContext context = new OverseerElectionContext(zkClient,
             overseer, getNodeName());
         overseerElector.setup(context);
@@ -855,12 +852,10 @@
       // Restore the interrupted status
       Thread.currentThread().interrupt();
       throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "", e);
-    } catch (KeeperException e) {
-      throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "", e);
-    } catch (IOException e) {
+    } catch (KeeperException | IOException e) {
       throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "", e);
     }
-    
+
 
     // in this case, we want to wait for the leader as long as the leader might 
     // wait for a vote, at least - but also long enough that a large cluster has
@@ -1636,9 +1631,7 @@
         log.info("Replica "+myCoreNodeName+
             " NOT in leader-initiated recovery, need to wait for leader to see down state.");
             
-        HttpSolrClient client = null;
-        client = new HttpSolrClient(leaderBaseUrl);
-        try {
+        try (HttpSolrClient client = new HttpSolrClient(leaderBaseUrl)) {
           client.setConnectionTimeout(15000);
           client.setSoTimeout(120000);
           WaitForState prepCmd = new WaitForState();
@@ -1689,8 +1682,8 @@
               }
             }
           }
-        } finally {
-          client.shutdown();
+        } catch (IOException e) {
+          SolrException.log(log, "Error closing HttpSolrClient", e);
         }
       }
     }
@@ -2023,14 +2016,10 @@
       stateData = zkClient.getData(znodePath, null, new Stat(), false);
     } catch (NoNodeException ignoreMe) {
       // safe to ignore as this znode will only exist if the leader initiated recovery
-    } catch (ConnectionLossException cle) {
+    } catch (ConnectionLossException | SessionExpiredException cle) {
       // sort of safe to ignore ??? Usually these are seen when the core is going down
       // or there are bigger issues to deal with than reading this znode
       log.warn("Unable to read "+znodePath+" due to: "+cle);
-    } catch (SessionExpiredException see) {
-      // sort of safe to ignore ??? Usually these are seen when the core is going down
-      // or there are bigger issues to deal with than reading this znode
-      log.warn("Unable to read "+znodePath+" due to: "+see);
     } catch (Exception exc) {
       log.error("Failed to read data from znode "+znodePath+" due to: "+exc);
       if (exc instanceof SolrException) {
@@ -2151,10 +2140,9 @@
    *
    * @return true on success
    */
-  public static boolean persistConfigResourceToZooKeeper( SolrResourceLoader loader, int znodeVersion ,
+  public static boolean persistConfigResourceToZooKeeper( ZkSolrResourceLoader zkLoader, int znodeVersion ,
                                                           String resourceName, byte[] content,
                                                           boolean createIfNotExists) {
-    final ZkSolrResourceLoader zkLoader = (ZkSolrResourceLoader)loader;
     final ZkController zkController = zkLoader.getZkController();
     final SolrZkClient zkClient = zkController.getZkClient();
     final String resourceLocation = zkLoader.getConfigSetZkPath() + "/" + resourceName;
@@ -2162,12 +2150,12 @@
     try {
       try {
         zkClient.setData(resourceLocation , content,znodeVersion, true);
-        zkClient.setData(zkLoader.getConfigSetZkPath(),new byte[]{0},true);
+        touchConfDir(zkLoader);
       } catch (NoNodeException e) {
         if(createIfNotExists){
           try {
             zkClient.create(resourceLocation,content, CreateMode.PERSISTENT,true);
-            zkClient.setData(zkLoader.getConfigSetZkPath(), new byte[]{0}, true);
+            touchConfDir(zkLoader);
           } catch (KeeperException.NodeExistsException nee) {
             try {
               Stat stat = zkClient.exists(resourceLocation, null, true);
@@ -2206,6 +2194,21 @@
     return true;
   }
 
+  public static void touchConfDir(ZkSolrResourceLoader zkLoader)  {
+    SolrZkClient zkClient = zkLoader.getZkController().getZkClient();
+    try {
+      zkClient.setData(zkLoader.getConfigSetZkPath(),new byte[]{0},true);
+    } catch (Exception e) {
+      if (e instanceof InterruptedException) {
+        Thread.currentThread().interrupt(); // Restore the interrupted status
+      }
+      final String msg = "Error 'touching' conf location " + zkLoader.getConfigSetZkPath();
+      log.error(msg, e);
+      throw new SolrException(ErrorCode.SERVER_ERROR, msg, e);
+
+    }
+  }
+
   public static  class ResourceModifiedInZkException extends SolrException {
     public ResourceModifiedInZkException(ErrorCode code, String msg) {
       super(code, msg);
diff --git a/solr/core/src/java/org/apache/solr/cloud/overseer/ZkStateWriter.java b/solr/core/src/java/org/apache/solr/cloud/overseer/ZkStateWriter.java
index b9c0724..e760ca9 100644
--- a/solr/core/src/java/org/apache/solr/cloud/overseer/ZkStateWriter.java
+++ b/solr/core/src/java/org/apache/solr/cloud/overseer/ZkStateWriter.java
@@ -17,7 +17,6 @@
  * limitations under the License.
  */
 
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Set;
@@ -151,7 +150,7 @@
             // let's clean up the collections path for this collection
             reader.getZkClient().clean("/collections/" + name);
           } else if (c.getStateFormat() > 1) {
-            byte[] data = ZkStateReader.toJSON(new ClusterState(-1, Collections.<String>emptySet(), singletonMap(c.getName(), c)));
+            byte[] data = ZkStateReader.toJSON(singletonMap(c.getName(),c));
             if (reader.getZkClient().exists(path, true)) {
               assert c.getZNodeVersion() >= 0;
               log.info("going to update_collection {} version: {}", path, c.getZNodeVersion());
diff --git a/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java
index 2b2f61b..7d3fdc0 100644
--- a/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java
@@ -36,7 +36,6 @@
 import org.apache.lucene.store.NRTCachingDirectory;
 import org.apache.lucene.store.NativeFSLockFactory;
 import org.apache.lucene.store.NoLockFactory;
-import org.apache.lucene.store.RateLimitedDirectoryWrapper;
 import org.apache.lucene.store.SimpleFSLockFactory;
 import org.apache.lucene.store.SingleInstanceLockFactory;
 import org.apache.lucene.util.IOUtils;
@@ -350,7 +349,6 @@
         directory = create(fullPath, createLockFactory(rawLockType), dirContext);
         boolean success = false;
         try {
-          directory = rateLimit(directory);
           CacheValue newCacheValue = new CacheValue(fullPath, directory);
           byDirectoryCache.put(directory, newCacheValue);
           byPathCache.put(fullPath, newCacheValue);
@@ -370,25 +368,6 @@
     }
   }
 
-  private Directory rateLimit(Directory directory) {
-    if (maxWriteMBPerSecDefault != null || maxWriteMBPerSecFlush != null || maxWriteMBPerSecMerge != null || maxWriteMBPerSecRead != null) {
-      directory = new RateLimitedDirectoryWrapper(directory);
-      if (maxWriteMBPerSecDefault != null) {
-        ((RateLimitedDirectoryWrapper)directory).setMaxWriteMBPerSec(maxWriteMBPerSecDefault, Context.DEFAULT);
-      }
-      if (maxWriteMBPerSecFlush != null) {
-        ((RateLimitedDirectoryWrapper)directory).setMaxWriteMBPerSec(maxWriteMBPerSecFlush, Context.FLUSH);
-      }
-      if (maxWriteMBPerSecMerge != null) {
-        ((RateLimitedDirectoryWrapper)directory).setMaxWriteMBPerSec(maxWriteMBPerSecMerge, Context.MERGE);
-      }
-      if (maxWriteMBPerSecRead != null) {
-        ((RateLimitedDirectoryWrapper)directory).setMaxWriteMBPerSec(maxWriteMBPerSecRead, Context.READ);
-      }
-    }
-    return directory;
-  }
-  
   /*
    * (non-Javadoc)
    * 
diff --git a/solr/core/src/java/org/apache/solr/core/Config.java b/solr/core/src/java/org/apache/solr/core/Config.java
index 94cd76a..7bd2126 100644
--- a/solr/core/src/java/org/apache/solr/core/Config.java
+++ b/solr/core/src/java/org/apache/solr/core/Config.java
@@ -20,6 +20,7 @@
 import org.apache.lucene.util.Version;
 import org.apache.solr.cloud.ZkSolrResourceLoader;
 import org.apache.solr.common.SolrException;
+import org.apache.solr.update.SolrIndexConfig;
 import org.apache.solr.util.DOMUtil;
 import org.apache.solr.util.SystemIdResolver;
 import org.apache.solr.common.util.XMLErrorLogger;
@@ -150,15 +151,24 @@
       if (substituteProps) {
         DOMUtil.substituteProperties(doc, getSubstituteProperties());
       }
-    } catch (ParserConfigurationException e)  {
+    } catch (ParserConfigurationException | SAXException | TransformerException e)  {
       SolrException.log(log, "Exception during parsing file: " + name, e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
-    } catch (SAXException e)  {
-      SolrException.log(log, "Exception during parsing file: " + name, e);
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
-    } catch (TransformerException e) {
-      SolrException.log(log, "Exception during parsing file: " + name, e);
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
+    }
+  }
+
+  /*
+     * Assert that assertCondition is true.
+     * If not, prints reason as log warning.
+     * If failCondition is true, then throw exception instead of warning
+     */
+  public static void assertWarnOrFail(String reason, boolean assertCondition, boolean failCondition) {
+    if (assertCondition) {
+      return;
+    } else if (failCondition) {
+      throw new SolrException(SolrException.ErrorCode.FORBIDDEN, reason);
+    } else {
+      log.warn(reason);
     }
   }
 
diff --git a/solr/core/src/java/org/apache/solr/core/ConfigOverlay.java b/solr/core/src/java/org/apache/solr/core/ConfigOverlay.java
index c1632dd..3a119c2 100644
--- a/solr/core/src/java/org/apache/solr/core/ConfigOverlay.java
+++ b/solr/core/src/java/org/apache/solr/core/ConfigOverlay.java
@@ -75,6 +75,7 @@
     for (int i = 0; i < hierarchy.size(); i++) {
       String s = hierarchy.get(i);
       if(i < hierarchy.size()-1){
+        if (!(obj.get(s) instanceof Map)) return null;
         obj = (Map) obj.get(s);
         if(obj == null) return null;
       } else {
diff --git a/solr/core/src/java/org/apache/solr/core/ConfigSolr.java b/solr/core/src/java/org/apache/solr/core/ConfigSolr.java
index f4028d3..f4e501c 100644
--- a/solr/core/src/java/org/apache/solr/core/ConfigSolr.java
+++ b/solr/core/src/java/org/apache/solr/core/ConfigSolr.java
@@ -22,24 +22,15 @@
 import org.apache.solr.cloud.ZkController;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.logging.LogWatcherConfig;
-import org.apache.solr.util.DOMUtil;
-import org.apache.solr.util.PropertiesUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
 import org.xml.sax.InputSource;
 
-import javax.xml.xpath.XPath;
-import javax.xml.xpath.XPathConstants;
-import javax.xml.xpath.XPathExpressionException;
 import java.io.ByteArrayInputStream;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.InputStream;
 import java.nio.charset.StandardCharsets;
-import java.util.HashMap;
-import java.util.Map;
 import java.util.Properties;
 
 
@@ -73,10 +64,9 @@
   public static ConfigSolr fromInputStream(SolrResourceLoader loader, InputStream is) {
     try {
       byte[] buf = IOUtils.toByteArray(is);
-      String originalXml = new String(buf, StandardCharsets.UTF_8);
       try (ByteArrayInputStream dup = new ByteArrayInputStream(buf)) {
         Config config = new Config(loader, null, new InputSource(dup), null, false);
-        return fromConfig(config, originalXml);
+        return new ConfigSolrXml(config);
       }
     } catch (SolrException exc) {
       throw exc;
@@ -88,12 +78,6 @@
   public static ConfigSolr fromSolrHome(SolrResourceLoader loader, String solrHome) {
     return fromFile(loader, new File(solrHome, SOLR_XML_FILE));
   }
-
-  public static ConfigSolr fromConfig(Config config, String originalXml) {
-    boolean oldStyle = (config.getNode("solr/cores", false) != null);
-    return oldStyle ? new ConfigSolrXmlOld(config, originalXml)
-                    : new ConfigSolrXml(config);
-  }
   
   public abstract CoresLocator getCoresLocator();
 
@@ -105,32 +89,26 @@
    * @return core root directory
    */
   public String getCoreRootDirectory() {
-    SolrResourceLoader loader = config.getResourceLoader();
-    String relativeDir = get(CfgProp.SOLR_COREROOTDIRECTORY, null);
+    String relativeDir = getString(CfgProp.SOLR_COREROOTDIRECTORY, null);
     if (relativeDir != null)
       return loader.resolve(relativeDir);
     return loader.getInstanceDir();
   }
 
-  public PluginInfo getShardHandlerFactoryPluginInfo() {
-    Node node = config.getNode(getShardHandlerFactoryConfigPath(), false);
-    return (node == null) ? null : new PluginInfo(node, "shardHandlerFactory", false, true);
-  }
-
-  protected abstract String getShardHandlerFactoryConfigPath();
+  public abstract PluginInfo getShardHandlerFactoryPluginInfo();
 
   public String getZkHost() {
     String sysZkHost = System.getProperty("zkHost");
     if (sysZkHost != null)
       return sysZkHost;
-    return get(CfgProp.SOLR_ZKHOST, null);
+    return getString(CfgProp.SOLR_ZKHOST, null);
   }
 
   public int getZkClientTimeout() {
     String sysProp = System.getProperty("zkClientTimeout");
     if (sysProp != null)
       return Integer.parseInt(sysProp);
-    return get(CfgProp.SOLR_ZKCLIENTTIMEOUT, DEFAULT_ZK_CLIENT_TIMEOUT);
+    return getInt(CfgProp.SOLR_ZKCLIENTTIMEOUT, DEFAULT_ZK_CLIENT_TIMEOUT);
   }
 
   private static final int DEFAULT_ZK_CLIENT_TIMEOUT = 15000;
@@ -142,124 +120,117 @@
   private static final int DEFAULT_AUTO_REPLICA_FAILOVER_WAIT_AFTER_EXPIRATION = 30000;
   private static final int DEFAULT_AUTO_REPLICA_FAILOVER_WORKLOOP_DELAY = 10000;
   private static final int DEFAULT_AUTO_REPLICA_FAILOVER_BAD_NODE_EXPIRATION = 60000;
+  
+  public static final int DEFAULT_DISTRIBUPDATECONNTIMEOUT = 60000;
+  public static final int DEFAULT_DISTRIBUPDATESOTIMEOUT = 600000;
 
   protected static final String DEFAULT_CORE_ADMIN_PATH = "/admin/cores";
 
   public String getSolrHostPort() {
-    return get(CfgProp.SOLR_HOSTPORT, null);
+    return getString(CfgProp.SOLR_HOSTPORT, null);
   }
 
   public String getZkHostContext() {
-    return get(CfgProp.SOLR_HOSTCONTEXT, null);
+    return getString(CfgProp.SOLR_HOSTCONTEXT, null);
   }
 
   public String getHost() {
-    return get(CfgProp.SOLR_HOST, null);
+    return getString(CfgProp.SOLR_HOST, null);
   }
 
   public int getLeaderVoteWait() {
-    return get(CfgProp.SOLR_LEADERVOTEWAIT, DEFAULT_LEADER_VOTE_WAIT);
+    return getInt(CfgProp.SOLR_LEADERVOTEWAIT, DEFAULT_LEADER_VOTE_WAIT);
   }
   
   public int getLeaderConflictResolveWait() {
-    return get(CfgProp.SOLR_LEADERCONFLICTRESOLVEWAIT, DEFAULT_LEADER_CONFLICT_RESOLVE_WAIT);
+    return getInt(CfgProp.SOLR_LEADERCONFLICTRESOLVEWAIT, DEFAULT_LEADER_CONFLICT_RESOLVE_WAIT);
   }
   
   public int getAutoReplicaFailoverWaitAfterExpiration() {
-    return get(CfgProp.SOLR_AUTOREPLICAFAILOVERWAITAFTEREXPIRATION, DEFAULT_AUTO_REPLICA_FAILOVER_WAIT_AFTER_EXPIRATION);
+    return getInt(CfgProp.SOLR_AUTOREPLICAFAILOVERWAITAFTEREXPIRATION, DEFAULT_AUTO_REPLICA_FAILOVER_WAIT_AFTER_EXPIRATION);
   }
   
   public int getAutoReplicaFailoverWorkLoopDelay() {
-    return get(CfgProp.SOLR_AUTOREPLICAFAILOVERWORKLOOPDELAY, DEFAULT_AUTO_REPLICA_FAILOVER_WORKLOOP_DELAY);
+    return getInt(CfgProp.SOLR_AUTOREPLICAFAILOVERWORKLOOPDELAY, DEFAULT_AUTO_REPLICA_FAILOVER_WORKLOOP_DELAY);
   }
   
   public int getAutoReplicaFailoverBadNodeExpiration() {
-    return get(CfgProp.SOLR_AUTOREPLICAFAILOVERBADNODEEXPIRATION, DEFAULT_AUTO_REPLICA_FAILOVER_BAD_NODE_EXPIRATION);
+    return getInt(CfgProp.SOLR_AUTOREPLICAFAILOVERBADNODEEXPIRATION, DEFAULT_AUTO_REPLICA_FAILOVER_BAD_NODE_EXPIRATION);
   }
 
   public boolean getGenericCoreNodeNames() {
-    return get(CfgProp.SOLR_GENERICCORENODENAMES, false);
+    return getBoolean(CfgProp.SOLR_GENERICCORENODENAMES, false);
   }
 
   public int getDistributedConnectionTimeout() {
-    return get(CfgProp.SOLR_DISTRIBUPDATECONNTIMEOUT, 0);
+    return getInt(CfgProp.SOLR_DISTRIBUPDATECONNTIMEOUT, DEFAULT_DISTRIBUPDATECONNTIMEOUT);
   }
 
   public int getDistributedSocketTimeout() {
-    return get(CfgProp.SOLR_DISTRIBUPDATESOTIMEOUT, 0);
+    return getInt(CfgProp.SOLR_DISTRIBUPDATESOTIMEOUT, DEFAULT_DISTRIBUPDATESOTIMEOUT);
   }
   
   public int getMaxUpdateConnections() {
-    return get(CfgProp.SOLR_MAXUPDATECONNECTIONS, 10000);
+    return getInt(CfgProp.SOLR_MAXUPDATECONNECTIONS, 10000);
   }
 
   public int getMaxUpdateConnectionsPerHost() {
-    return get(CfgProp.SOLR_MAXUPDATECONNECTIONSPERHOST, 100);
+    return getInt(CfgProp.SOLR_MAXUPDATECONNECTIONSPERHOST, 100);
   }
 
   public int getCoreLoadThreadCount() {
-    return get(ConfigSolr.CfgProp.SOLR_CORELOADTHREADS, DEFAULT_CORE_LOAD_THREADS);
+    return getInt(ConfigSolr.CfgProp.SOLR_CORELOADTHREADS, DEFAULT_CORE_LOAD_THREADS);
   }
 
   public String getSharedLibDirectory() {
-    return get(ConfigSolr.CfgProp.SOLR_SHAREDLIB , null);
-  }
-
-  public String getDefaultCoreName() {
-    return get(CfgProp.SOLR_CORES_DEFAULT_CORE_NAME, null);
-  }
-
-  public abstract boolean isPersistent();
-
-  public String getAdminPath() {
-    return get(CfgProp.SOLR_ADMINPATH, DEFAULT_CORE_ADMIN_PATH);
+    return getString(ConfigSolr.CfgProp.SOLR_SHAREDLIB, null);
   }
 
   public String getCoreAdminHandlerClass() {
-    return get(CfgProp.SOLR_ADMINHANDLER, "org.apache.solr.handler.admin.CoreAdminHandler");
+    return getString(CfgProp.SOLR_ADMINHANDLER, "org.apache.solr.handler.admin.CoreAdminHandler");
   }
   
   public String getZkCredentialsProviderClass() {
-    return get(CfgProp.SOLR_ZKCREDENTIALSPROVIDER, null);
+    return getString(CfgProp.SOLR_ZKCREDENTIALSPROVIDER, null);
   }
 
   public String getZkACLProviderClass() {
-    return get(CfgProp.SOLR_ZKACLPROVIDER, null);
+    return getString(CfgProp.SOLR_ZKACLPROVIDER, null);
   }
   
   public String getCollectionsHandlerClass() {
-    return get(CfgProp.SOLR_COLLECTIONSHANDLER, "org.apache.solr.handler.admin.CollectionsHandler");
+    return getString(CfgProp.SOLR_COLLECTIONSHANDLER, "org.apache.solr.handler.admin.CollectionsHandler");
   }
 
   public String getInfoHandlerClass() {
-    return get(CfgProp.SOLR_INFOHANDLER, "org.apache.solr.handler.admin.InfoHandler");
+    return getString(CfgProp.SOLR_INFOHANDLER, "org.apache.solr.handler.admin.InfoHandler");
   }
 
   public boolean hasSchemaCache() {
-    return get(ConfigSolr.CfgProp.SOLR_SHARESCHEMA, false);
+    return getBoolean(ConfigSolr.CfgProp.SOLR_SHARESCHEMA, false);
   }
 
   public String getManagementPath() {
-    return get(CfgProp.SOLR_MANAGEMENTPATH, null);
+    return getString(CfgProp.SOLR_MANAGEMENTPATH, null);
   }
 
   public String getConfigSetBaseDirectory() {
-    return get(CfgProp.SOLR_CONFIGSETBASEDIR, "configsets");
+    return getString(CfgProp.SOLR_CONFIGSETBASEDIR, "configsets");
   }
 
   public LogWatcherConfig getLogWatcherConfig() {
-    String loggingClass = get(CfgProp.SOLR_LOGGING_CLASS, null);
-    String loggingWatcherThreshold = get(CfgProp.SOLR_LOGGING_WATCHER_THRESHOLD, null);
+    String loggingClass = getString(CfgProp.SOLR_LOGGING_CLASS, null);
+    String loggingWatcherThreshold = getString(CfgProp.SOLR_LOGGING_WATCHER_THRESHOLD, null);
     return new LogWatcherConfig(
-        get(CfgProp.SOLR_LOGGING_ENABLED, true),
+        getBoolean(CfgProp.SOLR_LOGGING_ENABLED, true),
         loggingClass,
         loggingWatcherThreshold,
-        get(CfgProp.SOLR_LOGGING_WATCHER_SIZE, 50)
+        getInt(CfgProp.SOLR_LOGGING_WATCHER_SIZE, 50)
     );
   }
 
   public int getTransientCacheSize() {
-    return get(CfgProp.SOLR_TRANSIENTCACHESIZE, Integer.MAX_VALUE);
+    return getInt(CfgProp.SOLR_TRANSIENTCACHESIZE, Integer.MAX_VALUE);
   }
 
   public ConfigSetService createCoreConfigService(SolrResourceLoader loader, ZkController zkController) {
@@ -271,7 +242,7 @@
   }
 
   // Ugly for now, but we'll at least be able to centralize all of the differences between 4x and 5x.
-  protected static enum CfgProp {
+  public static enum CfgProp {
     SOLR_ADMINHANDLER,
     SOLR_COLLECTIONSHANDLER,
     SOLR_CORELOADTHREADS,
@@ -305,59 +276,44 @@
     
     SOLR_ZKCREDENTIALSPROVIDER,
     SOLR_ZKACLPROVIDER,
-    
-    //TODO: Remove all of these elements for 5.0
-    SOLR_PERSISTENT,
-    SOLR_CORES_DEFAULT_CORE_NAME,
-    SOLR_ADMINPATH
-  }
-
-  protected Config config;
-  protected Map<CfgProp, Object> propMap = new HashMap<>();
-
-  public ConfigSolr(Config config) {
-    this.config = config;
-    config.substituteProperties();
-  }
-
-  // for extension & testing.
-  protected ConfigSolr() {
 
   }
-  
-  public Config getConfig() {
-    return config;
+
+  protected final SolrResourceLoader loader;
+  protected final Properties solrProperties;
+
+  public ConfigSolr(SolrResourceLoader loader, Properties solrProperties) {
+    this.loader = loader;
+    this.solrProperties = solrProperties;
   }
 
-  @SuppressWarnings("unchecked")
-  public <T> T get(CfgProp key, T defaultValue) {
-    if (propMap.containsKey(key) && propMap.get(key) != null) {
-      return (T) propMap.get(key);
-    }
-    return defaultValue;
+  public ConfigSolr(SolrResourceLoader loader) {
+    this(loader, new Properties());
   }
 
-  public Properties getSolrProperties(String path) {
-    try {
-      return readProperties(((NodeList) config.evaluate(
-          path, XPathConstants.NODESET)).item(0));
-    } catch (Exception e) {
-      SolrException.log(log, null, e);
-    }
-    return null;
+  protected abstract String getProperty(CfgProp key);
 
+  private String getString(CfgProp key, String defaultValue) {
+    String v = getProperty(key);
+    return v == null ? defaultValue : v;
   }
-  
-  protected Properties readProperties(Node node) throws XPathExpressionException {
-    XPath xpath = config.getXPath();
-    NodeList props = (NodeList) xpath.evaluate("property", node, XPathConstants.NODESET);
-    Properties properties = new Properties();
-    for (int i = 0; i < props.getLength(); i++) {
-      Node prop = props.item(i);
-      properties.setProperty(DOMUtil.getAttr(prop, "name"),
-          PropertiesUtil.substituteProperty(DOMUtil.getAttr(prop, "value"), null));
-    }
-    return properties;
+
+  private int getInt(CfgProp key, int defaultValue) {
+    String v = getProperty(key);
+    return v == null ? defaultValue : Integer.parseInt(v);
+  }
+
+  private boolean getBoolean(CfgProp key, boolean defaultValue) {
+    String v = getProperty(key);
+    return v == null ? defaultValue : Boolean.parseBoolean(v);
+  }
+
+  public Properties getSolrProperties() {
+    return solrProperties;
+  }
+
+  public SolrResourceLoader getSolrResourceLoader() {
+    return loader;
   }
 
 }
diff --git a/solr/core/src/java/org/apache/solr/core/ConfigSolrXml.java b/solr/core/src/java/org/apache/solr/core/ConfigSolrXml.java
index f3ea1f2..0ed0855 100644
--- a/solr/core/src/java/org/apache/solr/core/ConfigSolrXml.java
+++ b/solr/core/src/java/org/apache/solr/core/ConfigSolrXml.java
@@ -17,24 +17,26 @@
  * limitations under the License.
  */
 
+import com.google.common.base.Function;
+import com.google.common.base.Functions;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.util.DOMUtil;
-
-import com.google.common.base.Function;
-import com.google.common.base.Functions;
-
+import org.apache.solr.util.PropertiesUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.util.List;
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathExpressionException;
 import java.util.ArrayList;
-import java.util.Map;
+import java.util.HashMap;
+import java.util.List;
 import java.util.Locale;
+import java.util.Map;
+import java.util.Properties;
 
 
 /**
@@ -45,51 +47,28 @@
   protected static Logger log = LoggerFactory.getLogger(ConfigSolrXml.class);
 
   private final CoresLocator coresLocator;
+  private final Config config;
+  private final Map<CfgProp, Object> propMap = new HashMap<>();
 
   public ConfigSolrXml(Config config) {
-    super(config);
-    try {
-      checkForIllegalConfig();
-      fillPropMap();
-      coresLocator = new CorePropertiesLocator(getCoreRootDirectory());
-    } catch (IOException e) {
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
-    }
+    super(config.getResourceLoader(), loadProperties(config));
+    this.config = config;
+    this.config.substituteProperties();
+    checkForIllegalConfig();
+    fillPropMap();
+    coresLocator = new CorePropertiesLocator(getCoreRootDirectory());
   }
 
-  private void checkForIllegalConfig() throws IOException {
+  private void checkForIllegalConfig() {
 
-    // Do sanity checks - we don't want to find old style config
     failIfFound("solr/@coreLoadThreads");
     failIfFound("solr/@persistent");
     failIfFound("solr/@sharedLib");
     failIfFound("solr/@zkHost");
 
-    failIfFound("solr/logging/@class");
-    failIfFound("solr/logging/@enabled");
-    failIfFound("solr/logging/watcher/@size");
-    failIfFound("solr/logging/watcher/@threshold");
+    failIfFound("solr/cores");
 
-    failIfFound("solr/cores/@adminHandler");
-    failIfFound("solr/cores/@distribUpdateConnTimeout");
-    failIfFound("solr/cores/@distribUpdateSoTimeout");
-    failIfFound("solr/cores/@host");
-    failIfFound("solr/cores/@hostContext");
-    failIfFound("solr/cores/@hostPort");
-    failIfFound("solr/cores/@leaderVoteWait");
-    failIfFound("solr/cores/@leaderConflictResolveWait");
-    failIfFound("solr/cores/@genericCoreNodeNames");
-    failIfFound("solr/cores/@managementPath");
-    failIfFound("solr/cores/@shareSchema");
-    failIfFound("solr/cores/@transientCacheSize");
-    failIfFound("solr/cores/@zkClientTimeout");
-
-    // These have no counterpart in 5.0, asking for any of these in Solr 5.0
-    // will result in an error being
-    // thrown.
-    failIfFound("solr/cores/@defaultCoreName");
     failIfFound("solr/@persistent");
-    failIfFound("solr/cores/@adminPath");
 
   }
 
@@ -97,7 +76,32 @@
 
     if (config.getVal(xPath, false) != null) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Should not have found " + xPath +
-          " solr.xml may be a mix of old and new style formats.");
+          "\n. Please upgrade your solr.xml: https://cwiki.apache.org/confluence/display/solr/Format+of+solr.xml");
+    }
+  }
+
+  protected String getProperty(CfgProp key) {
+    if (!propMap.containsKey(key) || propMap.get(key) == null)
+      return null;
+    return propMap.get(key).toString();
+  }
+
+  private static Properties loadProperties(Config config) {
+    try {
+      Node node = ((NodeList) config.evaluate("solr", XPathConstants.NODESET)).item(0);
+      XPath xpath = config.getXPath();
+      NodeList props = (NodeList) xpath.evaluate("property", node, XPathConstants.NODESET);
+      Properties properties = new Properties();
+      for (int i = 0; i < props.getLength(); i++) {
+        Node prop = props.item(i);
+        properties.setProperty(DOMUtil.getAttr(prop, "name"),
+            PropertiesUtil.substituteProperty(DOMUtil.getAttr(prop, "value"), null));
+      }
+      return properties;
+    }
+    catch (XPathExpressionException e) {
+      log.warn("Error parsing solr.xml: " + e.getMessage());
+      return null;
     }
   }
 
@@ -247,24 +251,9 @@
     }
   }
 
-  @Override
-  public String getDefaultCoreName() {
-    return "collection1";
-  }
-
-  @Override
-  public boolean isPersistent() {
-    return true;
-  }
-
-  @Override
-  protected String getShardHandlerFactoryConfigPath() {
-    return "solr/shardHandlerFactory";
-  }
-
-  @Override
-  public String getAdminPath() {
-    return DEFAULT_CORE_ADMIN_PATH;
+  public PluginInfo getShardHandlerFactoryPluginInfo() {
+    Node node = config.getNode("solr/shardHandlerFactory", false);
+    return (node == null) ? null : new PluginInfo(node, "shardHandlerFactory", false, true);
   }
 
   @Override
diff --git a/solr/core/src/java/org/apache/solr/core/ConfigSolrXmlOld.java b/solr/core/src/java/org/apache/solr/core/ConfigSolrXmlOld.java
deleted file mode 100644
index 6d57f31..0000000
--- a/solr/core/src/java/org/apache/solr/core/ConfigSolrXmlOld.java
+++ /dev/null
@@ -1,284 +0,0 @@
-package org.apache.solr.core;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import javax.xml.xpath.XPathConstants;
-import javax.xml.xpath.XPathExpressionException;
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.util.DOMUtil;
-import org.apache.solr.util.PropertiesUtil;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-
-
-/**
- *
- */
-public class ConfigSolrXmlOld extends ConfigSolr {
-
-  protected static Logger log = LoggerFactory.getLogger(ConfigSolrXmlOld.class);
-
-  private NodeList coreNodes = null;
-  
-  private final CoresLocator persistor;
-
-  public static final String DEFAULT_DEFAULT_CORE_NAME = "collection1";
-
-  @Override
-  protected String getShardHandlerFactoryConfigPath() {
-    return "solr/cores/shardHandlerFactory";
-  }
-
-  public ConfigSolrXmlOld(Config config, String originalXML) {
-    super(config);
-    try {
-      checkForIllegalConfig();
-      fillPropMap();
-      initCoreList();
-      this.persistor = isPersistent() ? new SolrXMLCoresLocator(originalXML, this)
-                                      : new SolrXMLCoresLocator.NonPersistingLocator(originalXML, this);
-    }
-    catch (IOException e) {
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
-    }
-  }
-
-  @Override
-  public CoresLocator getCoresLocator() {
-    return this.persistor;
-  }
-  
-  private void checkForIllegalConfig() throws IOException {
-    // Do sanity checks - we don't want to find new style
-    // config
-    failIfFound("solr/str[@name='adminHandler']");
-    failIfFound("solr/int[@name='coreLoadThreads']");
-    failIfFound("solr/str[@name='coreRootDirectory']");
-    failIfFound("solr/solrcloud/int[@name='distribUpdateConnTimeout']");
-    failIfFound("solr/solrcloud/int[@name='distribUpdateSoTimeout']");
-    failIfFound("solr/solrcloud/str[@name='host']");
-    failIfFound("solr/solrcloud/str[@name='hostContext']");
-    failIfFound("solr/solrcloud/int[@name='hostPort']");
-    failIfFound("solr/solrcloud/int[@name='leaderVoteWait']");
-    failIfFound("solr/solrcloud/int[@name='genericCoreNodeNames']");
-    failIfFound("solr/str[@name='managementPath']");
-    failIfFound("solr/str[@name='sharedLib']");
-    failIfFound("solr/str[@name='shareSchema']");
-    failIfFound("solr/int[@name='transientCacheSize']");
-    failIfFound("solr/solrcloud/int[@name='zkClientTimeout']");
-    failIfFound("solr/solrcloud/int[@name='zkHost']");
-    
-    failIfFound("solr/logging/str[@name='class']");
-    failIfFound("solr/logging/str[@name='enabled']");
-    
-    failIfFound("solr/logging/watcher/int[@name='size']");
-    failIfFound("solr/logging/watcher/int[@name='threshold']");
-  }
-  
-  private void failIfFound(String xPath) {
-
-    if (config.getVal(xPath, false) != null) {
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Should not have found " + xPath +
-          " solr.xml may be a mix of old and new style formats.");
-    }
-  }
-
-  @Override
-  public boolean isPersistent() {
-    return config.getBool("solr/@persistent", false);
-  }
-
-  @Override
-  public String getDefaultCoreName() {
-    return get(CfgProp.SOLR_CORES_DEFAULT_CORE_NAME, DEFAULT_DEFAULT_CORE_NAME);
-  }
-  
-  private void fillPropMap() {
-    storeConfigPropertyAsInt(CfgProp.SOLR_CORELOADTHREADS, "solr/@coreLoadThreads");
-    storeConfigPropertyAsString(CfgProp.SOLR_SHAREDLIB, "solr/@sharedLib");
-    storeConfigPropertyAsString(CfgProp.SOLR_ZKHOST, "solr/@zkHost");
-    storeConfigPropertyAsString(CfgProp.SOLR_LOGGING_CLASS, "solr/logging/@class");
-    storeConfigPropertyAsBoolean(CfgProp.SOLR_LOGGING_ENABLED, "solr/logging/@enabled");
-    storeConfigPropertyAsInt(CfgProp.SOLR_LOGGING_WATCHER_SIZE, "solr/logging/watcher/@size");
-    storeConfigPropertyAsString(CfgProp.SOLR_LOGGING_WATCHER_THRESHOLD, "solr/logging/watcher/@threshold");
-    storeConfigPropertyAsString(CfgProp.SOLR_ADMINHANDLER, "solr/cores/@adminHandler");
-    storeConfigPropertyAsString(CfgProp.SOLR_COLLECTIONSHANDLER, "solr/cores/@collectionsHandler");
-    storeConfigPropertyAsString(CfgProp.SOLR_INFOHANDLER, "solr/cores/@infoHandler");
-    storeConfigPropertyAsInt(CfgProp.SOLR_DISTRIBUPDATECONNTIMEOUT, "solr/cores/@distribUpdateConnTimeout");
-    storeConfigPropertyAsInt(CfgProp.SOLR_DISTRIBUPDATESOTIMEOUT, "solr/cores/@distribUpdateSoTimeout");
-    storeConfigPropertyAsInt(CfgProp.SOLR_MAXUPDATECONNECTIONS, "solr/cores/@maxUpdateConnections");
-    storeConfigPropertyAsInt(CfgProp.SOLR_MAXUPDATECONNECTIONSPERHOST, "solr/cores/@maxUpdateConnectionsPerHost");
-    storeConfigPropertyAsString(CfgProp.SOLR_HOST, "solr/cores/@host");
-    storeConfigPropertyAsString(CfgProp.SOLR_HOSTCONTEXT, "solr/cores/@hostContext");
-    storeConfigPropertyAsString(CfgProp.SOLR_HOSTPORT, "solr/cores/@hostPort");
-    storeConfigPropertyAsInt(CfgProp.SOLR_LEADERVOTEWAIT, "solr/cores/@leaderVoteWait");
-    storeConfigPropertyAsBoolean(CfgProp.SOLR_GENERICCORENODENAMES, "solr/cores/@genericCoreNodeNames");
-    storeConfigPropertyAsBoolean(CfgProp.SOLR_AUTOREPLICAFAILOVERBADNODEEXPIRATION, "solr/cores/@autoReplicaFailoverBadNodeExpiration");
-    storeConfigPropertyAsBoolean(CfgProp.SOLR_AUTOREPLICAFAILOVERWAITAFTEREXPIRATION, "solr/cores/@autoReplicaFailoverWaitAfterExpiration");
-    storeConfigPropertyAsBoolean(CfgProp.SOLR_AUTOREPLICAFAILOVERWORKLOOPDELAY, "solr/cores/@autoReplicaFailoverWorkLoopDelay");
-    storeConfigPropertyAsString(CfgProp.SOLR_ZKACLPROVIDER, "solr/cores/@zkACLProvider");
-    storeConfigPropertyAsString(CfgProp.SOLR_ZKCREDENTIALSPROVIDER, "solr/cores/@zkCredentialsProvider");
-    storeConfigPropertyAsString(CfgProp.SOLR_MANAGEMENTPATH, "solr/cores/@managementPath");
-    storeConfigPropertyAsBoolean(CfgProp.SOLR_SHARESCHEMA, "solr/cores/@shareSchema");
-    storeConfigPropertyAsInt(CfgProp.SOLR_TRANSIENTCACHESIZE, "solr/cores/@transientCacheSize");
-    storeConfigPropertyAsInt(CfgProp.SOLR_ZKCLIENTTIMEOUT, "solr/cores/@zkClientTimeout");
-    storeConfigPropertyAsString(CfgProp.SOLR_CONFIGSETBASEDIR, "solr/cores/@configSetBaseDir");
-
-    // These have no counterpart in 5.0, asking, for any of these in Solr 5.0
-    // will result in an error being
-    // thrown.
-    storeConfigPropertyAsString(CfgProp.SOLR_CORES_DEFAULT_CORE_NAME, "solr/cores/@defaultCoreName");
-    storeConfigPropertyAsString(CfgProp.SOLR_PERSISTENT, "solr/@persistent");
-    storeConfigPropertyAsString(CfgProp.SOLR_ADMINPATH, "solr/cores/@adminPath");
-  }
-
-  private void storeConfigPropertyAsInt(CfgProp key, String xmlPath) {
-    String valueAsString = config.getVal(xmlPath, false);
-    if (StringUtils.isNotBlank(valueAsString)) {
-      propMap.put(key, Integer.parseInt(valueAsString));
-    } else {
-      propMap.put(key, null);
-    }
-  }
-
-  private void storeConfigPropertyAsBoolean(CfgProp key, String xmlPath) {
-    String valueAsString = config.getVal(xmlPath, false);
-    if (StringUtils.isNotBlank(valueAsString)) {
-      propMap.put(key, Boolean.parseBoolean(valueAsString));
-    } else {
-      propMap.put(key, null);
-    }
-  }
-
-  private void storeConfigPropertyAsString(CfgProp key, String xmlPath) {
-    propMap.put(key, config.getVal(xmlPath, false));
-  }
-
-  private void initCoreList() throws IOException {
-    
-    coreNodes = (NodeList) config.evaluate("solr/cores/core",
-        XPathConstants.NODESET);
-    // Check a couple of error conditions
-    Set<String> names = new HashSet<>(); // for duplicate names
-    Map<String,String> dirs = new HashMap<>(); // for duplicate
-                                                            // data dirs.
-    
-    for (int idx = 0; idx < coreNodes.getLength(); ++idx) {
-      Node node = coreNodes.item(idx);
-      String name = DOMUtil.getAttr(node, CoreDescriptor.CORE_NAME, null);
-
-      String dataDir = DOMUtil.getAttr(node, CoreDescriptor.CORE_DATADIR, null);
-      if (name != null) {
-        if (!names.contains(name)) {
-          names.add(name);
-        } else {
-          String msg = String.format(Locale.ROOT,
-              "More than one core defined for core named %s", name);
-          log.error(msg);
-        }
-      }
-
-      String instDir = DOMUtil.getAttr(node, CoreDescriptor.CORE_INSTDIR, null);
-
-      if (dataDir != null) {
-        String absData = null;
-        File dataFile = new File(dataDir);
-        if (dataFile.isAbsolute()) {
-          absData = dataFile.getCanonicalPath();
-        } else if (instDir != null) {
-          File instFile = new File(instDir);
-          absData = new File(instFile, dataDir).getCanonicalPath();
-        }
-        if (absData != null) {
-          if (!dirs.containsKey(absData)) {
-            dirs.put(absData, name);
-          } else {
-            String msg = String
-                .format(
-                    Locale.ROOT,
-                    "More than one core points to data dir %s. They are in %s and %s",
-                    absData, dirs.get(absData), name);
-            log.warn(msg);
-          }
-        }
-      }
-    }
-    
-  }
-
-  public List<String> getAllCoreNames() {
-    List<String> ret = new ArrayList<>();
-    
-    synchronized (coreNodes) {
-      for (int idx = 0; idx < coreNodes.getLength(); ++idx) {
-        Node node = coreNodes.item(idx);
-        ret.add(DOMUtil.getAttr(node, CoreDescriptor.CORE_NAME, null));
-      }
-    }
-    
-    return ret;
-  }
-
-  public String getProperty(String coreName, String property, String defaultVal) {
-    
-    synchronized (coreNodes) {
-      for (int idx = 0; idx < coreNodes.getLength(); ++idx) {
-        Node node = coreNodes.item(idx);
-        if (coreName.equals(DOMUtil.getAttr(node, CoreDescriptor.CORE_NAME,
-            null))) {
-          String propVal = DOMUtil.getAttr(node, property);
-          if (propVal == null)
-            propVal = defaultVal;
-          return propVal;
-        }
-      }
-    }
-    return defaultVal;
-    
-  }
-
-  public Properties getCoreProperties(String coreName) {
-    synchronized (coreNodes) {
-      for (int idx = 0; idx < coreNodes.getLength(); idx++) {
-        Node node = coreNodes.item(idx);
-        if (coreName.equals(DOMUtil.getAttr(node, CoreDescriptor.CORE_NAME, null))) {
-          try {
-            return readProperties(node);
-          } catch (XPathExpressionException e) {
-            SolrException.log(log, e);
-          }
-        }
-      }
-    }
-    return new Properties();
-  }
-}
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index be7d66a..949f9a7 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -17,21 +17,8 @@
 
 package org.apache.solr.core;
 
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Properties;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Maps;
 import org.apache.solr.cloud.ZkController;
 import org.apache.solr.cloud.ZkSolrResourceLoader;
 import org.apache.solr.common.SolrException;
@@ -51,8 +38,20 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Maps;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Properties;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+import static com.google.common.base.Preconditions.checkNotNull;
 
 
 /**
@@ -102,9 +101,14 @@
   protected final CoresLocator coresLocator;
   
   private String hostName;
+
   private final JarRepository jarRepository = new JarRepository(this);
-  
-  private Map<String ,SolrRequestHandler> containerHandlers = new HashMap<>();
+
+  public static final String CORES_HANDLER_PATH = "/admin/cores";
+  public static final String COLLECTIONS_HANDLER_PATH = "/admin/collections";
+  public static final String INFO_HANDLER_PATH = "/admin/info";
+
+  private Map<String, SolrRequestHandler> containerHandlers = new HashMap<>();
 
   public SolrRequestHandler getRequestHandler(String path) {
     return RequestHandlerBase.getRequestHandler(path, containerHandlers);
@@ -112,7 +116,6 @@
 
   public Map<String, SolrRequestHandler> getRequestHandlers(){
     return this.containerHandlers;
-
   }
 
  // private ClientConnectionManager clientConnectionManager = new PoolingClientConnectionManager();
@@ -137,7 +140,7 @@
    * @see #load()
    */
   public CoreContainer(SolrResourceLoader loader) {
-    this(loader, ConfigSolr.fromSolrHome(loader, loader.getInstanceDir()));
+    this(ConfigSolr.fromSolrHome(loader, loader.getInstanceDir()));
   }
 
   /**
@@ -154,19 +157,15 @@
    * Create a new CoreContainer using the given SolrResourceLoader,
    * configuration and CoresLocator.  The container's cores are
    * not loaded.
-   * @param loader the SolrResourceLoader
    * @param config a ConfigSolr representation of this container's configuration
    * @see #load()
    */
-  public CoreContainer(SolrResourceLoader loader, ConfigSolr config) {
-    this.loader = checkNotNull(loader);
-    this.solrHome = loader.getInstanceDir();
-    this.cfg = checkNotNull(config);
-    this.coresLocator = config.getCoresLocator();
+  public CoreContainer(ConfigSolr config) {
+    this(config, config.getCoresLocator());
   }
 
-  public CoreContainer(SolrResourceLoader loader, ConfigSolr config, CoresLocator locator) {
-    this.loader = checkNotNull(loader);
+  public CoreContainer(ConfigSolr config, CoresLocator locator) {
+    this.loader = config.getSolrResourceLoader();
     this.solrHome = loader.getInstanceDir();
     this.cfg = checkNotNull(config);
     this.coresLocator = locator;
@@ -194,8 +193,13 @@
    */
   public static CoreContainer createAndLoad(String solrHome, File configFile) {
     SolrResourceLoader loader = new SolrResourceLoader(solrHome);
-    CoreContainer cc = new CoreContainer(loader, ConfigSolr.fromFile(loader, configFile));
-    cc.load();
+    CoreContainer cc = new CoreContainer(ConfigSolr.fromFile(loader, configFile));
+    try {
+      cc.load();
+    } catch (Exception e) {
+      cc.shutdown();
+      throw e;
+    }
     return cc;
   }
   
@@ -238,15 +242,15 @@
     zkSys.initZooKeeper(this, solrHome, cfg);
 
     collectionsHandler = createHandler(cfg.getCollectionsHandlerClass(), CollectionsHandler.class);
-    containerHandlers.put("/admin/collections" , collectionsHandler);
+    containerHandlers.put(COLLECTIONS_HANDLER_PATH, collectionsHandler);
     infoHandler        = createHandler(cfg.getInfoHandlerClass(), InfoHandler.class);
-    containerHandlers.put("/admin/info" , infoHandler);
+    containerHandlers.put(INFO_HANDLER_PATH, infoHandler);
     coreAdminHandler   = createHandler(cfg.getCoreAdminHandlerClass(), CoreAdminHandler.class);
-    containerHandlers.put(cfg.getAdminPath() , coreAdminHandler);
+    containerHandlers.put(CORES_HANDLER_PATH, coreAdminHandler);
 
     coreConfigService = cfg.createCoreConfigService(loader, zkSys.getZkController());
 
-    containerProperties = cfg.getSolrProperties("solr");
+    containerProperties = cfg.getSolrProperties();
 
     // setup executor to load cores in parallel
     // do not limit the size of the executor in zk mode since cores may try and wait for each other.
@@ -341,10 +345,9 @@
     }
 
     try {
-      coreAdminHandler.shutdown();
+      if (coreAdminHandler != null) coreAdminHandler.shutdown();
     } catch (Exception e) {
-      log.warn("Error shutting down CoreAdminHandler. Continuing to close CoreContainer.");
-      e.printStackTrace();
+      log.warn("Error shutting down CoreAdminHandler. Continuing to close CoreContainer.", e);
     }
 
     try {
@@ -519,11 +522,15 @@
 
       return core;
 
-    }
-    catch (Exception e) {
+    } catch (Exception e) {
       coreInitFailures.put(dcore.getName(), new CoreLoadFailure(dcore, e));
       log.error("Error creating core [{}]: {}", dcore.getName(), e.getMessage(), e);
       throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to create core [" + dcore.getName() + "]", e);
+    } catch (Throwable t) {
+      SolrException e = new SolrException(ErrorCode.SERVER_ERROR, "JVM Error creating core [" + dcore.getName() + "]: " + t.getMessage(), t);
+      log.error("Error creating core [{}]: {}", dcore.getName(), t.getMessage(), t);
+      coreInitFailures.put(dcore.getName(), new CoreLoadFailure(dcore, e));
+      throw t;
     }
 
   }
@@ -591,8 +598,6 @@
    */
   public void reload(String name) {
 
-    name = checkDefault(name);
-
     SolrCore core = solrCores.getCoreFromAnyList(name, false);
     if (core == null)
       throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "No such core: " + name );
@@ -615,11 +620,6 @@
 
   }
 
-  //5.0 remove all checkDefaults?
-  private String checkDefault(String name) {
-    return (null == name || name.isEmpty()) ? getDefaultCoreName() : name;
-  } 
-
   /**
    * Swaps two SolrCore descriptors.
    */
@@ -627,8 +627,6 @@
     if( n0 == null || n1 == null ) {
       throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Can not swap unnamed cores." );
     }
-    n0 = checkDefault(n0);
-    n1 = checkDefault(n1);
     solrCores.swap(n0, n1);
 
     coresLocator.swap(this, solrCores.getCoreDescriptor(n0), solrCores.getCoreDescriptor(n1));
@@ -654,8 +652,6 @@
    */
   public void unload(String name, boolean deleteIndexDir, boolean deleteDataDir, boolean deleteInstanceDir) {
 
-    name = checkDefault(name);
-
     // check for core-init errors first
     CoreLoadFailure loadFailure = coreInitFailures.remove(name);
     if (loadFailure != null) {
@@ -707,7 +703,6 @@
     try (SolrCore core = getCore(name)) {
       if (core != null) {
         registerCore(toName, core, true);
-        name = checkDefault(name);
         SolrCore old = solrCores.remove(name);
         coresLocator.rename(this, old.getCoreDescriptor(), core.getCoreDescriptor());
       }
@@ -745,8 +740,6 @@
    */
   public SolrCore getCore(String name) {
 
-    name = checkDefault(name);
-
     // Do this in two phases since we don't want to lock access to the cores over a load.
     SolrCore core = solrCores.getCoreFromAnyList(name, true);
 
@@ -813,26 +806,6 @@
     return infoHandler;
   }
 
-  // ---------------- Multicore self related methods ---------------
-
-  /**
-   * the default core name, or null if there is no default core name
-   */
-  public String getDefaultCoreName() {
-    return cfg.getDefaultCoreName();
-  }
-
-  // all of the following properties aren't synchronized
-  // but this should be OK since they normally won't be changed rapidly
-  @Deprecated
-  public boolean isPersistent() {
-    return cfg.isPersistent();
-  }
-  
-  public String getAdminPath() {
-    return cfg.getAdminPath();
-  }
-  
   public String getHostName() {
     return this.hostName;
   }
diff --git a/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java b/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java
index d5e7aae..157ce0d 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreDescriptor.java
@@ -23,7 +23,7 @@
 import org.apache.solr.cloud.CloudDescriptor;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.SolrParams;
-import org.apache.solr.util.IOUtils;
+import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.util.PropertiesUtil;
 
 import java.io.File;
diff --git a/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java b/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java
index 3742e7f..a25d99b 100644
--- a/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java
+++ b/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java
@@ -18,8 +18,9 @@
  */
 
 import com.google.common.collect.Lists;
+
 import org.apache.solr.common.SolrException;
-import org.apache.solr.util.IOUtils;
+import org.apache.solr.common.util.IOUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git a/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
index 04fe662..a4484f8 100644
--- a/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
@@ -21,6 +21,7 @@
 
 import java.io.IOException;
 import java.net.URI;
+import java.net.URL;
 import java.net.URLEncoder;
 import java.util.Locale;
 
@@ -37,6 +38,7 @@
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.store.blockcache.BlockCache;
 import org.apache.solr.store.blockcache.BlockDirectory;
@@ -47,11 +49,10 @@
 import org.apache.solr.store.hdfs.HdfsDirectory;
 import org.apache.solr.store.hdfs.HdfsLockFactory;
 import org.apache.solr.util.HdfsUtil;
-import org.apache.solr.util.IOUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-public class HdfsDirectoryFactory extends CachingDirectoryFactory {
+public class HdfsDirectoryFactory extends CachingDirectoryFactory implements SolrInfoMBean {
   public static Logger LOG = LoggerFactory
       .getLogger(HdfsDirectoryFactory.class);
   
@@ -60,7 +61,7 @@
   public static final String BLOCKCACHE_ENABLED = "solr.hdfs.blockcache.enabled";
   public static final String BLOCKCACHE_GLOBAL = "solr.hdfs.blockcache.global";
   public static final String BLOCKCACHE_READ_ENABLED = "solr.hdfs.blockcache.read.enabled";
-  public static final String BLOCKCACHE_WRITE_ENABLED = "solr.hdfs.blockcache.write.enabled";
+  public static final String BLOCKCACHE_WRITE_ENABLED = "solr.hdfs.blockcache.write.enabled"; // currently buggy and disabled
   
   public static final String NRTCACHINGDIRECTORY_ENABLE = "solr.hdfs.nrtcachingdirectory.enable";
   public static final String NRTCACHINGDIRECTORY_MAXMERGESIZEMB = "solr.hdfs.nrtcachingdirectory.maxmergesizemb";
@@ -86,16 +87,22 @@
   public static Metrics metrics;
   private static Boolean kerberosInit;
   
+  private final static class MetricsHolder {
+    // [JCIP SE, Goetz, 16.6] Lazy initialization
+    // Won't load until MetricsHolder is referenced
+    public static final Metrics metrics = new Metrics();
+  }
+
   @Override
   public void init(NamedList args) {
     params = SolrParams.toSolrParams(args);
-    this.hdfsDataDir = params.get(HDFS_HOME);
+    this.hdfsDataDir = getConfig(HDFS_HOME, null);
     if (this.hdfsDataDir != null && this.hdfsDataDir.length() == 0) {
       this.hdfsDataDir = null;
     } else {
       LOG.info(HDFS_HOME + "=" + this.hdfsDataDir);
     }
-    boolean kerberosEnabled = params.getBool(KERBEROS_ENABLED, false);
+    boolean kerberosEnabled = getConfig(KERBEROS_ENABLED, false);
     LOG.info("Solr Kerberos Authentication "
         + (kerberosEnabled ? "enabled" : "disabled"));
     if (kerberosEnabled) {
@@ -125,32 +132,27 @@
 
   @Override
   protected Directory create(String path, LockFactory lockFactory, DirContext dirContext) throws IOException {
+    assert params != null : "init must be called before create";
     LOG.info("creating directory factory for path {}", path);
     Configuration conf = getConf();
     
     if (metrics == null) {
-      metrics = new Metrics(conf);
+      metrics = MetricsHolder.metrics;
     }
     
-    boolean blockCacheEnabled = params.getBool(BLOCKCACHE_ENABLED, true);
-    boolean blockCacheGlobal = params.getBool(BLOCKCACHE_GLOBAL, false); // default to false for back compat
-    boolean blockCacheReadEnabled = params.getBool(BLOCKCACHE_READ_ENABLED, true);
-    boolean blockCacheWriteEnabled = params.getBool(BLOCKCACHE_WRITE_ENABLED, false);
-    
-    if (blockCacheWriteEnabled) {
-      LOG.warn("Using " + BLOCKCACHE_WRITE_ENABLED + " is currently buggy and can result in readers seeing a corrupted view of the index.");
-    }
+    boolean blockCacheEnabled = getConfig(BLOCKCACHE_ENABLED, true);
+    boolean blockCacheGlobal = getConfig(BLOCKCACHE_GLOBAL, false); // default to false for back compat
+    boolean blockCacheReadEnabled = getConfig(BLOCKCACHE_READ_ENABLED, true);
     
     final Directory dir;
     if (blockCacheEnabled && dirContext != DirContext.META_DATA) {
-      int numberOfBlocksPerBank = params.getInt(NUMBEROFBLOCKSPERBANK, 16384);
+      int numberOfBlocksPerBank = getConfig(NUMBEROFBLOCKSPERBANK, 16384);
       
       int blockSize = BlockDirectory.BLOCK_SIZE;
       
-      int bankCount = params.getInt(BLOCKCACHE_SLAB_COUNT, 1);
+      int bankCount = getConfig(BLOCKCACHE_SLAB_COUNT, 1);
       
-      boolean directAllocation = params.getBool(
-          BLOCKCACHE_DIRECT_MEMORY_ALLOCATION, true);
+      boolean directAllocation = getConfig(BLOCKCACHE_DIRECT_MEMORY_ALLOCATION, true);
       
       int slabSize = numberOfBlocksPerBank * blockSize;
       LOG.info(
@@ -161,8 +163,8 @@
           new Object[] {slabSize, bankCount,
               ((long) bankCount * (long) slabSize)});
       
-      int bufferSize = params.getInt("solr.hdfs.blockcache.bufferstore.buffersize", 128);
-      int bufferCount = params.getInt("solr.hdfs.blockcache.bufferstore.buffercount", 128 * 128);
+      int bufferSize = getConfig("solr.hdfs.blockcache.bufferstore.buffersize", 128);
+      int bufferCount = getConfig("solr.hdfs.blockcache.bufferstore.buffercount", 128 * 128);
       
       BlockCache blockCache = getBlockDirectoryCache(numberOfBlocksPerBank,
           blockSize, bankCount, directAllocation, slabSize,
@@ -170,25 +172,51 @@
       
       Cache cache = new BlockDirectoryCache(blockCache, path, metrics, blockCacheGlobal);
       HdfsDirectory hdfsDirectory = new HdfsDirectory(new Path(path), lockFactory, conf);
-      dir = new BlockDirectory(path, hdfsDirectory, cache, null,
-          blockCacheReadEnabled, blockCacheWriteEnabled);
+      dir = new BlockDirectory(path, hdfsDirectory, cache, null, blockCacheReadEnabled, false);
     } else {
       dir = new HdfsDirectory(new Path(path), lockFactory, conf);
     }
     
-    boolean nrtCachingDirectory = params.getBool(NRTCACHINGDIRECTORY_ENABLE, true);
+    boolean nrtCachingDirectory = getConfig(NRTCACHINGDIRECTORY_ENABLE, true);
     if (nrtCachingDirectory) {
-      double nrtCacheMaxMergeSizeMB = params.getInt(
-          NRTCACHINGDIRECTORY_MAXMERGESIZEMB, 16);
-      double nrtCacheMaxCacheMB = params.getInt(NRTCACHINGDIRECTORY_MAXCACHEMB,
-          192);
+      double nrtCacheMaxMergeSizeMB = getConfig(NRTCACHINGDIRECTORY_MAXMERGESIZEMB, 16);
+      double nrtCacheMaxCacheMB = getConfig(NRTCACHINGDIRECTORY_MAXCACHEMB, 192);
       
-      return new NRTCachingDirectory(dir, nrtCacheMaxMergeSizeMB,
-          nrtCacheMaxCacheMB);
+      return new NRTCachingDirectory(dir, nrtCacheMaxMergeSizeMB, nrtCacheMaxCacheMB);
     }
     return dir;
   }
 
+  boolean getConfig(String name, boolean defaultValue) {
+    Boolean value = params.getBool(name);
+    if (value == null) {
+      String sysValue = System.getProperty(name);
+      if (sysValue != null) {
+        value = Boolean.valueOf(sysValue);
+      }
+    }
+    return value == null ? defaultValue : value;
+  }
+  
+  int getConfig(String name, int defaultValue) {
+    Integer value = params.getInt(name);
+    if (value == null) {
+      String sysValue = System.getProperty(name);
+      if (sysValue != null) {
+        value = Integer.parseInt(sysValue);
+      }
+    }
+    return value == null ? defaultValue : value;
+  }
+
+  String getConfig(String name, String defaultValue) {
+    String value = params.get(name);
+    if (value == null) {
+      value = System.getProperty(name);
+    }
+    return value == null ? defaultValue : value;
+  }
+  
   private BlockCache getBlockDirectoryCache(int numberOfBlocksPerBank, int blockSize, int bankCount,
       boolean directAllocation, int slabSize, int bufferSize, int bufferCount, boolean staticBlockCache) {
     if (!staticBlockCache) {
@@ -245,7 +273,7 @@
   
   private Configuration getConf() {
     Configuration conf = new Configuration();
-    confDir = params.get(CONFIG_DIRECTORY, null);
+    confDir = getConfig(CONFIG_DIRECTORY, null);
     HdfsUtil.addHdfsResources(conf, confDir);
     return conf;
   }
@@ -322,12 +350,12 @@
   }
   
   private void initKerberos() {
-    String keytabFile = params.get(KERBEROS_KEYTAB, "").trim();
+    String keytabFile = getConfig(KERBEROS_KEYTAB, "").trim();
     if (keytabFile.length() == 0) {
       throw new IllegalArgumentException(KERBEROS_KEYTAB + " required because "
           + KERBEROS_ENABLED + " set to true");
     }
-    String principal = params.get(KERBEROS_PRINCIPAL, "");
+    String principal = getConfig(KERBEROS_PRINCIPAL, "");
     if (principal.length() == 0) {
       throw new IllegalArgumentException(KERBEROS_PRINCIPAL
           + " required because " + KERBEROS_ENABLED + " set to true");
@@ -359,4 +387,45 @@
       }
     }
   }
+
+  // SolrInfoMBean methods
+
+  @Override
+  public String getName() {
+    return getClass().getSimpleName() + "BlockCache";
+  }
+
+  @Override
+  public String getVersion() {
+    return SolrCore.version;
+  }
+
+  @Override
+  public String getDescription() {
+    return "Provides metrics for the HdfsDirectoryFactory BlockCache.";
+  }
+
+  @Override
+  public Category getCategory() {
+    return Category.CACHE;
+  }
+
+  @Override
+  public String getSource() {
+    return null;
+  }
+
+  @Override
+  public URL[] getDocs() {
+    return null;
+  }
+
+  @Override
+  public NamedList<?> getStatistics() {
+    if (metrics == null) {
+      return new NamedList<Object>();
+    }
+
+    return metrics.getStatistics();
+  }
 }
diff --git a/solr/core/src/java/org/apache/solr/core/IndexDeletionPolicyWrapper.java b/solr/core/src/java/org/apache/solr/core/IndexDeletionPolicyWrapper.java
index f48c9da..036d108 100644
--- a/solr/core/src/java/org/apache/solr/core/IndexDeletionPolicyWrapper.java
+++ b/solr/core/src/java/org/apache/solr/core/IndexDeletionPolicyWrapper.java
@@ -21,6 +21,8 @@
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.store.Directory;
 import org.apache.solr.update.SolrIndexWriter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.*;
@@ -43,6 +45,8 @@
  * @see org.apache.lucene.index.IndexDeletionPolicy
  */
 public final class IndexDeletionPolicyWrapper extends IndexDeletionPolicy {
+  private static final Logger LOG = LoggerFactory.getLogger(IndexDeletionPolicyWrapper.class.getName());
+
   private final IndexDeletionPolicy deletionPolicy;
   private volatile Map<Long, IndexCommit> solrVersionVsCommits = new ConcurrentHashMap<>();
   private final Map<Long, Long> reserves = new ConcurrentHashMap<>();
@@ -82,7 +86,11 @@
 
       // this is the common success case: the older time didn't exist, or
       // came before the new time.
-      if (previousTime == null || previousTime <= timeToSet) break;
+      if (previousTime == null || previousTime <= timeToSet) {
+        LOG.debug("Commit point reservation for generation {} set to {} (requested reserve time of {})",
+            indexGen, timeToSet, reserveTime);
+        break;
+      }
 
       // At this point, we overwrote a longer reservation, so we want to restore the older one.
       // the problem is that an even longer reservation may come in concurrently
diff --git a/solr/core/src/java/org/apache/solr/core/IndexReaderFactory.java b/solr/core/src/java/org/apache/solr/core/IndexReaderFactory.java
index c43310a..7576bfe3 100644
--- a/solr/core/src/java/org/apache/solr/core/IndexReaderFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/IndexReaderFactory.java
@@ -60,9 +60,8 @@
   /**
    * Creates a new IndexReader instance using the given IndexWriter.
    * <p>
-   * This is used for opening the initial reader in NRT mode ({@code nrtMode=true}
-   * in solrconfig.xml)
-   * 
+   * This is used for opening the initial reader in NRT mode
+   *
    * @param writer IndexWriter
    * @param core {@link SolrCore} instance where this reader will be used. NOTE:
    * this SolrCore instance may not be fully configured yet, but basic things like
diff --git a/solr/core/src/java/org/apache/solr/core/InitParams.java b/solr/core/src/java/org/apache/solr/core/InitParams.java
index 6aec640..424b98a 100644
--- a/solr/core/src/java/org/apache/solr/core/InitParams.java
+++ b/solr/core/src/java/org/apache/solr/core/InitParams.java
@@ -65,7 +65,8 @@
   private static boolean matchPath(String path, String name){
     List<String> pathSplit = StrUtils.splitSmart(path, '/');
     List<String> nameSplit = StrUtils.splitSmart(name, '/');
-    for (int i = 0; i < nameSplit.size(); i++) {
+    int i = 0;
+    for (;i < nameSplit.size(); i++) {
       String s = nameSplit.get(i);
       String ps = pathSplit.size()>i ?  pathSplit.get(i) :null;
       if(ps == null) return false;
@@ -74,7 +75,8 @@
       if("**".equals(ps)) return true;
       return false;
     }
-    return true;
+    String ps = pathSplit.size()>i ?  pathSplit.get(i) :null;
+    return "*".equals(ps) || "**".equals(ps);
 
   }
 
diff --git a/solr/core/src/java/org/apache/solr/core/JarRepository.java b/solr/core/src/java/org/apache/solr/core/JarRepository.java
index 33db0eb..25ba7c0 100644
--- a/solr/core/src/java/org/apache/solr/core/JarRepository.java
+++ b/solr/core/src/java/org/apache/solr/core/JarRepository.java
@@ -23,8 +23,11 @@
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashSet;
 import java.util.Map;
+import java.util.Random;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.zip.ZipEntry;
@@ -48,6 +51,18 @@
  */
 public class JarRepository {
   public static Logger log = LoggerFactory.getLogger(JarRepository.class);
+
+  static final Random RANDOM;
+  static {
+    // We try to make things reproducible in the context of our tests by initializing the random instance
+    // based on the current seed
+    String seed = System.getProperty("tests.seed");
+    if (seed == null) {
+      RANDOM = new Random();
+    } else {
+      RANDOM = new Random(seed.hashCode());
+    }
+  }
   
   private final CoreContainer coreContainer;
   
@@ -71,8 +86,10 @@
         ClusterState cs = this.coreContainer.getZkController().getZkStateReader().getClusterState();
         DocCollection coll = cs.getCollectionOrNull(CollectionsHandler.SYSTEM_COLL);
         if (coll == null) throw new SolrException(SERVICE_UNAVAILABLE, ".system collection not available");
-        Slice slice = coll.getActiveSlices().iterator().next();
-        if (slice == null) throw new SolrException(SERVICE_UNAVAILABLE, ".no active slices for .system collection");
+        ArrayList<Slice> slices = new ArrayList<>(coll.getActiveSlices());
+        if (slices.isEmpty()) throw new SolrException(SERVICE_UNAVAILABLE, ".no active slices for .system collection");
+        Collections.shuffle(slices, RANDOM); //do load balancing
+        Slice slice = slices.get(0) ;
         Replica replica = slice.getReplicas().iterator().next();
         if (replica == null) throw new SolrException(SERVICE_UNAVAILABLE, ".no active replica available for .system collection");
         String url = replica.getStr(BASE_URL_PROP) + "/.system/blob/" + key + "?wt=filestream";
diff --git a/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java b/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java
index cea9768..840cb0d 100644
--- a/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java
+++ b/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java
@@ -16,6 +16,7 @@
  */
 package org.apache.solr.core;
 
+import org.apache.lucene.store.AlreadyClosedException;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.SolrConfig.JmxConfiguration;
@@ -279,7 +280,9 @@
           }
         }
       } catch (Exception e) {
-        LOG.warn("Could not getStatistics on info bean {}", infoBean.getName(), e);
+        // don't log issue if the core is closing
+        if (!(SolrException.getRootCause(e) instanceof AlreadyClosedException))
+          LOG.warn("Could not getStatistics on info bean {}", infoBean.getName(), e);
       }
 
       MBeanAttributeInfo[] attrInfoArr = attrInfoList
diff --git a/solr/core/src/java/org/apache/solr/core/PluginInfo.java b/solr/core/src/java/org/apache/solr/core/PluginInfo.java
index 67fe6ed..82b0b23 100644
--- a/solr/core/src/java/org/apache/solr/core/PluginInfo.java
+++ b/solr/core/src/java/org/apache/solr/core/PluginInfo.java
@@ -40,7 +40,7 @@
   public final List<PluginInfo> children;
   private boolean isFromSolrConfig;
 
-  public PluginInfo(String type, Map<String, String> attrs ,NamedList initArgs, List<PluginInfo> children) {
+  public PluginInfo(String type, Map<String, String> attrs, NamedList initArgs, List<PluginInfo> children) {
     this.type = type;
     this.name = attrs.get(NAME);
     this.className = attrs.get(CLASS_NAME);
@@ -63,12 +63,15 @@
 
   public PluginInfo(String type, Map<String,Object> map) {
     LinkedHashMap m = new LinkedHashMap<>(map);
-    NamedList nl = new NamedList();
-    for (String s : asList(DEFAULTS, APPENDS, INVARIANTS)) if (m.get(s) != null) nl.add(s, map.remove(s));
+    initArgs = new NamedList();
+    for (Map.Entry<String, Object> entry : map.entrySet()) {
+      Object value = entry.getValue();
+      if (value instanceof Map) value = new NamedList((Map) value);
+      initArgs.add(entry.getKey(), value);
+    }
     this.type = type;
     this.name = (String) m.get(NAME);
     this.className = (String) m.get(CLASS_NAME);
-    this.initArgs = nl;
     attributes = unmodifiableMap(m);
     this.children =  Collections.<PluginInfo>emptyList();
     isFromSolrConfig = true;
diff --git a/solr/core/src/java/org/apache/solr/core/RequestHandlers.java b/solr/core/src/java/org/apache/solr/core/RequestHandlers.java
index 1e15363..18339e5 100644
--- a/solr/core/src/java/org/apache/solr/core/RequestHandlers.java
+++ b/solr/core/src/java/org/apache/solr/core/RequestHandlers.java
@@ -433,14 +433,14 @@
         return;
       }
 
-      version = _pluginInfo.attributes.get("version");
-      if (version == null) {
+      if (_pluginInfo.attributes.get("version") == null ){
         errMsg = "ERROR 'lib' attribute must be accompanied with version also";
         unrecoverable = true;
         _handler = this;
         log.error(errMsg);
         return;
       }
+      version = String.valueOf(_pluginInfo.attributes.get("version"));
       classLoader = new MemClassLoader(this);
     }
 
@@ -494,7 +494,7 @@
     public void close() throws Exception {
       super.close();
       if (_closed) return;
-      classLoader.releaseJar();
+      if(classLoader != null) classLoader.releaseJar();
       _closed = true;
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/core/RequestParams.java b/solr/core/src/java/org/apache/solr/core/RequestParams.java
index 1540755..a1050be 100644
--- a/solr/core/src/java/org/apache/solr/core/RequestParams.java
+++ b/solr/core/src/java/org/apache/solr/core/RequestParams.java
@@ -23,6 +23,7 @@
 import java.nio.charset.StandardCharsets;
 import java.util.Collections;
 import java.util.LinkedHashMap;
+import java.util.List;
 import java.util.Map;
 
 import org.apache.solr.cloud.ZkSolrResourceLoader;
@@ -58,7 +59,7 @@
         Map.Entry e = (Map.Entry) o;
         if (e.getValue() instanceof Map) {
           Map value = (Map) e.getValue();
-          Map copy = new LinkedHashMap<>(value);
+          Map copy = getMapCopy(value);
           Map meta = (Map) copy.remove("");
           this.paramsets.put((String) e.getKey(), new VersionedParams(Collections.unmodifiableMap(copy) ,meta));
         }
@@ -67,6 +68,32 @@
     this.znodeVersion = znodeVersion;
   }
 
+  private static Map getMapCopy(Map value) {
+    Map copy = new LinkedHashMap<>();
+    for (Object o1 : value.entrySet()) {
+      Map.Entry entry = (Map.Entry) o1;
+      if("".equals( entry.getKey())){
+        copy.put(entry.getKey(),entry.getValue());
+        continue;
+      }
+      if (entry.getValue() != null) {
+        if (entry.getValue() instanceof List) {
+          List l = (List) entry.getValue();
+          String[] sarr = new String[l.size()];
+          for (int i = 0; i < l.size(); i++) {
+            if( l.get(i) != null)  sarr[i]= String.valueOf(l.get(i));
+          }
+          copy.put(entry.getKey(), sarr);
+        } else {
+          copy.put(entry.getKey(), String.valueOf(entry.getValue()));
+        }
+      } else {
+        copy.put(entry.getKey(), entry.getValue());
+      }
+    }
+    return copy;
+  }
+
   public VersionedParams getParams(String name){
     return paramsets.get(name);
   }
@@ -77,7 +104,7 @@
 
   @Override
   public Map<String, Object> toMap() {
-    return getMapWithVersion(data,znodeVersion);
+    return getMapWithVersion(data, znodeVersion);
   }
 
   public static Map<String, Object> getMapWithVersion(Map<String, Object> data, int znodeVersion) {
@@ -135,9 +162,9 @@
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
       }
 
-    } else if(requestParams == null) {
+    } else  {
       Object[] o = getMapAndVersion(loader, RequestParams.RESOURCE);
-      requestParams = new RequestParams((Map) o[0],(Integer)o[1]);
+      requestParams = new RequestParams((Map) o[0], (Integer) o[1]);
     }
 
     return requestParams;
diff --git a/solr/core/src/java/org/apache/solr/core/SolrConfig.java b/solr/core/src/java/org/apache/solr/core/SolrConfig.java
index 78025b8..b63e1f9 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrConfig.java
@@ -193,7 +193,11 @@
       defaultIndexConfig = mainIndexConfig = null;
       indexConfigPrefix = "indexConfig";
     }
-    nrtMode = getBool(indexConfigPrefix+"/nrtMode", true);
+    assertWarnOrFail("The <nrtMode> config has been discontinued and NRT mode is always used by Solr." +
+            " This config will be removed in future versions.", getNode(indexConfigPrefix + "/nrtMode", false) == null,
+        true
+    );
+
     // Parse indexConfig section, using mainIndex as backup in case old config is used
     indexConfig = new SolrIndexConfig(this, "indexConfig", mainIndexConfig);
 
@@ -419,7 +423,6 @@
   public final int queryResultWindowSize;
   public final int queryResultMaxDocsCached;
   public final boolean enableLazyFieldLoading;
-  public final boolean nrtMode;
   // DocSet
   public final float hashSetInverseLoadFactor;
   public final int hashDocSetMaxSize;
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index dbb967a..6a21c50 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -17,6 +17,47 @@
 
 package org.apache.solr.core;
 
+import java.io.Closeable;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.io.Writer;
+import java.lang.reflect.Constructor;
+import java.net.URL;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.NoSuchFileException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.IdentityHashMap;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.StringTokenizer;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.ReentrantLock;
+
+import javax.xml.parsers.ParserConfigurationException;
+
 import org.apache.commons.io.FileUtils;
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.index.DirectoryReader;
@@ -38,6 +79,7 @@
 import org.apache.solr.common.params.CommonParams.EchoParamStyle;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.ExecutorUtil;
+import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.core.DirectoryFactory.DirContext;
@@ -97,7 +139,6 @@
 import org.apache.solr.update.processor.UpdateRequestProcessorChain;
 import org.apache.solr.update.processor.UpdateRequestProcessorFactory;
 import org.apache.solr.util.DefaultSolrThreadFactory;
-import org.apache.solr.util.IOUtils;
 import org.apache.solr.util.PropertiesInputStream;
 import org.apache.solr.util.RefCounted;
 import org.apache.solr.util.plugin.NamedListInitializedPlugin;
@@ -109,46 +150,6 @@
 import org.slf4j.LoggerFactory;
 import org.xml.sax.SAXException;
 
-import javax.xml.parsers.ParserConfigurationException;
-import java.io.Closeable;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.Writer;
-import java.lang.reflect.Constructor;
-import java.net.URL;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.NoSuchFileException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.IdentityHashMap;
-import java.util.LinkedHashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-import java.util.StringTokenizer;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.CopyOnWriteArrayList;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.atomic.AtomicLong;
-import java.util.concurrent.locks.ReentrantLock;
-
 /**
  *
  */
@@ -441,7 +442,7 @@
     solrCoreState.increfSolrCoreState();
     SolrCore currentCore;
     boolean indexDirChange = !getNewIndexDir().equals(getIndexDir());
-    if (indexDirChange || !coreConfig.getSolrConfig().nrtMode) {
+    if (indexDirChange) {
       // the directory is changing, don't pass on state
       currentCore = null;
     } else {
@@ -462,13 +463,6 @@
   }
 
 
-  // gets a non-caching searcher
-  public SolrIndexSearcher newSearcher(String name) throws IOException {
-    return new SolrIndexSearcher(this, getNewIndexDir(), getLatestSchema(), getSolrConfig().indexConfig, 
-                                 name, false, directoryFactory);
-  }
-
-
    private void initDirectoryFactory() {
     DirectoryFactory dirFactory;
     PluginInfo info = solrConfig.getPluginInfo(DirectoryFactory.class.getName());
@@ -542,7 +536,7 @@
         log.warn(logid+"Solr index directory '" + new File(indexDir) + "' doesn't exist."
                 + " Creating new index...");
 
-        SolrIndexWriter writer = SolrIndexWriter.create("SolrCore.initIndex", indexDir, getDirectoryFactory(), true, 
+        SolrIndexWriter writer = SolrIndexWriter.create(this, "SolrCore.initIndex", indexDir, getDirectoryFactory(), true, 
                                                         getLatestSchema(), solrConfig.indexConfig, solrDelPolicy, codec);
         writer.close();
       }
@@ -852,13 +846,7 @@
 
             @Override
             public DirectoryReader call() throws Exception {
-              if(getSolrConfig().nrtMode) {
-                // if in NRT mode, need to open from the previous writer
-                return indexReaderFactory.newReader(iw, core);
-              } else {
-                // if not NRT, need to create a new reader from the directory
-                return indexReaderFactory.newReader(iw.getDirectory(), core);
-              }
+              return indexReaderFactory.newReader(iw, core);
             }
           };
         }
@@ -1500,7 +1488,6 @@
 
     SolrIndexSearcher tmp;
     RefCounted<SolrIndexSearcher> newestSearcher = null;
-    boolean nrt = solrConfig.nrtMode && updateHandlerReopens;
 
     openSearcherLock.lock();
     try {
@@ -1509,7 +1496,7 @@
       String newIndexDirFile = null;
 
       // if it's not a normal near-realtime update, check that paths haven't changed.
-      if (!nrt) {
+      if (!updateHandlerReopens) {
         indexDirFile = getDirectoryFactory().normalize(getIndexDir());
         newIndexDirFile = getDirectoryFactory().normalize(newIndexDir);
       }
@@ -1521,7 +1508,7 @@
         }
       }
 
-      if (newestSearcher != null && (nrt || indexDirFile.equals(newIndexDirFile))) {
+      if (newestSearcher != null && (updateHandlerReopens || indexDirFile.equals(newIndexDirFile))) {
 
         DirectoryReader newReader;
         DirectoryReader currentReader = newestSearcher.get().getRawReader();
@@ -1531,12 +1518,11 @@
         RefCounted<IndexWriter> writer = getUpdateHandler().getSolrCoreState()
             .getIndexWriter(null);
         try {
-          if (writer != null && solrConfig.nrtMode) {
+          if (writer != null) {
             // if in NRT mode, open from the writer
             newReader = DirectoryReader.openIfChanged(currentReader, writer.get(), true);
           } else {
             // verbose("start reopen without writer, reader=", currentReader);
-            // if not in NRT mode, just re-open the reader
             newReader = DirectoryReader.openIfChanged(currentReader);
             // verbose("reopen result", newReader);
           }
@@ -1583,7 +1569,7 @@
           DirectoryReader newReader = newReaderCreator.call();
           tmp = new SolrIndexSearcher(this, newIndexDir, getLatestSchema(), 
               (realtime ? "realtime":"main"), newReader, true, !realtime, true, directoryFactory);
-        } else if (solrConfig.nrtMode) {
+        } else  {
           RefCounted<IndexWriter> writer = getUpdateHandler().getSolrCoreState().getIndexWriter(this);
           DirectoryReader newReader = null;
           try {
@@ -1593,12 +1579,6 @@
           }
           tmp = new SolrIndexSearcher(this, newIndexDir, getLatestSchema(),
               (realtime ? "realtime":"main"), newReader, true, !realtime, true, directoryFactory);
-        } else {
-         // normal open that happens at startup
-        // verbose("non-reopen START:");
-        tmp = new SolrIndexSearcher(this, newIndexDir, getLatestSchema(), getSolrConfig().indexConfig,
-                                    "main", true, directoryFactory);
-        // verbose("non-reopen DONE: searcher=",tmp);
         }
       }
 
diff --git a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
index 93553ba..332a2cf 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
@@ -18,6 +18,7 @@
 package org.apache.solr.core;
 
 import org.apache.lucene.analysis.util.CharFilterFactory;
+import org.apache.lucene.analysis.util.ResourceLoader;
 import org.apache.lucene.analysis.util.ResourceLoaderAware;
 import org.apache.lucene.analysis.util.TokenFilterFactory;
 import org.apache.lucene.analysis.util.TokenizerFactory;
@@ -26,7 +27,6 @@
 import org.apache.lucene.codecs.DocValuesFormat;
 import org.apache.lucene.codecs.PostingsFormat;
 import org.apache.lucene.util.IOUtils;
-import org.apache.solr.common.ResourceLoader;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.handler.admin.CoreAdminHandler;
 import org.apache.solr.handler.component.SearchComponent;
@@ -519,41 +519,11 @@
     return newInstance(name, expectedType, empty);
   }
 
-  public <T> T newInstance(String cname, Class<T> expectedType, String ... subpackages) {
-    Class<? extends T> clazz = findClass(cname, expectedType, subpackages);
-    if( clazz == null ) {
-      throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
-          "Can not find class: "+cname + " in " + classLoader);
-    }
-    
-    T obj = null;
-    try {
-      obj = clazz.newInstance();
-    } 
-    catch (Exception e) {
-      throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
-          "Error instantiating class: '" + clazz.getName()+"'", e);
-    }
+  private static final Class[] NO_CLASSES = new Class[0];
+  private static final Object[] NO_OBJECTS = new Object[0];
 
-    if (!live) {
-      if( obj instanceof SolrCoreAware ) {
-        assertAwareCompatibility( SolrCoreAware.class, obj );
-        waitingForCore.add( (SolrCoreAware)obj );
-      }
-      if (org.apache.solr.util.plugin.ResourceLoaderAware.class.isInstance(obj)) {
-        log.warn("Class [{}] uses org.apache.solr.util.plugin.ResourceLoaderAware " +
-            "which is deprecated. Change to org.apache.lucene.analysis.util.ResourceLoaderAware.", cname);
-      }
-      if( obj instanceof ResourceLoaderAware ) {
-        assertAwareCompatibility( ResourceLoaderAware.class, obj );
-        waitingForResources.add( (ResourceLoaderAware)obj );
-      }
-      if (obj instanceof SolrInfoMBean){
-        //TODO: Assert here?
-        infoMBeans.add((SolrInfoMBean) obj);
-      }
-    }
-    return obj;
+  public <T> T newInstance(String cname, Class<T> expectedType, String ... subpackages) {
+    return newInstance(cname, expectedType, subpackages, NO_CLASSES, NO_OBJECTS);
   }
 
   public CoreAdminHandler newAdminHandlerInstance(final CoreContainer coreContainer, String cname, String ... subpackages) {
@@ -576,10 +546,6 @@
     if (!live) {
       //TODO: Does SolrCoreAware make sense here since in a multi-core context
       // which core are we talking about ?
-      if (org.apache.solr.util.plugin.ResourceLoaderAware.class.isInstance(obj)) {
-        log.warn("Class [{}] uses org.apache.solr.util.plugin.ResourceLoaderAware " +
-            "which is deprecated. Change to org.apache.lucene.analysis.util.ResourceLoaderAware.", cname);
-      }
       if( obj instanceof ResourceLoaderAware ) {
         assertAwareCompatibility( ResourceLoaderAware.class, obj );
         waitingForResources.add( (ResourceLoaderAware)obj );
@@ -603,8 +569,13 @@
 
       Constructor<? extends T> constructor = clazz.getConstructor(params);
       obj = constructor.newInstance(args);
-    }
-    catch (Exception e) {
+
+    } catch (Error err) {
+      log.error("Loading Class " + cName + " ("+clazz.getName() + ") triggered serious java error: "
+                + err.getClass().getName(), err);
+      throw err;
+
+    } catch (Exception e) {
       throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
           "Error instantiating class: '" + clazz.getName()+"'", e);
     }
@@ -614,10 +585,6 @@
         assertAwareCompatibility( SolrCoreAware.class, obj );
         waitingForCore.add( (SolrCoreAware)obj );
       }
-      if (org.apache.solr.util.plugin.ResourceLoaderAware.class.isInstance(obj)) {
-        log.warn("Class [{}] uses org.apache.solr.util.plugin.ResourceLoaderAware " +
-            "which is deprecated. Change to org.apache.lucene.analysis.util.ResourceLoaderAware.", cName);
-      }
       if( obj instanceof ResourceLoaderAware ) {
         assertAwareCompatibility( ResourceLoaderAware.class, obj );
         waitingForResources.add( (ResourceLoaderAware)obj );
@@ -829,10 +796,10 @@
 
   public static void persistConfLocally(SolrResourceLoader loader, String resourceName, byte[] content) {
     // Persist locally
-    File managedSchemaFile = new File(loader.getConfigDir(), resourceName);
+    File confFile = new File(loader.getConfigDir(), resourceName);
     OutputStreamWriter writer = null;
     try {
-      File parentDir = managedSchemaFile.getParentFile();
+      File parentDir = confFile.getParentFile();
       if ( ! parentDir.isDirectory()) {
         if ( ! parentDir.mkdirs()) {
           final String msg = "Can't create managed schema directory " + parentDir.getAbsolutePath();
@@ -840,19 +807,19 @@
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg);
         }
       }
-      final FileOutputStream out = new FileOutputStream(managedSchemaFile);
+      final FileOutputStream out = new FileOutputStream(confFile);
       out.write(content);
-      log.info("Upgraded to managed schema at " + managedSchemaFile.getPath());
+      log.info("Written confile " + resourceName);
     } catch (IOException e) {
-      final String msg = "Error persisting managed schema " + managedSchemaFile;
+      final String msg = "Error persisting conf file " + resourceName;
       log.error(msg, e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg, e);
     } finally {
       org.apache.commons.io.IOUtils.closeQuietly(writer);
       try {
-        FileUtils.sync(managedSchemaFile);
+        FileUtils.sync(confFile);
       } catch (IOException e) {
-        final String msg = "Error syncing the managed schema file " + managedSchemaFile;
+        final String msg = "Error syncing conf file " + resourceName;
         log.error(msg, e);
       }
     }
diff --git a/solr/core/src/java/org/apache/solr/core/SolrXMLCoresLocator.java b/solr/core/src/java/org/apache/solr/core/SolrXMLCoresLocator.java
deleted file mode 100644
index bd70523..0000000
--- a/solr/core/src/java/org/apache/solr/core/SolrXMLCoresLocator.java
+++ /dev/null
@@ -1,240 +0,0 @@
-package org.apache.solr.core;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import com.google.common.collect.ImmutableList;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**
- * Writes any changes in core definitions to this instance's solr.xml
- */
-public class SolrXMLCoresLocator implements CoresLocator {
-
-  private static final Logger logger = LoggerFactory.getLogger(SolrXMLCoresLocator.class);
-
-  private final String solrXmlTemplate;
-  private final ConfigSolrXmlOld cfg;
-
-  /** Core name to use if a core definition has no name */
-  public static final String DEFAULT_CORE_NAME = "collection1";
-
-  /**
-   * Create a new SolrXMLCoresLocator
-   * @param originalXML   the original content of the solr.xml file
-   * @param cfg           the CoreContainer's config object
-   */
-  public SolrXMLCoresLocator(String originalXML, ConfigSolrXmlOld cfg) {
-    this.solrXmlTemplate = buildTemplate(originalXML);
-    this.cfg = cfg;
-  }
-
-  private static Pattern POPULATED_CORES_TAG
-      = Pattern.compile("^(.*<cores[^>]*>)(.*)(</cores>.*)$", Pattern.DOTALL);
-  private static Pattern EMPTY_CORES_TAG
-      = Pattern.compile("^(.*<cores[^>]*)/>(.*)$", Pattern.DOTALL);
-
-  private static Pattern SHARD_HANDLER_TAG
-      = Pattern.compile("(<shardHandlerFactory[^>]*>.*</shardHandlerFactory>)|(<shardHandlerFactory[^>]*/>)",
-                          Pattern.DOTALL);
-
-  private static String CORES_PLACEHOLDER = "{{CORES_PLACEHOLDER}}";
-
-  // Package-private for testing
-  // We replace the existing <cores></cores> contents with a template pattern
-  // that we can later replace with the up-to-date core definitions.  We also
-  // need to extract the <shardHandlerFactory> section, as, annoyingly, it's
-  // kept inside <cores/>.
-  static String buildTemplate(String originalXML) {
-
-    String shardHandlerConfig = "";
-    Matcher shfMatcher = SHARD_HANDLER_TAG.matcher(originalXML);
-    if (shfMatcher.find()) {
-      shardHandlerConfig = shfMatcher.group(0);
-    }
-
-    Matcher popMatcher = POPULATED_CORES_TAG.matcher(originalXML);
-    if (popMatcher.matches()) {
-      return new StringBuilder(popMatcher.group(1))
-          .append(CORES_PLACEHOLDER).append(shardHandlerConfig).append(popMatcher.group(3)).toString();
-    }
-
-    // Self-closing <cores/> tag gets expanded to <cores></cores>
-    Matcher emptyMatcher = EMPTY_CORES_TAG.matcher(originalXML);
-    if (emptyMatcher.matches())
-      return new StringBuilder(emptyMatcher.group(1))
-          .append(">").append(CORES_PLACEHOLDER).append("</cores>")
-          .append(emptyMatcher.group(2)).toString();
-
-    // If there's no <cores> tag at all, add one at the end of the file
-    return originalXML.replace("</solr>", "<cores>" + CORES_PLACEHOLDER + "</cores></solr>");
-  }
-
-  // protected access for testing
-  protected String buildSolrXML(List<CoreDescriptor> cds) {
-    StringBuilder builder = new StringBuilder();
-    for (CoreDescriptor cd : cds) {
-      builder.append(buildCoreTag(cd));
-    }
-    return solrXmlTemplate.replace(CORES_PLACEHOLDER, builder.toString());
-  }
-
-  public static final String NEWLINE = System.getProperty("line.separator");
-  public static final String INDENT = "    ";
-
-  /**
-   * Serialize a coredescriptor as a String containing an XML &lt;core> tag.
-   * @param cd the CoreDescriptor
-   * @return an XML representation of the CoreDescriptor
-   */
-  protected static String buildCoreTag(CoreDescriptor cd) {
-
-    StringBuilder builder = new StringBuilder(NEWLINE).append(INDENT).append("<core");
-    for (Map.Entry<Object, Object> entry : cd.getPersistableStandardProperties().entrySet()) {
-      builder.append(" ").append(entry.getKey()).append("=\"").append(entry.getValue()).append("\"");
-    }
-
-    Properties userProperties = cd.getPersistableUserProperties();
-    if (userProperties.isEmpty()) {
-      return builder.append("/>").append(NEWLINE).toString();
-    }
-
-    builder.append(">").append(NEWLINE);
-    for (Map.Entry<Object, Object> entry : userProperties.entrySet()) {
-      builder.append(INDENT).append(INDENT)
-          .append("<property name=\"").append(entry.getKey()).append("\" value=\"")
-          .append(entry.getValue()).append("\"/>").append(NEWLINE);
-    }
-
-    return builder.append("</core>").append(NEWLINE).toString();
-
-  }
-
-  @Override
-  public synchronized final void persist(CoreContainer cc, CoreDescriptor... coreDescriptors) {
-    List<CoreDescriptor> cds = new ArrayList<>(cc.getCoreDescriptors().size() + coreDescriptors.length);
-    
-    cds.addAll(cc.getCoreDescriptors());
-    cds.addAll(Arrays.asList(coreDescriptors));
-
-    doPersist(buildSolrXML(cds));
-  }
-
-  protected void doPersist(String xml) {
-    File file = new File(cfg.config.getResourceLoader().getInstanceDir(), ConfigSolr.SOLR_XML_FILE);
-    Writer writer = null;
-    FileOutputStream fos = null;
-    try {
-      fos = new FileOutputStream(file);
-      writer = new OutputStreamWriter(fos, StandardCharsets.UTF_8);
-      writer.write(xml);
-      writer.close();
-      logger.info("Persisted core descriptions to {}", file.getAbsolutePath());
-    } catch (IOException e) {
-      logger.error("Couldn't persist core descriptions to {} : {}",
-          file.getAbsolutePath(), e);
-    } finally {
-      IOUtils.closeQuietly(writer);
-      IOUtils.closeQuietly(fos);
-    }
-  }
-
-  @Override
-  public void create(CoreContainer cc, CoreDescriptor... coreDescriptors) {
-    this.persist(cc, coreDescriptors);
-  }
-
-  @Override
-  public void delete(CoreContainer cc, CoreDescriptor... coreDescriptors) {
-    // coreDescriptors is kind of a useless param - we persist the current state off cc
-    this.persist(cc);
-  }
-
-  @Override
-  public void rename(CoreContainer cc, CoreDescriptor oldCD, CoreDescriptor newCD) {
-    // we don't need those params, we just write out the current cc state
-    this.persist(cc);
-  }
-
-  @Override
-  public void swap(CoreContainer cc, CoreDescriptor cd1, CoreDescriptor cd2) {
-    this.persist(cc);
-  }
-
-  @Override
-  public List<CoreDescriptor> discover(CoreContainer cc) {
-
-    ImmutableList.Builder<CoreDescriptor> listBuilder = ImmutableList.builder();
-
-    for (String coreName : cfg.getAllCoreNames()) {
-
-      String name = cfg.getProperty(coreName, CoreDescriptor.CORE_NAME, DEFAULT_CORE_NAME);
-      String instanceDir = cfg.getProperty(coreName, CoreDescriptor.CORE_INSTDIR, "");
-
-      Properties coreProperties = new Properties();
-      for (String propName : CoreDescriptor.standardPropNames) {
-        String propValue = cfg.getProperty(coreName, propName, "");
-        if (StringUtils.isNotEmpty(propValue))
-          coreProperties.setProperty(propName, propValue);
-      }
-      coreProperties.putAll(cfg.getCoreProperties(coreName));
-
-      listBuilder.add(new CoreDescriptor(cc, name, instanceDir, coreProperties));
-    }
-
-    return listBuilder.build();
-  }
-
-  // for testing
-  String getTemplate() {
-    return solrXmlTemplate;
-  }
-
-  public static class NonPersistingLocator extends SolrXMLCoresLocator {
-
-    public NonPersistingLocator(String originalXML, ConfigSolrXmlOld cfg) {
-      super(originalXML, cfg);
-      this.xml = originalXML;
-    }
-
-    @Override
-    public void doPersist(String xml) {
-      this.xml = xml;
-    }
-
-    public String xml;
-
-  }
-
-}
diff --git a/solr/core/src/java/org/apache/solr/core/StandardDirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/StandardDirectoryFactory.java
index d67ac0e..198e15b 100644
--- a/solr/core/src/java/org/apache/solr/core/StandardDirectoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/StandardDirectoryFactory.java
@@ -28,7 +28,6 @@
 import org.apache.lucene.store.NRTCachingDirectory;
 import org.apache.lucene.store.NativeFSLockFactory;
 import org.apache.lucene.store.NoLockFactory;
-import org.apache.lucene.store.RateLimitedDirectoryWrapper;
 import org.apache.lucene.store.SimpleFSLockFactory;
 import org.apache.lucene.store.SingleInstanceLockFactory;
 import org.apache.solr.common.SolrException;
@@ -113,8 +112,7 @@
    * carefully - some Directory wrappers will
    * cache files for example.
    * 
-   * This implementation works with two wrappers:
-   * NRTCachingDirectory and RateLimitedDirectoryWrapper.
+   * This implementation works with NRTCachingDirectory.
    * 
    * You should first {@link Directory#sync(java.util.Collection)} any file that will be 
    * moved or avoid cached files through settings.
@@ -143,13 +141,11 @@
     super.move(fromDir, toDir, fileName, ioContext);
   }
 
-  // special hack to work with NRTCachingDirectory and RateLimitedDirectoryWrapper
+  // special hack to work with NRTCachingDirectory
   private Directory getBaseDir(Directory dir) {
     Directory baseDir;
     if (dir instanceof NRTCachingDirectory) {
       baseDir = ((NRTCachingDirectory)dir).getDelegate();
-    } else if (dir instanceof RateLimitedDirectoryWrapper) {
-      baseDir = ((RateLimitedDirectoryWrapper)dir).getDelegate();
     } else {
       baseDir = dir;
     }
diff --git a/solr/core/src/java/org/apache/solr/core/ZkContainer.java b/solr/core/src/java/org/apache/solr/core/ZkContainer.java
index 5f17910..692a3c2 100644
--- a/solr/core/src/java/org/apache/solr/core/ZkContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/ZkContainer.java
@@ -180,16 +180,12 @@
         log.error("Could not connect to ZooKeeper", e);
         throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
             "", e);
-      } catch (IOException e) {
-        log.error("", e);
-        throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
-            "", e);
-      } catch (KeeperException e) {
+      } catch (IOException | KeeperException e) {
         log.error("", e);
         throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
             "", e);
       }
-      
+
 
     }
     this.zkController = zkController;
diff --git a/solr/core/src/java/org/apache/solr/handler/BinaryUpdateRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/BinaryUpdateRequestHandler.java
deleted file mode 100644
index ceb2251..0000000
--- a/solr/core/src/java/org/apache/solr/handler/BinaryUpdateRequestHandler.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.handler;
-
-import org.apache.solr.common.util.NamedList;
-
-/**
- * Update handler which uses the JavaBin format
- *
- * @see org.apache.solr.client.solrj.request.JavaBinUpdateRequestCodec
- * @see org.apache.solr.common.util.JavaBinCodec
- * 
- * use {@link UpdateRequestHandler}
- */
-@Deprecated
-public class BinaryUpdateRequestHandler extends UpdateRequestHandler {
-
-  @Override
-  public void init(NamedList args) {
-    super.init(args);
-    setAssumeContentType("application/javabin");
-    log.warn("Using deprecated class: "+this.getClass().getSimpleName()+" -- replace with UpdateRequestHandler");
-  }
-
-  @Override
-  public String getDescription() {
-    return "Add/Update multiple documents with javabin format";
-  }
-}
diff --git a/solr/core/src/java/org/apache/solr/handler/BlobHandler.java b/solr/core/src/java/org/apache/solr/handler/BlobHandler.java
index abe22f5..e92d529 100644
--- a/solr/core/src/java/org/apache/solr/handler/BlobHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/BlobHandler.java
@@ -23,6 +23,7 @@
 import java.nio.ByteBuffer;
 import java.security.MessageDigest;
 import java.text.MessageFormat;
+import java.util.Collections;
 import java.util.Date;
 import java.util.List;
 import java.util.Map;
@@ -37,6 +38,7 @@
 import org.apache.lucene.search.TopFieldDocs;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.cloud.ZkNodeProps;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.MapSolrParams;
 import org.apache.solr.common.params.UpdateParams;
@@ -51,6 +53,7 @@
 import org.apache.solr.schema.FieldType;
 import org.apache.solr.search.QParser;
 import org.apache.solr.update.AddUpdateCommand;
+import org.apache.solr.update.CommitUpdateCommand;
 import org.apache.solr.update.processor.UpdateRequestProcessor;
 import org.apache.solr.update.processor.UpdateRequestProcessorChain;
 import org.apache.solr.util.SimplePostTool;
@@ -58,6 +61,7 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static java.util.Collections.singletonMap;
 import static org.apache.solr.common.cloud.ZkNodeProps.makeMap;
 
 public class BlobHandler extends RequestHandlerBase  implements PluginInfoInitialized{
@@ -85,10 +89,12 @@
       }
       String err = SolrConfigHandler.validateName(blobName);
       if(err!=null){
+        log.warn("no blob name");
         rsp.add("error", err);
         return;
       }
       if(req.getContentStreams() == null )  {
+        log.warn("no content stream");
         rsp.add("error","No stream");
         return;
       }
@@ -108,6 +114,7 @@
               "q", "md5:" + md5,
               "fl", "id,size,version,timestamp,blobName")),
               rsp);
+          log.warn("duplicate entry for blob :"+blobName);
           return;
         }
 
@@ -122,14 +129,19 @@
         }
         version++;
         String id = blobName+"/"+version;
-        indexMap(req, makeMap(
+        Map<String, Object> doc = makeMap(
             "id", id,
             "md5", md5,
             "blobName", blobName,
             "version", version,
             "timestamp", new Date(),
             "size", payload.limit(),
-            "blob", payload));
+            "blob", payload);
+        verifyWithRealtimeGet(blobName, version, req, doc);
+        log.info(MessageFormat.format("New blob inserting {0} ,size {1}, md5 {2}",doc.get("id"), payload.limit(),md5));
+        indexMap(req, rsp, doc);
+        log.info(" Successfully Added and committed a blob with id {} and size {} ",id, payload.limit());
+
         break;
       }
 
@@ -187,24 +199,44 @@
 
         req.forward(null,
             new MapSolrParams((Map) makeMap(
-                "q", MessageFormat.format(q,blobName,version),
-                "fl", "id,size,version,timestamp,blobName",
+                "q", MessageFormat.format(q, blobName, version),
+                "fl", "id,size,version,timestamp,blobName,md5",
                 "sort", "version desc"))
-            ,rsp);
+            , rsp);
       }
     }
   }
 
-  public static void indexMap(SolrQueryRequest req, Map<String, Object> doc) throws IOException {
+  private void verifyWithRealtimeGet(String blobName, long version, SolrQueryRequest req, Map<String, Object> doc) {
+    for(;;) {
+      SolrQueryResponse response = new SolrQueryResponse();
+      String id = blobName + "/" + version;
+      req.forward("/get", new MapSolrParams(singletonMap("id", id)), response);
+      if(response.getValues().get("doc") == null) {
+        //ensure that the version does not exist
+        return;
+      } else {
+        log.info("id {} already exists trying next ",id);
+        version++;
+        doc.put("version", version);
+        id = blobName + "/" + version;
+        doc.put("id", id);
+      }
+   }
+
+  }
+
+  public static void indexMap(SolrQueryRequest req, SolrQueryResponse rsp, Map<String, Object> doc) throws IOException {
     SolrInputDocument solrDoc = new SolrInputDocument();
     for (Map.Entry<String, Object> e : doc.entrySet()) solrDoc.addField(e.getKey(),e.getValue());
     UpdateRequestProcessorChain processorChain = req.getCore().getUpdateProcessingChain(req.getParams().get(UpdateParams.UPDATE_CHAIN));
-    UpdateRequestProcessor processor = processorChain.createProcessor(req,null);
+    UpdateRequestProcessor processor = processorChain.createProcessor(req, rsp);
     AddUpdateCommand cmd = new AddUpdateCommand(req);
-    cmd.commitWithin =1;
     cmd.solrDoc = solrDoc;
+    log.info("Adding doc "+doc);
     processor.processAdd(cmd);
-
+    log.info("committing doc"+doc);
+    processor.processCommit(new CommitUpdateCommand(req, false));
   }
 
   @Override
@@ -245,7 +277,11 @@
       "<updateHandler class='solr.DirectUpdateHandler2'>\n" +
       "  <updateLog>\n" +
       "    <str name='dir'>${solr.ulog.dir:}</str>\n" +
-      "  </updateLog>\n" +
+      "  </updateLog>\n     " +
+      "  <autoCommit> \n" +
+      "       <maxDocs>1</maxDocs> \n" +
+      "       <openSearcher>true</openSearcher> \n" +
+      "  </autoCommit>" +
       "</updateHandler>\n" +
       "<requestHandler name='standard' class='solr.StandardRequestHandler' default='true' />\n" +
       "<requestHandler name='/analysis/field' startup='lazy' class='solr.FieldAnalysisRequestHandler' />\n" +
diff --git a/solr/core/src/java/org/apache/solr/handler/DumpRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/DumpRequestHandler.java
index 43669e2..c78d4f6 100644
--- a/solr/core/src/java/org/apache/solr/handler/DumpRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/DumpRequestHandler.java
@@ -54,6 +54,12 @@
       }
     }
 
+    if(Boolean.TRUE.equals( req.getParams().getBool("getdefaults"))){
+      NamedList def = (NamedList) initArgs.get(PluginInfo.DEFAULTS);
+      rsp.add("getdefaults", def);
+    }
+
+
     if(Boolean.TRUE.equals( req.getParams().getBool("initArgs"))) rsp.add("initArgs", initArgs);
         
     // Write the streams...
diff --git a/solr/core/src/java/org/apache/solr/handler/JsonUpdateRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/NotFoundRequestHandler.java
similarity index 64%
rename from solr/core/src/java/org/apache/solr/handler/JsonUpdateRequestHandler.java
rename to solr/core/src/java/org/apache/solr/handler/NotFoundRequestHandler.java
index 8c60ad4..2eead9b 100644
--- a/solr/core/src/java/org/apache/solr/handler/JsonUpdateRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/NotFoundRequestHandler.java
@@ -1,3 +1,5 @@
+package org.apache.solr.handler;
+
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -15,30 +17,21 @@
  * limitations under the License.
  */
 
-package org.apache.solr.handler;
-
-import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
 
 /**
- * use {@link UpdateRequestHandler}
+ * Does nothing other than showing a 404 message
  */
-@Deprecated
-public class JsonUpdateRequestHandler extends UpdateRequestHandler {
-
+public class NotFoundRequestHandler extends RequestHandlerBase{
   @Override
-  public void init(NamedList args) {
-    super.init(args);
-    setAssumeContentType("application/json");
-    // log.warn("Using deprecated class: "+this.getClass().getSimpleName()+" -- replace with UpdateRequestHandler");
+  public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
+    throw new SolrException(SolrException.ErrorCode.NOT_FOUND,""+req.getContext().get("path") +" is not found");
   }
 
-  //////////////////////// SolrInfoMBeans methods //////////////////////
-
   @Override
   public String getDescription() {
-    return "Add documents with JSON";
+    return "No Operation";
   }
 }
-
-
-
diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
index 6237561..9b1c963 100644
--- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
@@ -99,7 +99,7 @@
  * @since solr 1.4
  */
 public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAware {
-  
+
   private static final Logger LOG = LoggerFactory.getLogger(ReplicationHandler.class.getName());
   SolrCore core;
 
@@ -212,7 +212,7 @@
       doSnapShoot(new ModifiableSolrParams(solrParams), rsp, req);
       rsp.add(STATUS, OK_STATUS);
     } else if (command.equalsIgnoreCase(CMD_DELETE_BACKUP)) {
-      deleteSnapshot(new ModifiableSolrParams(solrParams), rsp, req);
+      deleteSnapshot(new ModifiableSolrParams(solrParams));
       rsp.add(STATUS, OK_STATUS);
     } else if (command.equalsIgnoreCase(CMD_FETCH_INDEX)) {
       String masterUrl = solrParams.get(MASTER_URL);
@@ -272,7 +272,7 @@
     }
   }
 
-  private void deleteSnapshot(ModifiableSolrParams params, SolrQueryResponse rsp, SolrQueryRequest req) {
+  private void deleteSnapshot(ModifiableSolrParams params) {
     String name = params.get("name");
     if(name == null) {
       throw new SolrException(ErrorCode.BAD_REQUEST, "Missing mandatory param: name");
@@ -329,11 +329,16 @@
     if (!snapPullLock.tryLock())
       return false;
     try {
-      tempSnapPuller = snapPuller;
       if (masterUrl != null) {
+        if (tempSnapPuller != null && tempSnapPuller != snapPuller) {
+          tempSnapPuller.destroy();
+        }
+        
         NamedList<Object> nl = solrParams.toNamedList();
         nl.remove(SnapPuller.POLL_INTERVAL);
         tempSnapPuller = new SnapPuller(nl, this, core);
+      } else {
+        tempSnapPuller = snapPuller;
       }
       return tempSnapPuller.fetchLatestIndex(core, forceReplication);
     } catch (Exception e) {
@@ -572,7 +577,7 @@
     NamedList list = super.getStatistics();
     if (core != null) {
       list.add("indexSize", NumberUtils.readableSize(getIndexSize()));
-      CommitVersionInfo vInfo = getIndexVersion();
+      CommitVersionInfo vInfo = (core != null && !core.isClosed()) ? getIndexVersion(): null;
       list.add("indexVersion", null == vInfo ? 0 : vInfo.version);
       list.add(GENERATION, null == vInfo ? 0 : vInfo.generation);
 
@@ -788,8 +793,8 @@
     } else if (clzz == List.class) {
       String ss[] = s.split(",");
       List<String> l = new ArrayList<>();
-      for (int i = 0; i < ss.length; i++) {
-        l.add(new Date(Long.valueOf(ss[i])).toString());
+      for (String s1 : ss) {
+        l.add(new Date(Long.valueOf(s1)).toString());
       }
       nl.add(key, l);
     } else {
@@ -1001,6 +1006,9 @@
         if (snapPuller != null) {
           snapPuller.destroy();
         }
+        if (tempSnapPuller != null && tempSnapPuller != snapPuller) {
+          tempSnapPuller.destroy();
+        }
       }
 
       @Override
@@ -1174,6 +1182,7 @@
           offset = offset == -1 ? 0 : offset;
           int read = (int) Math.min(buf.length, filelen - offset);
           in.readBytes(buf, 0, read);
+
           fos.writeInt(read);
           if (useChecksum) {
             checksum.reset();
@@ -1182,6 +1191,7 @@
           }
           fos.write(buf, 0, read);
           fos.flush();
+          LOG.debug("Wrote {} bytes for file {}", offset + read, fileName);
 
           //Pause if necessary
           maxBytesBeforePause += read;
@@ -1231,8 +1241,8 @@
       FileInputStream inputStream = null;
       try {
         initWrite();
-  
-        //if if is a conf file read from config diectory
+
+        //if if is a conf file read from config directory
         File file = new File(core.getResourceLoader().getConfigDir(), cfileName);
 
         if (file.exists() && file.canRead()) {
@@ -1356,7 +1366,7 @@
    * Boolean param for tests that can be specified when using 
    * {@link #CMD_FETCH_INDEX} to force the current request to block until 
    * the fetch is complete.  <b>NOTE:</b> This param is not advised for 
-   * non-test code, since the the durration of the fetch for non-trivial
+   * non-test code, since the the duration of the fetch for non-trivial
    * indexes will likeley cause the request to time out.
    *
    * @lucene.internal
diff --git a/solr/core/src/java/org/apache/solr/handler/SnapPuller.java b/solr/core/src/java/org/apache/solr/handler/SnapPuller.java
index eac7376..f167ca7 100644
--- a/solr/core/src/java/org/apache/solr/handler/SnapPuller.java
+++ b/solr/core/src/java/org/apache/solr/handler/SnapPuller.java
@@ -20,6 +20,8 @@
 import org.apache.http.client.HttpClient;
 import org.apache.lucene.index.IndexCommit;
 import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.SegmentCommitInfo;
+import org.apache.lucene.index.SegmentInfos;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
 import org.apache.lucene.store.IndexInput;
@@ -32,7 +34,6 @@
 import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.FastInputStream;
 import org.apache.solr.common.util.NamedList;
@@ -73,10 +74,12 @@
 import java.util.Collections;
 import java.util.Date;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
 import java.util.Properties;
+import java.util.Set;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
@@ -176,14 +179,11 @@
     httpClientParams.set(HttpClientUtil.PROP_BASIC_AUTH_PASS, httpBasicAuthPassword);
     httpClientParams.set(HttpClientUtil.PROP_ALLOW_COMPRESSION, useCompression);
 
-    HttpClient httpClient = HttpClientUtil.createClient(httpClientParams, core.getCoreDescriptor().getCoreContainer().getUpdateShardHandler().getConnectionManager());
-
-    return httpClient;
+    return HttpClientUtil.createClient(httpClientParams, core.getCoreDescriptor().getCoreContainer().getUpdateShardHandler().getConnectionManager());
   }
 
   public SnapPuller(final NamedList initArgs, final ReplicationHandler handler, final SolrCore sc) {
     solrCore = sc;
-    final SolrParams params = SolrParams.toSolrParams(initArgs);
     String masterUrl = (String) initArgs.get(MASTER_URL);
     if (masterUrl == null)
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
@@ -246,19 +246,16 @@
     params.set(CommonParams.WT, "javabin");
     params.set(CommonParams.QT, "/replication");
     QueryRequest req = new QueryRequest(params);
-    HttpSolrClient client = new HttpSolrClient(masterUrl, myHttpClient); //XXX modify to use shardhandler
-    NamedList rsp;
-    try {
+
+    // TODO modify to use shardhandler
+    try (HttpSolrClient client = new HttpSolrClient(masterUrl, myHttpClient)) {
       client.setSoTimeout(60000);
       client.setConnectionTimeout(15000);
       
-      rsp = client.request(req);
+      return client.request(req);
     } catch (SolrServerException e) {
       throw new SolrException(ErrorCode.SERVER_ERROR, e.getMessage(), e);
-    } finally {
-      client.shutdown();
     }
-    return rsp;
   }
 
   /**
@@ -271,8 +268,9 @@
     params.set(CommonParams.WT, "javabin");
     params.set(CommonParams.QT, "/replication");
     QueryRequest req = new QueryRequest(params);
-    HttpSolrClient client = new HttpSolrClient(masterUrl, myHttpClient);  //XXX modify to use shardhandler
-    try {
+
+    // TODO modify to use shardhandler
+    try (HttpSolrClient client = new HttpSolrClient(masterUrl, myHttpClient)) {
       client.setSoTimeout(60000);
       client.setConnectionTimeout(15000);
       NamedList response = client.request(req);
@@ -291,8 +289,6 @@
 
     } catch (SolrServerException e) {
       throw new IOException(e);
-    } finally {
-      client.shutdown();
     }
   }
 
@@ -407,13 +403,41 @@
         }
         
         if (!isFullCopyNeeded) {
-          // rollback - and do it before we download any files
-          // so we don't remove files we thought we didn't need
-          // to download later
-          solrCore.getUpdateHandler().getSolrCoreState()
-          .closeIndexWriter(core, true);
+          // a searcher might be using some flushed but not committed segments
+          // because of soft commits (which open a searcher on IW's data)
+          // so we need to close the existing searcher on the last commit
+          // and wait until we are able to clean up all unused lucene files
+          if (solrCore.getCoreDescriptor().getCoreContainer().isZooKeeperAware()) {
+            solrCore.closeSearcher();
+          }
+
+          // rollback and reopen index writer and wait until all unused files
+          // are successfully deleted
+          solrCore.getUpdateHandler().newIndexWriter(true);
+          RefCounted<IndexWriter> writer = solrCore.getUpdateHandler().getSolrCoreState().getIndexWriter(null);
+          try {
+            IndexWriter indexWriter = writer.get();
+            int c = 0;
+            indexWriter.deleteUnusedFiles();
+            while (hasUnusedFiles(indexDir, commit)) {
+              indexWriter.deleteUnusedFiles();
+              LOG.info("Sleeping for 1000ms to wait for unused lucene index files to be delete-able");
+              Thread.sleep(1000);
+              c++;
+              if (c >= 30)  {
+                LOG.warn("SnapPuller unable to cleanup unused lucene index files so we must do a full copy instead");
+                isFullCopyNeeded = true;
+                break;
+              }
+            }
+            if (c > 0)  {
+              LOG.info("SnapPuller slept for " + (c * 1000) + "ms for unused lucene index files to be delete-able");
+            }
+          } finally {
+            writer.decref();
+          }
+          solrCore.getUpdateHandler().getSolrCoreState().closeIndexWriter(core, true);
         }
-        
         boolean reloadCore = false;
         
         try {
@@ -490,7 +514,7 @@
             solrCore.getUpdateHandler().newIndexWriter(isFullCopyNeeded);
           }
           
-          openNewSearcherAndUpdateCommitPoint(isFullCopyNeeded);
+          openNewSearcherAndUpdateCommitPoint();
         }
         
         replicationStartTime = 0;
@@ -544,6 +568,24 @@
     }
   }
 
+  private boolean hasUnusedFiles(Directory indexDir, IndexCommit commit) throws IOException {
+    Set<String> currentFiles = new HashSet<>();
+    String segmentsFileName = commit.getSegmentsFileName();
+    SegmentInfos infos = SegmentInfos.readCommit(indexDir, segmentsFileName);
+    for (SegmentCommitInfo info : infos.asList()) {
+      Set<String> files = info.info.files(); // All files that belong to this segment
+      currentFiles.addAll(files);
+    }
+    String[] allFiles = indexDir.listAll();
+    for (String file : allFiles) {
+      if (!file.equals(segmentsFileName) && !currentFiles.contains(file) && !file.endsWith(".lock")) {
+        LOG.info("Found unused file: " + file);
+        return true;
+      }
+    }
+    return false;
+  }
+
   private volatile Exception fsyncException;
 
   /**
@@ -651,9 +693,7 @@
     List<String> l = new ArrayList<>();
     if (str != null && str.length() != 0) {
       String[] ss = str.split(",");
-      for (int i = 0; i < ss.length; i++) {
-        l.add(ss[i]);
-      }
+      Collections.addAll(l, ss);
     }
     sb.append(replicationTime);
     if (!l.isEmpty()) {
@@ -666,7 +706,7 @@
     return sb;
   }
 
-  private void openNewSearcherAndUpdateCommitPoint(boolean isFullCopyNeeded) throws IOException {
+  private void openNewSearcherAndUpdateCommitPoint() throws IOException {
     SolrQueryRequest req = new LocalSolrQueryRequest(solrCore,
         new ModifiableSolrParams());
     
@@ -678,9 +718,7 @@
       if (waitSearcher[0] != null) {
         try {
           waitSearcher[0].get();
-        } catch (InterruptedException e) {
-          SolrException.log(LOG, e);
-        } catch (ExecutionException e) {
+        } catch (InterruptedException | ExecutionException e) {
           SolrException.log(LOG, e);
         }
       }
@@ -703,8 +741,7 @@
   private String createTempindexDir(SolrCore core, String tmpIdxDirName) {
     // TODO: there should probably be a DirectoryFactory#concatPath(parent, name)
     // or something
-    String tmpIdxDir = core.getDataDir() + tmpIdxDirName;
-    return tmpIdxDir;
+    return core.getDataDir() + tmpIdxDirName;
   }
 
   private void reloadCore() {
@@ -731,7 +768,7 @@
 
   private void downloadConfFiles(List<Map<String, Object>> confFilesToDownload, long latestGeneration) throws Exception {
     LOG.info("Starting download of configuration files from master: " + confFilesToDownload);
-    confFilesDownloaded = Collections.synchronizedList(new ArrayList<Map<String, Object>>());
+    confFilesDownloaded = Collections.synchronizedList(new ArrayList<>());
     File tmpconfDir = new File(solrCore.getResourceLoader().getConfigDir(), "conf." + getDateAsStr(new Date()));
     try {
       boolean status = tmpconfDir.mkdirs();
@@ -822,7 +859,7 @@
    * Copy a file by the File#renameTo() method. If it fails, it is considered a failure
    * <p/>
    */
-  private boolean moveAFile(Directory tmpIdxDir, Directory indexDir, String fname, List<String> copiedfiles) {
+  private boolean moveAFile(Directory tmpIdxDir, Directory indexDir, String fname) {
     LOG.debug("Moving file: {}", fname);
     boolean success = false;
     try {
@@ -856,7 +893,6 @@
       }
     }
     String segmentsFile = null;
-    List<String> movedfiles = new ArrayList<>();
     for (Map<String, Object> f : filesDownloaded) {
       String fname = (String) f.get(NAME);
       // the segments file must be copied last
@@ -868,12 +904,11 @@
         segmentsFile = fname;
         continue;
       }
-      if (!moveAFile(tmpIdxDir, indexDir, fname, movedfiles)) return false;
-      movedfiles.add(fname);
+      if (!moveAFile(tmpIdxDir, indexDir, fname)) return false;
     }
     //copy the segments file last
     if (segmentsFile != null) {
-      if (!moveAFile(tmpIdxDir, indexDir, segmentsFile, movedfiles)) return false;
+      if (!moveAFile(tmpIdxDir, indexDir, segmentsFile)) return false;
     }
     return true;
   }
@@ -899,7 +934,7 @@
   private void copyTmpConfFiles2Conf(File tmpconfDir) {
     boolean status = false;
     File confDir = new File(solrCore.getResourceLoader().getConfigDir());
-    for (File file : makeTmpConfDirFileList(tmpconfDir, new ArrayList<File>())) {
+    for (File file : makeTmpConfDirFileList(tmpconfDir, new ArrayList<>())) {
       File oldFile = new File(confDir, file.getPath().substring(tmpconfDir.getPath().length(), file.getPath().length()));
       if (!oldFile.getParentFile().exists()) {
         status = oldFile.getParentFile().mkdirs();
@@ -1111,7 +1146,7 @@
       return null;
     tmp = new HashMap<>(tmp);
     if (tmpFileFetcher != null)
-      tmp.put("bytesDownloaded", tmpFileFetcher.bytesDownloaded);
+      tmp.put("bytesDownloaded", tmpFileFetcher.getBytesDownloaded());
     return tmp;
   }
 
@@ -1132,58 +1167,53 @@
     }
   }
 
+  private interface FileInterface {
+    public void sync() throws IOException;
+    public void write(byte[] buf, int packetSize) throws IOException;
+    public void close() throws Exception;
+    public void delete() throws Exception;
+  }
+
   /**
    * The class acts as a client for ReplicationHandler.FileStream. It understands the protocol of wt=filestream
    *
    * @see org.apache.solr.handler.ReplicationHandler.DirectoryFileStream
    */
-  private class DirectoryFileFetcher {
-    boolean includeChecksum = true;
-
-    Directory copy2Dir;
-
-    String fileName;
-
-    String saveAs;
-
-    long size;
-
-    long bytesDownloaded = 0;
-
-    byte[] buf = new byte[1024 * 1024];
-
-    Checksum checksum;
-
-    int errorCount = 0;
-
+  private class FileFetcher {
+    private final FileInterface file;
+    private boolean includeChecksum = true;
+    private String fileName;
+    private String saveAs;
     private boolean isConf;
-
-    private boolean aborted = false;
-
     private Long indexGen;
 
-    private IndexOutput outStream;
+    private long size;
+    private long bytesDownloaded = 0;
+    private byte[] buf = new byte[1024 * 1024];
+    private Checksum checksum;
+    private int errorCount = 0;
+    private boolean aborted = false;
 
-    DirectoryFileFetcher(Directory tmpIndexDir, Map<String, Object> fileDetails, String saveAs,
+    FileFetcher(FileInterface file, Map<String, Object> fileDetails, String saveAs,
                 boolean isConf, long latestGen) throws IOException {
-      this.copy2Dir = tmpIndexDir;
+      this.file = file;
       this.fileName = (String) fileDetails.get(NAME);
       this.size = (Long) fileDetails.get(SIZE);
       this.isConf = isConf;
       this.saveAs = saveAs;
-
       indexGen = latestGen;
-      
-      outStream = copy2Dir.createOutput(saveAs, DirectoryFactory.IOCONTEXT_NO_CACHE);
-
       if (includeChecksum)
         checksum = new Adler32();
     }
 
+    public long getBytesDownloaded() {
+      return bytesDownloaded;
+    }
+
     /**
      * The main method which downloads file
      */
-    void fetchFile() throws Exception {
+    public void fetchFile() throws Exception {
       try {
         while (true) {
           final FastInputStream is = getStream();
@@ -1202,12 +1232,12 @@
         }
       } finally {
         cleanup();
-        //if cleanup suceeds . The file is downloaded fully. do an fsync
+        //if cleanup succeeds . The file is downloaded fully. do an fsync
         fsyncService.submit(new Runnable(){
           @Override
           public void run() {
             try {
-              copy2Dir.sync(Collections.singleton(saveAs));
+              file.sync();
             } catch (IOException e) {
               fsyncException = e;
             }
@@ -1231,7 +1261,7 @@
           //read the size of the packet
           int packetSize = readInt(intbytes);
           if (packetSize <= 0) {
-            LOG.warn("No content received for file: " + currentFile);
+            LOG.warn("No content received for file: {}", fileName);
             return NO_CONTENT;
           }
           if (buf.length < packetSize)
@@ -1249,45 +1279,45 @@
             checksum.update(buf, 0, packetSize);
             long checkSumClient = checksum.getValue();
             if (checkSumClient != checkSumServer) {
-              LOG.error("Checksum not matched between client and server for: " + currentFile);
+              LOG.error("Checksum not matched between client and server for file: {}", fileName);
               //if checksum is wrong it is a problem return for retry
               return 1;
             }
           }
           //if everything is fine, write down the packet to the file
-          writeBytes(packetSize);
+          file.write(buf, packetSize);
           bytesDownloaded += packetSize;
+          LOG.debug("Fetched and wrote {} bytes of file: {}", bytesDownloaded, fileName);
           if (bytesDownloaded >= size)
             return 0;
-          //errorcount is always set to zero after a successful packet
+          //errorCount is always set to zero after a successful packet
           errorCount = 0;
         }
       } catch (ReplicationHandlerException e) {
         throw e;
       } catch (Exception e) {
-        LOG.warn("Error in fetching packets ", e);
-        //for any failure , increment the error count
+        LOG.warn("Error in fetching file: {} (downloaded {} of {} bytes)",
+            fileName, bytesDownloaded, size, e);
+        //for any failure, increment the error count
         errorCount++;
-        //if it fails for the same pacaket for   MAX_RETRIES fail and come out
+        //if it fails for the same packet for MAX_RETRIES fail and come out
         if (errorCount > MAX_RETRIES) {
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-                  "Fetch failed for file:" + fileName, e);
+              "Failed to fetch file: " + fileName +
+                  " (downloaded " + bytesDownloaded + " of " + size + " bytes" +
+                  ", error count: " + errorCount + " > " + MAX_RETRIES + ")", e);
         }
         return ERR;
       }
     }
 
-    protected void writeBytes(int packetSize) throws IOException {
-      outStream.writeBytes(buf, 0, packetSize);
-    }
-
     /**
      * The webcontainer flushes the data only after it fills the buffer size. So, all data has to be read as readFully()
      * other wise it fails. So read everything as bytes and then extract an integer out of it
      */
     private int readInt(byte[] b) {
       return (((b[0] & 0xff) << 24) | ((b[1] & 0xff) << 16)
-              | ((b[2] & 0xff) << 8) | (b[3] & 0xff));
+          | ((b[2] & 0xff) << 8) | (b[3] & 0xff));
 
     }
 
@@ -1296,9 +1326,9 @@
      */
     private long readLong(byte[] b) {
       return (((long) (b[0] & 0xff)) << 56) | (((long) (b[1] & 0xff)) << 48)
-              | (((long) (b[2] & 0xff)) << 40) | (((long) (b[3] & 0xff)) << 32)
-              | (((long) (b[4] & 0xff)) << 24) | ((b[5] & 0xff) << 16)
-              | ((b[6] & 0xff) << 8) | ((b[7] & 0xff));
+          | (((long) (b[2] & 0xff)) << 40) | (((long) (b[3] & 0xff)) << 32)
+          | (((long) (b[4] & 0xff)) << 24) | ((b[5] & 0xff) << 16)
+          | ((b[6] & 0xff) << 8) | ((b[7] & 0xff));
 
     }
 
@@ -1307,30 +1337,30 @@
      */
     private void cleanup() {
       try {
-        outStream.close();
-      } catch (Exception e) {/* noop */
-          LOG.error("Error closing the file stream: "+ this.saveAs ,e);
+        file.close();
+      } catch (Exception e) {/* no-op */
+        LOG.error("Error closing file: {}", this.saveAs, e);
       }
       if (bytesDownloaded != size) {
         //if the download is not complete then
         //delete the file being downloaded
         try {
-          copy2Dir.deleteFile(saveAs);
+          file.delete();
         } catch (Exception e) {
-          LOG.error("Error deleting file in cleanup" + e.getMessage());
+          LOG.error("Error deleting file: {}", this.saveAs, e);
         }
-        //if the failure is due to a user abort it is returned nomally else an exception is thrown
+        //if the failure is due to a user abort it is returned normally else an exception is thrown
         if (!aborted)
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-                  "Unable to download " + fileName + " completely. Downloaded "
-                          + bytesDownloaded + "!=" + size);
+              "Unable to download " + fileName + " completely. Downloaded "
+                  + bytesDownloaded + "!=" + size);
       }
     }
 
     /**
      * Open a new stream using HttpClient
      */
-    FastInputStream getStream() throws IOException {
+    private FastInputStream getStream() throws IOException {
 
       ModifiableSolrParams params = new ModifiableSolrParams();
 
@@ -1346,7 +1376,7 @@
         params.set(FILE, fileName);
       }
       if (useInternal) {
-        params.set(COMPRESSION, "true"); 
+        params.set(COMPRESSION, "true");
       }
       //use checksum
       if (this.includeChecksum) {
@@ -1354,18 +1384,18 @@
       }
       //wt=filestream this is a custom protocol
       params.set(CommonParams.WT, FILE_STREAM);
-        // This happen if there is a failure there is a retry. the offset=<sizedownloaded> ensures that
-        // the server starts from the offset
+      // This happen if there is a failure there is a retry. the offset=<sizedownloaded> ensures that
+      // the server starts from the offset
       if (bytesDownloaded > 0) {
         params.set(OFFSET, Long.toString(bytesDownloaded));
       }
-      
+
 
       NamedList response;
       InputStream is = null;
-      
-      HttpSolrClient client = new HttpSolrClient(masterUrl, myHttpClient, null);  //XXX use shardhandler
-      try {
+
+      // TODO use shardhandler
+      try (HttpSolrClient client = new HttpSolrClient(masterUrl, myHttpClient, null)) {
         client.setSoTimeout(60000);
         client.setConnectionTimeout(15000);
         QueryRequest req = new QueryRequest(params);
@@ -1379,326 +1409,134 @@
         //close stream on error
         IOUtils.closeQuietly(is);
         throw new IOException("Could not download file '" + fileName + "'", e);
-      } finally {
-        client.shutdown();
       }
     }
   }
-  
-  /**
-   * The class acts as a client for ReplicationHandler.FileStream. It understands the protocol of wt=filestream
-   *
-   * @see org.apache.solr.handler.ReplicationHandler.LocalFsFileStream
-   */
-  private class LocalFsFileFetcher {
-    boolean includeChecksum = true;
 
+  private class DirectoryFile implements FileInterface {
+    private final String saveAs;
+    private Directory copy2Dir;
+    private IndexOutput outStream;
+
+    DirectoryFile(Directory tmpIndexDir, String saveAs) throws IOException {
+      this.saveAs = saveAs;
+      this.copy2Dir = tmpIndexDir;
+      outStream = copy2Dir.createOutput(this.saveAs, DirectoryFactory.IOCONTEXT_NO_CACHE);
+    }
+
+    public void sync() throws IOException {
+      copy2Dir.sync(Collections.singleton(saveAs));
+    }
+
+    public void write(byte[] buf, int packetSize) throws IOException {
+      outStream.writeBytes(buf, 0, packetSize);
+    }
+
+    public void close() throws Exception {
+      outStream.close();
+    }
+
+    public void delete() throws Exception {
+      copy2Dir.deleteFile(saveAs);
+    }
+  }
+
+  private class DirectoryFileFetcher extends FileFetcher {
+    DirectoryFileFetcher(Directory tmpIndexDir, Map<String, Object> fileDetails, String saveAs,
+                boolean isConf, long latestGen) throws IOException {
+      super(new DirectoryFile(tmpIndexDir, saveAs), fileDetails, saveAs, isConf, latestGen);
+    }
+  }
+
+  private class LocalFsFile implements FileInterface {
     private File copy2Dir;
 
-    String fileName;
-
-    String saveAs;
-
-    long size;
-
-    long bytesDownloaded = 0;
-
     FileChannel fileChannel;
-    
     private FileOutputStream fileOutputStream;
-
-    byte[] buf = new byte[1024 * 1024];
-
-    Checksum checksum;
-
     File file;
 
-    int errorCount = 0;
-
-    private boolean isConf;
-
-    private boolean aborted = false;
-
-    private Long indexGen;
-
-    // TODO: could do more code sharing with DirectoryFileFetcher
-    LocalFsFileFetcher(File dir, Map<String, Object> fileDetails, String saveAs,
-                boolean isConf, long latestGen) throws IOException {
+    LocalFsFile(File dir, String saveAs) throws IOException {
       this.copy2Dir = dir;
-      this.fileName = (String) fileDetails.get(NAME);
-      this.size = (Long) fileDetails.get(SIZE);
-      this.isConf = isConf;
-      this.saveAs = saveAs;
-
-      indexGen = latestGen;
 
       this.file = new File(copy2Dir, saveAs);
-      
+
       File parentDir = this.file.getParentFile();
       if( ! parentDir.exists() ){
         if ( ! parentDir.mkdirs() ) {
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-                                  "Failed to create (sub)directory for file: " + saveAs);
+              "Failed to create (sub)directory for file: " + saveAs);
         }
       }
-      
+
       this.fileOutputStream = new FileOutputStream(file);
       this.fileChannel = this.fileOutputStream.getChannel();
-
-      if (includeChecksum)
-        checksum = new Adler32();
     }
 
-    /**
-     * The main method which downloads file
-     */
-    void fetchFile() throws Exception {
-      try {
-        while (true) {
-          final FastInputStream is = getStream();
-          int result;
-          try {
-            //fetch packets one by one in a single request
-            result = fetchPackets(is);
-            if (result == 0 || result == NO_CONTENT) {
-              return;
-            }
-            //if there is an error continue. But continue from the point where it got broken
-          } finally {
-            IOUtils.closeQuietly(is);
-          }
-        }
-      } finally {
-        cleanup();
-        //if cleanup suceeds . The file is downloaded fully. do an fsync
-        fsyncService.submit(new Runnable(){
-          @Override
-          public void run() {
-            try {
-              FileUtils.sync(file);
-            } catch (IOException e) {
-              fsyncException = e;
-            }
-          }
-        });
-      }
+    public void sync() throws IOException {
+      FileUtils.sync(file);
     }
 
-    private int fetchPackets(FastInputStream fis) throws Exception {
-      byte[] intbytes = new byte[4];
-      byte[] longbytes = new byte[8];
-      try {
-        while (true) {
-          if (stop) {
-            stop = false;
-            aborted = true;
-            throw new ReplicationHandlerException("User aborted replication");
-          }
-          long checkSumServer = -1;
-          fis.readFully(intbytes);
-          //read the size of the packet
-          int packetSize = readInt(intbytes);
-          if (packetSize <= 0) {
-            LOG.warn("No content received for file: " + currentFile);
-            return NO_CONTENT;
-          }
-          if (buf.length < packetSize)
-            buf = new byte[packetSize];
-          if (checksum != null) {
-            //read the checksum
-            fis.readFully(longbytes);
-            checkSumServer = readLong(longbytes);
-          }
-          //then read the packet of bytes
-          fis.readFully(buf, 0, packetSize);
-          //compare the checksum as sent from the master
-          if (includeChecksum) {
-            checksum.reset();
-            checksum.update(buf, 0, packetSize);
-            long checkSumClient = checksum.getValue();
-            if (checkSumClient != checkSumServer) {
-              LOG.error("Checksum not matched between client and server for: " + currentFile);
-              //if checksum is wrong it is a problem return for retry
-              return 1;
-            }
-          }
-          //if everything is fine, write down the packet to the file
-          fileChannel.write(ByteBuffer.wrap(buf, 0, packetSize));
-          bytesDownloaded += packetSize;
-          if (bytesDownloaded >= size)
-            return 0;
-          //errorcount is always set to zero after a successful packet
-          errorCount = 0;
-        }
-      } catch (ReplicationHandlerException e) {
-        throw e;
-      } catch (Exception e) {
-        LOG.warn("Error in fetching packets ", e);
-        //for any failure , increment the error count
-        errorCount++;
-        //if it fails for the same pacaket for   MAX_RETRIES fail and come out
-        if (errorCount > MAX_RETRIES) {
-          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-                  "Fetch failed for file:" + fileName, e);
-        }
-        return ERR;
-      }
+    public void write(byte[] buf, int packetSize) throws IOException {
+      fileChannel.write(ByteBuffer.wrap(buf, 0, packetSize));
     }
 
-    /**
-     * The webcontainer flushes the data only after it fills the buffer size. So, all data has to be read as readFully()
-     * other wise it fails. So read everything as bytes and then extract an integer out of it
-     */
-    private int readInt(byte[] b) {
-      return (((b[0] & 0xff) << 24) | ((b[1] & 0xff) << 16)
-              | ((b[2] & 0xff) << 8) | (b[3] & 0xff));
-
+    public void close() throws Exception {
+      //close the FileOutputStream (which also closes the Channel)
+      fileOutputStream.close();
     }
 
-    /**
-     * Same as above but to read longs from a byte array
-     */
-    private long readLong(byte[] b) {
-      return (((long) (b[0] & 0xff)) << 56) | (((long) (b[1] & 0xff)) << 48)
-              | (((long) (b[2] & 0xff)) << 40) | (((long) (b[3] & 0xff)) << 32)
-              | (((long) (b[4] & 0xff)) << 24) | ((b[5] & 0xff) << 16)
-              | ((b[6] & 0xff) << 8) | ((b[7] & 0xff));
-
-    }
-
-    /**
-     * cleanup everything
-     */
-    private void cleanup() {
-      try {
-        //close the FileOutputStream (which also closes the Channel)
-        fileOutputStream.close();
-      } catch (Exception e) {/* noop */
-          LOG.error("Error closing the file stream: "+ this.saveAs ,e);
-      }
-      if (bytesDownloaded != size) {
-        //if the download is not complete then
-        //delete the file being downloaded
-        try {
-          Files.delete(file.toPath());
-        } catch (SecurityException e) {
-          LOG.error("Error deleting file in cleanup" + e.getMessage());
-        } catch (Throwable other) {
-          // TODO: should this class care if a file couldnt be deleted?
-          // this just emulates previous behavior, where only SecurityException would be handled.
-        }
-        //if the failure is due to a user abort it is returned nomally else an exception is thrown
-        if (!aborted)
-          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-                  "Unable to download " + fileName + " completely. Downloaded "
-                          + bytesDownloaded + "!=" + size);
-      }
-    }
-
-    /**
-     * Open a new stream using HttpClient
-     */
-    FastInputStream getStream() throws IOException {
-
-      ModifiableSolrParams params = new ModifiableSolrParams();
-
-//    //the method is command=filecontent
-      params.set(COMMAND, CMD_GET_FILE);
-      params.set(GENERATION, Long.toString(indexGen));
-      params.set(CommonParams.QT, "/replication");
-      //add the version to download. This is used to reserve the download
-      if (isConf) {
-        //set cf instead of file for config file
-        params.set(CONF_FILE_SHORT, fileName);
-      } else {
-        params.set(FILE, fileName);
-      }
-      if (useInternal) {
-        params.set(COMPRESSION, "true"); 
-      }
-      //use checksum
-      if (this.includeChecksum) {
-        params.set(CHECKSUM, true);
-      }
-      //wt=filestream this is a custom protocol
-      params.set(CommonParams.WT, FILE_STREAM);
-        // This happen if there is a failure there is a retry. the offset=<sizedownloaded> ensures that
-        // the server starts from the offset
-      if (bytesDownloaded > 0) {
-        params.set(OFFSET, Long.toString(bytesDownloaded));
-      }
-      
-
-      NamedList response;
-      InputStream is = null;
-      HttpSolrClient client = new HttpSolrClient(masterUrl, myHttpClient, null);  //XXX use shardhandler
-      try {
-        client.setSoTimeout(60000);
-        client.setConnectionTimeout(15000);
-        QueryRequest req = new QueryRequest(params);
-        response = client.request(req);
-        is = (InputStream) response.get("stream");
-        if(useInternal) {
-          is = new InflaterInputStream(is);
-        }
-        return new FastInputStream(is);
-      } catch (Exception e) {
-        //close stream on error
-        IOUtils.closeQuietly(is);
-        throw new IOException("Could not download file '" + fileName + "'", e);
-      } finally {
-        client.shutdown();
-      }
+    public void delete() throws Exception {
+      Files.delete(file.toPath());
     }
   }
-  
+
+  private class LocalFsFileFetcher extends FileFetcher {
+    LocalFsFileFetcher(File dir, Map<String, Object> fileDetails, String saveAs,
+                boolean isConf, long latestGen) throws IOException {
+      super(new LocalFsFile(dir, saveAs), fileDetails, saveAs, isConf, latestGen);
+    }
+  }
+
   NamedList getDetails() throws IOException, SolrServerException {
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.set(COMMAND, CMD_DETAILS);
     params.set("slave", false);
     params.set(CommonParams.QT, "/replication");
-    HttpSolrClient client = new HttpSolrClient(masterUrl, myHttpClient); //XXX use shardhandler
-    NamedList rsp;
-    try {
+
+    // TODO use shardhandler
+    try (HttpSolrClient client = new HttpSolrClient(masterUrl, myHttpClient)) {
       client.setSoTimeout(60000);
       client.setConnectionTimeout(15000);
       QueryRequest request = new QueryRequest(params);
-      rsp = client.request(request);
-    } finally {
-      client.shutdown();
+      return client.request(request);
     }
-    return rsp;
   }
 
   static Integer readInterval(String interval) {
     if (interval == null)
       return null;
     int result = 0;
-    if (interval != null) {
-      Matcher m = INTERVAL_PATTERN.matcher(interval.trim());
-      if (m.find()) {
-        String hr = m.group(1);
-        String min = m.group(2);
-        String sec = m.group(3);
-        result = 0;
-        try {
-          if (sec != null && sec.length() > 0)
-            result += Integer.parseInt(sec);
-          if (min != null && min.length() > 0)
-            result += (60 * Integer.parseInt(min));
-          if (hr != null && hr.length() > 0)
-            result += (60 * 60 * Integer.parseInt(hr));
-          result *= 1000;
-        } catch (NumberFormatException e) {
-          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-                  INTERVAL_ERR_MSG);
-        }
-      } else {
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-                INTERVAL_ERR_MSG);
+    Matcher m = INTERVAL_PATTERN.matcher(interval.trim());
+    if (m.find()) {
+      String hr = m.group(1);
+      String min = m.group(2);
+      String sec = m.group(3);
+      result = 0;
+      try {
+        if (sec != null && sec.length() > 0)
+          result += Integer.parseInt(sec);
+        if (min != null && min.length() > 0)
+          result += (60 * Integer.parseInt(min));
+        if (hr != null && hr.length() > 0)
+          result += (60 * 60 * Integer.parseInt(hr));
+        result *= 1000;
+      } catch (NumberFormatException e) {
+        throw new SolrException(ErrorCode.SERVER_ERROR, INTERVAL_ERR_MSG);
       }
-
+    } else {
+      throw new SolrException(ErrorCode.SERVER_ERROR, INTERVAL_ERR_MSG);
     }
+
     return result;
   }
 
diff --git a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
index 752ed7c..cc2e05d 100644
--- a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
@@ -40,6 +40,7 @@
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.core.ConfigOverlay;
 import org.apache.solr.core.PluginInfo;
+import org.apache.solr.core.PluginsRegistry;
 import org.apache.solr.core.RequestParams;
 import org.apache.solr.core.SolrConfig;
 import org.apache.solr.core.SolrResourceLoader;
@@ -99,7 +100,7 @@
 
     private void handleGET() {
       if(parts.size() == 1) {
-        resp.add("solrConfig", req.getCore().getSolrConfig().toMap());
+        resp.add("config", getConfigDetails());
       } else {
         if(ConfigOverlay.NAME.equals(parts.get(1))){
           resp.add(ConfigOverlay.NAME, req.getCore().getSolrConfig().getOverlay().toMap());
@@ -118,12 +119,27 @@
           }
 
         } else {
-          Map<String, Object> m = req.getCore().getSolrConfig().toMap();
-          resp.add("solrConfig", ZkNodeProps.makeMap(parts.get(1),m.get(parts.get(1))));
+          Map<String, Object> m = getConfigDetails();
+          resp.add("config", ZkNodeProps.makeMap(parts.get(1),m.get(parts.get(1))));
         }
       }
     }
 
+    private Map<String, Object> getConfigDetails() {
+      Map<String, Object> map = req.getCore().getSolrConfig().toMap();
+      Map reqHandlers = (Map) map.get(SolrRequestHandler.TYPE);
+      if(reqHandlers == null) map.put(SolrRequestHandler.TYPE, reqHandlers =  new LinkedHashMap<>());
+      List<PluginInfo> plugins = PluginsRegistry.getHandlers(req.getCore());
+      for (PluginInfo plugin : plugins) {
+        if(SolrRequestHandler.TYPE.equals( plugin.type)){
+          if(!reqHandlers.containsKey(plugin.name)){
+            reqHandlers.put(plugin.name,plugin.toMap());
+          }
+        }
+      }
+      return map;
+    }
+
 
     private void handlePOST() throws IOException {
       Iterable<ContentStream> streams = req.getContentStreams();
@@ -236,11 +252,16 @@
 
       SolrResourceLoader loader = req.getCore().getResourceLoader();
       if (loader instanceof ZkSolrResourceLoader) {
-        ZkController.persistConfigResourceToZooKeeper(loader,params.getZnodeVersion(),
-            RequestParams.RESOURCE,params.toByteArray(),true);
+        ZkSolrResourceLoader zkLoader = (ZkSolrResourceLoader) loader;
+        if(ops.isEmpty()) {
+          ZkController.touchConfDir(zkLoader);
+        }else {
+          ZkController.persistConfigResourceToZooKeeper(zkLoader, params.getZnodeVersion(),
+              RequestParams.RESOURCE, params.toByteArray(), true);
+        }
 
       } else {
-        SolrResourceLoader.persistConfLocally(loader, ConfigOverlay.RESOURCE_NAME, params.toByteArray());
+        SolrResourceLoader.persistConfLocally(loader, RequestParams.RESOURCE, params.toByteArray());
         req.getCore().getSolrConfig().refreshRequestParams();
       }
 
@@ -278,7 +299,7 @@
 
     SolrResourceLoader loader = req.getCore().getResourceLoader();
     if (loader instanceof ZkSolrResourceLoader) {
-      ZkController.persistConfigResourceToZooKeeper(loader,overlay.getZnodeVersion(),
+      ZkController.persistConfigResourceToZooKeeper((ZkSolrResourceLoader) loader,overlay.getZnodeVersion(),
           ConfigOverlay.RESOURCE_NAME,overlay.toByteArray(),true);
 
     } else {
diff --git a/solr/core/src/java/org/apache/solr/handler/XmlUpdateRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/XmlUpdateRequestHandler.java
deleted file mode 100644
index d6f1f49..0000000
--- a/solr/core/src/java/org/apache/solr/handler/XmlUpdateRequestHandler.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.handler;
-
-import org.apache.solr.common.util.NamedList;
-
-/**
- * Add documents to solr using the STAX XML parser.
- * 
- * use {@link UpdateRequestHandler}
- */
-@Deprecated
-public class XmlUpdateRequestHandler extends UpdateRequestHandler {
-
-  @Override
-  public void init(NamedList args) {
-    super.init(args);
-    setAssumeContentType("application/xml");
-    log.warn("Using deprecated class: "+this.getClass().getSimpleName()+" -- replace with UpdateRequestHandler");
-  }
-
-  //////////////////////// SolrInfoMBeans methods //////////////////////
-
-  @Override
-  public String getDescription() {
-    return "Add documents with XML";
-  }
-}
-
-
-
diff --git a/solr/core/src/java/org/apache/solr/handler/XsltUpdateRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/XsltUpdateRequestHandler.java
deleted file mode 100644
index 1bfdfaa..0000000
--- a/solr/core/src/java/org/apache/solr/handler/XsltUpdateRequestHandler.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.handler;
-
-import org.apache.solr.common.util.NamedList;
-
-/**
- * Add documents to solr using the STAX XML parser, transforming it with XSLT first
- * 
- * use {@link UpdateRequestHandler}
- */
-@Deprecated
-public class XsltUpdateRequestHandler extends UpdateRequestHandler {
-
-  @Override
-  public void init(NamedList args) {
-    super.init(args);
-    setAssumeContentType("application/xml");
-    log.warn("Using deprecated class: "+this.getClass().getSimpleName()+" -- replace with UpdateRequestHandler");
-  }
-
-  //////////////////////// SolrInfoMBeans methods //////////////////////
-
-  @Override
-  public String getDescription() {
-    return "Add documents with XML, transforming with XSLT first";
-  }
-}
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
index c6e84c8..e82c6a6 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
@@ -17,66 +17,7 @@
  * limitations under the License.
  */
 
-import static org.apache.solr.cloud.Overseer.QUEUE_OPERATION;
-import static org.apache.solr.cloud.OverseerCollectionProcessor.ASYNC;
-import static org.apache.solr.cloud.OverseerCollectionProcessor.COLL_CONF;
-import static org.apache.solr.cloud.OverseerCollectionProcessor.CREATE_NODE_SET;
-import static org.apache.solr.cloud.OverseerCollectionProcessor.SHARD_UNIQUE;
-import static org.apache.solr.cloud.OverseerCollectionProcessor.CREATE_NODE_SET_SHUFFLE;
-import static org.apache.solr.cloud.OverseerCollectionProcessor.NUM_SLICES;
-import static org.apache.solr.cloud.OverseerCollectionProcessor.ONLY_ACTIVE_NODES;
-import static org.apache.solr.cloud.OverseerCollectionProcessor.ONLY_IF_DOWN;
-import static org.apache.solr.cloud.OverseerCollectionProcessor.REPLICATION_FACTOR;
-import static org.apache.solr.cloud.OverseerCollectionProcessor.REQUESTID;
-import static org.apache.solr.cloud.OverseerCollectionProcessor.ROUTER;
-import static org.apache.solr.cloud.OverseerCollectionProcessor.SHARDS_PROP;
-import static org.apache.solr.common.cloud.ZkNodeProps.makeMap;
-import static org.apache.solr.common.cloud.ZkStateReader.ACTIVE;
-import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.CORE_NAME_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.ELECTION_NODE_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.LEADER_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.REJOIN_AT_HEAD_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_VALUE_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
-import static org.apache.solr.common.cloud.ZkStateReader.AUTO_ADD_REPLICAS;
-import static org.apache.solr.common.cloud.ZkStateReader.REPLICA_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.MAX_AT_ONCE_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.MAX_WAIT_SECONDS_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.NODE_NAME_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.STATE_PROP;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDROLE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICAPROP;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.BALANCESHARDUNIQUE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.CLUSTERPROP;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.CREATE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.CREATEALIAS;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.CREATESHARD;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEALIAS;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEREPLICA;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEREPLICAPROP;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETESHARD;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.MIGRATE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.OVERSEERSTATUS;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.REBALANCELEADERS;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.RELOAD;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.REMOVEROLE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.SPLITSHARD;
-
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.TimeUnit;
-
+import com.google.common.collect.ImmutableSet;
 import org.apache.commons.lang.StringUtils;
 import org.apache.solr.client.solrj.SolrResponse;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -118,7 +59,65 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.ImmutableSet;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+
+import static org.apache.solr.cloud.Overseer.QUEUE_OPERATION;
+import static org.apache.solr.cloud.OverseerCollectionProcessor.ASYNC;
+import static org.apache.solr.cloud.OverseerCollectionProcessor.COLL_CONF;
+import static org.apache.solr.cloud.OverseerCollectionProcessor.CREATE_NODE_SET;
+import static org.apache.solr.cloud.OverseerCollectionProcessor.CREATE_NODE_SET_SHUFFLE;
+import static org.apache.solr.cloud.OverseerCollectionProcessor.NUM_SLICES;
+import static org.apache.solr.cloud.OverseerCollectionProcessor.ONLY_ACTIVE_NODES;
+import static org.apache.solr.cloud.OverseerCollectionProcessor.ONLY_IF_DOWN;
+import static org.apache.solr.cloud.OverseerCollectionProcessor.REPLICATION_FACTOR;
+import static org.apache.solr.cloud.OverseerCollectionProcessor.REQUESTID;
+import static org.apache.solr.cloud.OverseerCollectionProcessor.ROUTER;
+import static org.apache.solr.cloud.OverseerCollectionProcessor.SHARDS_PROP;
+import static org.apache.solr.cloud.OverseerCollectionProcessor.SHARD_UNIQUE;
+import static org.apache.solr.common.cloud.ZkNodeProps.makeMap;
+import static org.apache.solr.common.cloud.ZkStateReader.ACTIVE;
+import static org.apache.solr.common.cloud.ZkStateReader.AUTO_ADD_REPLICAS;
+import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.CORE_NAME_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.ELECTION_NODE_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.LEADER_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.MAX_AT_ONCE_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
+import static org.apache.solr.common.cloud.ZkStateReader.MAX_WAIT_SECONDS_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.NODE_NAME_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_VALUE_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.REJOIN_AT_HEAD_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.REPLICA_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.STATE_PROP;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICAPROP;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDROLE;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.BALANCESHARDUNIQUE;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.CLUSTERPROP;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.CREATE;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.CREATEALIAS;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.CREATESHARD;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETE;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEALIAS;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEREPLICA;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEREPLICAPROP;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETESHARD;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.MIGRATE;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.OVERSEERSTATUS;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.REBALANCELEADERS;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.RELOAD;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.REMOVEROLE;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.SPLITSHARD;
 
 public class CollectionsHandler extends RequestHandlerBase {
   protected static Logger log = LoggerFactory.getLogger(CollectionsHandler.class);
@@ -773,17 +772,15 @@
     ZkNodeProps leaderProps = clusterState.getLeader(collection, shard);
     ZkCoreNodeProps nodeProps = new ZkCoreNodeProps(leaderProps);
     
-    HttpSolrClient server = new HttpSolrClient(nodeProps.getBaseUrl());
-    try {
-      server.setConnectionTimeout(15000);
-      server.setSoTimeout(60000);
+    ;
+    try (HttpSolrClient client = new HttpSolrClient(nodeProps.getBaseUrl())) {
+      client.setConnectionTimeout(15000);
+      client.setSoTimeout(60000);
       RequestSyncShard reqSyncShard = new CoreAdminRequest.RequestSyncShard();
       reqSyncShard.setCollection(collection);
       reqSyncShard.setShard(shard);
       reqSyncShard.setCoreName(nodeProps.getCoreName());
-      server.request(reqSyncShard);
-    } finally {
-      server.shutdown();
+      client.request(reqSyncShard);
     }
   }
   
@@ -853,6 +850,9 @@
          DocCollection.STATE_FORMAT,
          AUTO_ADD_REPLICAS,
         "router.");
+    if(props.get(DocCollection.STATE_FORMAT) == null){
+      props.put(DocCollection.STATE_FORMAT,"2");
+    }
 
     if(SYSTEM_COLL.equals(name)){
       //We must always create asystem collection with only a single shard
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java
index 3687828..85e8be8 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CoreAdminHandler.java
@@ -17,25 +17,8 @@
 
 package org.apache.solr.handler.admin;
 
-import static org.apache.solr.common.cloud.DocCollection.DOC_ROUTER;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
 import org.apache.commons.lang.StringUtils;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.search.MatchAllDocsQuery;
@@ -67,7 +50,6 @@
 import org.apache.solr.core.DirectoryFactory;
 import org.apache.solr.core.DirectoryFactory.DirContext;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrXMLCoresLocator;
 import org.apache.solr.handler.RequestHandlerBase;
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
@@ -86,8 +68,24 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Lists;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+
+import static org.apache.solr.common.cloud.DocCollection.DOC_ROUTER;
 
 /**
  *
@@ -586,12 +584,7 @@
       
       // only write out the descriptor if the core is successfully created
       coreContainer.getCoresLocator().create(coreContainer, dcore);
-      
-      if (coreContainer.getCoresLocator() instanceof SolrXMLCoresLocator) {
-        // hack - in this case we persist once more because a core create race might
-        // have dropped entries.
-        coreContainer.getCoresLocator().create(coreContainer);
-      }
+
       rsp.add("core", core.getName());
     }
     catch (Exception ex) {
@@ -694,7 +687,6 @@
     }
     try {
       if (cname == null) {
-        rsp.add("defaultCoreName", coreContainer.getDefaultCoreName());
         for (String name : coreContainer.getAllCoreNames()) {
           status.add(name, getCoreStatus(coreContainer, name, isIndexInfoNeeded));
         }
@@ -1111,7 +1103,6 @@
       CoreDescriptor desc = cores.getUnloadedCoreDescriptor(cname);
       if (desc != null) {
         info.add("name", desc.getName());
-        info.add("isDefaultCore", desc.getName().equals(cores.getDefaultCoreName()));
         info.add("instanceDir", desc.getInstanceDir());
         // None of the following are guaranteed to be present in a not-yet-loaded core.
         String tmp = desc.getDataDir();
@@ -1126,7 +1117,6 @@
       try (SolrCore core = cores.getCore(cname)) {
         if (core != null) {
           info.add("name", core.getName());
-          info.add("isDefaultCore", core.getName().equals(cores.getDefaultCoreName()));
           info.add("instanceDir", normalizePath(core.getResourceLoader().getInstanceDir()));
           info.add("dataDir", normalizePath(core.getDataDir()));
           info.add("config", core.getConfigResource());
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/EditFileRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/EditFileRequestHandler.java
deleted file mode 100644
index 3560176..0000000
--- a/solr/core/src/java/org/apache/solr/handler/admin/EditFileRequestHandler.java
+++ /dev/null
@@ -1,340 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.handler.admin;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.IOUtils;
-import org.apache.solr.cloud.ZkController;
-import org.apache.solr.cloud.ZkSolrResourceLoader;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.SolrException.ErrorCode;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.util.ContentStream;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.core.Config;
-import org.apache.solr.core.CoreContainer;
-import org.apache.solr.core.CoreDescriptor;
-import org.apache.solr.core.SolrConfig;
-import org.apache.solr.core.SolrCore;
-import org.apache.solr.handler.RequestHandlerBase;
-import org.apache.solr.request.SolrQueryRequest;
-import org.apache.solr.response.RawResponseWriter;
-import org.apache.solr.response.SolrQueryResponse;
-import org.apache.zookeeper.KeeperException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.xml.sax.InputSource;
-
-import java.io.ByteArrayInputStream;
-import java.io.File;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.Set;
-
-/**
- * This handler uses the RawResponseWriter to give client access to
- * files inside ${solr.home}/conf
- * <p/>
- * If you want to selectively restrict access some configuration files, you can list
- * these files in the hidden invariants.  For example to hide
- * synonyms.txt and anotherfile.txt, you would register:
- * <p/>
- * <pre>
- * &lt;requestHandler name="/admin/fileupdate" class="org.apache.solr.handler.admin.EditFileRequestHandler" &gt;
- *   &lt;lst name="defaults"&gt;
- *    &lt;str name="echoParams"&gt;explicit&lt;/str&gt;
- *   &lt;/lst&gt;
- *   &lt;lst name="invariants"&gt;
- *    &lt;str name="hidden"&gt;synonyms.txt&lt;/str&gt;
- *    &lt;str name="hidden"&gt;anotherfile.txt&lt;/str&gt;
- *    &lt;str name="hidden"&gt;*&lt;/str&gt;
- *   &lt;/lst&gt;
- * &lt;/requestHandler&gt;
- * </pre>
- * <p/>
- * At present, there is only explicit file names (including path) or the glob '*' are supported. Variants like '*.xml'
- * are NOT supported.ere
- * <p/>
- * <p/>
- * The EditFileRequestHandler uses the {@link RawResponseWriter} (wt=raw) to return
- * file contents.  If you need to use a different writer, you will need to change
- * the registered invariant param for wt.
- * <p/>
- * If you want to override the contentType header returned for a given file, you can
- * set it directly using: CONTENT_TYPE.  For example, to get a plain text
- * version of schema.xml, try:
- * <pre>
- *   http://localhost:8983/solr/admin/fileedit?file=schema.xml&contentType=text/plain
- * </pre>
- *
- * @since solr 4.7
- *        <p/>
- *        <p/>
- *        You can use this handler to modify any files in the conf directory, e.g. solrconfig.xml
- *        or schema.xml, or even in sub-directories (e.g. velocity/error.vm) by POSTing a file. Here's an example cURL command
- *        <pre>
- *                                            curl -X POST --form "fileupload=@schema.new" 'http://localhost:8983/solr/collection1/admin/fileedit?op=write&file=schema.xml'
- *                                           </pre>
- *
- *        or
- *        <pre>
- *                                            curl -X POST --form "fileupload=@error.new" 'http://localhost:8983/solr/collection1/admin/file?op=write&file=velocity/error.vm'
- *                                           </pre>
- *
- *        For the first iteration, this is probably going to be used from the Solr admin screen.
- *
- *        NOTE: Specifying a directory or simply leaving the any "file=XXX" parameters will list the contents of a directory.
- *
- *        NOTE: <b>You must reload the core/collection for any changes made via this handler to take effect!</b>
- *
- *        NOTE: <b>If the core does not load (say schema.xml is not well formed for instance) you may be unable to replace
- *        the files with this interface.</b>
- *
- *        NOTE: <b>Leaving this handler enabled is a security risk! This handler should be disabled in all but trusted
- *        (probably development only) environments!</b>
- *
- *        Configuration files in ZooKeeper are supported.
- */
-public class EditFileRequestHandler extends RequestHandlerBase {
-
-  protected static final Logger log = LoggerFactory.getLogger(EditFileRequestHandler.class);
-
-  private final static String OP_PARAM = "op";
-  private final static String OP_WRITE = "write";
-  private final static String OP_TEST = "test";
-
-  ContentStream stream;
-  private byte[] data = null;
-  Set<String> hiddenFiles;
-
-  public EditFileRequestHandler() {
-    super();
-  }
-
-  @Override
-  public void init(NamedList args) {
-    super.init(args);
-    hiddenFiles = ShowFileRequestHandler.initHidden(invariants);
-  }
-
-  @Override
-  public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp)
-      throws InterruptedException, KeeperException, IOException {
-
-    CoreContainer coreContainer = req.getCore().getCoreDescriptor().getCoreContainer();
-    String op = req.getParams().get(OP_PARAM);
-    if (OP_WRITE.equalsIgnoreCase(op) || OP_TEST.equalsIgnoreCase(op)) {
-      String fname = req.getParams().get("file", null);
-      if (fname == null) {
-        rsp.setException(new SolrException(ErrorCode.BAD_REQUEST, "No file name specified for write operation."));
-      } else {
-        fname = fname.replace('\\', '/');
-        stream = getOneInputStream(req, rsp);
-        if (stream == null) {
-          return; // Error already in rsp.
-        }
-
-        data = IOUtils.toByteArray(stream.getStream());
-
-        // If it's "solrconfig.xml", try parsing it as that object. Otherwise, if it ends in '.xml',
-        // see if it at least parses.
-        if ("solrconfig.xml".equals(fname)) {
-          try {
-            new SolrConfig("unused", new InputSource(new ByteArrayInputStream(data)));
-          } catch (Exception e) {
-            rsp.setException(new SolrException(ErrorCode.BAD_REQUEST, "Invalid solr config file: " + e.getMessage()));
-            return;
-          }
-        } else if (fname.endsWith(".xml")) { // At least do a rudimentary test, see if the thing parses.
-          try {
-            new Config(null, null, new InputSource(new ByteArrayInputStream(data)), null, false);
-          } catch (Exception e) {
-            rsp.setException(new SolrException(ErrorCode.BAD_REQUEST, "Invalid XML file: " + e.getMessage()));
-            return;
-          }
-        }
-        if (ShowFileRequestHandler.isHiddenFile(req, rsp, fname, true, hiddenFiles) == false) {
-          if (coreContainer.isZooKeeperAware()) {
-            writeToZooKeeper(req, rsp);
-          } else {
-            writeToFileSystem(req, rsp);
-          }
-        }
-      }
-    }
-  }
-
-  // write the file contained in the parameter "file=XXX" to ZooKeeper. The file may be a path, e.g.
-  // file=velocity/error.vm or file=schema.xml
-  //
-  // Important: Assumes that the file already exists in ZK, so far we aren't creating files there.
-  private void writeToZooKeeper(SolrQueryRequest req, SolrQueryResponse rsp)
-      throws KeeperException, InterruptedException, IOException {
-
-    CoreContainer coreContainer = req.getCore().getCoreDescriptor().getCoreContainer();
-    SolrZkClient zkClient = coreContainer.getZkController().getZkClient();
-
-    String adminFile = ShowFileRequestHandler.getAdminFileFromZooKeeper(req, rsp, zkClient, hiddenFiles);
-    String fname = req.getParams().get("file", null);
-    if (OP_TEST.equals(req.getParams().get(OP_PARAM))) {
-      testReloadSuccess(req, rsp);
-      return;
-    }
-    // Persist the managed schema
-    try {
-      // Assumption: the path exists
-      zkClient.setData(adminFile, data, true);
-      log.info("Saved " + fname + " to ZooKeeper successfully.");
-    } catch (KeeperException.BadVersionException e) {
-      log.error("Cannot save file: " + fname + " to Zookeeper, " +
-          "ZooKeeper error: " + e.getMessage());
-      rsp.setException(new SolrException(ErrorCode.SERVER_ERROR, "Cannot save file: " + fname + " to Zookeeper, " +
-          "ZooKeeper error: " + e.getMessage()));
-    }
-  }
-
-  // Used when POSTing the configuration files to Solr (either ZooKeeper or locally).
-  //
-  // It takes some effort to insure that there is one (and only one) stream provided, there's no provision for
-  // more than one stream at present.
-  private ContentStream getOneInputStream(SolrQueryRequest req, SolrQueryResponse rsp) {
-    String file = req.getParams().get("file");
-    if (file == null) {
-      log.error("You must specify a file for the write operation.");
-      rsp.setException(new SolrException(ErrorCode.BAD_REQUEST, "You must specify a file for the write operation."));
-      return null;
-    }
-
-    // Now, this is truly clumsy
-    Iterable<ContentStream> streams = req.getContentStreams();
-    if (streams == null) {
-      log.error("Input stream list was null for admin file write operation.");
-      rsp.setException(new SolrException(ErrorCode.BAD_REQUEST, "Input stream list was null for admin file write operation."));
-      return null;
-    }
-    Iterator<ContentStream> iter = streams.iterator();
-    if (!iter.hasNext()) {
-      log.error("No input streams were in the list for admin file write operation.");
-      rsp.setException(new SolrException(ErrorCode.BAD_REQUEST, "No input streams were in the list for admin file write operation."));
-      return null;
-    }
-    ContentStream stream = iter.next();
-    if (iter.hasNext()) {
-      log.error("More than one input stream was found for admin file write operation.");
-      rsp.setException(new SolrException(ErrorCode.BAD_REQUEST, "More than one input stream was found for admin file write operation."));
-      return null;
-    }
-    return stream;
-  }
-
-  // Write the data passed in from the stream to the file indicated by the file=XXX parameter on the local file system
-  private void writeToFileSystem(SolrQueryRequest req, SolrQueryResponse rsp) throws IOException {
-
-    File adminFile = ShowFileRequestHandler.getAdminFileFromFileSystem(req, rsp, hiddenFiles);
-    if (adminFile == null || adminFile.isDirectory()) {
-      String fname = req.getParams().get("file", null);
-
-      if (adminFile == null) {
-        log.error("File " + fname + " was not found.");
-        rsp.setException(new SolrException(ErrorCode.BAD_REQUEST, "File " + fname + " was not found."));
-        return;
-      }
-      log.error("File " + fname + " is a directory.");
-      rsp.setException(new SolrException(ErrorCode.BAD_REQUEST, "File " + fname + " is a directory."));
-      return;
-    }
-    if (OP_TEST.equals(req.getParams().get(OP_PARAM))) {
-      testReloadSuccess(req, rsp);
-      return;
-    }
-
-    FileUtils.copyInputStreamToFile(stream.getStream(), adminFile);
-    log.info("Successfully saved file " + adminFile.getAbsolutePath() + " locally");
-  }
-
-  private boolean testReloadSuccess(SolrQueryRequest req, SolrQueryResponse rsp) {
-    // Try writing the config to a temporary core and reloading to see that we don't allow people to shoot themselves
-    // in the foot.
-    File home = null;
-    try {
-      home = new File(FileUtils.getTempDirectory(), "SOLR_5459"); // Unlikely to name a core or collection this!
-      FileUtils.writeStringToFile(new File(home, "solr.xml"), "<solr></solr>", "UTF-8"); // Use auto-discovery
-      File coll = new File(home, "SOLR_5459");
-
-      SolrCore core = req.getCore();
-      CoreDescriptor desc = core.getCoreDescriptor();
-      CoreContainer coreContainer = desc.getCoreContainer();
-
-      if (coreContainer.isZooKeeperAware()) {
-        try {
-          String confPath = ((ZkSolrResourceLoader) core.getResourceLoader()).getConfigSetZkPath();
-
-          ZkController.downloadConfigDir(coreContainer.getZkController().getZkClient(), confPath,
-              new File(coll, "conf"));
-        } catch (Exception ex) {
-          log.error("Error when attempting to download conf from ZooKeeper: " + ex.getMessage());
-          rsp.setException(new SolrException(ErrorCode.BAD_REQUEST,
-              "Error when attempting to download conf from ZooKeeper" + ex.getMessage()));
-          return false;
-        }
-      } else {
-        FileUtils.copyDirectory(new File(desc.getInstanceDir(), "conf"),
-            new File(coll, "conf"));
-      }
-
-      FileUtils.writeStringToFile(new File(coll, "core.properties"), "name=SOLR_5459", "UTF-8");
-
-      FileUtils.writeByteArrayToFile(new File(new File(coll, "conf"), req.getParams().get("file", null)), data);
-
-      return tryReloading(rsp, home);
-
-    } catch (IOException ex) {
-      log.warn("Caught IO exception when trying to verify configs. " + ex.getMessage());
-      rsp.setException(new SolrException(ErrorCode.SERVER_ERROR,
-          "Caught IO exception when trying to verify configs. " + ex.getMessage()));
-      return false;
-    }
-  }
-
-  private boolean tryReloading(SolrQueryResponse rsp, File home) {
-    CoreContainer cc = null;
-    try {
-      cc = CoreContainer.createAndLoad(home.getAbsolutePath(), new File(home, "solr.xml"));
-      if (cc.getCoreInitFailures().size() > 0) {
-        for (CoreContainer.CoreLoadFailure ex : cc.getCoreInitFailures().values()) {
-          log.error("Error when attempting to reload core: " + ex.exception.getMessage());
-          rsp.setException(new SolrException(ErrorCode.BAD_REQUEST,
-              "Error when attempting to reload core after writing config" + ex.exception.getMessage()));
-        }
-        return false;
-      }
-      return true;
-    } finally {
-      if (cc != null) {
-        cc.shutdown();
-      }
-    }
-  }
-
-  //////////////////////// SolrInfoMBeans methods //////////////////////
-
-  @Override
-  public String getDescription() {
-    return "Admin Config File -- update config files directly";
-  }
-}
diff --git a/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java b/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java
index 6b39a01..1a60660 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java
@@ -24,11 +24,29 @@
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Iterator;
+import java.util.Arrays;
 
+import com.carrotsearch.hppc.IntObjectOpenHashMap;
+import com.carrotsearch.hppc.LongOpenHashSet;
+import com.carrotsearch.hppc.LongObjectOpenHashMap;
+import com.carrotsearch.hppc.cursors.IntObjectCursor;
+import com.carrotsearch.hppc.cursors.LongCursor;
+import com.carrotsearch.hppc.cursors.LongObjectCursor;
+import com.carrotsearch.hppc.IntOpenHashSet;
+import com.carrotsearch.hppc.cursors.ObjectCursor;
+import com.carrotsearch.hppc.LongObjectMap;
 import org.apache.lucene.index.DocValues;
+import org.apache.lucene.index.DocValuesType;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.index.FieldInfos;
+import org.apache.lucene.index.FilterLeafReader;
 import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.MultiDocValues;
+import org.apache.lucene.index.NumericDocValues;
 import org.apache.lucene.index.SortedDocValues;
+import org.apache.lucene.queries.TermsFilter;
 import org.apache.lucene.search.Collector;
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.LeafCollector;
@@ -40,35 +58,37 @@
 import org.apache.lucene.search.TopDocsCollector;
 import org.apache.lucene.search.TopFieldCollector;
 import org.apache.lucene.search.TopScoreDocCollector;
+import org.apache.lucene.uninverting.UninvertingReader;
 import org.apache.lucene.util.BitSetIterator;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.BytesRefBuilder;
 import org.apache.lucene.util.CharsRefBuilder;
 import org.apache.lucene.util.FixedBitSet;
+import org.apache.lucene.util.LongValues;
 import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.params.ExpandParams;
-import org.apache.solr.common.params.ShardParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.schema.FieldType;
+import org.apache.solr.schema.TrieFloatField;
+import org.apache.solr.schema.TrieIntField;
+import org.apache.solr.schema.TrieLongField;
+import org.apache.solr.schema.TrieDoubleField;
+import org.apache.solr.schema.StrField;
 import org.apache.solr.search.CollapsingQParserPlugin;
 import org.apache.solr.search.DocIterator;
 import org.apache.solr.search.DocList;
 import org.apache.solr.search.DocSlice;
 import org.apache.solr.search.QParser;
 import org.apache.solr.search.QueryParsing;
+import org.apache.solr.search.SolrConstantScoreQuery;
 import org.apache.solr.search.SolrIndexSearcher;
 import org.apache.solr.util.plugin.PluginInfoInitialized;
 import org.apache.solr.util.plugin.SolrCoreAware;
 
-import com.carrotsearch.hppc.IntObjectMap;
-import com.carrotsearch.hppc.IntObjectOpenHashMap;
-import com.carrotsearch.hppc.IntOpenHashSet;
-import com.carrotsearch.hppc.cursors.IntObjectCursor;
-import com.carrotsearch.hppc.cursors.ObjectCursor;
-
 /**
  * The ExpandComponent is designed to work with the CollapsingPostFilter.
  * The CollapsingPostFilter collapses a result set on a field.
@@ -118,6 +138,7 @@
     SolrParams params = req.getParams();
 
     String field = params.get(ExpandParams.EXPAND_FIELD);
+    String hint = null;
     if (field == null) {
       List<Query> filters = rb.getFilters();
       if (filters != null) {
@@ -125,6 +146,7 @@
           if (q instanceof CollapsingQParserPlugin.CollapsingPostFilter) {
             CollapsingQParserPlugin.CollapsingPostFilter cp = (CollapsingQParserPlugin.CollapsingPostFilter) q;
             field = cp.getField();
+            hint = cp.hint;
           }
         }
       }
@@ -183,26 +205,168 @@
 
     SolrIndexSearcher searcher = req.getSearcher();
     LeafReader reader = searcher.getLeafReader();
-    SortedDocValues values = DocValues.getSorted(reader, field);
-    FixedBitSet groupBits = new FixedBitSet(values.getValueCount());
+
+    FieldType fieldType = searcher.getSchema().getField(field).getType();
+
+    SortedDocValues values = null;
+    long nullValue = 0;
+
+    if(fieldType instanceof StrField) {
+      //Get The Top Level SortedDocValues
+      if(CollapsingQParserPlugin.HINT_TOP_FC.equals(hint)) {
+        Map<String, UninvertingReader.Type> mapping = new HashMap();
+        mapping.put(field, UninvertingReader.Type.SORTED);
+        UninvertingReader uninvertingReader = new UninvertingReader(new ReaderWrapper(searcher.getLeafReader(), field), mapping);
+        values = uninvertingReader.getSortedDocValues(field);
+      } else {
+        values = DocValues.getSorted(reader, field);
+      }
+    } else {
+      //Get the nullValue for the numeric collapse field
+      String defaultValue = searcher.getSchema().getField(field).getDefaultValue();
+      if(defaultValue != null) {
+        if(fieldType instanceof TrieIntField || fieldType instanceof TrieLongField) {
+          nullValue = Long.parseLong(defaultValue);
+        } else if(fieldType instanceof TrieFloatField){
+          nullValue = Float.floatToIntBits(Float.parseFloat(defaultValue));
+        } else if(fieldType instanceof TrieDoubleField){
+          nullValue = Double.doubleToLongBits(Double.parseDouble(defaultValue));
+        }
+      } else {
+        if(fieldType instanceof TrieFloatField){
+          nullValue = Float.floatToIntBits(0.0f);
+        } else if(fieldType instanceof TrieDoubleField){
+          nullValue = Double.doubleToLongBits(0.0f);
+        }
+      }
+    }
+
+    FixedBitSet groupBits = null;
+    LongOpenHashSet groupSet = null;
     DocList docList = rb.getResults().docList;
     IntOpenHashSet collapsedSet = new IntOpenHashSet(docList.size() * 2);
 
+    //Gather the groups for the current page of documents
     DocIterator idit = docList.iterator();
-
+    int[] globalDocs = new int[docList.size()];
+    int docsIndex = -1;
     while (idit.hasNext()) {
-      int doc = idit.nextDoc();
-      int ord = values.getOrd(doc);
-      if (ord > -1) {
-        groupBits.set(ord);
-        collapsedSet.add(doc);
+      globalDocs[++docsIndex] = idit.nextDoc();
+    }
+
+    Arrays.sort(globalDocs);
+    Query groupQuery = null;
+
+    /*
+    * This code gathers the group information for the current page.
+    */
+    List<LeafReaderContext> contexts = searcher.getTopReaderContext().leaves();
+    int currentContext = 0;
+    int currentDocBase = contexts.get(currentContext).docBase;
+    int nextDocBase = (currentContext+1)<contexts.size() ? contexts.get(currentContext+1).docBase : Integer.MAX_VALUE;
+    IntObjectOpenHashMap<BytesRef> ordBytes = null;
+    if(values != null) {
+      groupBits = new FixedBitSet(values.getValueCount());
+      MultiDocValues.OrdinalMap ordinalMap = null;
+      SortedDocValues[] sortedDocValues = null;
+      LongValues segmentOrdinalMap = null;
+      SortedDocValues currentValues = null;
+      if(values instanceof  MultiDocValues.MultiSortedDocValues) {
+        ordinalMap = ((MultiDocValues.MultiSortedDocValues)values).mapping;
+        sortedDocValues = ((MultiDocValues.MultiSortedDocValues)values).values;
+        currentValues = sortedDocValues[currentContext];
+        segmentOrdinalMap = ordinalMap.getGlobalOrds(currentContext);
+      }
+      int count = 0;
+
+      ordBytes = new IntObjectOpenHashMap();
+
+      for(int i=0; i<globalDocs.length; i++) {
+        int globalDoc = globalDocs[i];
+        while(globalDoc >= nextDocBase) {
+          currentContext++;
+          currentDocBase = contexts.get(currentContext).docBase;
+          nextDocBase = (currentContext+1) < contexts.size() ? contexts.get(currentContext+1).docBase : Integer.MAX_VALUE;
+          if(ordinalMap != null) {
+            currentValues = sortedDocValues[currentContext];
+            segmentOrdinalMap = ordinalMap.getGlobalOrds(currentContext);
+          }
+        }
+
+        int contextDoc = globalDoc - currentDocBase;
+        if(ordinalMap != null) {
+          int ord = currentValues.getOrd(contextDoc);
+          if(ord > -1) {
+            ++count;
+            BytesRef ref = currentValues.lookupOrd(ord);
+            ord = (int)segmentOrdinalMap.get(ord);
+            ordBytes.put(ord, BytesRef.deepCopyOf(ref));
+            groupBits.set(ord);
+            collapsedSet.add(globalDoc);
+          }
+        } else {
+          int ord = values.getOrd(globalDoc);
+          if(ord > -1) {
+            ++count;
+            BytesRef ref = values.lookupOrd(ord);
+            ordBytes.put(ord, BytesRef.deepCopyOf(ref));
+            groupBits.set(ord);
+            collapsedSet.add(globalDoc);
+          }
+        }
+      }
+
+      if(count > 0 && count < 200) {
+        try {
+          groupQuery = getGroupQuery(field, count, ordBytes);
+        } catch(Exception e) {
+          throw new IOException(e);
+        }
+      }
+    } else {
+      groupSet = new LongOpenHashSet((int)(docList.size()*1.25));
+      NumericDocValues collapseValues = contexts.get(currentContext).reader().getNumericDocValues(field);
+      int count = 0;
+      for(int i=0; i<globalDocs.length; i++) {
+        int globalDoc = globalDocs[i];
+        while(globalDoc >= nextDocBase) {
+          currentContext++;
+          currentDocBase = contexts.get(currentContext).docBase;
+          nextDocBase = currentContext+1 < contexts.size() ? contexts.get(currentContext+1).docBase : Integer.MAX_VALUE;
+          collapseValues = contexts.get(currentContext).reader().getNumericDocValues(field);
+        }
+        int contextDoc = globalDoc - currentDocBase;
+        long value = collapseValues.get(contextDoc);
+        if(value != nullValue) {
+          ++count;
+          groupSet.add(value);
+          collapsedSet.add(globalDoc);
+        }
+      }
+
+      if(count > 0 && count < 200) {
+        groupQuery = getGroupQuery(field, fieldType, count, groupSet);
       }
     }
 
     Collector collector;
     if (sort != null)
       sort = sort.rewrite(searcher);
-    GroupExpandCollector groupExpandCollector = new GroupExpandCollector(values, groupBits, collapsedSet, limit, sort);
+
+
+    Collector groupExpandCollector = null;
+
+    if(values != null) {
+      groupExpandCollector = new GroupExpandCollector(values, groupBits, collapsedSet, limit, sort);
+    } else {
+      groupExpandCollector = new NumericGroupExpandCollector(field, nullValue, groupSet, collapsedSet, limit, sort);
+    }
+
+    if(groupQuery !=  null) {
+      //Limits the results to documents that are in the same group as the documents in the page.
+      newFilters.add(groupQuery);
+    }
+
     SolrIndexSearcher.ProcessedFilter pfilter = searcher.getProcessedFilter(null, newFilters);
     if (pfilter.postFilter != null) {
       pfilter.postFilter.setLastDelegate(groupExpandCollector);
@@ -212,12 +376,11 @@
     }
 
     searcher.search(query, pfilter.filter, collector);
-    IntObjectMap groups = groupExpandCollector.getGroups();
+    LongObjectMap groups = ((GroupCollector)groupExpandCollector).getGroups();
     Map<String, DocSlice> outMap = new HashMap<>();
     CharsRefBuilder charsRef = new CharsRefBuilder();
-    FieldType fieldType = searcher.getSchema().getField(field).getType();
-    for (IntObjectCursor cursor : (Iterable<IntObjectCursor>) groups) {
-      int ord = cursor.key;
+    for (LongObjectCursor cursor : (Iterable<LongObjectCursor>) groups) {
+      long groupValue = cursor.key;
       TopDocsCollector topDocsCollector = (TopDocsCollector) cursor.value;
       TopDocs topDocs = topDocsCollector.topDocs();
       ScoreDoc[] scoreDocs = topDocs.scoreDocs;
@@ -230,10 +393,21 @@
           scores[i] = scoreDoc.score;
         }
         DocSlice slice = new DocSlice(0, docs.length, docs, scores, topDocs.totalHits, topDocs.getMaxScore());
-        final BytesRef bytesRef = values.lookupOrd(ord);
-        fieldType.indexedToReadable(bytesRef, charsRef);
-        String group = charsRef.toString();
-        outMap.put(group, slice);
+
+        if(fieldType instanceof StrField) {
+          final BytesRef bytesRef = ordBytes.get((int)groupValue);
+          fieldType.indexedToReadable(bytesRef, charsRef);
+          String group = charsRef.toString();
+          outMap.put(group, slice);
+        } else {
+          if(fieldType instanceof TrieIntField || fieldType instanceof TrieLongField ) {
+            outMap.put(Long.toString(groupValue), slice);
+          } else if(fieldType instanceof TrieFloatField) {
+            outMap.put(Float.toString(Float.intBitsToFloat((int)groupValue)), slice);
+          } else if(fieldType instanceof TrieDoubleField) {
+            outMap.put(Double.toString(Double.longBitsToDouble(groupValue)), slice);
+          }
+        }
       }
     }
 
@@ -306,32 +480,46 @@
     rb.rsp.add("expanded", expanded);
   }
 
-  private class GroupExpandCollector implements Collector {
+  private class GroupExpandCollector implements Collector, GroupCollector {
     private SortedDocValues docValues;
-    private IntObjectMap<Collector> groups;
-    private int docBase;
+    private MultiDocValues.OrdinalMap ordinalMap;
+    private SortedDocValues segmentValues;
+    private LongValues segmentOrdinalMap;
+    private MultiDocValues.MultiSortedDocValues multiSortedDocValues;
+
+    private LongObjectMap<Collector> groups;
     private FixedBitSet groupBits;
     private IntOpenHashSet collapsedSet;
 
     public GroupExpandCollector(SortedDocValues docValues, FixedBitSet groupBits, IntOpenHashSet collapsedSet, int limit, Sort sort) throws IOException {
       int numGroups = collapsedSet.size();
-      groups = new IntObjectOpenHashMap<>(numGroups * 2);
+      groups = new LongObjectOpenHashMap<>(numGroups * 2);
       DocIdSetIterator iterator = new BitSetIterator(groupBits, 0); // cost is not useful here
       int group;
       while ((group = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
-        Collector collector = (sort == null) ? TopScoreDocCollector.create(limit, true) : TopFieldCollector.create(sort, limit, false, false, false, true);
+        Collector collector = (sort == null) ? TopScoreDocCollector.create(limit) : TopFieldCollector.create(sort, limit, false, false, false);
         groups.put(group, collector);
       }
 
       this.collapsedSet = collapsedSet;
       this.groupBits = groupBits;
       this.docValues = docValues;
+      if(docValues instanceof MultiDocValues.MultiSortedDocValues) {
+        this.multiSortedDocValues = (MultiDocValues.MultiSortedDocValues)docValues;
+        this.ordinalMap = multiSortedDocValues.mapping;
+      }
     }
 
     public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
       final int docBase = context.docBase;
-      final IntObjectMap<LeafCollector> leafCollectors = new IntObjectOpenHashMap<>();
-      for (IntObjectCursor<Collector> entry : groups) {
+
+      if(ordinalMap != null) {
+        this.segmentValues = this.multiSortedDocValues.values[context.ord];
+        this.segmentOrdinalMap = ordinalMap.getGlobalOrds(context.ord);
+      }
+
+      final LongObjectMap<LeafCollector> leafCollectors = new LongObjectOpenHashMap<>();
+      for (LongObjectCursor<Collector> entry : groups) {
         leafCollectors.put(entry.key, entry.value.getLeafCollector(context));
       }
       return new LeafCollector() {
@@ -345,27 +533,139 @@
 
         @Override
         public void collect(int docId) throws IOException {
-          int doc = docId + docBase;
-          int ord = docValues.getOrd(doc);
-          if (ord > -1 && groupBits.get(ord) && !collapsedSet.contains(doc)) {
+          int globalDoc = docId + docBase;
+          int ord = -1;
+          if(ordinalMap != null) {
+            ord = segmentValues.getOrd(docId);
+            if(ord > -1) {
+              ord = (int)segmentOrdinalMap.get(ord);
+            }
+          } else {
+            ord = docValues.getOrd(globalDoc);
+          }
+
+          if (ord > -1 && groupBits.get(ord) && !collapsedSet.contains(globalDoc)) {
             LeafCollector c = leafCollectors.get(ord);
             c.collect(docId);
           }
         }
+      };
+    }
+
+    public LongObjectMap<Collector> getGroups() {
+      return groups;
+    }
+  }
+
+  private class NumericGroupExpandCollector implements Collector, GroupCollector {
+    private NumericDocValues docValues;
+
+
+    private String field;
+    private LongObjectOpenHashMap<Collector> groups;
+
+    private IntOpenHashSet collapsedSet;
+    private long nullValue;
+
+    public NumericGroupExpandCollector(String field, long nullValue, LongOpenHashSet groupSet, IntOpenHashSet collapsedSet, int limit, Sort sort) throws IOException {
+      int numGroups = collapsedSet.size();
+      this.nullValue = nullValue;
+      groups = new LongObjectOpenHashMap(numGroups * 2);
+      Iterator<LongCursor> iterator = groupSet.iterator();
+      while (iterator.hasNext()) {
+        LongCursor cursor = iterator.next();
+        Collector collector = (sort == null) ? TopScoreDocCollector.create(limit) : TopFieldCollector.create(sort, limit, false, false, false);
+        groups.put(cursor.value, collector);
+      }
+
+      this.field = field;
+      this.collapsedSet = collapsedSet;
+    }
+
+    public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
+      final int docBase = context.docBase;
+      this.docValues = context.reader().getNumericDocValues(this.field);
+
+      final LongObjectOpenHashMap<LeafCollector> leafCollectors = new LongObjectOpenHashMap<>();
+
+      for (LongObjectCursor<Collector> entry : groups) {
+        leafCollectors.put(entry.key, entry.value.getLeafCollector(context));
+      }
+
+      return new LeafCollector() {
 
         @Override
-        public boolean acceptsDocsOutOfOrder() {
-          return false;
+        public void setScorer(Scorer scorer) throws IOException {
+          for (ObjectCursor<LeafCollector> c : leafCollectors.values()) {
+            c.value.setScorer(scorer);
+          }
+        }
+
+        @Override
+        public void collect(int docId) throws IOException {
+          long value = docValues.get(docId);
+          if (value != nullValue && leafCollectors.containsKey(value) && !collapsedSet.contains(docId + docBase)) {
+            LeafCollector c = leafCollectors.lget();
+            c.collect(docId);
+          }
         }
       };
     }
 
-    public IntObjectMap<Collector> getGroups() {
+    public LongObjectOpenHashMap<Collector> getGroups() {
       return groups;
     }
 
   }
 
+  private interface GroupCollector {
+    public LongObjectMap getGroups();
+
+  }
+
+  private Query getGroupQuery(String fname,
+                           FieldType ft,
+                           int size,
+                           LongOpenHashSet groupSet) {
+
+    BytesRef[] bytesRefs = new BytesRef[size];
+    BytesRefBuilder term = new BytesRefBuilder();
+    Iterator<LongCursor> it = groupSet.iterator();
+    int index = -1;
+    String stringVal =  null;
+    while (it.hasNext()) {
+      LongCursor cursor = it.next();
+      if(ft instanceof TrieIntField || ft instanceof TrieLongField) {
+        stringVal = Long.toString(cursor.value);
+      } else {
+        if(ft instanceof TrieFloatField) {
+          stringVal = Float.toString(Float.intBitsToFloat((int)cursor.value));
+        } else {
+          stringVal = Double.toString(Double.longBitsToDouble(cursor.value));
+        }
+      }
+      ft.readableToIndexed(stringVal, term);
+      bytesRefs[++index] = term.toBytesRef();
+    }
+
+    return new SolrConstantScoreQuery(new TermsFilter(fname, bytesRefs));
+  }
+
+  private Query getGroupQuery(String fname,
+                              int size,
+                              IntObjectOpenHashMap<BytesRef> ordBytes) throws Exception {
+
+    BytesRef[] bytesRefs = new BytesRef[size];
+    int index = -1;
+    Iterator<IntObjectCursor<BytesRef>>it = ordBytes.iterator();
+    while (it.hasNext()) {
+      IntObjectCursor<BytesRef> cursor = it.next();
+      bytesRefs[++index] = cursor.value;
+    }
+    return new SolrConstantScoreQuery(new TermsFilter(fname, bytesRefs));
+  }
+
+
   ////////////////////////////////////////////
   ///  SolrInfoMBean
   ////////////////////////////////////////////
@@ -385,4 +685,49 @@
       throw new RuntimeException(e);
     }
   }
+
+  private class ReaderWrapper extends FilterLeafReader {
+
+    private String field;
+
+    public ReaderWrapper(LeafReader leafReader, String field) {
+      super(leafReader);
+      this.field = field;
+    }
+
+    public SortedDocValues getSortedDocValues(String field) {
+      return null;
+    }
+
+    public Object getCoreCacheKey() {
+      return in.getCoreCacheKey();
+    }
+
+    public FieldInfos getFieldInfos() {
+      Iterator<FieldInfo> it = in.getFieldInfos().iterator();
+      List<FieldInfo> newInfos = new ArrayList();
+      while(it.hasNext()) {
+        FieldInfo fieldInfo = it.next();
+
+        if(fieldInfo.name.equals(field)) {
+          FieldInfo f = new FieldInfo(fieldInfo.name,
+              fieldInfo.number,
+              fieldInfo.hasVectors(),
+              fieldInfo.hasNorms(),
+              fieldInfo.hasPayloads(),
+              fieldInfo.getIndexOptions(),
+              DocValuesType.NONE,
+              fieldInfo.getDocValuesGen(),
+              fieldInfo.attributes());
+          newInfos.add(f);
+
+        } else {
+          newInfos.add(fieldInfo);
+        }
+      }
+      FieldInfos infos = new FieldInfos(newInfos.toArray(new FieldInfo[newInfos.size()]));
+      return infos;
+    }
+  }
+
 }
diff --git a/solr/core/src/java/org/apache/solr/handler/component/HighlightComponent.java b/solr/core/src/java/org/apache/solr/handler/component/HighlightComponent.java
index e768faf..334f75a 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/HighlightComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/HighlightComponent.java
@@ -92,7 +92,6 @@
       PluginInfo pluginInfo = core.getSolrConfig().getPluginInfo(SolrHighlighter.class.getName()); //TODO deprecated configuration remove later
       if (pluginInfo != null) {
         highlighter = core.createInitInstance(pluginInfo, SolrHighlighter.class, null, DefaultSolrHighlighter.class.getName());
-        highlighter.initalize(core.getSolrConfig());
       } else {
         DefaultSolrHighlighter defHighlighter = new DefaultSolrHighlighter(core);
         defHighlighter.init(PluginInfo.EMPTY_INFO);
diff --git a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java
index 2e07080..88d548a 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java
@@ -152,11 +152,8 @@
           if (urls.size() <= 1) {
             String url = urls.get(0);
             srsp.setShardAddress(url);
-            SolrClient client = new HttpSolrClient(url, httpClient);
-            try {
+            try (SolrClient client = new HttpSolrClient(url, httpClient)) {
               ssr.nl = client.request(req);
-            } finally {
-              client.shutdown();
             }
           } else {
             LBHttpSolrClient.Rsp rsp = httpShardHandlerFactory.makeLoadBalancedRequest(req, urls);
@@ -282,7 +279,6 @@
 
         clusterState =  zkController.getClusterState();
         String shardKeys =  params.get(ShardParams._ROUTE_);
-        if(shardKeys == null) shardKeys = params.get(ShardParams.SHARD_KEYS);//eprecated
 
         // This will be the complete list of slices we need to query for this request.
         slices = new HashMap<>();
diff --git a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java
index b217c52..cc2d139 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java
@@ -18,6 +18,8 @@
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.http.client.HttpClient;
+import org.apache.http.impl.client.DefaultHttpClient;
+import org.apache.http.impl.client.DefaultHttpRequestRetryHandler;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.HttpClientUtil;
 import org.apache.solr.client.solrj.impl.LBHttpSolrClient;
@@ -27,6 +29,7 @@
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.common.util.URLUtil;
+import org.apache.solr.core.ConfigSolr;
 import org.apache.solr.core.PluginInfo;
 import org.apache.solr.util.DefaultSolrThreadFactory;
 import org.slf4j.Logger;
@@ -66,8 +69,8 @@
   protected HttpClient defaultClient;
   private LBHttpSolrClient loadbalancer;
   //default values:
-  int soTimeout = 0; 
-  int connectionTimeout = 0; 
+  int soTimeout = ConfigSolr.DEFAULT_DISTRIBUPDATESOTIMEOUT;
+  int connectionTimeout = ConfigSolr.DEFAULT_DISTRIBUPDATECONNTIMEOUT;
   int maxConnectionsPerHost = 20;
   int maxConnections = 10000;
   int corePoolSize = 0;
@@ -75,6 +78,7 @@
   int keepAliveTime = 5;
   int queueSize = -1;
   boolean accessPolicy = false;
+  boolean useRetries = false;
 
   private String scheme = null;
 
@@ -97,6 +101,10 @@
 
   // Configure if the threadpool favours fairness over throughput
   static final String INIT_FAIRNESS_POLICY = "fairnessPolicy";
+  
+  // Turn on retries for certain IOExceptions, many of which can happen
+  // due to connection pooling limitations / races
+  static final String USE_RETRIES = "useRetries";
 
   /**
    * Get {@link ShardHandler} that uses the default http client.
@@ -129,6 +137,7 @@
     this.keepAliveTime = getParameter(args, MAX_THREAD_IDLE_TIME, keepAliveTime);
     this.queueSize = getParameter(args, INIT_SIZE_OF_QUEUE, queueSize);
     this.accessPolicy = getParameter(args, INIT_FAIRNESS_POLICY, accessPolicy);
+    this.useRetries = getParameter(args, USE_RETRIES, useRetries);
     
     // magic sysprop to make tests reproducible: set by SolrTestCaseJ4.
     String v = System.getProperty("tests.shardhandler.randomSeed");
@@ -153,8 +162,18 @@
     clientParams.set(HttpClientUtil.PROP_MAX_CONNECTIONS, maxConnections);
     clientParams.set(HttpClientUtil.PROP_SO_TIMEOUT, soTimeout);
     clientParams.set(HttpClientUtil.PROP_CONNECTION_TIMEOUT, connectionTimeout);
-    clientParams.set(HttpClientUtil.PROP_USE_RETRY, false);
+    if (!useRetries) {
+      clientParams.set(HttpClientUtil.PROP_USE_RETRY, false);
+    }
     this.defaultClient = HttpClientUtil.createClient(clientParams);
+    
+    // must come after createClient
+    if (useRetries) {
+      // our default retry handler will never retry on IOException if the request has been sent already,
+      // but for these read only requests we can use the standard DefaultHttpRequestRetryHandler rules
+      ((DefaultHttpClient) this.defaultClient).setHttpRequestRetryHandler(new DefaultHttpRequestRetryHandler());
+    }
+    
     this.loadbalancer = createLoadbalancer(defaultClient);
   }
 
@@ -189,7 +208,7 @@
       } finally {
         
         if (loadbalancer != null) {
-          loadbalancer.shutdown();
+          loadbalancer.close();
         }
       }
     }
diff --git a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
index 3fef56c..a3520ea 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
@@ -23,6 +23,7 @@
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.BooleanQuery;
 import org.apache.lucene.search.FieldComparator;
+import org.apache.lucene.search.LeafFieldComparator;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.ScoreDoc;
 import org.apache.lucene.search.Scorer;
@@ -598,7 +599,8 @@
         // :TODO: would be simpler to always serialize every position of SortField[]
         if (type==SortField.Type.SCORE || type==SortField.Type.DOC) continue;
 
-        FieldComparator comparator = null;
+        FieldComparator<?> comparator = null;
+        LeafFieldComparator leafComparator = null;
         Object[] vals = new Object[nDocs];
 
         int lastIdx = -1;
@@ -621,12 +623,12 @@
 
           if (comparator == null) {
             comparator = sortField.getComparator(1,0);
-            comparator = comparator.setNextReader(currentLeaf);
+            leafComparator = comparator.getLeafComparator(currentLeaf);
           }
 
           doc -= currentLeaf.docBase;  // adjust for what segment this is in
-          comparator.setScorer(new FakeScorer(doc, score));
-          comparator.copy(0, doc);
+          leafComparator.setScorer(new FakeScorer(doc, score));
+          leafComparator.copy(0, doc);
           Object val = comparator.value(0);
           if (null != ft) val = ft.marshalSortValue(val);
           vals[position] = val;
diff --git a/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java b/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
index 235c3e0..3c5b24f 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
@@ -36,6 +36,7 @@
 import org.apache.lucene.search.FieldComparator;
 import org.apache.lucene.search.FieldComparatorSource;
 import org.apache.lucene.search.Query;
+import org.apache.lucene.search.SimpleFieldComparator;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.SortField;
 import org.apache.lucene.search.TermQuery;
@@ -631,7 +632,7 @@
 
   @Override
   public FieldComparator<Integer> newComparator(String fieldname, final int numHits, int sortPos, boolean reversed) throws IOException {
-    return new FieldComparator<Integer>() {
+    return new SimpleFieldComparator<Integer>() {
       private final int[] values = new int[numHits];
       private int bottomVal;
       private int topVal;
@@ -677,13 +678,13 @@
       }
 
       @Override
-      public FieldComparator setNextReader(LeafReaderContext context) throws IOException {
+      protected void doSetNextReader(LeafReaderContext context) throws IOException {
         //convert the ids to Lucene doc ids, the ordSet and termValues needs to be the same size as the number of elevation docs we have
         ordSet.clear();
         Fields fields = context.reader().fields();
-        if (fields == null) return this;
+        if (fields == null) return;
         Terms terms = fields.terms(idField);
-        if (terms == null) return this;
+        if (terms == null) return;
         termsEnum = terms.iterator(termsEnum);
         BytesRefBuilder term = new BytesRefBuilder();
         Bits liveDocs = context.reader().getLiveDocs();
@@ -701,7 +702,6 @@
             }
           }
         }
-        return this;
       }
 
       @Override
diff --git a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
index 0710f6c..64a8b61 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java
@@ -177,7 +177,7 @@
 
        int docid = searcher.getFirstMatch(new Term(idField.getName(), idBytes.get()));
        if (docid < 0) continue;
-       StoredDocument luceneDocument = searcher.doc(docid);
+       StoredDocument luceneDocument = searcher.doc(docid, rsp.getReturnFields().getLuceneFieldNames());
        SolrDocument doc = toSolrDoc(luceneDocument,  core.getLatestSchema());
        if( transformer != null ) {
          transformer.transform(doc, docid);
@@ -598,9 +598,7 @@
           // TODO: do any kind of validation here?
           updates.add(o);
 
-        } catch (SolrException e) {
-          log.warn("Exception reading log for updates", e);
-        } catch (ClassCastException e) {
+        } catch (SolrException | ClassCastException e) {
           log.warn("Exception reading log for updates", e);
         }
       }
diff --git a/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java b/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
index 47afee6..da677ac 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
@@ -17,6 +17,7 @@
  * limitations under the License.
  */
 
+import java.io.File;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -29,6 +30,7 @@
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.lucene.search.suggest.Lookup;
 import org.apache.lucene.search.suggest.Lookup.LookupResult;
@@ -77,6 +79,9 @@
   /** SolrConfig label to identify boolean value to build suggesters on optimize */
   private static final String BUILD_ON_OPTIMIZE_LABEL = "buildOnOptimize";
   
+  /** SolrConfig label to identify boolean value to build suggesters on optimize */
+  private static final String BUILD_ON_STARTUP_LABEL = "buildOnStartup";
+  
   @SuppressWarnings("unchecked")
   protected NamedList initParams;
   
@@ -128,14 +133,22 @@
               throw new RuntimeException("More than one dictionary is missing name.");
             }
           }
-          
-          // Register event listeners for this Suggester
-          core.registerFirstSearcherListener(new SuggesterListener(core, suggester, false, false));
+          boolean buildOnStartup;
+          Object buildOnStartupObj = suggesterParams.get(BUILD_ON_STARTUP_LABEL);
+          if (buildOnStartupObj == null) {
+            File storeFile = suggester.getStoreFile();
+            buildOnStartup = storeFile == null || !storeFile.exists();
+          } else {
+            buildOnStartup = Boolean.parseBoolean((String) buildOnStartupObj);
+          }
           boolean buildOnCommit = Boolean.parseBoolean((String) suggesterParams.get(BUILD_ON_COMMIT_LABEL));
           boolean buildOnOptimize = Boolean.parseBoolean((String) suggesterParams.get(BUILD_ON_OPTIMIZE_LABEL));
-          if (buildOnCommit || buildOnOptimize) {
-            LOG.info("Registering newSearcher listener for suggester: " + suggester.getName());
-            core.registerNewSearcherListener(new SuggesterListener(core, suggester, buildOnCommit, buildOnOptimize));
+          
+          if (buildOnCommit || buildOnOptimize || buildOnStartup) {
+            SuggesterListener listener = new SuggesterListener(core, suggester, buildOnCommit, buildOnOptimize, buildOnStartup, core.isReloaded());
+            LOG.info("Registering searcher listener for suggester: " + suggester.getName() + " - " + listener);
+            core.registerFirstSearcherListener(listener);
+            core.registerNewSearcherListener(listener);
           }
         }
       }
@@ -448,12 +461,23 @@
     private final SolrSuggester suggester;
     private final boolean buildOnCommit;
     private final boolean buildOnOptimize;
+    private final boolean buildOnStartup;
+    
+    // On core reload, immediately after the core is created a new searcher is opened, causing the suggester
+    // to trigger a "buildOnCommit". The only event that we want to trigger in that situation is "buildOnStartup"
+    // so if buildOnCommit is true and this is a core being reloaded, we will skip the first time this listener 
+    // is called. 
+    private final AtomicLong callCount = new AtomicLong(0);
+    private final boolean isCoreReload;
+    
 
-    public SuggesterListener(SolrCore core, SolrSuggester checker, boolean buildOnCommit, boolean buildOnOptimize) {
+    public SuggesterListener(SolrCore core, SolrSuggester checker, boolean buildOnCommit, boolean buildOnOptimize, boolean buildOnStartup, boolean isCoreReload) {
       this.core = core;
       this.suggester = checker;
       this.buildOnCommit = buildOnCommit;
       this.buildOnOptimize = buildOnOptimize;
+      this.buildOnStartup = buildOnStartup;
+      this.isCoreReload = isCoreReload;
     }
 
     @Override
@@ -462,24 +486,23 @@
     @Override
     public void newSearcher(SolrIndexSearcher newSearcher,
                             SolrIndexSearcher currentSearcher) {
-      if (currentSearcher == null) {
-        // firstSearcher event
-        try {
-          LOG.info("Loading suggester index for: " + suggester.getName());
-          suggester.reload(core, newSearcher);
-        } catch (IOException e) {
-          log.error("Exception in reloading suggester index for: " + suggester.getName(), e);
+      long thisCallCount = callCount.incrementAndGet();
+      if (isCoreReload && thisCallCount == 1) {
+        LOG.info("Skipping first newSearcher call for suggester " + suggester + " in core reload");
+        return;
+      } else if (thisCallCount == 1 || (isCoreReload && thisCallCount == 2)) {
+        if (buildOnStartup) {
+          LOG.info("buildOnStartup: " + suggester.getName());
+          buildSuggesterIndex(newSearcher);
         }
       } else {
-        // newSearcher event
         if (buildOnCommit)  {
+          LOG.info("buildOnCommit: " + suggester.getName());
           buildSuggesterIndex(newSearcher);
         } else if (buildOnOptimize) {
           if (newSearcher.getIndexReader().leaves().size() == 1)  {
+            LOG.info("buildOnOptimize: " + suggester.getName());
             buildSuggesterIndex(newSearcher);
-          } else  {
-            LOG.info("Index is not optimized therefore skipping building suggester index for: " 
-                    + suggester.getName());
           }
         }
       }
@@ -488,7 +511,6 @@
 
     private void buildSuggesterIndex(SolrIndexSearcher newSearcher) {
       try {
-        LOG.info("Building suggester index for: " + suggester.getName());
         suggester.build(core, newSearcher);
       } catch (Exception e) {
         log.error("Exception in building suggester index for: " + suggester.getName(), e);
@@ -500,6 +522,14 @@
 
     @Override
     public void postSoftCommit() {}
+
+    @Override
+    public String toString() {
+      return "SuggesterListener [core=" + core + ", suggester=" + suggester
+          + ", buildOnCommit=" + buildOnCommit + ", buildOnOptimize="
+          + buildOnOptimize + ", buildOnStartup=" + buildOnStartup
+          + ", isCoreReload=" + isCoreReload + "]";
+    }
     
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java b/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java
index c357097..3d58134 100644
--- a/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java
+++ b/solr/core/src/java/org/apache/solr/highlight/DefaultSolrHighlighter.java
@@ -16,18 +16,6 @@
  */
 package org.apache.solr.highlight;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.ListIterator;
-import java.util.Map;
-import java.util.Set;
-
 import org.apache.lucene.analysis.CachingTokenFilter;
 import org.apache.lucene.analysis.TokenFilter;
 import org.apache.lucene.analysis.TokenStream;
@@ -59,7 +47,6 @@
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.core.PluginInfo;
-import org.apache.solr.core.SolrConfig;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.schema.IndexSchema;
@@ -71,6 +58,18 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.ListIterator;
+import java.util.Map;
+import java.util.Set;
+
 /**
  * 
  * @since solr 1.3
@@ -160,38 +159,7 @@
     if(boundaryScanner == null) boundaryScanner = new SimpleBoundaryScanner();
     boundaryScanners.put("", boundaryScanner);
     boundaryScanners.put(null, boundaryScanner);
-    
-    initialized = true;
-  }
-  //just for back-compat with the deprecated method
-  private boolean initialized = false;
-  @Override
-  @Deprecated
-  public void initalize( SolrConfig config) {
-    if (initialized) return;
-    SolrFragmenter frag = new GapFragmenter();
-    fragmenters.put("", frag);
-    fragmenters.put(null, frag);
 
-    SolrFormatter fmt = new HtmlFormatter();
-    formatters.put("", fmt);
-    formatters.put(null, fmt);    
-
-    SolrEncoder enc = new DefaultEncoder();
-    encoders.put("", enc);
-    encoders.put(null, enc);    
-
-    SolrFragListBuilder fragListBuilder = new SimpleFragListBuilder();
-    fragListBuilders.put( "", fragListBuilder );
-    fragListBuilders.put( null, fragListBuilder );
-    
-    SolrFragmentsBuilder fragsBuilder = new ScoreOrderFragmentsBuilder();
-    fragmentsBuilders.put( "", fragsBuilder );
-    fragmentsBuilders.put( null, fragsBuilder );
-    
-    SolrBoundaryScanner boundaryScanner = new SimpleBoundaryScanner();
-    boundaryScanners.put("", boundaryScanner);
-    boundaryScanners.put(null, boundaryScanner);
   }
 
   /**
@@ -240,16 +208,11 @@
    * @param request The SolrQueryRequest
    */
   private QueryScorer getSpanQueryScorer(Query query, String fieldName, TokenStream tokenStream, SolrQueryRequest request) {
-    boolean reqFieldMatch = request.getParams().getFieldBool(fieldName, HighlightParams.FIELD_MATCH, false);
-    boolean highlightMultiTerm = request.getParams().getBool(HighlightParams.HIGHLIGHT_MULTI_TERM, true);
-    QueryScorer scorer;
-    if (reqFieldMatch) {
-      scorer = new QueryScorer(query, fieldName);
-    }
-    else {
-      scorer = new QueryScorer(query, null);
-    }
-    scorer.setExpandMultiTermQuery(highlightMultiTerm);
+    QueryScorer scorer = new QueryScorer(query,
+        request.getParams().getFieldBool(fieldName, HighlightParams.FIELD_MATCH, false) ? fieldName : null);
+    scorer.setExpandMultiTermQuery(request.getParams().getBool(HighlightParams.HIGHLIGHT_MULTI_TERM, true));
+    scorer.setUsePayloads(request.getParams().getFieldBool(fieldName, HighlightParams.PAYLOADS,
+        request.getSearcher().getLeafReader().getFieldInfos().fieldInfo(fieldName).hasPayloads()));
     return scorer;
   }
 
@@ -608,7 +571,7 @@
       if (summaries.length > 0) 
       docSummaries.add(fieldName, summaries);
     }
-    // no summeries made, copy text from alternate field
+    // no summaries made, copy text from alternate field
     if (summaries == null || summaries.length == 0) {
       alternateField( docSummaries, params, doc, fieldName );
     }
diff --git a/solr/core/src/java/org/apache/solr/highlight/PostingsSolrHighlighter.java b/solr/core/src/java/org/apache/solr/highlight/PostingsSolrHighlighter.java
index 9f4eb72..82e75a8 100644
--- a/solr/core/src/java/org/apache/solr/highlight/PostingsSolrHighlighter.java
+++ b/solr/core/src/java/org/apache/solr/highlight/PostingsSolrHighlighter.java
@@ -107,9 +107,6 @@
  * @lucene.experimental 
  */
 public class PostingsSolrHighlighter extends SolrHighlighter implements PluginInfoInitialized {
-
-  @Override
-  public void initalize(SolrConfig config) {}
   
   @Override
   public void init(PluginInfo info) {}
diff --git a/solr/core/src/java/org/apache/solr/highlight/SolrHighlighter.java b/solr/core/src/java/org/apache/solr/highlight/SolrHighlighter.java
index ac4e3d3..b6f510a 100644
--- a/solr/core/src/java/org/apache/solr/highlight/SolrHighlighter.java
+++ b/solr/core/src/java/org/apache/solr/highlight/SolrHighlighter.java
@@ -16,21 +16,20 @@
  * limitations under the License.
  */
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import org.apache.lucene.search.Query;
 import org.apache.solr.common.params.HighlightParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
-import org.apache.solr.core.SolrConfig;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.search.DocList;
 import org.apache.solr.util.SolrPluginUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
 
 public abstract class SolrHighlighter
 {
@@ -38,9 +37,6 @@
   public static int DEFAULT_PHRASE_LIMIT = 5000;
   public static Logger log = LoggerFactory.getLogger(SolrHighlighter.class);
 
-  @Deprecated
-  public abstract void initalize( SolrConfig config );
-
   /**
    * Check whether Highlighting is enabled for this request.
    * @param params The params controlling Highlighting
diff --git a/solr/core/src/java/org/apache/solr/parser/CharStream.java b/solr/core/src/java/org/apache/solr/parser/CharStream.java
index d45c194..7536df0 100644
--- a/solr/core/src/java/org/apache/solr/parser/CharStream.java
+++ b/solr/core/src/java/org/apache/solr/parser/CharStream.java
@@ -27,22 +27,6 @@
    */
   char readChar() throws java.io.IOException;
 
-  @Deprecated
-  /**
-   * Returns the column position of the character last read.
-   * @deprecated
-   * @see #getEndColumn
-   */
-  int getColumn();
-
-  @Deprecated
-  /**
-   * Returns the line number of the character last read.
-   * @deprecated
-   * @see #getEndLine
-   */
-  int getLine();
-
   /**
    * Returns the column number of the last character for current token (being
    * matched after the last call to BeginTOken).
diff --git a/solr/core/src/java/org/apache/solr/parser/FastCharStream.java b/solr/core/src/java/org/apache/solr/parser/FastCharStream.java
index 91dbf91..2e3b466 100644
--- a/solr/core/src/java/org/apache/solr/parser/FastCharStream.java
+++ b/solr/core/src/java/org/apache/solr/parser/FastCharStream.java
@@ -108,15 +108,6 @@
     }
   }
 
-  @Override
-  public final int getColumn() {
-    return bufferStart + bufferPosition;
-  }
-  @Override
-  public final int getLine() {
-    return 1;
-  }
-  @Override
   public final int getEndColumn() {
     return bufferStart + bufferPosition;
   }
diff --git a/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java b/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java
index 3b421ce..abe1bb0 100644
--- a/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java
+++ b/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java
@@ -150,13 +150,9 @@
       Query res = TopLevelQuery(null);  // pass null so we can tell later if an explicit field was provided or not
       return res!=null ? res : newBooleanQuery(false);
     }
-    catch (ParseException tme) {
+    catch (ParseException | TokenMgrError tme) {
       throw new SyntaxError("Cannot parse '" +query+ "': " + tme.getMessage(), tme);
-    }
-    catch (TokenMgrError tme) {
-      throw new SyntaxError("Cannot parse '" +query+ "': " + tme.getMessage(), tme);
-    }
-    catch (BooleanQuery.TooManyClauses tmc) {
+    } catch (BooleanQuery.TooManyClauses tmc) {
       throw new SyntaxError("Cannot parse '" +query+ "': too many boolean clauses", tmc);
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
index 1273a6d..ae3b384 100644
--- a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
+++ b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
@@ -17,26 +17,6 @@
 
 package org.apache.solr.request;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Date;
-import java.util.EnumSet;
-import java.util.IdentityHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.Executor;
-import java.util.concurrent.Future;
-import java.util.concurrent.FutureTask;
-import java.util.concurrent.RunnableFuture;
-import java.util.concurrent.Semaphore;
-import java.util.concurrent.SynchronousQueue;
-import java.util.concurrent.ThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-
 import org.apache.lucene.index.DocsEnum;
 import org.apache.lucene.index.Fields;
 import org.apache.lucene.index.LeafReader;
@@ -95,6 +75,26 @@
 import org.apache.solr.util.DateMathParser;
 import org.apache.solr.util.DefaultSolrThreadFactory;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.EnumSet;
+import java.util.IdentityHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Executor;
+import java.util.concurrent.Future;
+import java.util.concurrent.FutureTask;
+import java.util.concurrent.RunnableFuture;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.SynchronousQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+
 /**
  * A class that generates simple Facet information for a request.
  *
diff --git a/solr/core/src/java/org/apache/solr/schema/AbstractSpatialFieldType.java b/solr/core/src/java/org/apache/solr/schema/AbstractSpatialFieldType.java
index 8ba8e16..f551587 100644
--- a/solr/core/src/java/org/apache/solr/schema/AbstractSpatialFieldType.java
+++ b/solr/core/src/java/org/apache/solr/schema/AbstractSpatialFieldType.java
@@ -23,7 +23,6 @@
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
-import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeSet;
@@ -36,7 +35,6 @@
 import com.spatial4j.core.context.SpatialContext;
 import com.spatial4j.core.context.SpatialContextFactory;
 import com.spatial4j.core.distance.DistanceUtils;
-import com.spatial4j.core.io.LegacyShapeReadWriterFormat;
 import com.spatial4j.core.shape.Point;
 import com.spatial4j.core.shape.Rectangle;
 import com.spatial4j.core.shape.Shape;
@@ -59,6 +57,7 @@
 import org.apache.solr.response.TextResponseWriter;
 import org.apache.solr.search.QParser;
 import org.apache.solr.search.SpatialOptions;
+import org.apache.solr.util.DistanceUnits;
 import org.apache.solr.util.MapListener;
 import org.apache.solr.util.SpatialUtils;
 import org.slf4j.Logger;
@@ -71,7 +70,7 @@
  */
 public abstract class AbstractSpatialFieldType<T extends SpatialStrategy> extends FieldType implements SpatialQueryable {
 
-  /** A local-param with one of "none" (default), "distance", or "recipDistance". */
+  /** A local-param with one of "none" (default), "distance", "recipDistance" or supported values in ({@link DistanceUnits#getSupportedUnits()}. */
   public static final String SCORE_PARAM = "score";
   /** A local-param boolean that can be set to false to only return the
    * FunctionQuery (score), and thus not do filtering.
@@ -90,6 +89,10 @@
 
   private final Cache<String, T> fieldStrategyCache = CacheBuilder.newBuilder().build();
 
+  protected DistanceUnits distanceUnits;
+  @Deprecated
+  protected String units; // for back compat; hopefully null
+
   protected final Set<String> supportedScoreModes;
 
   protected AbstractSpatialFieldType() {
@@ -101,6 +104,7 @@
     set.add(NONE);
     set.add(DISTANCE);
     set.add(RECIP_DISTANCE);
+    set.addAll(DistanceUnits.getSupportedUnits());
     set.addAll(moreScoreModes);
     supportedScoreModes = Collections.unmodifiableSet(set);
   }
@@ -109,30 +113,58 @@
   protected void init(IndexSchema schema, Map<String, String> args) {
     super.init(schema, args);
 
-    String units = args.remove("units");
-    if (!"degrees".equals(units))
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-          "Must specify units=\"degrees\" on field types with class "+getClass().getSimpleName());
-
-    //replace legacy rect format with ENVELOPE
-    String wbStr = args.get("worldBounds");
-    if (wbStr != null && !wbStr.toUpperCase(Locale.ROOT).startsWith("ENVELOPE")) {
-      log.warn("Using old worldBounds format? Should use ENVELOPE(xMin, xMax, yMax, yMin).");
-      String[] parts = wbStr.split(" ");//"xMin yMin xMax yMax"
-      if (parts.length == 4) {
-        args.put("worldBounds",
-            "ENVELOPE(" + parts[0] + ", " + parts[2] + ", " + parts[3] + ", " + parts[1] + ")");
-      } //else likely eventual exception
-    }
-
     //Solr expects us to remove the parameters we've used.
     MapListener<String, String> argsWrap = new MapListener<>(args);
     ctx = SpatialContextFactory.makeSpatialContext(argsWrap, schema.getResourceLoader().getClassLoader());
     args.keySet().removeAll(argsWrap.getSeenKeys());
 
+    final String unitsErrMsg = "units parameter is deprecated, please use distanceUnits instead for field types with class " +
+        getClass().getSimpleName();
+    this.units = args.remove("units");//deprecated
+    if (units != null) {
+      if ("degrees".equals(units)) {
+        log.warn(unitsErrMsg);
+      } else {
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, unitsErrMsg);
+      }
+    }
+
+    final String distanceUnitsStr = args.remove("distanceUnits");
+    if (distanceUnitsStr == null) {
+      if (units != null) {
+        this.distanceUnits = DistanceUnits.BACKCOMPAT;
+      } else {
+        this.distanceUnits = ctx.isGeo() ? DistanceUnits.KILOMETERS : DistanceUnits.DEGREES;
+      }
+    } else {
+      // If both units and distanceUnits was specified
+      if (units != null) {
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, unitsErrMsg);
+      }
+      this.distanceUnits = parseDistanceUnits(distanceUnitsStr);
+      if (this.distanceUnits == null)
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
+            "Must specify distanceUnits as one of "+ DistanceUnits.getSupportedUnits() +
+                " on field types with class "+getClass().getSimpleName());
+    }
+
     argsParser = newSpatialArgsParser();
   }
 
+  /** if {@code str} is non-null, returns {@link org.apache.solr.util.DistanceUnits#valueOf(String)}
+   * (which will return null if not found),
+   * else returns {@link #distanceUnits} (only null before initialized in {@code init()}.
+   * @param str maybe null
+   * @return maybe null
+   */
+  public DistanceUnits parseDistanceUnits(String str) {
+    if (str == null) {
+      return this.distanceUnits;
+    } else {
+      return DistanceUnits.valueOf(str);
+    }
+  }
+
   protected SpatialArgsParser newSpatialArgsParser() {
     return new SpatialArgsParser() {
       @Override
@@ -192,18 +224,17 @@
   protected Shape parseShape(String str) {
     if (str.length() == 0)
       throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "empty string shape");
-    //In Solr trunk we only support "lat, lon" (or x y) as an additional format; in v4.0 we do the
-    // weird Circle & Rect formats too (Spatial4j LegacyShapeReadWriterFormat).
-    try {
-      Shape shape = LegacyShapeReadWriterFormat.readShapeOrNull(str, ctx);
-      if (shape != null)
-        return shape;
-      return ctx.readShapeFromWkt(str);
-    } catch (Exception e) {
-      String message = e.getMessage();
-      if (!message.contains(str))
-        message = "Couldn't parse shape '" + str + "' because: " + message;
-      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, message, e);
+    if (Character.isLetter(str.charAt(0))) {//WKT starts with a letter
+      try {
+        return ctx.readShapeFromWkt(str);
+      } catch (Exception e) {
+        String message = e.getMessage();
+        if (!message.contains(str))
+          message = "Couldn't parse shape '" + str + "' because: " + message;
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, message, e);
+      }
+    } else {
+      return SpatialUtils.parsePointSolrException(str, ctx);
     }
   }
 
@@ -281,7 +312,12 @@
 
   protected SpatialArgs parseSpatialArgs(QParser parser, String externalVal) {
     try {
-      return argsParser.parse(externalVal, ctx);
+      SpatialArgs args = argsParser.parse(externalVal, ctx);
+      // Convert parsed args.distErr to degrees (using distanceUnits)
+      if (args.getDistErr() != null) {
+        args.setDistErr(args.getDistErr() * distanceUnits.multiplierFromThisUnitToDegrees());
+      }
+      return args;
     } catch (SolrException e) {
       throw e;
     } catch (Exception e) {
@@ -315,6 +351,11 @@
     return new FilteredQuery(functionQuery, filter);
   }
 
+  @Override
+  public double getSphereRadius() {
+      return distanceUnits.getEarthRadius();
+  }
+
   /** The set of values supported for the score local-param. Not null. */
   public Set<String> getSupportedScoreModes() {
     return supportedScoreModes;
@@ -324,21 +365,31 @@
     if (score == null) {
       return null;
     }
-    switch (score) {
-      case NONE:
+
+    final double multiplier; // default multiplier for degrees
+
+    switch(score) {
       case "":
+      case NONE:
         return null;
-      case DISTANCE:
-        double multiplier = 1.0;//TODO support units=kilometers
-        return strategy.makeDistanceValueSource(spatialArgs.getShape().getCenter(), multiplier);
       case RECIP_DISTANCE:
         return strategy.makeRecipDistanceValueSource(spatialArgs.getShape());
+      case DISTANCE:
+        multiplier = distanceUnits.multiplierFromDegreesToThisUnit();
+        break;
       default:
+        DistanceUnits du = parseDistanceUnits(score);
+        if (du != null) {
+          multiplier = du.multiplierFromDegreesToThisUnit();
+        } else {
         throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-            "'score' local-param must be one of " + supportedScoreModes);
+              "'score' local-param must be one of " + supportedScoreModes + ", it was: " + score);
     }
   }
 
+    return strategy.makeDistanceValueSource(spatialArgs.getShape().getCenter(), multiplier);
+  }
+
   /**
    * Gets the cached strategy for this field, creating it if necessary
    * via {@link #newSpatialStrategy(String)}.
@@ -368,6 +419,10 @@
     throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Sorting not supported on SpatialField: " + field.getName()+
       ", instead try sorting by query.");
   }
+
+  public DistanceUnits getDistanceUnits() {
+    return this.distanceUnits;
+  }
 }
 
 
diff --git a/solr/core/src/java/org/apache/solr/schema/AbstractSpatialPrefixTreeFieldType.java b/solr/core/src/java/org/apache/solr/schema/AbstractSpatialPrefixTreeFieldType.java
index 8af7e46..98efe33 100644
--- a/solr/core/src/java/org/apache/solr/schema/AbstractSpatialPrefixTreeFieldType.java
+++ b/solr/core/src/java/org/apache/solr/schema/AbstractSpatialPrefixTreeFieldType.java
@@ -17,14 +17,14 @@
  * limitations under the License.
  */
 
+import java.util.Map;
+
 import org.apache.lucene.spatial.prefix.PrefixTreeStrategy;
 import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
 import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTreeFactory;
 import org.apache.lucene.spatial.query.SpatialArgsParser;
 import org.apache.solr.util.MapListener;
 
-import java.util.Map;
-
 /**
  * @see PrefixTreeStrategy
  * @lucene.experimental
@@ -42,6 +42,13 @@
   protected void init(IndexSchema schema, Map<String, String> args) {
     super.init(schema, args);
 
+    // Convert the maxDistErr to degrees (based on distanceUnits) since Lucene spatial layer depends on degrees
+    if(args.containsKey(SpatialPrefixTreeFactory.MAX_DIST_ERR)) {
+      double maxDistErrOriginal = Double.parseDouble(args.get(SpatialPrefixTreeFactory.MAX_DIST_ERR));
+      args.put(SpatialPrefixTreeFactory.MAX_DIST_ERR, 
+          Double.toString(maxDistErrOriginal * distanceUnits.multiplierFromThisUnitToDegrees()));
+    }
+
     //Solr expects us to remove the parameters we've used.
     MapListener<String, String> argsWrap = new MapListener<>(args);
     grid = SpatialPrefixTreeFactory.makeSPT(argsWrap, schema.getResourceLoader().getClassLoader(), ctx);
diff --git a/solr/core/src/java/org/apache/solr/schema/BBoxField.java b/solr/core/src/java/org/apache/solr/schema/BBoxField.java
index 9c7a661..f9378af 100644
--- a/solr/core/src/java/org/apache/solr/schema/BBoxField.java
+++ b/solr/core/src/java/org/apache/solr/schema/BBoxField.java
@@ -17,6 +17,12 @@
  * limitations under the License.
  */
 
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+
 import com.spatial4j.core.shape.Rectangle;
 import org.apache.lucene.index.DocValuesType;
 import org.apache.lucene.queries.function.ValueSource;
@@ -27,12 +33,6 @@
 import org.apache.solr.common.SolrException;
 import org.apache.solr.search.QParser;
 
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-
 public class BBoxField extends AbstractSpatialFieldType<BBoxStrategy> implements SchemaAware {
   private static final String PARAM_QUERY_TARGET_PROPORTION = "queryTargetProportion";
   private static final String PARAM_MIN_SIDE_LENGTH = "minSideLength";
@@ -140,6 +140,7 @@
     if (scoreParam == null) {
       return null;
     }
+
     switch (scoreParam) {
       //TODO move these to superclass after LUCENE-5804 ?
       case OVERLAP_RATIO:
@@ -160,10 +161,12 @@
             queryTargetProportion, minSideLength);
 
       case AREA:
-        return new ShapeAreaValueSource(strategy.makeShapeValueSource(), ctx, ctx.isGeo());
+        return new ShapeAreaValueSource(strategy.makeShapeValueSource(), ctx, ctx.isGeo(),
+            distanceUnits.multiplierFromDegreesToThisUnit() * distanceUnits.multiplierFromDegreesToThisUnit());
 
       case AREA2D:
-        return new ShapeAreaValueSource(strategy.makeShapeValueSource(), ctx, false);
+        return new ShapeAreaValueSource(strategy.makeShapeValueSource(), ctx, false,
+            distanceUnits.multiplierFromDegreesToThisUnit() * distanceUnits.multiplierFromDegreesToThisUnit());
 
       default:
         return super.getValueSourceFromSpatialArgs(parser, field, spatialArgs, scoreParam, strategy);
diff --git a/solr/core/src/java/org/apache/solr/schema/CollationField.java b/solr/core/src/java/org/apache/solr/schema/CollationField.java
index 568fc98..aa25481 100644
--- a/solr/core/src/java/org/apache/solr/schema/CollationField.java
+++ b/solr/core/src/java/org/apache/solr/schema/CollationField.java
@@ -180,11 +180,8 @@
      input = loader.openResource(fileName);
      String rules = IOUtils.toString(input, "UTF-8");
      return new RuleBasedCollator(rules);
-    } catch (IOException e) {
-      // io error
-      throw new RuntimeException(e);
-    } catch (ParseException e) {
-      // invalid rules
+    } catch (IOException | ParseException e) {
+      // io error or invalid rules
       throw new RuntimeException(e);
     } finally {
       IOUtils.closeQuietly(input);
diff --git a/solr/core/src/java/org/apache/solr/schema/CurrencyField.java b/solr/core/src/java/org/apache/solr/schema/CurrencyField.java
index c0bee75..2170607 100644
--- a/solr/core/src/java/org/apache/solr/schema/CurrencyField.java
+++ b/solr/core/src/java/org/apache/solr/schema/CurrencyField.java
@@ -836,13 +836,7 @@
           
           addRate(tmpRates, fromCurrency, toCurrency, exchangeRate);
         }
-      } catch (SAXException e) {
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error parsing currency config.", e);
-      } catch (IOException e) {
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error parsing currency config.", e);
-      } catch (ParserConfigurationException e) {
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error parsing currency config.", e);
-      } catch (XPathExpressionException e) {
+      } catch (SAXException | XPathExpressionException | ParserConfigurationException | IOException e) {
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error parsing currency config.", e);
       }
     } catch (IOException e) {
diff --git a/solr/core/src/java/org/apache/solr/schema/DateRangeField.java b/solr/core/src/java/org/apache/solr/schema/DateRangeField.java
index 7ae8a61..d6f4627 100644
--- a/solr/core/src/java/org/apache/solr/schema/DateRangeField.java
+++ b/solr/core/src/java/org/apache/solr/schema/DateRangeField.java
@@ -54,12 +54,7 @@
 
   @Override
   protected void init(IndexSchema schema, Map<String, String> args) {
-    super.init(schema, addDegrees(args));
-  }
-
-  private Map<String, String> addDegrees(Map<String, String> args) {
-    args.put("units", "degrees");//HACK!
-    return args;
+    super.init(schema, args);
   }
 
   @Override
diff --git a/solr/core/src/java/org/apache/solr/schema/EnumField.java b/solr/core/src/java/org/apache/solr/schema/EnumField.java
index ec3a365..24b4312 100644
--- a/solr/core/src/java/org/apache/solr/schema/EnumField.java
+++ b/solr/core/src/java/org/apache/solr/schema/EnumField.java
@@ -126,13 +126,7 @@
           enumStringToIntMap.put(valueStr, i);
         }
       }
-      catch (ParserConfigurationException e) {
-        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error parsing enums config.", e);
-      }
-      catch (SAXException e) {
-        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error parsing enums config.", e);
-      }
-      catch (XPathExpressionException e) {
+      catch (ParserConfigurationException | XPathExpressionException | SAXException e) {
         throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error parsing enums config.", e);
       }
     }
diff --git a/solr/core/src/java/org/apache/solr/schema/GeoHashField.java b/solr/core/src/java/org/apache/solr/schema/GeoHashField.java
index bbe7a72..db48a80 100644
--- a/solr/core/src/java/org/apache/solr/schema/GeoHashField.java
+++ b/solr/core/src/java/org/apache/solr/schema/GeoHashField.java
@@ -17,16 +17,18 @@
 
 package org.apache.solr.schema;
 
+import java.io.IOException;
+
+import com.spatial4j.core.context.SpatialContext;
+import com.spatial4j.core.distance.DistanceUtils;
+import com.spatial4j.core.io.GeohashUtils;
+import com.spatial4j.core.shape.Point;
+import org.apache.lucene.index.StorableField;
 import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.queries.function.valuesource.LiteralValueSource;
-import org.apache.lucene.index.StorableField;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.SortField;
 import org.apache.lucene.uninverting.UninvertingReader.Type;
-
-import com.spatial4j.core.context.SpatialContext;
-import com.spatial4j.core.io.GeohashUtils;
-import com.spatial4j.core.shape.Point;
 import org.apache.solr.response.TextResponseWriter;
 import org.apache.solr.search.QParser;
 import org.apache.solr.search.SolrConstantScoreQuery;
@@ -35,9 +37,6 @@
 import org.apache.solr.search.function.distance.GeohashHaversineFunction;
 import org.apache.solr.util.SpatialUtils;
 
-
-import java.io.IOException;
-
 /**
  * This is a class that represents a <a
  * href="http://en.wikipedia.org/wiki/Geohash">Geohash</a> field. The field is
@@ -94,4 +93,9 @@
     return new StrFieldSource(field.name);
   }
 
+  @Override
+  public double getSphereRadius() {
+    return DistanceUtils.EARTH_MEAN_RADIUS_KM;
+  }
+
 }
diff --git a/solr/core/src/java/org/apache/solr/schema/IndexSchema.java b/solr/core/src/java/org/apache/solr/schema/IndexSchema.java
index 30da7ab..2f8b764 100644
--- a/solr/core/src/java/org/apache/solr/schema/IndexSchema.java
+++ b/solr/core/src/java/org/apache/solr/schema/IndexSchema.java
@@ -901,37 +901,49 @@
       String msg = "copyField dest :'" + dest + "' is not an explicit field and doesn't match a dynamicField.";
       throw new SolrException(ErrorCode.SERVER_ERROR, msg);
     }
-    if (sourceIsDynamicFieldReference || sourceIsGlob) {
-      if (null != destDynamicField) { // source: glob or no-asterisk dynamic field ref; dest: dynamic field ref
+    if (sourceIsGlob) {
+      if (null != destDynamicField) { // source: glob ; dest: dynamic field ref
         registerDynamicCopyField(new DynamicCopy(source, destDynamicField, maxChars, sourceDynamicBase, destDynamicBase));
         incrementCopyFieldTargetCount(destSchemaField);
-      } else {                        // source: glob or no-asterisk dynamic field ref; dest: explicit field
+      } else {                        // source: glob ; dest: explicit field
         destDynamicField = new DynamicField(destSchemaField);
         registerDynamicCopyField(new DynamicCopy(source, destDynamicField, maxChars, sourceDynamicBase, null));
         incrementCopyFieldTargetCount(destSchemaField);
       }
-    } else {                          
-      if (null != destDynamicField) { // source: explicit field; dest: dynamic field reference
+    } else if (sourceIsDynamicFieldReference) {
+        if (null != destDynamicField) {  // source: no-asterisk dynamic field ref ; dest: dynamic field ref
+          registerDynamicCopyField(new DynamicCopy(source, destDynamicField, maxChars, sourceDynamicBase, destDynamicBase));
+          incrementCopyFieldTargetCount(destSchemaField);
+        } else {                        // source: no-asterisk dynamic field ref ; dest: explicit field
+          sourceSchemaField = getField(source);
+          registerExplicitSrcAndDestFields(source, maxChars, destSchemaField, sourceSchemaField);
+        }
+    } else {
+      if (null != destDynamicField) { // source: explicit field ; dest: dynamic field reference
         if (destDynamicField.pattern instanceof DynamicReplacement.DynamicPattern.NameEquals) {
           // Dynamic dest with no asterisk is acceptable
           registerDynamicCopyField(new DynamicCopy(source, destDynamicField, maxChars, sourceDynamicBase, destDynamicBase));
           incrementCopyFieldTargetCount(destSchemaField);
-        } else {
+        } else {                    // source: explicit field ; dest: dynamic field with an asterisk
           String msg = "copyField only supports a dynamic destination with an asterisk "
                      + "if the source also has an asterisk";
           throw new SolrException(ErrorCode.SERVER_ERROR, msg);
         }
-      } else {                        // source & dest: explicit fields 
-        List<CopyField> copyFieldList = copyFieldsMap.get(source);
-        if (copyFieldList == null) {
-          copyFieldList = new ArrayList<>();
-          copyFieldsMap.put(source, copyFieldList);
-        }
-        copyFieldList.add(new CopyField(sourceSchemaField, destSchemaField, maxChars));
-        incrementCopyFieldTargetCount(destSchemaField);
+      } else {                        // source & dest: explicit fields
+        registerExplicitSrcAndDestFields(source, maxChars, destSchemaField, sourceSchemaField);
       }
     }
   }
+
+  private void registerExplicitSrcAndDestFields(String source, int maxChars, SchemaField destSchemaField, SchemaField sourceSchemaField) {
+    List<CopyField> copyFieldList = copyFieldsMap.get(source);
+    if (copyFieldList == null) {
+      copyFieldList = new ArrayList<>();
+      copyFieldsMap.put(source, copyFieldList);
+    }
+    copyFieldList.add(new CopyField(sourceSchemaField, destSchemaField, maxChars));
+    incrementCopyFieldTargetCount(destSchemaField);
+  }
   
   private void incrementCopyFieldTargetCount(SchemaField dest) {
     copyFieldTargetCounts.put(dest, copyFieldTargetCounts.containsKey(dest) ? copyFieldTargetCounts.get(dest) + 1 : 1);
diff --git a/solr/core/src/java/org/apache/solr/schema/LatLonType.java b/solr/core/src/java/org/apache/solr/schema/LatLonType.java
index d29122a..3ec8eeb 100644
--- a/solr/core/src/java/org/apache/solr/schema/LatLonType.java
+++ b/solr/core/src/java/org/apache/solr/schema/LatLonType.java
@@ -22,11 +22,13 @@
 import java.util.Map;
 import java.util.Set;
 
+import com.spatial4j.core.context.SpatialContext;
+import com.spatial4j.core.distance.DistanceUtils;
 import com.spatial4j.core.shape.Point;
-
+import com.spatial4j.core.shape.Rectangle;
 import org.apache.lucene.document.FieldType;
-import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.StorableField;
 import org.apache.lucene.queries.function.FunctionValues;
 import org.apache.lucene.queries.function.ValueSource;
@@ -49,11 +51,6 @@
 import org.apache.solr.search.PostFilter;
 import org.apache.solr.search.QParser;
 import org.apache.solr.search.SpatialOptions;
-
-import com.spatial4j.core.context.SpatialContext;
-import com.spatial4j.core.distance.DistanceUtils;
-import com.spatial4j.core.shape.Rectangle;
-
 import org.apache.solr.util.SpatialUtils;
 
 
@@ -256,6 +253,11 @@
     throw new UnsupportedOperationException("LatLonType uses multiple fields.  field=" + field.getName());
   }
 
+  @Override
+  public double getSphereRadius() {
+    return DistanceUtils.EARTH_MEAN_RADIUS_KM;
+  }
+
 }
 
 class LatLonValueSource extends VectorValueSource {
diff --git a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
index a37a422..60c165c 100644
--- a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
+++ b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
@@ -19,12 +19,13 @@
 import org.apache.commons.io.IOUtils;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.util.CharFilterFactory;
+import org.apache.lucene.analysis.util.ResourceLoaderAware;
 import org.apache.lucene.analysis.util.TokenFilterFactory;
 import org.apache.lucene.analysis.util.TokenizerFactory;
 import org.apache.solr.analysis.TokenizerChain;
+import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.SolrResponse;
-import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.cloud.ZkController;
@@ -47,7 +48,6 @@
 import org.apache.solr.rest.schema.FieldTypeXmlAdapter;
 import org.apache.solr.util.DefaultSolrThreadFactory;
 import org.apache.solr.util.FileUtils;
-import org.apache.lucene.analysis.util.ResourceLoaderAware;
 import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.data.Stat;
@@ -58,7 +58,6 @@
 import javax.xml.xpath.XPath;
 import javax.xml.xpath.XPathConstants;
 import javax.xml.xpath.XPathExpressionException;
-
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
@@ -80,9 +79,6 @@
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 
-import static java.util.Collections.singletonList;
-import static java.util.Collections.singletonMap;
-
 /** Solr-managed schema - non-user-editable, but can be mutable via internal and external REST API requests. */
 public final class ManagedIndexSchema extends IndexSchema {
 
@@ -331,9 +327,8 @@
 
     @Override
     public Integer call() throws Exception {
-      HttpSolrClient solr = new HttpSolrClient(coreUrl);
       int remoteVersion = -1;
-      try {
+      try (HttpSolrClient solr = new HttpSolrClient(coreUrl)) {
         // eventually, this loop will get killed by the ExecutorService's timeout
         while (remoteVersion == -1 || remoteVersion < expectedZkVersion) {
           try {
@@ -358,10 +353,7 @@
             }
           }
         }
-      } finally {
-        solr.shutdown();
       }
-
       return remoteVersion;
     }
 
@@ -560,34 +552,8 @@
       aware.inform(newSchema);
     
     // looks good for the add, notify ResoureLoaderAware objects
-    for (FieldType fieldType : fieldTypeList) {      
-          
-      // must inform any sub-components used in the 
-      // tokenizer chain if they are ResourceLoaderAware    
-      if (fieldType.supportsAnalyzers()) {
-        Analyzer indexAnalyzer = fieldType.getIndexAnalyzer();
-        if (indexAnalyzer != null && indexAnalyzer instanceof TokenizerChain)
-          informResourceLoaderAwareObjectsInChain((TokenizerChain)indexAnalyzer);
-        
-        Analyzer queryAnalyzer = fieldType.getQueryAnalyzer();
-        // ref comparison is correct here (vs. equals) as they may be the same
-        // object in which case, we don't need to inform twice ... however, it's
-        // actually safe to call inform multiple times on an object anyway
-        if (queryAnalyzer != null && 
-            queryAnalyzer != indexAnalyzer && 
-            queryAnalyzer instanceof TokenizerChain)
-          informResourceLoaderAwareObjectsInChain((TokenizerChain)queryAnalyzer);
-
-        // if fieldType is a TextField, it might have a multi-term analyzer
-        if (fieldType instanceof TextField) {
-          TextField textFieldType = (TextField)fieldType;
-          Analyzer multiTermAnalyzer = textFieldType.getMultiTermAnalyzer();
-          if (multiTermAnalyzer != null && multiTermAnalyzer != indexAnalyzer &&
-              multiTermAnalyzer != queryAnalyzer && multiTermAnalyzer instanceof TokenizerChain)
-            informResourceLoaderAwareObjectsInChain((TokenizerChain)multiTermAnalyzer);
-        }
-      }      
-    }
+    for (FieldType fieldType : fieldTypeList)
+      informResourceLoaderAwareObjectsForFieldType(fieldType);
 
     newSchema.refreshAnalyzers();
 
@@ -611,7 +577,39 @@
     }
 
     return newSchema;
-  }  
+  }
+
+  /**
+   * Informs analyzers used by a fieldType.
+   */
+  protected void informResourceLoaderAwareObjectsForFieldType(FieldType fieldType) {
+    // must inform any sub-components used in the
+    // tokenizer chain if they are ResourceLoaderAware
+    if (!fieldType.supportsAnalyzers())
+      return;
+
+    Analyzer indexAnalyzer = fieldType.getIndexAnalyzer();
+    if (indexAnalyzer != null && indexAnalyzer instanceof TokenizerChain)
+      informResourceLoaderAwareObjectsInChain((TokenizerChain)indexAnalyzer);
+
+    Analyzer queryAnalyzer = fieldType.getQueryAnalyzer();
+    // ref comparison is correct here (vs. equals) as they may be the same
+    // object in which case, we don't need to inform twice ... however, it's
+    // actually safe to call inform multiple times on an object anyway
+    if (queryAnalyzer != null &&
+        queryAnalyzer != indexAnalyzer &&
+        queryAnalyzer instanceof TokenizerChain)
+      informResourceLoaderAwareObjectsInChain((TokenizerChain)queryAnalyzer);
+
+    // if fieldType is a TextField, it might have a multi-term analyzer
+    if (fieldType instanceof TextField) {
+      TextField textFieldType = (TextField)fieldType;
+      Analyzer multiTermAnalyzer = textFieldType.getMultiTermAnalyzer();
+      if (multiTermAnalyzer != null && multiTermAnalyzer != indexAnalyzer &&
+          multiTermAnalyzer != queryAnalyzer && multiTermAnalyzer instanceof TokenizerChain)
+        informResourceLoaderAwareObjectsInChain((TokenizerChain)multiTermAnalyzer);
+    }
+  }
   
   @Override
   public SchemaField newField(String fieldName, String fieldType, Map<String,?> options) {
@@ -786,6 +784,11 @@
       for (SchemaAware aware : newSchema.schemaAware) {
         aware.inform(newSchema);
       }
+
+      // notify analyzers and other objects for our fieldTypes
+      for (FieldType fieldType : newSchema.fieldTypes.values())
+        informResourceLoaderAwareObjectsForFieldType(fieldType);
+
       newSchema.refreshAnalyzers();
       newSchema.schemaZkVersion = schemaZkVersion;
     } catch (SolrException e) {
diff --git a/solr/core/src/java/org/apache/solr/schema/PointType.java b/solr/core/src/java/org/apache/solr/schema/PointType.java
index 6cba8b4..a5282a1 100644
--- a/solr/core/src/java/org/apache/solr/schema/PointType.java
+++ b/solr/core/src/java/org/apache/solr/schema/PointType.java
@@ -17,10 +17,16 @@
 
 package org.apache.solr.schema;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import com.spatial4j.core.distance.DistanceUtils;
 import org.apache.lucene.document.FieldType;
+import org.apache.lucene.index.StorableField;
 import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.queries.function.valuesource.VectorValueSource;
-import org.apache.lucene.index.StorableField;
 import org.apache.lucene.search.BooleanClause;
 import org.apache.lucene.search.BooleanQuery;
 import org.apache.lucene.search.Query;
@@ -33,11 +39,6 @@
 import org.apache.solr.search.QParser;
 import org.apache.solr.search.SpatialOptions;
 
-import java.io.IOException;
-import java.util.Map;
-import java.util.List;
-import java.util.ArrayList;
-
 /**
  * A point type that indexes a point in an n-dimensional space as separate fields and supports range queries.
  * See {@link LatLonType} for geo-spatial queries.
@@ -178,6 +179,7 @@
       throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
     }
     IndexSchema schema = parser.getReq().getSchema();
+
     if (dimension == 1){
       //TODO: Handle distance measures
       String lower = String.valueOf(point[0] - options.distance);
@@ -273,6 +275,13 @@
     return out;
   }
 
+  @Override
+  public double getSphereRadius() {
+    // This won't likely be used. You should probably be using LatLonType instead if you felt the need for this.
+    // This is here just for backward compatibility reasons.
+    return DistanceUtils.EARTH_MEAN_RADIUS_KM;
+  }
+
 }
 
 
@@ -293,4 +302,4 @@
   public String description() {
     return name()+"("+sf.getName()+")";
   }
-}
\ No newline at end of file
+}
diff --git a/solr/core/src/java/org/apache/solr/schema/RandomSortField.java b/solr/core/src/java/org/apache/solr/schema/RandomSortField.java
index cb2adbb..5d097a2 100644
--- a/solr/core/src/java/org/apache/solr/schema/RandomSortField.java
+++ b/solr/core/src/java/org/apache/solr/schema/RandomSortField.java
@@ -109,7 +109,7 @@
   private static FieldComparatorSource randomComparatorSource = new FieldComparatorSource() {
     @Override
     public FieldComparator<Integer> newComparator(final String fieldname, final int numHits, int sortPos, boolean reversed) {
-      return new FieldComparator<Integer>() {
+      return new SimpleFieldComparator<Integer>() {
         int seed;
         private final int[] values = new int[numHits];
         int bottomVal;
@@ -141,9 +141,8 @@
         }
 
         @Override
-        public FieldComparator setNextReader(LeafReaderContext context) {
+        protected void doSetNextReader(LeafReaderContext context) {
           seed = getSeed(fieldname, context);
-          return this;
         }
 
         @Override
diff --git a/solr/core/src/java/org/apache/solr/schema/SchemaField.java b/solr/core/src/java/org/apache/solr/schema/SchemaField.java
index cca6e51..e8feaed 100644
--- a/solr/core/src/java/org/apache/solr/schema/SchemaField.java
+++ b/solr/core/src/java/org/apache/solr/schema/SchemaField.java
@@ -96,10 +96,6 @@
   public boolean storeTermOffsets() { return (properties & STORE_TERMOFFSETS)!=0; }
   public boolean omitNorms() { return (properties & OMIT_NORMS)!=0; }
 
-  /** @deprecated Use {@link #omitTermFreqAndPositions} */
-  @Deprecated
-  public boolean omitTf() { return omitTermFreqAndPositions(); }
-
   public boolean omitTermFreqAndPositions() { return (properties & OMIT_TF_POSITIONS)!=0; }
   public boolean omitPositions() { return (properties & OMIT_POSITIONS)!=0; }
   public boolean storeOffsetsWithPositions() { return (properties & STORE_OFFSETS)!=0; }
diff --git a/solr/core/src/java/org/apache/solr/schema/SchemaManager.java b/solr/core/src/java/org/apache/solr/schema/SchemaManager.java
index 5b1060d..5130647 100644
--- a/solr/core/src/java/org/apache/solr/schema/SchemaManager.java
+++ b/solr/core/src/java/org/apache/solr/schema/SchemaManager.java
@@ -134,7 +134,7 @@
       if (!errs.isEmpty()) return errs;
       SolrResourceLoader loader = req.getCore().getResourceLoader();
       if (loader instanceof ZkSolrResourceLoader) {
-
+        ZkSolrResourceLoader zkLoader = (ZkSolrResourceLoader) loader;
         StringWriter sw = new StringWriter();
         try {
           managedIndexSchema.persist(sw);
@@ -145,7 +145,7 @@
         }
 
         try {
-          ZkController.persistConfigResourceToZooKeeper(loader,
+          ZkController.persistConfigResourceToZooKeeper(zkLoader,
               managedIndexSchema.getSchemaZkVersion(),
               managedIndexSchema.getResourceName(),
               sw.toString().getBytes(StandardCharsets.UTF_8),
diff --git a/solr/core/src/java/org/apache/solr/schema/SpatialQueryable.java b/solr/core/src/java/org/apache/solr/schema/SpatialQueryable.java
index c014570..b4a769a 100644
--- a/solr/core/src/java/org/apache/solr/schema/SpatialQueryable.java
+++ b/solr/core/src/java/org/apache/solr/schema/SpatialQueryable.java
@@ -31,4 +31,6 @@
 public interface SpatialQueryable {
 
   public Query createSpatialQuery(QParser parser, SpatialOptions options);
+
+  public double getSphereRadius();
 }
diff --git a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
index 7ac7fe2..6fd25db 100644
--- a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
@@ -19,13 +19,28 @@
 
 import java.io.IOException;
 import java.util.Arrays;
+import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
+import java.util.List;
+import java.util.ArrayList;
 
+import com.carrotsearch.hppc.IntArrayList;
+import com.carrotsearch.hppc.IntLongOpenHashMap;
+import com.carrotsearch.hppc.LongArrayList;
+import com.carrotsearch.hppc.cursors.IntLongCursor;
 import org.apache.lucene.index.DocValues;
+import org.apache.lucene.index.DocValuesType;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.index.FieldInfos;
+import org.apache.lucene.index.FilterLeafReader;
+import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.MultiDocValues;
 import org.apache.lucene.index.NumericDocValues;
 import org.apache.lucene.index.SortedDocValues;
+import org.apache.lucene.uninverting.UninvertingReader;
+import org.apache.lucene.util.LongValues;
 import org.apache.lucene.queries.function.FunctionQuery;
 import org.apache.lucene.queries.function.FunctionValues;
 import org.apache.lucene.queries.function.ValueSource;
@@ -45,13 +60,13 @@
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestInfo;
 import org.apache.solr.schema.FieldType;
+import org.apache.solr.schema.TrieDoubleField;
 import org.apache.solr.schema.TrieFloatField;
 import org.apache.solr.schema.TrieIntField;
 import org.apache.solr.schema.TrieLongField;
-
+import org.apache.solr.schema.StrField;
 import com.carrotsearch.hppc.FloatArrayList;
 import com.carrotsearch.hppc.IntIntOpenHashMap;
-import com.carrotsearch.hppc.IntOpenHashSet;
 import com.carrotsearch.hppc.cursors.IntIntCursor;
 
 /**
@@ -87,8 +102,6 @@
  collapse : collapses all docs with a null value into a single group using either highest score, or min/max.
  <p/>
  The CollapsingQParserPlugin fully supports the QueryElevationComponent
-
-
  **/
 
 public class CollapsingQParserPlugin extends QParserPlugin {
@@ -97,6 +110,8 @@
   public static final String NULL_COLLAPSE = "collapse";
   public static final String NULL_IGNORE = "ignore";
   public static final String NULL_EXPAND = "expand";
+  public static final String HINT_TOP_FC = "TOP_FC";
+  public static final String HINT_MULTI_DOCVALUES = "MULTI_DOCVALUES";
 
 
   public void init(NamedList namedList) {
@@ -124,19 +139,20 @@
 
   public class CollapsingPostFilter extends ExtendedQueryBase implements PostFilter, ScoreFilter {
 
-    private String field;
+    private String collapseField;
     private String max;
     private String min;
+    public String hint;
     private boolean needsScores = true;
     private int nullPolicy;
     private Map<BytesRef, Integer> boosted;
     public static final int NULL_POLICY_IGNORE = 0;
     public static final int NULL_POLICY_COLLAPSE = 1;
     public static final int NULL_POLICY_EXPAND = 2;
-
+    private int size;
 
     public String getField(){
-      return this.field;
+      return this.collapseField;
     }
 
     public void setCache(boolean cache) {
@@ -156,7 +172,7 @@
     }
 
     public int hashCode() {
-      int hashCode = field.hashCode();
+      int hashCode = collapseField.hashCode();
       hashCode = max!=null ? hashCode+max.hashCode():hashCode;
       hashCode = min!=null ? hashCode+min.hashCode():hashCode;
       hashCode = hashCode+nullPolicy;
@@ -168,7 +184,7 @@
 
       if(o instanceof CollapsingPostFilter) {
         CollapsingPostFilter c = (CollapsingPostFilter)o;
-        if(this.field.equals(c.field) &&
+        if(this.collapseField.equals(c.collapseField) &&
            ((this.max == null && c.max == null) || (this.max != null && c.max != null && this.max.equals(c.max))) &&
            ((this.min == null && c.min == null) || (this.min != null && c.min != null && this.min.equals(c.min))) &&
            this.nullPolicy == c.nullPolicy &&
@@ -188,12 +204,15 @@
     }
 
     public CollapsingPostFilter(SolrParams localParams, SolrParams params, SolrQueryRequest request) throws IOException {
-      this.field = localParams.get("field");
-      if (this.field == null) {
+      this.collapseField = localParams.get("field");
+      if (this.collapseField == null) {
         throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Required 'field' param is missing.");
       }
       this.max = localParams.get("max");
       this.min = localParams.get("min");
+      this.hint = localParams.get("hint");
+      this.size = localParams.getInt("size", 100000); //Only used for collapsing on int fields.
+
       if(this.min != null || this.max != null) {
         this.needsScores = needsScores(params);
       }
@@ -208,7 +227,6 @@
       } else {
         throw new IOException("Invalid nullPolicy:"+nPolicy);
       }
-
     }
 
     private IntIntOpenHashMap getBoostDocs(SolrIndexSearcher indexSearcher, Map<BytesRef, Integer> boosted, Map context) throws IOException {
@@ -220,57 +238,12 @@
       try {
 
         SolrIndexSearcher searcher = (SolrIndexSearcher)indexSearcher;
-
-        SortedDocValues docValues = null;
-        FunctionQuery funcQuery = null;
-        docValues = DocValues.getSorted(searcher.getLeafReader(), this.field);
-
-        FieldType fieldType = null;
-
-        if(this.max != null) {
-          if(this.max.indexOf("(") == -1) {
-            fieldType = searcher.getSchema().getField(this.max).getType();
-          } else {
-            LocalSolrQueryRequest request = null;
-            try {
-              SolrParams params = new ModifiableSolrParams();
-              request = new LocalSolrQueryRequest(searcher.getCore(), params);
-              FunctionQParser functionQParser = new FunctionQParser(this.max, null, null,request);
-              funcQuery = (FunctionQuery)functionQParser.parse();
-            } catch (Exception e) {
-              throw new IOException(e);
-            } finally {
-              request.close();
-            }
-          }
-        }
-
-        if(this.min != null) {
-          if(this.min.indexOf("(") == -1) {
-            fieldType = searcher.getSchema().getField(this.min).getType();
-          } else {
-            LocalSolrQueryRequest request = null;
-            try {
-              SolrParams params = new ModifiableSolrParams();
-              request = new LocalSolrQueryRequest(searcher.getCore(), params);
-              FunctionQParser functionQParser = new FunctionQParser(this.min, null, null,request);
-              funcQuery = (FunctionQuery)functionQParser.parse();
-            } catch (Exception e) {
-              throw new IOException(e);
-            } finally {
-              request.close();
-            }
-          }
-        }
-
-        int maxDoc = searcher.maxDoc();
-        int leafCount = searcher.getTopReaderContext().leaves().size();
-
+        CollectorFactory collectorFactory = new CollectorFactory();
         //Deal with boosted docs.
         //We have to deal with it here rather then the constructor because
         //because the QueryElevationComponent runs after the Queries are constructed.
 
-        IntIntOpenHashMap boostDocs = null;
+        IntIntOpenHashMap boostDocsMap = null;
         Map context = null;
         SolrRequestInfo info = SolrRequestInfo.getRequestInfo();
         if(info != null) {
@@ -281,23 +254,17 @@
           this.boosted = (Map<BytesRef, Integer>)context.get(QueryElevationComponent.BOOSTED_PRIORITY);
         }
 
-        boostDocs = getBoostDocs(searcher, this.boosted, context);
+        boostDocsMap = getBoostDocs(searcher, this.boosted, context);
+        return collectorFactory.getCollector(this.collapseField,
+                                             this.min,
+                                             this.max,
+                                             this.nullPolicy,
+                                             this.hint,
+                                             this.needsScores,
+                                             this.size,
+                                             boostDocsMap,
+                                             searcher);
 
-        if (this.min != null || this.max != null) {
-
-          return new CollapsingFieldValueCollector(maxDoc,
-                                                   leafCount,
-                                                   docValues,
-                                                   this.nullPolicy,
-                                                   max != null ? this.max : this.min,
-                                                   max != null,
-                                                   this.needsScores,
-                                                   fieldType,
-                                                   boostDocs,
-                                                   funcQuery, searcher);
-        } else {
-          return new CollapsingScoreCollector(maxDoc, leafCount, docValues, this.nullPolicy, boostDocs);
-        }
       } catch (Exception e) {
         throw new RuntimeException(e);
       }
@@ -337,6 +304,51 @@
     }
   }
 
+  private class ReaderWrapper extends FilterLeafReader {
+
+    private String field;
+
+    public ReaderWrapper(LeafReader leafReader, String field) {
+      super(leafReader);
+      this.field = field;
+    }
+
+    public SortedDocValues getSortedDocValues(String field) {
+      return null;
+    }
+
+    public Object getCoreCacheKey() {
+      return in.getCoreCacheKey();
+    }
+
+    public FieldInfos getFieldInfos() {
+      Iterator<FieldInfo> it = in.getFieldInfos().iterator();
+      List<FieldInfo> newInfos = new ArrayList();
+      while(it.hasNext()) {
+        FieldInfo fieldInfo = it.next();
+
+        if(fieldInfo.name.equals(field)) {
+          FieldInfo f = new FieldInfo(fieldInfo.name,
+                                      fieldInfo.number,
+                                      fieldInfo.hasVectors(),
+                                      fieldInfo.hasNorms(),
+                                      fieldInfo.hasPayloads(),
+                                      fieldInfo.getIndexOptions(),
+                                      DocValuesType.NONE,
+                                      fieldInfo.getDocValuesGen(),
+                                      fieldInfo.attributes());
+          newInfos.add(f);
+
+        } else {
+          newInfos.add(fieldInfo);
+        }
+      }
+      FieldInfos infos = new FieldInfos(newInfos.toArray(new FieldInfo[newInfos.size()]));
+      return infos;
+    }
+  }
+
+
   private class DummyScorer extends Scorer {
 
     public float score;
@@ -372,11 +384,20 @@
   }
 
 
-  private class CollapsingScoreCollector extends DelegatingCollector {
+
+  /*
+  * Collapses on Ordinal Values using Score to select the group head.
+  */
+
+  private class OrdScoreCollector extends DelegatingCollector {
 
     private LeafReaderContext[] contexts;
     private FixedBitSet collapsedSet;
-    private SortedDocValues values;
+    private SortedDocValues collapseValues;
+    private MultiDocValues.OrdinalMap ordinalMap;
+    private SortedDocValues segmentValues;
+    private LongValues segmentOrdinalMap;
+    private MultiDocValues.MultiSortedDocValues multiSortedDocValues;
     private int[] ords;
     private float[] scores;
     private int maxDoc;
@@ -384,36 +405,25 @@
     private float nullScore = -Float.MAX_VALUE;
     private int nullDoc;
     private FloatArrayList nullScores;
-    private IntIntOpenHashMap boostDocs;
-    private int[] boostOrds;
+    private IntArrayList boostOrds;
+    private IntArrayList boostDocs;
+    private MergeBoost mergeBoost;
+    private boolean boosts;
 
-    public CollapsingScoreCollector(int maxDoc,
-                                    int segments,
-                                    SortedDocValues values,
-                                    int nullPolicy,
-                                    IntIntOpenHashMap boostDocs) {
+    public OrdScoreCollector(int maxDoc,
+                             int segments,
+                             SortedDocValues collapseValues,
+                             int nullPolicy,
+                             IntIntOpenHashMap boostDocsMap) {
       this.maxDoc = maxDoc;
       this.contexts = new LeafReaderContext[segments];
       this.collapsedSet = new FixedBitSet(maxDoc);
-      this.boostDocs = boostDocs;
-      if(this.boostDocs != null) {
-        //Set the elevated docs now.
-        IntOpenHashSet boostG = new IntOpenHashSet();
-        Iterator<IntIntCursor> it = this.boostDocs.iterator();
-        while(it.hasNext()) {
-          IntIntCursor cursor = it.next();
-          int i = cursor.key;
-          this.collapsedSet.set(i);
-          int ord = values.getOrd(i);
-          if(ord > -1) {
-            boostG.add(ord);
-          }
-        }
-        boostOrds = boostG.toArray();
-        Arrays.sort(boostOrds);
+      this.collapseValues = collapseValues;
+      int valueCount = collapseValues.getValueCount();
+      if(collapseValues instanceof MultiDocValues.MultiSortedDocValues) {
+        this.multiSortedDocValues = (MultiDocValues.MultiSortedDocValues)collapseValues;
+        this.ordinalMap = multiSortedDocValues.mapping;
       }
-      this.values = values;
-      int valueCount = values.getValueCount();
       this.ords = new int[valueCount];
       Arrays.fill(this.ords, -1);
       this.scores = new float[valueCount];
@@ -422,24 +432,57 @@
       if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
         nullScores = new FloatArrayList();
       }
-    }
 
-    @Override
-    public boolean acceptsDocsOutOfOrder() {
-      //Documents must be sent in order to this collector.
-      return false;
+      if(boostDocsMap != null) {
+        this.boosts = true;
+        this.boostOrds = new IntArrayList();
+        this.boostDocs = new IntArrayList();
+        int[] bd = new int[boostDocsMap.size()];
+        Iterator<IntIntCursor> it =  boostDocsMap.iterator();
+        int index = -1;
+        while(it.hasNext()) {
+          IntIntCursor cursor = it.next();
+          bd[++index] = cursor.key;
+        }
+
+        Arrays.sort(bd);
+        this.mergeBoost = new MergeBoost(bd);
+      }
     }
 
     @Override
     protected void doSetNextReader(LeafReaderContext context) throws IOException {
       this.contexts[context.ord] = context;
       this.docBase = context.docBase;
+      if(ordinalMap != null) {
+        this.segmentValues = this.multiSortedDocValues.values[context.ord];
+        this.segmentOrdinalMap = ordinalMap.getGlobalOrds(context.ord);
+      } else {
+        this.segmentValues = collapseValues;
+      }
     }
 
     @Override
-    public void collect(int docId) throws IOException {
-      int globalDoc = docId+this.docBase;
-      int ord = values.getOrd(globalDoc);
+    public void collect(int contextDoc) throws IOException {
+      int globalDoc = contextDoc+this.docBase;
+      int ord = -1;
+      if(this.ordinalMap != null) {
+        //Handle ordinalMapping case
+        ord = segmentValues.getOrd(contextDoc);
+        if(ord > -1) {
+          ord = (int)segmentOrdinalMap.get(ord);
+        }
+      } else {
+        //Handle top Level FieldCache or Single Segment Case
+        ord = segmentValues.getOrd(globalDoc);
+      }
+
+      // Check to see if we have documents boosted by the QueryElevationComponent
+      if(boosts && mergeBoost.boost(globalDoc)) {
+        boostDocs.add(globalDoc);
+        boostOrds.add(ord);
+        return;
+      }
 
       if(ord > -1) {
         float score = scorer.score();
@@ -447,9 +490,6 @@
           ords[ord] = globalDoc;
           scores[ord] = score;
         }
-      } else if (this.collapsedSet.get(globalDoc)) {
-        //The doc is elevated so score does not matter
-        //We just want to be sure it doesn't fall into the null policy
       } else if(nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
         float score = scorer.score();
         if(score > nullScore) {
@@ -469,15 +509,25 @@
       }
 
       if(nullScore > 0) {
-        this.collapsedSet.set(nullDoc);
+        collapsedSet.set(nullDoc);
       }
 
+      //Handle the boosted docs.
       if(this.boostOrds != null) {
-        for(int i=0; i<this.boostOrds.length; i++) {
-          ords[boostOrds[i]] = -1;
+        int s = boostOrds.size();
+        for(int i=0; i<s; i++) {
+          int ord = this.boostOrds.get(i);
+          if(ord > -1) {
+            //Remove any group heads that are in the same groups as boosted documents.
+            ords[ord] = -1;
+          }
+          //Add the boosted docs to the collapsedSet
+          this.collapsedSet.set(boostDocs.get(i));
         }
+        mergeBoost.reset(); // Reset mergeBoost because we're going to use it again.
       }
 
+      //Build the sorted DocSet of group heads.
       for(int i=0; i<ords.length; i++) {
         int doc = ords[i];
         if(doc > -1) {
@@ -487,37 +537,59 @@
 
       int currentContext = 0;
       int currentDocBase = 0;
+
+      if(ordinalMap != null) {
+        this.segmentValues = this.multiSortedDocValues.values[currentContext];
+        this.segmentOrdinalMap = this.ordinalMap.getGlobalOrds(currentContext);
+      } else {
+        this.segmentValues = collapseValues;
+      }
+
       int nextDocBase = currentContext+1 < contexts.length ? contexts[currentContext+1].docBase : maxDoc;
       leafDelegate = delegate.getLeafCollector(contexts[currentContext]);
       DummyScorer dummy = new DummyScorer();
       leafDelegate.setScorer(dummy);
       DocIdSetIterator it = new BitSetIterator(collapsedSet, 0L); // cost is not useful here
       int docId = -1;
-      int nullScoreIndex = 0;
+      int index = -1;
       while((docId = it.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
 
-        int ord = values.getOrd(docId);
-
-        if(ord > -1) {
-          dummy.score = scores[ord];
-        } else if(this.boostDocs != null && boostDocs.containsKey(docId)) {
-          //Elevated docs don't need a score.
-          dummy.score = 0F;
-        } else if (nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
-          dummy.score = nullScore;
-        } else if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
-          dummy.score = nullScores.get(nullScoreIndex++);
-        }
-
         while(docId >= nextDocBase) {
           currentContext++;
           currentDocBase = contexts[currentContext].docBase;
           nextDocBase = currentContext+1 < contexts.length ? contexts[currentContext+1].docBase : maxDoc;
           leafDelegate = delegate.getLeafCollector(contexts[currentContext]);
           leafDelegate.setScorer(dummy);
+          if(ordinalMap != null) {
+            this.segmentValues = this.multiSortedDocValues.values[currentContext];
+            this.segmentOrdinalMap = this.ordinalMap.getGlobalOrds(currentContext);
+          }
         }
 
         int contextDoc = docId-currentDocBase;
+
+        int ord = -1;
+        if(this.ordinalMap != null) {
+          //Handle ordinalMapping case
+          ord = segmentValues.getOrd(contextDoc);
+          if(ord > -1) {
+            ord = (int)segmentOrdinalMap.get(ord);
+          }
+        } else {
+          //Handle top Level FieldCache or Single Segment Case
+          ord = segmentValues.getOrd(docId);
+        }
+
+        if(ord > -1) {
+          dummy.score = scores[ord];
+        } else if(boosts && mergeBoost.boost(docId)) {
+          //Ignore so it doesn't mess up the null scoring.
+        } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+          dummy.score = nullScore;
+        } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+          dummy.score = nullScores.get(++index);
+        }
+
         dummy.docId = contextDoc;
         leafDelegate.collect(contextDoc);
       }
@@ -528,46 +600,237 @@
     }
   }
 
-  private class CollapsingFieldValueCollector extends DelegatingCollector {
+  /*
+  * Collapses on an integer field using the score to select the group head.
+  */
+
+  private class IntScoreCollector extends DelegatingCollector {
+
     private LeafReaderContext[] contexts;
-    private SortedDocValues values;
+    private FixedBitSet collapsedSet;
+    private NumericDocValues collapseValues;
+    private IntLongOpenHashMap cmap;
+    private int maxDoc;
+    private int nullPolicy;
+    private float nullScore = -Float.MAX_VALUE;
+    private int nullDoc;
+    private FloatArrayList nullScores;
+    private IntArrayList boostKeys;
+    private IntArrayList boostDocs;
+    private MergeBoost mergeBoost;
+    private boolean boosts;
+    private String field;
+    private int nullValue;
+
+    public IntScoreCollector(int maxDoc,
+                             int segments,
+                             int nullValue,
+                             int nullPolicy,
+                             int size,
+                             String field,
+                             IntIntOpenHashMap boostDocsMap) {
+      this.maxDoc = maxDoc;
+      this.contexts = new LeafReaderContext[segments];
+      this.collapsedSet = new FixedBitSet(maxDoc);
+      this.nullValue = nullValue;
+      this.nullPolicy = nullPolicy;
+      if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+        nullScores = new FloatArrayList();
+      }
+      this.cmap = new IntLongOpenHashMap(size);
+      this.field = field;
+
+      if(boostDocsMap != null) {
+        this.boosts = true;
+        this.boostDocs = new IntArrayList();
+        this.boostKeys = new IntArrayList();
+        int[] bd = new int[boostDocsMap.size()];
+        Iterator<IntIntCursor> it =  boostDocsMap.iterator();
+        int index = -1;
+        while(it.hasNext()) {
+          IntIntCursor cursor = it.next();
+          bd[++index] = cursor.key;
+        }
+
+        Arrays.sort(bd);
+        this.mergeBoost = new MergeBoost(bd);
+        this.boosts = true;
+      }
+
+    }
+
+    @Override
+    protected void doSetNextReader(LeafReaderContext context) throws IOException {
+      this.contexts[context.ord] = context;
+      this.docBase = context.docBase;
+      this.collapseValues = DocValues.getNumeric(context.reader(), this.field);
+    }
+
+    @Override
+    public void collect(int contextDoc) throws IOException {
+
+      int collapseValue = (int)this.collapseValues.get(contextDoc);
+      int globalDoc = docBase+contextDoc;
+
+      // Check to see of we have documents boosted by the QueryElevationComponent
+      if(boosts && mergeBoost.boost(globalDoc)) {
+        boostDocs.add(globalDoc);
+        boostKeys.add(collapseValue);
+        return;
+      }
+
+      if(collapseValue != nullValue) {
+        float score = scorer.score();
+        if(cmap.containsKey(collapseValue)) {
+          long scoreDoc = cmap.lget();
+          int testScore = (int)(scoreDoc>>32);
+          int currentScore = Float.floatToRawIntBits(score);
+          if(currentScore > testScore) {
+            //Current score is higher so replace the old scoreDoc with the current scoreDoc
+            cmap.lset((((long)currentScore)<<32)+globalDoc);
+          }
+        } else {
+          //Combine the score and document into a long.
+          long scoreDoc = (((long)Float.floatToRawIntBits(score))<<32)+globalDoc;
+          cmap.put(collapseValue, scoreDoc);
+        }
+      } else if(nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+        float score = scorer.score();
+        if(score > this.nullScore) {
+          this.nullScore = score;
+          this.nullDoc = globalDoc;
+        }
+      } else if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+        collapsedSet.set(globalDoc);
+        nullScores.add(scorer.score());
+      }
+    }
+
+    @Override
+    public void finish() throws IOException {
+      if(contexts.length == 0) {
+        return;
+      }
+
+      if(nullScore > -1) {
+        collapsedSet.set(nullDoc);
+      }
+
+      //Handle the boosted docs.
+      if(this.boostKeys != null) {
+        int s = boostKeys.size();
+        for(int i=0; i<s; i++) {
+          int key = this.boostKeys.get(i);
+          if(key != nullValue) {
+            cmap.remove(key);
+          }
+          //Add the boosted docs to the collapsedSet
+          this.collapsedSet.set(boostDocs.get(i));
+        }
+      }
+
+      Iterator<IntLongCursor> it1 = cmap.iterator();
+
+      while(it1.hasNext()) {
+        IntLongCursor cursor = it1.next();
+        int doc = (int)cursor.value;
+        collapsedSet.set(doc);
+      }
+
+      int currentContext = 0;
+      int currentDocBase = 0;
+
+      collapseValues = contexts[currentContext].reader().getNumericDocValues(this.field);
+      int nextDocBase = currentContext+1 < contexts.length ? contexts[currentContext+1].docBase : maxDoc;
+      leafDelegate = delegate.getLeafCollector(contexts[currentContext]);
+      DummyScorer dummy = new DummyScorer();
+      leafDelegate.setScorer(dummy);
+      DocIdSetIterator it = new BitSetIterator(collapsedSet, 0L); // cost is not useful here
+      int globalDoc = -1;
+      int nullScoreIndex = 0;
+      while((globalDoc = it.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
+
+        while(globalDoc >= nextDocBase) {
+          currentContext++;
+          currentDocBase = contexts[currentContext].docBase;
+          nextDocBase = currentContext+1 < contexts.length ? contexts[currentContext+1].docBase : maxDoc;
+          leafDelegate = delegate.getLeafCollector(contexts[currentContext]);
+          leafDelegate.setScorer(dummy);
+          collapseValues = contexts[currentContext].reader().getNumericDocValues(this.field);
+        }
+
+        int contextDoc = globalDoc-currentDocBase;
+
+        int collapseValue = (int)collapseValues.get(contextDoc);
+        if(collapseValue != nullValue) {
+          long scoreDoc = cmap.get(collapseValue);
+          dummy.score = Float.intBitsToFloat((int)(scoreDoc>>32));
+        } else if(boosts && mergeBoost.boost(globalDoc)) {
+          //Ignore so boosted documents don't mess up the null scoring policies.
+        } else if (nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+          dummy.score = nullScore;
+        } else if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+          dummy.score = nullScores.get(nullScoreIndex++);
+        }
+
+        dummy.docId = contextDoc;
+        leafDelegate.collect(contextDoc);
+      }
+
+      if(delegate instanceof DelegatingCollector) {
+        ((DelegatingCollector) delegate).finish();
+      }
+    }
+  }
+  /*
+  *  Collapse on Ordinal value using max/min value of a field to select the group head.
+  */
+
+  private class OrdFieldValueCollector extends DelegatingCollector {
+    private LeafReaderContext[] contexts;
+    private SortedDocValues collapseValues;
+    protected MultiDocValues.OrdinalMap ordinalMap;
+    protected SortedDocValues segmentValues;
+    protected LongValues segmentOrdinalMap;
+    protected MultiDocValues.MultiSortedDocValues multiSortedDocValues;
 
     private int maxDoc;
     private int nullPolicy;
 
-    private FieldValueCollapse fieldValueCollapse;
+    private OrdFieldValueStrategy collapseStrategy;
     private boolean needsScores;
-    private IntIntOpenHashMap boostDocs;
 
-    public CollapsingFieldValueCollector(int maxDoc,
-                                         int segments,
-                                         SortedDocValues values,
-                                         int nullPolicy,
-                                         String field,
-                                         boolean max,
-                                         boolean needsScores,
-                                         FieldType fieldType,
-                                         IntIntOpenHashMap boostDocs,
-                                         FunctionQuery funcQuery, IndexSearcher searcher) throws IOException{
+    public OrdFieldValueCollector(int maxDoc,
+                                  int segments,
+                                  SortedDocValues collapseValues,
+                                  int nullPolicy,
+                                  String field,
+                                  boolean max,
+                                  boolean needsScores,
+                                  FieldType fieldType,
+                                  IntIntOpenHashMap boostDocs,
+                                  FunctionQuery funcQuery, IndexSearcher searcher) throws IOException{
 
       this.maxDoc = maxDoc;
       this.contexts = new LeafReaderContext[segments];
-      this.values = values;
-      int valueCount = values.getValueCount();
+      this.collapseValues = collapseValues;
+      if(collapseValues instanceof MultiDocValues.MultiSortedDocValues) {
+        this.multiSortedDocValues = (MultiDocValues.MultiSortedDocValues)collapseValues;
+        this.ordinalMap = multiSortedDocValues.mapping;
+      }
+
+      int valueCount = collapseValues.getValueCount();
       this.nullPolicy = nullPolicy;
       this.needsScores = needsScores;
-      this.boostDocs = boostDocs;
       if(funcQuery != null) {
-        this.fieldValueCollapse =  new ValueSourceCollapse(maxDoc, field, nullPolicy, new int[valueCount], max, this.needsScores, boostDocs, funcQuery, searcher, values);
+        this.collapseStrategy =  new OrdValueSourceStrategy(maxDoc, field, nullPolicy, new int[valueCount], max, this.needsScores, boostDocs, funcQuery, searcher, collapseValues);
       } else {
-        if(fieldType instanceof TrieIntField) {
-          this.fieldValueCollapse = new IntValueCollapse(maxDoc, field, nullPolicy, new int[valueCount], max, this.needsScores, boostDocs, values);
-        } else if(fieldType instanceof TrieLongField) {
-          this.fieldValueCollapse =  new LongValueCollapse(maxDoc, field, nullPolicy, new int[valueCount], max, this.needsScores, boostDocs, values);
-        } else if(fieldType instanceof TrieFloatField) {
-          this.fieldValueCollapse =  new FloatValueCollapse(maxDoc, field, nullPolicy, new int[valueCount], max, this.needsScores, boostDocs, values);
+        if(fieldType instanceof TrieIntField || fieldType instanceof TrieFloatField) {
+          this.collapseStrategy = new OrdIntStrategy(maxDoc, field, nullPolicy, new int[valueCount], max, this.needsScores, boostDocs, collapseValues);
+        } else if(fieldType instanceof TrieLongField || fieldType instanceof TrieDoubleField) {
+          this.collapseStrategy =  new OrdLongStrategy(maxDoc, field, nullPolicy, new int[valueCount], max, this.needsScores, boostDocs, collapseValues);
         } else {
-          throw new IOException("min/max must be either TrieInt, TrieLong or TrieFloat.");
+          throw new IOException("min/max must be either TrieInt, TrieLong, TrieFloat or TrieDouble.");
         }
       }
     }
@@ -578,19 +841,33 @@
     }
 
     public void setScorer(Scorer scorer) {
-      this.fieldValueCollapse.setScorer(scorer);
+      this.collapseStrategy.setScorer(scorer);
     }
 
     public void doSetNextReader(LeafReaderContext context) throws IOException {
       this.contexts[context.ord] = context;
       this.docBase = context.docBase;
-      this.fieldValueCollapse.setNextReader(context);
+      this.collapseStrategy.setNextReader(context);
+      if(ordinalMap != null) {
+        this.segmentValues = this.multiSortedDocValues.values[context.ord];
+        this.segmentOrdinalMap = ordinalMap.getGlobalOrds(context.ord);
+      } else {
+        this.segmentValues = collapseValues;
+      }
     }
 
-    public void collect(int docId) throws IOException {
-      int globalDoc = docId+this.docBase;
-      int ord = values.getOrd(globalDoc);
-      fieldValueCollapse.collapse(ord, docId, globalDoc);
+    public void collect(int contextDoc) throws IOException {
+      int globalDoc = contextDoc+this.docBase;
+      int ord = -1;
+      if(this.ordinalMap != null) {
+        ord = segmentValues.getOrd(contextDoc);
+        if(ord > -1) {
+          ord = (int)segmentOrdinalMap.get(ord);
+        }
+      } else {
+        ord = segmentValues.getOrd(globalDoc);
+      }
+      collapseStrategy.collapse(ord, contextDoc, globalDoc);
     }
 
     public void finish() throws IOException {
@@ -600,23 +877,58 @@
 
       int currentContext = 0;
       int currentDocBase = 0;
+
+      if(ordinalMap != null) {
+        this.segmentValues = this.multiSortedDocValues.values[currentContext];
+        this.segmentOrdinalMap = this.ordinalMap.getGlobalOrds(currentContext);
+      } else {
+        this.segmentValues = collapseValues;
+      }
+
       int nextDocBase = currentContext+1 < contexts.length ? contexts[currentContext+1].docBase : maxDoc;
       leafDelegate = delegate.getLeafCollector(contexts[currentContext]);
       DummyScorer dummy = new DummyScorer();
       leafDelegate.setScorer(dummy);
-      DocIdSetIterator it = new BitSetIterator(fieldValueCollapse.getCollapsedSet(), 0); // cost is not useful here
-      int docId = -1;
+      DocIdSetIterator it = new BitSetIterator(collapseStrategy.getCollapsedSet(), 0); // cost is not useful here
+      int globalDoc = -1;
       int nullScoreIndex = 0;
-      float[] scores = fieldValueCollapse.getScores();
-      FloatArrayList nullScores = fieldValueCollapse.getNullScores();
-      float nullScore = fieldValueCollapse.getNullScore();
-      while((docId = it.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
+      float[] scores = collapseStrategy.getScores();
+      FloatArrayList nullScores = collapseStrategy.getNullScores();
+      float nullScore = collapseStrategy.getNullScore();
+
+      MergeBoost mergeBoost = collapseStrategy.getMergeBoost();
+      while((globalDoc = it.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
+
+        while(globalDoc >= nextDocBase) {
+          currentContext++;
+          currentDocBase = contexts[currentContext].docBase;
+          nextDocBase = currentContext+1 < contexts.length ? contexts[currentContext+1].docBase : maxDoc;
+          leafDelegate = delegate.getLeafCollector(contexts[currentContext]);
+          leafDelegate.setScorer(dummy);
+          if(ordinalMap != null) {
+            this.segmentValues = this.multiSortedDocValues.values[currentContext];
+            this.segmentOrdinalMap = this.ordinalMap.getGlobalOrds(currentContext);
+          }
+        }
+
+        int contextDoc = globalDoc-currentDocBase;
 
         if(this.needsScores){
-          int ord = values.getOrd(docId);
+          int ord = -1;
+          if(this.ordinalMap != null) {
+            //Handle ordinalMapping case
+            ord = segmentValues.getOrd(contextDoc);
+            if(ord > -1) {
+              ord = (int)segmentOrdinalMap.get(ord);
+            }
+          } else {
+            //Handle top Level FieldCache or Single Segment Case
+            ord = segmentValues.getOrd(globalDoc);
+          }
+
           if(ord > -1) {
             dummy.score = scores[ord];
-          } else if (boostDocs != null && boostDocs.containsKey(docId)) {
+          } else if (mergeBoost != null && mergeBoost.boost(globalDoc)) {
             //It's an elevated doc so no score is needed
             dummy.score = 0F;
           } else if (nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
@@ -626,15 +938,6 @@
           }
         }
 
-        while(docId >= nextDocBase) {
-          currentContext++;
-          currentDocBase = contexts[currentContext].docBase;
-          nextDocBase = currentContext+1 < contexts.length ? contexts[currentContext+1].docBase : maxDoc;
-          leafDelegate = delegate.getLeafCollector(contexts[currentContext]);
-          leafDelegate.setScorer(dummy);
-        }
-
-        int contextDoc = docId-currentDocBase;
         dummy.docId = contextDoc;
         leafDelegate.collect(contextDoc);
       }
@@ -645,7 +948,320 @@
     }
   }
 
-  private abstract class FieldValueCollapse {
+
+  /*
+  *  Collapses on an integer field using the min/max value of numeric field to select the group head.
+  */
+
+  private class IntFieldValueCollector extends DelegatingCollector {
+    private LeafReaderContext[] contexts;
+    private NumericDocValues collapseValues;
+    private int maxDoc;
+    private int nullValue;
+    private int nullPolicy;
+
+    private IntFieldValueStrategy collapseStrategy;
+    private boolean needsScores;
+    private String collapseField;
+
+    public IntFieldValueCollector(int maxDoc,
+                                  int size,
+                                  int segments,
+                                  int nullValue,
+                                  int nullPolicy,
+                                  String collapseField,
+                                  String field,
+                                  boolean max,
+                                  boolean needsScores,
+                                  FieldType fieldType,
+                                  IntIntOpenHashMap boostDocsMap,
+                                  FunctionQuery funcQuery,
+                                  IndexSearcher searcher) throws IOException{
+
+      this.maxDoc = maxDoc;
+      this.contexts = new LeafReaderContext[segments];
+      this.collapseField = collapseField;
+      this.nullValue = nullValue;
+      this.nullPolicy = nullPolicy;
+      this.needsScores = needsScores;
+      if(funcQuery != null) {
+        this.collapseStrategy =  new IntValueSourceStrategy(maxDoc, field, size, collapseField, nullValue, nullPolicy, max, this.needsScores, boostDocsMap, funcQuery, searcher);
+      } else {
+        if(fieldType instanceof TrieIntField || fieldType instanceof TrieFloatField) {
+          this.collapseStrategy = new IntIntStrategy(maxDoc, size, collapseField, field, nullValue, nullPolicy, max, this.needsScores, boostDocsMap);
+        } else {
+          throw new IOException("min/max must be TrieInt or TrieFloat when collapsing on numeric fields .");
+        }
+      }
+    }
+
+    public boolean acceptsDocsOutOfOrder() {
+      //Documents must be sent in order to this collector.
+      return false;
+    }
+
+    public void setScorer(Scorer scorer) {
+      this.collapseStrategy.setScorer(scorer);
+    }
+
+    public void doSetNextReader(LeafReaderContext context) throws IOException {
+      this.contexts[context.ord] = context;
+      this.docBase = context.docBase;
+      this.collapseStrategy.setNextReader(context);
+      this.collapseValues = context.reader().getNumericDocValues(this.collapseField);
+    }
+
+    public void collect(int contextDoc) throws IOException {
+      int globalDoc = contextDoc+this.docBase;
+      int collapseKey = (int)this.collapseValues.get(contextDoc);
+      collapseStrategy.collapse(collapseKey, contextDoc, globalDoc);
+    }
+
+    public void finish() throws IOException {
+      if(contexts.length == 0) {
+        return;
+      }
+
+      int currentContext = 0;
+      int currentDocBase = 0;
+      this.collapseValues = contexts[currentContext].reader().getNumericDocValues(this.collapseField);
+      int nextDocBase = currentContext+1 < contexts.length ? contexts[currentContext+1].docBase : maxDoc;
+      leafDelegate = delegate.getLeafCollector(contexts[currentContext]);
+      DummyScorer dummy = new DummyScorer();
+      leafDelegate.setScorer(dummy);
+      DocIdSetIterator it = new BitSetIterator(collapseStrategy.getCollapsedSet(), 0); // cost is not useful here
+      int globalDoc = -1;
+      int nullScoreIndex = 0;
+      IntLongOpenHashMap cmap = collapseStrategy.getCollapseMap();
+      LongArrayList docScores = collapseStrategy.getDocScores();
+      FloatArrayList nullScores = collapseStrategy.getNullScores();
+      MergeBoost mergeBoost = collapseStrategy.getMergeBoost();
+      float nullScore = collapseStrategy.getNullScore();
+
+      while((globalDoc = it.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
+
+        while(globalDoc >= nextDocBase) {
+          currentContext++;
+          currentDocBase = contexts[currentContext].docBase;
+          nextDocBase = currentContext+1 < contexts.length ? contexts[currentContext+1].docBase : maxDoc;
+          leafDelegate = delegate.getLeafCollector(contexts[currentContext]);
+          leafDelegate.setScorer(dummy);
+          this.collapseValues = contexts[currentContext].reader().getNumericDocValues(this.collapseField);
+        }
+
+        int contextDoc = globalDoc-currentDocBase;
+
+        if(this.needsScores){
+          int collapseValue = (int)collapseValues.get(contextDoc);
+          if(collapseValue != nullValue) {
+            long pointerValue = cmap.get(collapseValue);
+            //Unpack the pointer
+            int pointer = (int)(pointerValue>>32);
+            long docScore = docScores.get(pointer);
+            //Unpack the score
+            dummy.score = Float.intBitsToFloat(((int)docScore));
+          } else if (mergeBoost != null && mergeBoost.boost(globalDoc)) {
+            //Its an elevated doc so no score is needed
+            dummy.score = 0F;
+          } else if (nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+            dummy.score = nullScore;
+          } else if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+            dummy.score = nullScores.get(nullScoreIndex++);
+          }
+        }
+
+        dummy.docId = contextDoc;
+        leafDelegate.collect(contextDoc);
+      }
+
+      if(delegate instanceof DelegatingCollector) {
+        ((DelegatingCollector) delegate).finish();
+      }
+    }
+  }
+
+
+
+  private class CollectorFactory {
+
+
+    public DelegatingCollector getCollector(String collapseField,
+                                            String min,
+                                            String max,
+                                            int nullPolicy,
+                                            String hint,
+                                            boolean needsScores,
+                                            int size,
+                                            IntIntOpenHashMap boostDocs,
+                                            SolrIndexSearcher searcher) throws IOException {
+
+
+
+      SortedDocValues docValues = null;
+      FunctionQuery funcQuery = null;
+
+      FieldType collapseFieldType = searcher.getSchema().getField(collapseField).getType();
+      String defaultValue = searcher.getSchema().getField(collapseField).getDefaultValue();
+
+      if(collapseFieldType instanceof StrField) {
+        if(HINT_TOP_FC.equals(hint)) {
+
+            /*
+            * This hint forces the use of the top level field cache for String fields.
+            * This is VERY fast at query time but slower to warm and causes insanity.
+            */
+
+          Map<String, UninvertingReader.Type> mapping = new HashMap();
+          mapping.put(collapseField, UninvertingReader.Type.SORTED);
+          UninvertingReader uninvertingReader = new UninvertingReader(new ReaderWrapper(searcher.getLeafReader(), collapseField), mapping);
+          docValues = uninvertingReader.getSortedDocValues(collapseField);
+        } else {
+          docValues = DocValues.getSorted(searcher.getLeafReader(), collapseField);
+        }
+      } else {
+        if(HINT_TOP_FC.equals(hint)) {
+          throw new IOException("top_fc hint is only supported when collapsing on String Fields");
+        }
+      }
+
+      FieldType minMaxFieldType = null;
+      if(max != null) {
+        if(max.indexOf("(") == -1) {
+          minMaxFieldType = searcher.getSchema().getField(max).getType();
+        } else {
+          LocalSolrQueryRequest request = null;
+          try {
+            SolrParams params = new ModifiableSolrParams();
+            request = new LocalSolrQueryRequest(searcher.getCore(), params);
+            FunctionQParser functionQParser = new FunctionQParser(max, null, null,request);
+            funcQuery = (FunctionQuery)functionQParser.parse();
+          } catch (Exception e) {
+            throw new IOException(e);
+          } finally {
+            request.close();
+          }
+        }
+      }
+
+      if(min != null) {
+        if(min.indexOf("(") == -1) {
+          minMaxFieldType = searcher.getSchema().getField(min).getType();
+        } else {
+          LocalSolrQueryRequest request = null;
+          try {
+            SolrParams params = new ModifiableSolrParams();
+            request = new LocalSolrQueryRequest(searcher.getCore(), params);
+            FunctionQParser functionQParser = new FunctionQParser(min, null, null,request);
+            funcQuery = (FunctionQuery)functionQParser.parse();
+          } catch (Exception e) {
+            throw new IOException(e);
+          } finally {
+            request.close();
+          }
+        }
+      }
+
+      int maxDoc = searcher.maxDoc();
+      int leafCount = searcher.getTopReaderContext().leaves().size();
+
+      if (min != null || max != null) {
+
+        if(collapseFieldType instanceof StrField) {
+
+          return new OrdFieldValueCollector(maxDoc,
+                                            leafCount,
+                                            docValues,
+                                            nullPolicy,
+                                            max != null ? max : min,
+                                            max != null,
+                                            needsScores,
+                                            minMaxFieldType,
+                                            boostDocs,
+                                            funcQuery,
+                                            searcher);
+
+        } else if((collapseFieldType instanceof TrieIntField ||
+                   collapseFieldType instanceof TrieFloatField)) {
+
+          int nullValue = 0;
+
+          if(collapseFieldType instanceof TrieFloatField) {
+            if(defaultValue != null) {
+              nullValue = Float.floatToIntBits(Float.parseFloat(defaultValue));
+            } else {
+              nullValue = Float.floatToIntBits(0.0f);
+            }
+          } else {
+            if(defaultValue != null) {
+              nullValue = Integer.parseInt(defaultValue);
+            }
+          }
+
+          return new IntFieldValueCollector(maxDoc,
+                                            size,
+                                            leafCount,
+                                            nullValue,
+                                            nullPolicy,
+                                            collapseField,
+                                            max != null ? max : min,
+                                            max != null,
+                                            needsScores,
+                                            minMaxFieldType,
+                                            boostDocs,
+                                            funcQuery,
+                                            searcher);
+        } else {
+          throw new IOException("64 bit numeric collapse fields are not supported");
+        }
+
+      } else {
+
+        if(collapseFieldType instanceof StrField) {
+
+          return new OrdScoreCollector(maxDoc, leafCount, docValues, nullPolicy, boostDocs);
+
+        } else if(collapseFieldType instanceof TrieIntField ||
+                  collapseFieldType instanceof TrieFloatField) {
+
+          int nullValue = 0;
+
+          if(collapseFieldType instanceof TrieFloatField) {
+            if(defaultValue != null) {
+              nullValue = Float.floatToIntBits(Float.parseFloat(defaultValue));
+            } else {
+              nullValue = Float.floatToIntBits(0.0f);
+            }
+          } else {
+            if(defaultValue != null) {
+              nullValue = Integer.parseInt(defaultValue);
+            }
+          }
+
+          return new IntScoreCollector(maxDoc, leafCount, nullValue, nullPolicy, size, collapseField, boostDocs);
+
+        } else {
+          throw new IOException("64 bit numeric collapse fields are not supported");
+        }
+      }
+    }
+  }
+
+  public static final class CollapseScore {
+    public float score;
+  }
+
+
+  /*
+  * Collapse Strategies
+  */
+
+  /*
+  * The abstract base Strategy for collapse strategies that collapse on an ordinal
+  * using min/max field value to select the group head.
+  *
+  */
+
+  private abstract class OrdFieldValueStrategy {
     protected int nullPolicy;
     protected int[] ords;
     protected Scorer scorer;
@@ -653,55 +1269,69 @@
     protected float nullScore;
     protected float[] scores;
     protected FixedBitSet collapsedSet;
-    protected IntIntOpenHashMap boostDocs;
-    protected int[] boostOrds;
     protected int nullDoc = -1;
     protected boolean needsScores;
     protected boolean max;
     protected String field;
+    protected boolean boosts;
+    protected IntArrayList boostOrds;
+    protected IntArrayList boostDocs;
+    protected MergeBoost mergeBoost;
+    protected boolean boosted;
 
     public abstract void collapse(int ord, int contextDoc, int globalDoc) throws IOException;
     public abstract void setNextReader(LeafReaderContext context) throws IOException;
 
-    public FieldValueCollapse(int maxDoc,
-                              String field,
-                              int nullPolicy,
-                              boolean max,
-                              boolean needsScores,
-                              IntIntOpenHashMap boostDocs,
-                              SortedDocValues values) {
+    public OrdFieldValueStrategy(int maxDoc,
+                                 String field,
+                                 int nullPolicy,
+                                 boolean max,
+                                 boolean needsScores,
+                                 IntIntOpenHashMap boostDocsMap,
+                                 SortedDocValues values) {
       this.field = field;
       this.nullPolicy = nullPolicy;
       this.max = max;
       this.needsScores = needsScores;
       this.collapsedSet = new FixedBitSet(maxDoc);
-      this.boostDocs = boostDocs;
-      if(this.boostDocs != null) {
-        IntOpenHashSet boostG = new IntOpenHashSet();
-        Iterator<IntIntCursor> it = boostDocs.iterator();
+      if(boostDocsMap != null) {
+        this.boosts = true;
+        this.boostOrds = new IntArrayList();
+        this.boostDocs = new IntArrayList();
+        int[] bd = new int[boostDocsMap.size()];
+        Iterator<IntIntCursor> it =  boostDocsMap.iterator();
+        int index = -1;
         while(it.hasNext()) {
           IntIntCursor cursor = it.next();
-          int i = cursor.key;
-          this.collapsedSet.set(i);
-          int ord = values.getOrd(i);
-          if(ord > -1) {
-            boostG.add(ord);
-          }
+          bd[++index] = cursor.key;
         }
-        this.boostOrds = boostG.toArray();
-        Arrays.sort(this.boostOrds);
+
+        Arrays.sort(bd);
+        this.mergeBoost = new MergeBoost(bd);
+        this.boosted = true;
       }
     }
 
+    public MergeBoost getMergeBoost() {
+      return this.mergeBoost;
+    }
+
     public FixedBitSet getCollapsedSet() {
       if(nullDoc > -1) {
         this.collapsedSet.set(nullDoc);
       }
 
       if(this.boostOrds != null) {
-        for(int i=0; i<this.boostOrds.length; i++) {
-          ords[boostOrds[i]] = -1;
+        int s = boostOrds.size();
+        for(int i=0; i<s; i++) {
+          int ord = boostOrds.get(i);
+          if(ord > -1) {
+            ords[ord] = -1;
+          }
+          collapsedSet.set(boostDocs.get(i));
         }
+
+        mergeBoost.reset();
       }
 
       for(int i=0; i<ords.length; i++) {
@@ -731,20 +1361,25 @@
     }
   }
 
-  private class IntValueCollapse extends FieldValueCollapse {
+  /*
+  * Strategy for collapsing on ordinal using min/max of an int field to select the group head.
+  */
 
-    private NumericDocValues vals;
+  private class OrdIntStrategy extends OrdFieldValueStrategy {
+
+    private NumericDocValues minMaxValues;
     private IntCompare comp;
     private int nullVal;
     private int[] ordVals;
 
-    public IntValueCollapse(int maxDoc,
-                            String field,
-                            int nullPolicy,
-                            int[] ords,
-                            boolean max,
-                            boolean needsScores,
-                            IntIntOpenHashMap boostDocs, SortedDocValues values) throws IOException {
+    public OrdIntStrategy(int maxDoc,
+                          String field,
+                          int nullPolicy,
+                          int[] ords,
+                          boolean max,
+                          boolean needsScores,
+                          IntIntOpenHashMap boostDocs,
+                          SortedDocValues values) throws IOException {
       super(maxDoc, field, nullPolicy, max, needsScores, boostDocs, values);
       this.ords = ords;
       this.ordVals = new int[ords.length];
@@ -768,24 +1403,30 @@
     }
 
     public void setNextReader(LeafReaderContext context) throws IOException {
-      this.vals = DocValues.getNumeric(context.reader(), this.field);
+      this.minMaxValues = DocValues.getNumeric(context.reader(), this.field);
     }
 
     public void collapse(int ord, int contextDoc, int globalDoc) throws IOException {
-      int val = (int) vals.get(contextDoc);
+
+      if(this.boosted && mergeBoost.boost(globalDoc)) {
+        this.boostDocs.add(globalDoc);
+        this.boostOrds.add(ord);
+        return;
+      }
+
+      int currentVal = (int) minMaxValues.get(contextDoc);
+
       if(ord > -1) {
-        if(comp.test(val, ordVals[ord])) {
+        if(comp.test(currentVal, ordVals[ord])) {
           ords[ord] = globalDoc;
-          ordVals[ord] = val;
+          ordVals[ord] = currentVal;
           if(needsScores) {
             scores[ord] = scorer.score();
           }
         }
-      } else if(this.collapsedSet.get(globalDoc)) {
-        // Elevated doc so do nothing.
       } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
-        if(comp.test(val, nullVal)) {
-          nullVal = val;
+        if(comp.test(currentVal, nullVal)) {
+          nullVal = currentVal;
           nullDoc = globalDoc;
           if(needsScores) {
             nullScore = scorer.score();
@@ -800,19 +1441,24 @@
     }
   }
 
-  private class LongValueCollapse extends FieldValueCollapse {
+  /*
+  * Strategy for collapsing on ordinal and using the min/max value of a long
+  * field to select the group head
+  */
 
-    private NumericDocValues vals;
+  private class OrdLongStrategy extends OrdFieldValueStrategy {
+
+    private NumericDocValues minMaxVals;
     private LongCompare comp;
     private long nullVal;
     private long[] ordVals;
 
-    public LongValueCollapse(int maxDoc, String field,
-                             int nullPolicy,
-                             int[] ords,
-                             boolean max,
-                             boolean needsScores,
-                             IntIntOpenHashMap boostDocs, SortedDocValues values) throws IOException {
+    public OrdLongStrategy(int maxDoc, String field,
+                           int nullPolicy,
+                           int[] ords,
+                           boolean max,
+                           boolean needsScores,
+                           IntIntOpenHashMap boostDocs, SortedDocValues values) throws IOException {
       super(maxDoc, field, nullPolicy, max, needsScores, boostDocs, values);
       this.ords = ords;
       this.ordVals = new long[ords.length];
@@ -836,24 +1482,29 @@
     }
 
     public void setNextReader(LeafReaderContext context) throws IOException {
-      this.vals = DocValues.getNumeric(context.reader(), this.field);
+      this.minMaxVals = DocValues.getNumeric(context.reader(), this.field);
     }
 
     public void collapse(int ord, int contextDoc, int globalDoc) throws IOException {
-      long val = vals.get(contextDoc);
+
+      if(boosted && mergeBoost.boost(globalDoc)) {
+        this.boostOrds.add(ord);
+        this.boostDocs.add(globalDoc);
+        return;
+      }
+
+      long currentVal = minMaxVals.get(contextDoc);
       if(ord > -1) {
-        if(comp.test(val, ordVals[ord])) {
+        if(comp.test(currentVal, ordVals[ord])) {
           ords[ord] = globalDoc;
-          ordVals[ord] = val;
+          ordVals[ord] = currentVal;
           if(needsScores) {
             scores[ord] = scorer.score();
           }
         }
-      } else if (this.collapsedSet.get(globalDoc)) {
-        //Elevated doc so do nothing
       } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
-        if(comp.test(val, nullVal)) {
-          nullVal = val;
+        if(comp.test(currentVal, nullVal)) {
+          nullVal = currentVal;
           nullDoc = globalDoc;
           if(needsScores) {
             nullScore = scorer.score();
@@ -868,76 +1519,12 @@
     }
   }
 
-  private class FloatValueCollapse extends FieldValueCollapse {
+  /*
+  * Strategy for collapsing on ordinal and using the min/max value of a value source function
+  * to select the group head
+  */
 
-    private NumericDocValues vals;
-    private FloatCompare comp;
-    private float nullVal;
-    private float[] ordVals;
-
-    public FloatValueCollapse(int maxDoc,
-                              String field,
-                              int nullPolicy,
-                              int[] ords,
-                              boolean max,
-                              boolean needsScores,
-                              IntIntOpenHashMap boostDocs, SortedDocValues values) throws IOException {
-      super(maxDoc, field, nullPolicy, max, needsScores, boostDocs, values);
-      this.ords = ords;
-      this.ordVals = new float[ords.length];
-      Arrays.fill(ords, -1);
-
-      if(max) {
-        comp = new MaxFloatComp();
-        Arrays.fill(ordVals, -Float.MAX_VALUE );
-      } else {
-        this.nullVal = Float.MAX_VALUE;
-        comp = new MinFloatComp();
-        Arrays.fill(ordVals, Float.MAX_VALUE);
-      }
-
-      if(needsScores) {
-        this.scores = new float[ords.length];
-        if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
-          nullScores = new FloatArrayList();
-        }
-      }
-    }
-
-    public void setNextReader(LeafReaderContext context) throws IOException {
-      this.vals = DocValues.getNumeric(context.reader(), this.field);
-    }
-
-    public void collapse(int ord, int contextDoc, int globalDoc) throws IOException {
-      float val = Float.intBitsToFloat((int)vals.get(contextDoc));
-      if(ord > -1) {
-        if(comp.test(val, ordVals[ord])) {
-          ords[ord] = globalDoc;
-          ordVals[ord] = val;
-          if(needsScores) {
-            scores[ord] = scorer.score();
-          }
-        }
-      } else if (this.collapsedSet.get(globalDoc)) {
-        //Elevated doc so do nothing
-      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
-        if(comp.test(val, nullVal)) {
-          nullVal = val;
-          nullDoc = globalDoc;
-          if(needsScores) {
-            nullScore = scorer.score();
-          }
-        }
-      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
-        this.collapsedSet.set(globalDoc);
-        if(needsScores) {
-          nullScores.add(scorer.score());
-        }
-      }
-    }
-  }
-
-  private class ValueSourceCollapse extends FieldValueCollapse {
+  private class OrdValueSourceStrategy extends OrdFieldValueStrategy {
 
     private FloatCompare comp;
     private float nullVal;
@@ -949,14 +1536,16 @@
     private float score;
     private boolean cscore;
 
-    public ValueSourceCollapse(int maxDoc,
-                               String funcStr,
-                               int nullPolicy,
-                               int[] ords,
-                               boolean max,
-                               boolean needsScores,
-                               IntIntOpenHashMap boostDocs,
-                               FunctionQuery funcQuery, IndexSearcher searcher, SortedDocValues values) throws IOException {
+    public OrdValueSourceStrategy(int maxDoc,
+                                  String funcStr,
+                                  int nullPolicy,
+                                  int[] ords,
+                                  boolean max,
+                                  boolean needsScores,
+                                  IntIntOpenHashMap boostDocs,
+                                  FunctionQuery funcQuery,
+                                  IndexSearcher searcher,
+                                  SortedDocValues values) throws IOException {
       super(maxDoc, null, nullPolicy, max, needsScores, boostDocs, values);
       this.valueSource = funcQuery.getValueSource();
       this.rcontext = ValueSource.newContext(searcher);
@@ -991,26 +1580,30 @@
     }
 
     public void collapse(int ord, int contextDoc, int globalDoc) throws IOException {
+
+      if(boosted && mergeBoost.boost(globalDoc)) {
+        this.boostOrds.add(ord);
+        this.boostDocs.add(globalDoc);
+      }
+
       if(needsScores || cscore) {
         this.score = scorer.score();
         this.collapseScore.score = score;
       }
 
-      float val = functionValues.floatVal(contextDoc);
+      float currentVal = functionValues.floatVal(contextDoc);
 
       if(ord > -1) {
-        if(comp.test(val, ordVals[ord])) {
+        if(comp.test(currentVal, ordVals[ord])) {
           ords[ord] = globalDoc;
-          ordVals[ord] = val;
+          ordVals[ord] = currentVal;
           if(needsScores) {
             scores[ord] = score;
           }
         }
-      } else if (this.collapsedSet.get(globalDoc)) {
-        //Elevated doc so do nothing
       } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
-        if(comp.test(val, nullVal)) {
-          nullVal = val;
+        if(comp.test(currentVal, nullVal)) {
+          nullVal = currentVal;
           nullDoc = globalDoc;
           if(needsScores) {
             nullScore = score;
@@ -1025,8 +1618,404 @@
     }
   }
 
-  public static final class CollapseScore {
-    public float score;
+
+  /*
+  * Base strategy for collapsing on a 32 bit numeric field and selecting a group head
+  * based on min/max value of a 32 bit numeric field.
+  */
+
+  private abstract class IntFieldValueStrategy {
+    protected int nullPolicy;
+    protected IntLongOpenHashMap cmap;
+    protected Scorer scorer;
+    protected FloatArrayList nullScores;
+    protected float nullScore;
+    protected float[] scores;
+    protected FixedBitSet collapsedSet;
+    protected int nullDoc = -1;
+    protected boolean needsScores;
+    protected boolean max;
+    protected String field;
+    protected String collapseField;
+    protected LongArrayList docScores;
+    protected IntArrayList docs;
+    protected int nullValue;
+    protected IntArrayList boostDocs;
+    protected IntArrayList boostKeys;
+    protected boolean boosts;
+    protected MergeBoost mergeBoost;
+
+    public abstract void collapse(int collapseKey, int contextDoc, int globalDoc) throws IOException;
+    public abstract void setNextReader(LeafReaderContext context) throws IOException;
+
+    public IntFieldValueStrategy(int maxDoc,
+                                 int size,
+                                 String collapseField,
+                                 String field,
+                                 int nullValue,
+                                 int nullPolicy,
+                                 boolean max,
+                                 boolean needsScores,
+                                 IntIntOpenHashMap boostDocsMap) {
+      this.field = field;
+      this.collapseField = collapseField;
+      this.nullValue = nullValue;
+      this.nullPolicy = nullPolicy;
+      this.max = max;
+      this.needsScores = needsScores;
+      this.collapsedSet = new FixedBitSet(maxDoc);
+      this.cmap = new IntLongOpenHashMap(size);
+      if(boostDocsMap != null) {
+        this.boosts = true;
+        this.boostDocs = new IntArrayList();
+        this.boostKeys = new IntArrayList();
+        int[] bd = new int[boostDocsMap.size()];
+        Iterator<IntIntCursor> it =  boostDocsMap.iterator();
+        int index = -1;
+        while(it.hasNext()) {
+          IntIntCursor cursor = it.next();
+          bd[++index] = cursor.key;
+        }
+
+        Arrays.sort(bd);
+        this.mergeBoost = new MergeBoost(bd);
+      }
+    }
+
+    public FixedBitSet getCollapsedSet() {
+
+      if(nullDoc > -1) {
+        this.collapsedSet.set(nullDoc);
+      }
+
+      //Handle the boosted docs.
+      if(this.boostKeys != null) {
+        int s = boostKeys.size();
+        for(int i=0; i<s; i++) {
+          int key = this.boostKeys.get(i);
+          if(key != nullValue) {
+            cmap.remove(key);
+          }
+          //Add the boosted docs to the collapsedSet
+          this.collapsedSet.set(boostDocs.get(i));
+        }
+
+        mergeBoost.reset();
+      }
+
+      Iterator<IntLongCursor> it1 = cmap.iterator();
+
+     if(needsScores) {
+       while(it1.hasNext()) {
+         IntLongCursor cursor = it1.next();
+         int pointer = (int)(cursor.value>>32);
+         collapsedSet.set((int)(docScores.get(pointer)>>32));
+       }
+     } else {
+      while(it1.hasNext()) {
+        IntLongCursor cursor = it1.next();
+        int pointer = (int)(cursor.value>>32);
+        collapsedSet.set(docs.get(pointer));
+      }
+     }
+
+      return collapsedSet;
+    }
+
+    public void setScorer(Scorer scorer) {
+      this.scorer = scorer;
+    }
+
+    public FloatArrayList getNullScores() {
+      return nullScores;
+    }
+
+    public IntLongOpenHashMap getCollapseMap() {
+      return cmap;
+    }
+
+    public float getNullScore() {
+      return this.nullScore;
+    }
+
+    public LongArrayList getDocScores() {
+      return this.docScores;
+    }
+
+    public float[] getScores() {
+      return scores;
+    }
+
+    public MergeBoost getMergeBoost()  {
+      return this.mergeBoost;
+    }
+  }
+
+  /*
+  *  Strategy for collapsing on a 32 bit numeric field and selecting the group head based
+  *  on the min/max value of a 32 bit field numeric field.
+  */
+
+  private class IntIntStrategy extends IntFieldValueStrategy {
+
+    private NumericDocValues minMaxVals;
+    private IntCompare comp;
+    private int nullCompVal;
+
+    private int index=-1;
+
+    public IntIntStrategy(int maxDoc,
+                          int size,
+                          String collapseField,
+                          String field,
+                          int nullValue,
+                          int nullPolicy,
+                          boolean max,
+                          boolean needsScores,
+                          IntIntOpenHashMap boostDocs) throws IOException {
+
+      super(maxDoc, size, collapseField, field, nullValue, nullPolicy, max, needsScores, boostDocs);
+
+      if(max) {
+        comp = new MaxIntComp();
+      } else {
+        comp = new MinIntComp();
+        this.nullCompVal = Integer.MAX_VALUE;
+      }
+
+      if(needsScores) {
+        this.docScores = new LongArrayList();
+        if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+          nullScores = new FloatArrayList();
+        }
+      } else {
+        this.docs = new IntArrayList();
+      }
+    }
+
+    public void setNextReader(LeafReaderContext context) throws IOException {
+      this.minMaxVals = DocValues.getNumeric(context.reader(), this.field);
+    }
+
+    public void collapse(int collapseKey, int contextDoc, int globalDoc) throws IOException {
+
+      // Check to see if we have documents boosted by the QueryElevationComponent
+      if(boosts && mergeBoost.boost(globalDoc)) {
+        boostDocs.add(globalDoc);
+        boostKeys.add(collapseKey);
+        return;
+      }
+
+      int currentVal = (int) minMaxVals.get(contextDoc);
+
+      if(collapseKey != nullValue) {
+        if(cmap.containsKey(collapseKey)) {
+          long pointerValue = cmap.lget();
+          int testValue = (int)pointerValue;
+          if(comp.test(currentVal, testValue)) {
+            pointerValue = (pointerValue-testValue)+currentVal;
+            cmap.lset(pointerValue);
+            int pointer = (int)(pointerValue>>32);
+            if(needsScores) {
+              float score = scorer.score();
+              long docScore = (((long)globalDoc)<<32)+Float.floatToIntBits(score);
+              docScores.set(pointer, docScore);
+            } else {
+              docs.set(pointer, globalDoc);
+            }
+          }
+        } else {
+          ++index;
+          //The index provides a pointer into docs or docScore lists.
+          //Combined the pointer with the current value into a long
+          long pointerValue = (((long)index)<<32)+currentVal;
+          cmap.put(collapseKey, pointerValue);
+          if(needsScores) {
+            float score = scorer.score();
+            long docScore = (((long)globalDoc)<<32)+Float.floatToIntBits(score);
+            docScores.add(docScore);
+          } else {
+            docs.add(globalDoc);
+          }
+        }
+      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+        if(comp.test(currentVal, nullCompVal)) {
+          nullCompVal = currentVal;
+          nullDoc = globalDoc;
+          if(needsScores) {
+            nullScore = scorer.score();
+          }
+        }
+      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+        this.collapsedSet.set(globalDoc);
+        if(needsScores) {
+          nullScores.add(scorer.score());
+        }
+      }
+    }
+  }
+
+
+  /*
+  *  Strategy for collapsing on a 32 bit numeric field and selecting the group head based
+  *  on the min/max value of a Value Source Function.
+  */
+
+  private class IntValueSourceStrategy extends IntFieldValueStrategy {
+
+    private IntCompare comp;
+    private int nullCompVal;
+
+    private ValueSource valueSource;
+    private FunctionValues functionValues;
+    private Map rcontext;
+    private CollapseScore collapseScore = new CollapseScore();
+    private boolean cscore;
+    private float score;
+    private int index=-1;
+
+    public IntValueSourceStrategy(int maxDoc,
+                                  String funcStr,
+                                  int size,
+                                  String collapseField,
+                                  int nullValue,
+                                  int nullPolicy,
+                                  boolean max,
+                                  boolean needsScores,
+                                  IntIntOpenHashMap boostDocs,
+                                  FunctionQuery funcQuery,
+                                  IndexSearcher searcher) throws IOException {
+
+      super(maxDoc, size, collapseField, null, nullValue, nullPolicy, max, needsScores, boostDocs);
+
+      this.valueSource = funcQuery.getValueSource();
+      this.rcontext = ValueSource.newContext(searcher);
+
+      if(max) {
+        this.nullCompVal = Integer.MIN_VALUE;
+        comp = new MaxIntComp();
+      } else {
+        this.nullCompVal = Integer.MAX_VALUE;
+        comp = new MinIntComp();
+      }
+
+      if(funcStr.indexOf("cscore()") != -1) {
+        this.cscore = true;
+        this.rcontext.put("CSCORE",this.collapseScore);
+      }
+
+      if(needsScores) {
+        this.docScores = new LongArrayList();
+        if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+          nullScores = new FloatArrayList();
+        }
+      } else {
+        this.docs = new IntArrayList();
+      }
+    }
+
+    public void setNextReader(LeafReaderContext context) throws IOException {
+      functionValues = this.valueSource.getValues(rcontext, context);
+    }
+
+    public void collapse(int collapseKey, int contextDoc, int globalDoc) throws IOException {
+
+      // Check to see if we have documents boosted by the QueryElevationComponent
+      if(boosts && mergeBoost.boost(globalDoc)) {
+        boostDocs.add(globalDoc);
+        boostKeys.add(collapseKey);
+        return;
+      }
+
+      if(needsScores || cscore) {
+        this.score = scorer.score();
+        this.collapseScore.score = score;
+      }
+
+      float functionValue = functionValues.floatVal(contextDoc);
+      int currentVal = Float.floatToRawIntBits(functionValue);
+
+      if(collapseKey != nullValue) {
+        if(cmap.containsKey(collapseKey)) {
+          long pointerValue = cmap.lget();
+          int testValue = (int)pointerValue;
+          if(comp.test(currentVal, testValue)) {
+            pointerValue = (pointerValue-testValue)+currentVal;
+            cmap.lset(pointerValue);
+            int pointer = (int)(pointerValue>>32);
+            if(needsScores){
+              //Combine the doc and score into a long
+              long docScore = (((long)globalDoc)<<32)+Float.floatToIntBits(score);
+              docScores.set(pointer, docScore);
+            } else {
+              docs.set(pointer, globalDoc);
+            }
+          }
+        } else {
+          ++index;
+          //Use the index as a pointer into the docScore and docs list.
+          long pointerValue = (((long)index)<<32)+currentVal;
+          cmap.put(collapseKey, pointerValue);
+          if(needsScores) {
+            //Combine the doc and score into a long
+            long docScore = (((long)globalDoc)<<32)+Float.floatToIntBits(score);
+            docScores.add(docScore);
+          } else {
+            docs.add(globalDoc);
+          }
+        }
+      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+        if(comp.test(currentVal, nullCompVal)) {
+          nullCompVal = currentVal;
+          nullDoc = globalDoc;
+          if(needsScores) {
+            nullScore = scorer.score();
+          }
+        }
+      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+        this.collapsedSet.set(globalDoc);
+        if(needsScores) {
+          nullScores.add(scorer.score());
+        }
+      }
+    }
+  }
+
+  static class MergeBoost {
+
+    private int[] boostDocs;
+    private int index = 0;
+
+    public MergeBoost(int[] boostDocs) {
+      this.boostDocs = boostDocs;
+    }
+
+    public void reset() {
+      this.index = 0;
+    }
+
+    public boolean boost(int globalDoc) {
+      if(index == Integer.MIN_VALUE) {
+        return false;
+      } else {
+        while(true) {
+          if(index >= boostDocs.length) {
+            index = Integer.MIN_VALUE;
+            return false;
+          } else {
+            int comp = boostDocs[index];
+            if(comp == globalDoc) {
+              ++index;
+              return true;
+            } else if(comp < globalDoc) {
+              ++index;
+            } else {
+              return false;
+            }
+          }
+        }
+      }
+    }
   }
 
   private interface IntCompare {
diff --git a/solr/core/src/java/org/apache/solr/search/DelegatingCollector.java b/solr/core/src/java/org/apache/solr/search/DelegatingCollector.java
index 65af211..b72ee11 100644
--- a/solr/core/src/java/org/apache/solr/search/DelegatingCollector.java
+++ b/solr/core/src/java/org/apache/solr/search/DelegatingCollector.java
@@ -75,11 +75,6 @@
     leafDelegate = delegate.getLeafCollector(context);
   }
 
-  @Override
-  public boolean acceptsDocsOutOfOrder() {
-    return leafDelegate.acceptsDocsOutOfOrder();
-  }
-
   public void finish() throws IOException {
     if(delegate instanceof DelegatingCollector) {
       ((DelegatingCollector) delegate).finish();
diff --git a/solr/core/src/java/org/apache/solr/search/DocSetCollector.java b/solr/core/src/java/org/apache/solr/search/DocSetCollector.java
index 09412d1..5408901 100644
--- a/solr/core/src/java/org/apache/solr/search/DocSetCollector.java
+++ b/solr/core/src/java/org/apache/solr/search/DocSetCollector.java
@@ -87,9 +87,4 @@
   protected void doSetNextReader(LeafReaderContext context) throws IOException {
     this.base = context.docBase;
   }
-
-  @Override
-  public boolean acceptsDocsOutOfOrder() {
-    return false;
-  }
 }
diff --git a/solr/core/src/java/org/apache/solr/search/EarlyTerminatingCollector.java b/solr/core/src/java/org/apache/solr/search/EarlyTerminatingCollector.java
index bc4ef3e..e05eee3 100644
--- a/solr/core/src/java/org/apache/solr/search/EarlyTerminatingCollector.java
+++ b/solr/core/src/java/org/apache/solr/search/EarlyTerminatingCollector.java
@@ -63,16 +63,6 @@
 
     return new FilterLeafCollector(super.getLeafCollector(context)) {
 
-      /**
-       * This collector requires that docs be collected in order, otherwise
-       * the computed number of scanned docs in the resulting
-       * {@link EarlyTerminatingCollectorException} will be meaningless.
-       */
-      @Override
-      public boolean acceptsDocsOutOfOrder() {
-        return false;
-      }
-
       @Override
       public void collect(int doc) throws IOException {
         super.collect(doc);
diff --git a/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java
index 0bafbc3..bd8fbc2 100644
--- a/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java
@@ -26,7 +26,6 @@
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.common.params.SolrParams;
 
-
 import java.io.IOException;
 import java.util.Map;
 import java.util.Set;
@@ -129,22 +128,27 @@
   private class ExportCollector extends TopDocsCollector  {
 
     private FixedBitSet[] sets;
-    private FixedBitSet set;
 
     public ExportCollector(FixedBitSet[] sets) {
       super(null);
       this.sets = sets;
     }
-    
-    public void doSetNextReader(LeafReaderContext context) throws IOException {
-      this.set = new FixedBitSet(context.reader().maxDoc());
-      this.sets[context.ord] = set;
 
-    }
-    
-    public void collect(int docId) throws IOException{
-      ++totalHits;
-      set.set(docId);
+    @Override
+    public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
+      final FixedBitSet set = new FixedBitSet(context.reader().maxDoc());
+      this.sets[context.ord] = set;
+      return new LeafCollector() {
+        
+        @Override
+        public void setScorer(Scorer scorer) throws IOException {}
+        
+        @Override
+        public void collect(int docId) throws IOException{
+          ++totalHits;
+          set.set(docId);
+        }
+      };
     }
 
     private ScoreDoc[] getScoreDocs(int howMany) {
@@ -170,12 +174,5 @@
       return new TopDocs(totalHits, scoreDocs, 0.0f);
     }
 
-    public void setScorer(Scorer scorer) throws IOException {
-
-    }
-    
-    public boolean acceptsDocsOutOfOrder() {
-      return false;
-    }
   }
 }
\ No newline at end of file
diff --git a/solr/core/src/java/org/apache/solr/search/Grouping.java b/solr/core/src/java/org/apache/solr/search/Grouping.java
index acadc59..df12669 100644
--- a/solr/core/src/java/org/apache/solr/search/Grouping.java
+++ b/solr/core/src/java/org/apache/solr/search/Grouping.java
@@ -450,12 +450,9 @@
     }
     try {
       searcher.search(query, luceneFilter, collector);
-    } catch (TimeLimitingCollector.TimeExceededException x) {
+    } catch (TimeLimitingCollector.TimeExceededException | ExitableDirectoryReader.ExitingReaderException x) {
       logger.warn( "Query: " + query + "; " + x.getMessage() );
       qr.setPartialResults(true);
-    } catch (ExitableDirectoryReader.ExitingReaderException e) {
-      logger.warn( "Query: " + query + "; " + e.getMessage() );
-      qr.setPartialResults(true);
     }
   }
 
@@ -879,9 +876,9 @@
     TopDocsCollector newCollector(Sort sort, boolean needScores) throws IOException {
       int groupDocsToCollect = getMax(groupOffset, docsPerGroup, maxDoc);
       if (sort == null || sort == Sort.RELEVANCE) {
-        return TopScoreDocCollector.create(groupDocsToCollect, true);
+        return TopScoreDocCollector.create(groupDocsToCollect);
       } else {
-        return TopFieldCollector.create(searcher.weightSort(sort), groupDocsToCollect, false, needScores, needScores, true);
+        return TopFieldCollector.create(searcher.weightSort(sort), groupDocsToCollect, false, needScores, needScores);
       }
     }
 
@@ -930,7 +927,7 @@
      */
     @Override
     protected void prepare() throws IOException {
-      Map context = ValueSource.newContext(searcher);
+      context = ValueSource.newContext(searcher);
       groupBy.createWeight(context, searcher);
       actualGroupsToFind = getMax(offset, numGroups, maxDoc);
     }
diff --git a/solr/core/src/java/org/apache/solr/search/QueryParsing.java b/solr/core/src/java/org/apache/solr/search/QueryParsing.java
index 4fd12c1..70e0d2e 100644
--- a/solr/core/src/java/org/apache/solr/search/QueryParsing.java
+++ b/solr/core/src/java/org/apache/solr/search/QueryParsing.java
@@ -33,7 +33,6 @@
 import org.apache.lucene.search.TermRangeQuery;
 import org.apache.lucene.search.WildcardQuery;
 import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.CharsRef;
 import org.apache.lucene.util.CharsRefBuilder;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.MapSolrParams;
@@ -45,8 +44,8 @@
 import org.apache.solr.schema.SchemaField;
 
 import java.io.IOException;
-import java.util.Collections;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Locale;
@@ -222,17 +221,6 @@
     return new MapSolrParams(localParams);
   }
 
-  /** 
-   * Returns the Sort object represented by the string, or null if default sort 
-   * by score descending should be used.
-   * @see #parseSortSpec
-   * @deprecated use {@link #parseSortSpec} 
-   */
-  @Deprecated
-  public static Sort parseSort(String sortSpec, SolrQueryRequest req) {
-    return parseSortSpec(sortSpec, req).getSort();
-  }
-
   /**
    * <p>
    * The form of the sort specification string currently parsed is:
diff --git a/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java
index 5e744be..934094c 100644
--- a/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java
@@ -18,6 +18,7 @@
 package org.apache.solr.search;
 
 import com.carrotsearch.hppc.IntIntOpenHashMap;
+
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.search.Explanation;
 import org.apache.lucene.search.MatchAllDocsQuery;
@@ -33,7 +34,7 @@
 import org.apache.solr.handler.component.MergeStrategy;
 import org.apache.solr.handler.component.QueryElevationComponent;
 import org.apache.solr.request.SolrQueryRequest;
-
+import org.apache.lucene.search.LeafCollector;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.Weight;
 import org.apache.lucene.search.IndexSearcher;
@@ -43,6 +44,7 @@
 import org.apache.lucene.index.Term;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.search.ScoreDoc;
+
 import com.carrotsearch.hppc.IntFloatOpenHashMap;
 
 import org.apache.lucene.util.Bits;
@@ -246,35 +248,24 @@
       this.boostedPriority = boostedPriority;
       Sort sort = cmd.getSort();
       if(sort == null) {
-        this.mainCollector = TopScoreDocCollector.create(Math.max(this.reRankDocs, length),true);
+        this.mainCollector = TopScoreDocCollector.create(Math.max(this.reRankDocs, length));
       } else {
         sort = sort.rewrite(searcher);
-        this.mainCollector = TopFieldCollector.create(sort, Math.max(this.reRankDocs, length), false, true, true, true);
+        this.mainCollector = TopFieldCollector.create(sort, Math.max(this.reRankDocs, length), false, true, true);
       }
       this.searcher = searcher;
       this.reRankWeight = reRankWeight;
     }
 
-    public boolean acceptsDocsOutOfOrder() {
-      return false;
-    }
-
-    public void collect(int doc) throws IOException {
-      mainCollector.collect(doc);
-    }
-
-    public void setScorer(Scorer scorer) throws IOException{
-      mainCollector.setScorer(scorer);
-    }
-
-    public void doSetNextReader(LeafReaderContext context) throws IOException{
-      mainCollector.getLeafCollector(context);
-    }
-
     public int getTotalHits() {
       return mainCollector.getTotalHits();
     }
 
+    @Override
+    public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
+      return mainCollector.getLeafCollector(context);
+    }
+
     public TopDocs topDocs(int start, int howMany) {
 
       try {
@@ -387,6 +378,7 @@
         throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
       }
     }
+
   }
 
   public class BoostedComp implements Comparator {
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index 1efa33b..479a33e 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -234,12 +234,9 @@
         ((DelegatingCollector)collector).finish();
       }
     }
-    catch( TimeLimitingCollector.TimeExceededException x ) {
+    catch( TimeLimitingCollector.TimeExceededException | ExitableDirectoryReader.ExitingReaderException x ) {
       log.warn( "Query: " + query + "; " + x.getMessage() );
       qr.setPartialResults(true);
-    } catch ( ExitableDirectoryReader.ExitingReaderException e) {
-      log.warn("Query: " + query + "; " + e.getMessage());
-      qr.setPartialResults(true);
     }
   }
   
@@ -1597,7 +1594,7 @@
 
     if (null == cmd.getSort()) {
       assert null == cmd.getCursorMark() : "have cursor but no sort";
-      return TopScoreDocCollector.create(len, true);
+      return TopScoreDocCollector.create(len);
     } else {
       // we have a sort
       final boolean needScores = (cmd.getFlags() & GET_SCORES) != 0;
@@ -1609,7 +1606,7 @@
       final boolean fillFields = (null != cursor);
       final FieldDoc searchAfter = (null != cursor ? cursor.getSearchAfterFieldDoc() : null);
       return TopFieldCollector.create(weightedSort, len, searchAfter,
-                                      fillFields, needScores, needScores, true); 
+                                      fillFields, needScores, needScores); 
     }
   }
 
@@ -1644,10 +1641,6 @@
           public void collect(int doc) {
             numHits[0]++;
           }
-          @Override
-          public boolean acceptsDocsOutOfOrder() {
-            return true;
-          }
         };
       } else {
         collector = new SimpleCollector() {
@@ -1662,10 +1655,6 @@
             float score = scorer.score();
             if (score > topscore[0]) topscore[0]=score;            
           }
-          @Override
-          public boolean acceptsDocsOutOfOrder() {
-            return true;
-          }
         };
       }
       
@@ -1750,11 +1739,6 @@
             float score = scorer.score();
             if (score > topscore[0]) topscore[0] = score;
           }
-          
-          @Override
-          public boolean acceptsDocsOutOfOrder() {
-            return true;
-          }
         };
         
         collector = MultiCollector.wrap(setCollector, topScoreCollector);
@@ -2055,16 +2039,17 @@
     int end=0;
     int readerIndex = 0;
 
+    LeafCollector leafCollector = null;
     while (iter.hasNext()) {
       int doc = iter.nextDoc();
       while (doc>=end) {
         LeafReaderContext leaf = leafContexts.get(readerIndex++);
         base = leaf.docBase;
         end = base + leaf.reader().maxDoc();
-        topCollector.getLeafCollector(leaf);
+        leafCollector = topCollector.getLeafCollector(leaf);
         // we should never need to set the scorer given the settings for the collector
       }
-      topCollector.collect(doc-base);
+      leafCollector.collect(doc-base);
     }
     
     TopDocs topDocs = topCollector.topDocs(0, nDocs);
diff --git a/solr/core/src/java/org/apache/solr/search/SortSpec.java b/solr/core/src/java/org/apache/solr/search/SortSpec.java
index 6655aa6..45db657 100644
--- a/solr/core/src/java/org/apache/solr/search/SortSpec.java
+++ b/solr/core/src/java/org/apache/solr/search/SortSpec.java
@@ -19,13 +19,11 @@
 
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.SortField;
-
 import org.apache.solr.schema.SchemaField;
 
 import java.util.Arrays;
-import java.util.List;
-import java.util.ArrayList;
 import java.util.Collections;
+import java.util.List;
 /***
  * SortSpec encapsulates a Lucene Sort and a count of the number of documents
  * to return.
@@ -44,28 +42,6 @@
     setSortAndFields(sort, Arrays.asList(fields));
   }
 
-  /** @deprecated Specify both Sort and SchemaField[] when constructing */
-  @Deprecated
-  public SortSpec(Sort sort, int num) {
-    this(sort,0,num);
-  }
-
-  /** @deprecated Specify both Sort and SchemaField[] when constructing */
-  @Deprecated
-  public SortSpec(Sort sort, int offset, int num) {
-    setSort(sort);
-    this.offset=offset;
-    this.num=num;
-  }
-  
-  /** @deprecated use {@link #setSortAndFields} */
-  @Deprecated
-  public void setSort( Sort s )
-  {
-    sort = s;
-    fields = Collections.unmodifiableList(Arrays.asList(new SchemaField[s.getSort().length]));
-  }
-
   /** 
    * the specified SchemaFields must correspond one to one with the Sort's SortFields, 
    * using null where appropriate.
diff --git a/solr/core/src/java/org/apache/solr/search/SpatialFilterQParser.java b/solr/core/src/java/org/apache/solr/search/SpatialFilterQParser.java
index b29db72..5b6fa3d 100644
--- a/solr/core/src/java/org/apache/solr/search/SpatialFilterQParser.java
+++ b/solr/core/src/java/org/apache/solr/search/SpatialFilterQParser.java
@@ -18,7 +18,6 @@
 
 
 import org.apache.lucene.search.Query;
-import com.spatial4j.core.distance.DistanceUtils;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.params.SpatialParams;
@@ -76,10 +75,11 @@
       FieldType type = sf.getType();
 
       if (type instanceof SpatialQueryable) {
-        double radius = localParams.getDouble(SpatialParams.SPHERE_RADIUS, DistanceUtils.EARTH_MEAN_RADIUS_KM);
+        SpatialQueryable queryable = ((SpatialQueryable)type);
+        double radius = localParams.getDouble(SpatialParams.SPHERE_RADIUS, queryable.getSphereRadius());
         SpatialOptions opts = new SpatialOptions(pointStr, dist, sf, measStr, radius);
         opts.bbox = bbox;
-        result = ((SpatialQueryable)type).createSpatialQuery(this, opts);
+        result = queryable.createSpatialQuery(this, opts);
       } else {
         throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "The field " + fields[0]
                 + " does not support spatial filtering");
diff --git a/solr/core/src/java/org/apache/solr/search/function/distance/GeoDistValueSourceParser.java b/solr/core/src/java/org/apache/solr/search/function/distance/GeoDistValueSourceParser.java
index c81db42..2936395 100644
--- a/solr/core/src/java/org/apache/solr/search/function/distance/GeoDistValueSourceParser.java
+++ b/solr/core/src/java/org/apache/solr/search/function/distance/GeoDistValueSourceParser.java
@@ -17,6 +17,10 @@
  * limitations under the License.
  */
 
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
 import com.spatial4j.core.context.SpatialContext;
 import com.spatial4j.core.distance.DistanceUtils;
 import com.spatial4j.core.shape.Point;
@@ -33,12 +37,9 @@
 import org.apache.solr.search.FunctionQParser;
 import org.apache.solr.search.SyntaxError;
 import org.apache.solr.search.ValueSourceParser;
+import org.apache.solr.util.DistanceUnits;
 import org.apache.solr.util.SpatialUtils;
 
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
 /**
  * Parses "geodist" creating {@link HaversineConstFunction} or {@link HaversineFunction}
  * or calling {@link SpatialStrategy#makeDistanceValueSource(com.spatial4j.core.shape.Point,double)}.
@@ -133,8 +134,11 @@
             " the point must be supplied as constants");
       // note: uses Haversine by default but can be changed via distCalc=...
       SpatialStrategy strategy = ((SpatialStrategyMultiValueSource) mv2).strategy;
+      DistanceUnits distanceUnits = ((SpatialStrategyMultiValueSource) mv2).distanceUnits;
       Point queryPoint = strategy.getSpatialContext().makePoint(constants[1], constants[0]);
-      return strategy.makeDistanceValueSource(queryPoint, DistanceUtils.DEG_TO_KM);
+      if (distanceUnits == DistanceUnits.BACKCOMPAT)
+        distanceUnits = DistanceUnits.KILOMETERS;
+      return strategy.makeDistanceValueSource(queryPoint, distanceUnits.multiplierFromDegreesToThisUnit());
     }
 
     if (constants != null && other instanceof VectorValueSource) {
@@ -180,7 +184,7 @@
     FieldType type = sf.getType();
     if (type instanceof AbstractSpatialFieldType) {
       AbstractSpatialFieldType asft = (AbstractSpatialFieldType) type;
-      return new SpatialStrategyMultiValueSource(asft.getStrategy(sfield));
+      return new SpatialStrategyMultiValueSource(asft.getStrategy(sfield), asft.getDistanceUnits());
     }
     ValueSource vs = type.getValueSource(sf, fp);
     if (vs instanceof MultiValueSource) {
@@ -194,10 +198,12 @@
   private static class SpatialStrategyMultiValueSource extends VectorValueSource {
 
     final SpatialStrategy strategy;
+    final DistanceUnits distanceUnits;
 
-    public SpatialStrategyMultiValueSource(SpatialStrategy strategy) {
+    public SpatialStrategyMultiValueSource(SpatialStrategy strategy, DistanceUnits distanceUnits) {
       super(Collections.EMPTY_LIST);
       this.strategy = strategy;
+      this.distanceUnits = distanceUnits;
     }
 
     @Override
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java b/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java
index e82170b..8bc96d3 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java
@@ -228,12 +228,9 @@
 
     try {
       searcher.search(query, luceneFilter, collector);
-    } catch (TimeLimitingCollector.TimeExceededException x) {
+    } catch (TimeLimitingCollector.TimeExceededException | ExitableDirectoryReader.ExitingReaderException x) {
       partialResults = true;
       logger.warn( "Query: " + query + "; " + x.getMessage() );
-    } catch (ExitableDirectoryReader.ExitingReaderException e) {
-      partialResults = true;
-      logger.warn( "Query: " + query + "; " + e.getMessage() );
     }
 
     if (includeHitCount) {
diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java
index 6f1d846..5a94236 100644
--- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java
+++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/command/QueryCommand.java
@@ -126,9 +126,9 @@
   @Override
   public List<Collector> create() throws IOException {
     if (sort == null || sort == Sort.RELEVANCE) {
-      collector = TopScoreDocCollector.create(docsToCollect, true);
+      collector = TopScoreDocCollector.create(docsToCollect);
     } else {
-      collector = TopFieldCollector.create(sort, docsToCollect, true, needScores, needScores, true);
+      collector = TopFieldCollector.create(sort, docsToCollect, true, needScores, needScores);
     }
     filterCollector = new FilterCollector(docSet, collector);
     return Arrays.asList((Collector) filterCollector);
diff --git a/solr/core/src/java/org/apache/solr/servlet/LoadAdminUiServlet.java b/solr/core/src/java/org/apache/solr/servlet/LoadAdminUiServlet.java
index 1572097..37cb2a0 100644
--- a/solr/core/src/java/org/apache/solr/servlet/LoadAdminUiServlet.java
+++ b/solr/core/src/java/org/apache/solr/servlet/LoadAdminUiServlet.java
@@ -17,20 +17,19 @@
 
 package org.apache.solr.servlet;
 
-import java.io.InputStream;
-import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-import java.nio.charset.StandardCharsets;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.StringEscapeUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.SolrCore;
 
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.lang.StringEscapeUtils;
-import org.apache.solr.core.CoreContainer;
-import org.apache.solr.core.SolrCore;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 
 /**
  * A simple servlet to load the Solr Admin UI
@@ -63,7 +62,7 @@
         };
         String[] replace = new String[] {
             StringEscapeUtils.escapeJavaScript(request.getContextPath()),
-            StringEscapeUtils.escapeJavaScript(cores.getAdminPath()),
+            StringEscapeUtils.escapeJavaScript(CoreContainer.CORES_HANDLER_PATH),
             StringEscapeUtils.escapeJavaScript(pack.getSpecificationVersion())
         };
         
diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
index 2ea2a3e..42e6205 100644
--- a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
+++ b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
@@ -17,9 +17,37 @@
 
 package org.apache.solr.servlet;
 
+import java.io.ByteArrayInputStream;
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Enumeration;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
-import org.apache.http.client.HttpClient;
+import org.apache.http.Header;
+import org.apache.http.HeaderIterator;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpEntityEnclosingRequest;
+import org.apache.http.HttpResponse;
 import org.apache.http.client.methods.HttpDelete;
 import org.apache.http.client.methods.HttpEntityEnclosingRequestBase;
 import org.apache.http.client.methods.HttpGet;
@@ -28,13 +56,10 @@
 import org.apache.http.client.methods.HttpPut;
 import org.apache.http.client.methods.HttpRequestBase;
 import org.apache.http.entity.InputStreamEntity;
+import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.util.EntityUtils;
-import org.apache.http.Header;
-import org.apache.http.HeaderIterator;
-import org.apache.http.HttpEntity;
-import org.apache.http.HttpEntityEnclosingRequest;
-import org.apache.http.HttpResponse;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.HttpClientUtil;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.cloud.Aliases;
@@ -57,7 +82,6 @@
 import org.apache.solr.core.SolrConfig;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrResourceLoader;
-import org.apache.solr.client.solrj.impl.HttpClientUtil;
 import org.apache.solr.handler.ContentStreamHandlerBase;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequestBase;
@@ -72,31 +96,6 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import java.io.ByteArrayInputStream;
-import java.io.EOFException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Enumeration;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-
 /**
  * This filter looks at the incoming URL maps them to handlers defined in solrconfig.xml
  *
@@ -113,7 +112,7 @@
 
   protected String pathPrefix = null; // strip this from the beginning of a path
   protected String abortErrorMessage = null;
-  protected final HttpClient httpClient = HttpClientUtil.createClient(new ModifiableSolrParams());
+  protected final CloseableHttpClient httpClient = HttpClientUtil.createClient(new ModifiableSolrParams());
   
   public SolrDispatchFilter() {
   }
@@ -179,7 +178,7 @@
   protected CoreContainer createCoreContainer() {
     SolrResourceLoader loader = new SolrResourceLoader(SolrResourceLoader.locateSolrHome());
     ConfigSolr config = loadConfigSolr(loader);
-    CoreContainer cores = new CoreContainer(loader, config);
+    CoreContainer cores = new CoreContainer(config);
     cores.load();
     return cores;
   }
@@ -190,10 +189,14 @@
   
   @Override
   public void destroy() {
-    if (cores != null) {
-      cores.shutdown();
-      cores = null;
-    }    
+    try {
+      if (cores != null) {
+        cores.shutdown();
+        cores = null;
+      }
+    } finally {
+      IOUtils.closeQuietly(httpClient);
+    }
   }
   
   @Override
@@ -247,22 +250,18 @@
           path = path.substring( 0, idx );
         }
 
-        // Check for the core admin page
-        if( path.equals( cores.getAdminPath() ) ) {
-          handler = cores.getMultiCoreHandler();
-          solrReq =  SolrRequestParsers.DEFAULT.parse(null,path, req);
+
+        boolean usingAliases = false;
+        List<String> collectionsList = null;
+
+        // Check for container handlers
+        handler = cores.getRequestHandler(path);
+        if (handler != null) {
+          solrReq = SolrRequestParsers.DEFAULT.parse(null, path, req);
           handleAdminRequest(req, response, handler, solrReq);
           return;
         }
-        boolean usingAliases = false;
-        List<String> collectionsList = null;
-        // Check for the core admin collections url
-        handler = cores.getRequestHandler(path);
-        if( handler!= null ) {
-          solrReq =  SolrRequestParsers.DEFAULT.parse(null,path, req);
-          handleAdminRequest(req, response, handler, solrReq);
-          return;
-        } else {
+        else {
           //otherwise, we should find a core from the path
           idx = path.indexOf( "/", 1 );
           if( idx > 1 ) {
@@ -527,7 +526,6 @@
       
       urlstr += queryString == null ? "" : "?" + queryString;
 
-      URL url = new URL(urlstr);
       boolean isPostOrPutRequest = "POST".equals(req.getMethod()) || "PUT".equals(req.getMethod());
 
       if ("GET".equals(req.getMethod())) {
diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/SolrSuggester.java b/solr/core/src/java/org/apache/solr/spelling/suggest/SolrSuggester.java
index 6a04a5f..74191aa 100644
--- a/solr/core/src/java/org/apache/solr/spelling/suggest/SolrSuggester.java
+++ b/solr/core/src/java/org/apache/solr/spelling/suggest/SolrSuggester.java
@@ -23,7 +23,6 @@
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.util.Collection;
-import java.util.Collections;
 import java.util.List;
 
 import org.apache.lucene.search.spell.Dictionary;
@@ -123,17 +122,19 @@
     });
 
     // if store directory is provided make it or load up the lookup with its content
-    if (store != null) {
+    if (store != null && !store.isEmpty()) {
       storeDir = new File(store);
       if (!storeDir.isAbsolute()) {
         storeDir = new File(core.getDataDir() + File.separator + storeDir);
       }
       if (!storeDir.exists()) {
         storeDir.mkdirs();
-      } else {
-        // attempt reload of the stored lookup
+      } else if (getStoreFile().exists()) {
+        if (LOG.isDebugEnabled()) {
+          LOG.debug("attempt reload of the stored lookup from file " + getStoreFile());
+        }
         try {
-          lookup.load(new FileInputStream(new File(storeDir, factory.storeFileName())));
+          lookup.load(new FileInputStream(getStoreFile()));
         } catch (IOException e) {
           LOG.warn("Loading stored lookup data failed, possibly not cached yet");
         }
@@ -156,12 +157,12 @@
 
   /** Build the underlying Lucene Suggester */
   public void build(SolrCore core, SolrIndexSearcher searcher) throws IOException {
-    LOG.info("build()");
+    LOG.info("SolrSuggester.build(" + name + ")");
 
     dictionary = dictionaryFactory.create(core, searcher);
     lookup.build(dictionary);
     if (storeDir != null) {
-      File target = new File(storeDir, factory.storeFileName());
+      File target = getStoreFile();
       if(!lookup.store(new FileOutputStream(target))) {
         LOG.error("Store Lookup build failed");
       } else {
@@ -172,21 +173,35 @@
 
   /** Reloads the underlying Lucene Suggester */
   public void reload(SolrCore core, SolrIndexSearcher searcher) throws IOException {
-    LOG.info("reload()");
+    LOG.info("SolrSuggester.reload(" + name + ")");
     if (dictionary == null && storeDir != null) {
-      // this may be a firstSearcher event, try loading it
-      FileInputStream is = new FileInputStream(new File(storeDir, factory.storeFileName()));
-      try {
-        if (lookup.load(is)) {
-          return;  // loaded ok
+      File lookupFile = getStoreFile();
+      if (lookupFile.exists()) {
+        // this may be a firstSearcher event, try loading it
+        FileInputStream is = new FileInputStream(lookupFile);
+        try {
+          if (lookup.load(is)) {
+            return;  // loaded ok
+          }
+        } finally {
+          IOUtils.closeWhileHandlingException(is);
         }
-      } finally {
-        IOUtils.closeWhileHandlingException(is);
+      } else {
+        LOG.info("lookup file doesn't exist");
       }
-      LOG.debug("load failed, need to build Lookup again");
     }
-    // loading was unsuccessful - build it again
-    build(core, searcher);
+  }
+
+  /**
+   * 
+   * @return the file where this suggester is stored.
+   *         null if no storeDir was configured
+   */
+  public File getStoreFile() {
+    if (storeDir == null) {
+      return null;
+    }
+    return new File(storeDir, factory.storeFileName());
   }
 
   /** Returns suggestions based on the {@link SuggesterOptions} passed */
diff --git a/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java b/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java
index 3086a91..c9ab33c 100644
--- a/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java
+++ b/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java
@@ -22,17 +22,16 @@
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.atomic.AtomicLong;
 
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.metrics.MetricsContext;
-import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.hadoop.metrics.MetricsUtil;
-import org.apache.hadoop.metrics.Updater;
-import org.apache.hadoop.metrics.jvm.JvmMetrics;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.search.SolrCacheBase;
 
 /**
+ * A {@link SolrInfoMBean} that provides metrics on block cache operations.
+ *
  * @lucene.experimental
  */
-public class Metrics implements Updater {
+public class Metrics extends SolrCacheBase {
   
   public static class MethodCall {
     public AtomicLong invokes = new AtomicLong();
@@ -60,14 +59,12 @@
   public AtomicLong indexMemoryUsage = new AtomicLong(0);
   public AtomicLong segmentCount = new AtomicLong(0);
 
-  private MetricsRecord metricsRecord;
   private long previous = System.nanoTime();
 
   public static void main(String[] args) throws InterruptedException {
-    Configuration conf = new Configuration();
-    Metrics metrics = new Metrics(conf);
+    Metrics metrics = new Metrics();
     MethodCall methodCall = new MethodCall();
-    metrics.methodCalls.put("test",methodCall);
+    metrics.methodCalls.put("test", methodCall);
     for (int i = 0; i < 100; i++) {
       metrics.blockCacheHit.incrementAndGet();
       metrics.blockCacheMiss.incrementAndGet();
@@ -77,53 +74,50 @@
     }
   }
 
-  public Metrics(Configuration conf) {
-    JvmMetrics.init("blockcache", Long.toString(System.currentTimeMillis()));
-    MetricsContext metricsContext = MetricsUtil.getContext("blockcache");
-    metricsRecord = MetricsUtil.createRecord(metricsContext, "metrics");
-    metricsContext.registerUpdater(this);
-  }
-
-  @Override
-  public void doUpdates(MetricsContext context) {
-    synchronized (this) {
-      long now = System.nanoTime();
-      float seconds = (now - previous) / 1000000000.0f;
-      metricsRecord.setMetric("blockcache.hit", getPerSecond(blockCacheHit.getAndSet(0), seconds));
-      metricsRecord.setMetric("blockcache.miss", getPerSecond(blockCacheMiss.getAndSet(0), seconds));
-      metricsRecord.setMetric("blockcache.eviction", getPerSecond(blockCacheEviction.getAndSet(0), seconds));
-      metricsRecord.setMetric("blockcache.size", blockCacheSize.get());
-      metricsRecord.setMetric("row.reads", getPerSecond(rowReads.getAndSet(0), seconds));
-      metricsRecord.setMetric("row.writes", getPerSecond(rowWrites.getAndSet(0), seconds));
-      metricsRecord.setMetric("record.reads", getPerSecond(recordReads.getAndSet(0), seconds));
-      metricsRecord.setMetric("record.writes", getPerSecond(recordWrites.getAndSet(0), seconds));
-      metricsRecord.setMetric("query.external", getPerSecond(queriesExternal.getAndSet(0), seconds));
-      metricsRecord.setMetric("query.internal", getPerSecond(queriesInternal.getAndSet(0), seconds));
-      metricsRecord.setMetric("buffercache.allocations", getPerSecond(shardBuffercacheAllocate.getAndSet(0), seconds));
-      metricsRecord.setMetric("buffercache.lost", getPerSecond(shardBuffercacheLost.getAndSet(0), seconds));
-      for (Entry<String,MethodCall> entry : methodCalls.entrySet()) {
-        String key = entry.getKey();
-        MethodCall value = entry.getValue();
-        long invokes = value.invokes.getAndSet(0);
-        long times = value.times.getAndSet(0);
-        
-        float avgTimes = (times / (float) invokes) / 1000000000.0f;
-        metricsRecord.setMetric("methodcalls." + key + ".count", getPerSecond(invokes, seconds));
-        metricsRecord.setMetric("methodcalls." + key + ".time", avgTimes);
-      }
-      metricsRecord.setMetric("tables", tableCount.get());
-      metricsRecord.setMetric("rows", rowCount.get());
-      metricsRecord.setMetric("records", recordCount.get());
-      metricsRecord.setMetric("index.count", indexCount.get());
-      metricsRecord.setMetric("index.memoryusage", indexMemoryUsage.get());
-      metricsRecord.setMetric("index.segments", segmentCount.get());
-      previous = now;
+  public NamedList<Number> getStatistics() {
+    NamedList<Number> stats = new NamedList<Number>();
+    
+    long now = System.nanoTime();
+    float seconds = (now - previous) / 1000000000.0f;
+    
+    long hits = blockCacheHit.getAndSet(0);
+    long lookups = hits + blockCacheMiss.getAndSet(0);
+    
+    stats.add("lookups", getPerSecond(lookups, seconds));
+    stats.add("hits", getPerSecond(hits, seconds));
+    stats.add("hitratio", calcHitRatio(lookups, hits));
+    stats.add("evictions", getPerSecond(blockCacheEviction.getAndSet(0), seconds));
+    stats.add("size", blockCacheSize.get());
+    stats.add("row.reads", getPerSecond(rowReads.getAndSet(0), seconds));
+    stats.add("row.writes", getPerSecond(rowWrites.getAndSet(0), seconds));
+    stats.add("record.reads", getPerSecond(recordReads.getAndSet(0), seconds));
+    stats.add("record.writes", getPerSecond(recordWrites.getAndSet(0), seconds));
+    stats.add("query.external", getPerSecond(queriesExternal.getAndSet(0), seconds));
+    stats.add("query.internal", getPerSecond(queriesInternal.getAndSet(0), seconds));
+    stats.add("buffercache.allocations", getPerSecond(shardBuffercacheAllocate.getAndSet(0), seconds));
+    stats.add("buffercache.lost", getPerSecond(shardBuffercacheLost.getAndSet(0), seconds));
+    for (Entry<String,MethodCall> entry : methodCalls.entrySet()) {
+      String key = entry.getKey();
+      MethodCall value = entry.getValue();
+      long invokes = value.invokes.getAndSet(0);
+      long times = value.times.getAndSet(0);
+      
+      float avgTimes = (times / (float) invokes) / 1000000000.0f;
+      stats.add("methodcalls." + key + ".count", getPerSecond(invokes, seconds));
+      stats.add("methodcalls." + key + ".time", avgTimes);
     }
-    metricsRecord.update();
+    stats.add("tables", tableCount.get());
+    stats.add("rows", rowCount.get());
+    stats.add("records", recordCount.get());
+    stats.add("index.count", indexCount.get());
+    stats.add("index.memoryusage", indexMemoryUsage.get());
+    stats.add("index.segments", segmentCount.get());
+    previous = now;
+    
+    return stats;
   }
 
   private float getPerSecond(long value, float seconds) {
     return (float) (value / seconds);
   }
-
 }
diff --git a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java
index 75f9d28..92a6f30 100644
--- a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java
+++ b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java
@@ -85,11 +85,11 @@
           }
           continue;
         }
-        org.apache.solr.util.IOUtils.closeQuietly(fileSystem);
+        org.apache.solr.common.util.IOUtils.closeQuietly(fileSystem);
         throw new RuntimeException(
             "Problem creating directory: " + hdfsDirPath, e);
       } catch (Exception e) {
-        org.apache.solr.util.IOUtils.closeQuietly(fileSystem);
+        org.apache.solr.common.util.IOUtils.closeQuietly(fileSystem);
         throw new RuntimeException(
             "Problem creating directory: " + hdfsDirPath, e);
       }
diff --git a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLockFactory.java b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLockFactory.java
index 3c2dece..a3525ae 100644
--- a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLockFactory.java
+++ b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLockFactory.java
@@ -29,7 +29,7 @@
 import org.apache.lucene.store.Lock;
 import org.apache.lucene.store.LockFactory;
 import org.apache.lucene.store.LockReleaseFailedException;
-import org.apache.solr.util.IOUtils;
+import org.apache.solr.common.util.IOUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git a/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java b/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java
index 1ff0e19..8aa2178 100644
--- a/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java
+++ b/solr/core/src/java/org/apache/solr/update/DefaultSolrCoreState.java
@@ -264,7 +264,7 @@
   }
   
   protected SolrIndexWriter createMainIndexWriter(SolrCore core, String name) throws IOException {
-    return SolrIndexWriter.create(name, core.getNewIndexDir(),
+    return SolrIndexWriter.create(core, name, core.getNewIndexDir(),
         core.getDirectoryFactory(), false, core.getLatestSchema(),
         core.getSolrConfig().indexConfig, core.getDeletionPolicy(), core.getCodec());
   }
diff --git a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
index 3943c7f..814d7a4 100644
--- a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
+++ b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java
@@ -21,9 +21,11 @@
 package org.apache.solr.update;
 
 import org.apache.lucene.document.Document;
+import org.apache.lucene.index.CodecReader;
 import org.apache.lucene.index.DirectoryReader;
-import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.SlowCodecReaderWrapper;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.queries.function.ValueSource;
 import org.apache.lucene.search.BooleanClause;
@@ -233,11 +235,11 @@
             }
 
             if (cmd.isBlock()) {
-              writer.updateDocuments(updateTerm, cmd, schema.getIndexAnalyzer());
+              writer.updateDocuments(updateTerm, cmd);
             } else {
               Document luceneDocument = cmd.getLuceneDocument();
               // SolrCore.verbose("updateDocument",updateTerm,luceneDocument,writer);
-              writer.updateDocument(updateTerm, luceneDocument, schema.getIndexAnalyzer());
+              writer.updateDocument(updateTerm, luceneDocument);
             }
             // SolrCore.verbose("updateDocument",updateTerm,"DONE");
             
@@ -262,9 +264,9 @@
         } else {
           // allow duplicates
           if (cmd.isBlock()) {
-            writer.addDocuments(cmd, schema.getIndexAnalyzer());
+            writer.addDocuments(cmd);
           } else {
-            writer.addDocument(cmd.getLuceneDocument(), schema.getIndexAnalyzer());
+            writer.addDocument(cmd.getLuceneDocument());
           }
 
           if (ulog != null) ulog.add(cmd);
@@ -440,8 +442,7 @@
       RefCounted<IndexWriter> iw = solrCoreState.getIndexWriter(core);
       try {
         IndexWriter writer = iw.get();
-        writer.updateDocument(idTerm, luceneDocument, cmd.getReq().getSchema()
-            .getIndexAnalyzer());
+        writer.updateDocument(idTerm, luceneDocument);
         
         for (Query q : dbqList) {
           writer.deleteDocuments(new DeleteByQueryWrapper(q, core.getLatestSchema()));
@@ -467,9 +468,15 @@
     
     List<DirectoryReader> readers = cmd.readers;
     if (readers != null && readers.size() > 0) {
+      List<CodecReader> mergeReaders = new ArrayList<>();
+      for (DirectoryReader reader : readers) {
+        for (LeafReaderContext leaf : reader.leaves()) {
+          mergeReaders.add(SlowCodecReaderWrapper.wrap(leaf.reader()));
+        }
+      }
       RefCounted<IndexWriter> iw = solrCoreState.getIndexWriter(core);
       try {
-        iw.get().addIndexes(readers.toArray(new IndexReader[readers.size()]));
+        iw.get().addIndexes(mergeReaders.toArray(new CodecReader[mergeReaders.size()]));
       } finally {
         iw.decref();
       }
@@ -650,9 +657,7 @@
     if (waitSearcher!=null && waitSearcher[0] != null) {
        try {
         waitSearcher[0].get();
-      } catch (InterruptedException e) {
-        SolrException.log(log,e);
-      } catch (ExecutionException e) {
+      } catch (InterruptedException | ExecutionException e) {
         SolrException.log(log,e);
       }
     }
diff --git a/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java b/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java
index 73ab0e7..8b61b88 100644
--- a/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java
+++ b/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java
@@ -33,6 +33,8 @@
 import org.apache.solr.common.util.FastInputStream;
 import org.apache.solr.common.util.FastOutputStream;
 import org.apache.solr.common.util.JavaBinCodec;
+import org.apache.solr.common.util.ObjectReleaseTracker;
+import org.apache.solr.util.FSHDFSUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -78,8 +80,9 @@
       }
       this.tlogFile = tlogFile;
       
-      // TODO: look into forcefully taking over any lease
       if (fs.exists(tlogFile) && openExisting) {
+        FSHDFSUtils.recoverFileLease(fs, tlogFile, fs.getConf());
+        
         tlogOutStream = fs.append(tlogFile);
       } else {
         fs.delete(tlogFile, false);
@@ -114,6 +117,8 @@
 
       success = true;
 
+      assert ObjectReleaseTracker.track(this);
+      
     } catch (IOException e) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
     } finally {
@@ -305,6 +310,7 @@
       log.error("Exception closing tlog.", e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
     } finally {
+      assert ObjectReleaseTracker.release(this);
       if (deleteOnClose) {
         try {
           fs.delete(tlogFile, true);
diff --git a/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java b/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java
index f4ff989..111ef2e 100644
--- a/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java
+++ b/solr/core/src/java/org/apache/solr/update/HdfsUpdateLog.java
@@ -25,6 +25,7 @@
 import java.util.Locale;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Future;
+import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
@@ -35,10 +36,10 @@
 import org.apache.lucene.util.BytesRef;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
+import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.util.HdfsUtil;
-import org.apache.solr.util.IOUtils;
 
 /** @lucene.experimental */
 public class HdfsUpdateLog extends UpdateLog {
@@ -46,6 +47,9 @@
   private volatile FileSystem fs;
   private volatile Path tlogDir;
   private final String confDir;
+  
+  // used internally by tests to track total count of failed tran log loads in init
+  public static AtomicLong INIT_FAILED_LOGS_COUNT = new AtomicLong();
 
   public HdfsUpdateLog() {
     this.confDir = null;
@@ -66,9 +70,7 @@
     if (future != null) {
       try {
         future.get();
-      } catch (InterruptedException e) {
-        throw new RuntimeException(e);
-      } catch (ExecutionException e) {
+      } catch (InterruptedException | ExecutionException e) {
         throw new RuntimeException(e);
       }
     }
@@ -191,6 +193,7 @@
         addOldLog(oldLog, false); // don't remove old logs on startup since more
                                   // than one may be uncapped.
       } catch (Exception e) {
+        INIT_FAILED_LOGS_COUNT.incrementAndGet();
         SolrException.log(log, "Failure to open existing log file (non fatal) "
             + f, e);
         try {
@@ -264,8 +267,6 @@
           return path.getName().startsWith(prefix);
         }
       });
-    } catch (FileNotFoundException e) {
-      throw new RuntimeException(e);
     } catch (IOException e) {
       throw new RuntimeException(e);
     }
diff --git a/solr/core/src/java/org/apache/solr/update/PeerSync.java b/solr/core/src/java/org/apache/solr/update/PeerSync.java
index 8e045d6..77b26a9 100644
--- a/solr/core/src/java/org/apache/solr/update/PeerSync.java
+++ b/solr/core/src/java/org/apache/solr/update/PeerSync.java
@@ -296,20 +296,20 @@
         boolean connectTimeoutExceptionInChain = connectTimeoutExceptionInChain(srsp.getException());
         if (connectTimeoutExceptionInChain || solrException instanceof ConnectException || solrException instanceof ConnectTimeoutException
             || solrException instanceof NoHttpResponseException || solrException instanceof SocketException) {
-          log.warn(msg() + " couldn't connect to " + srsp.getShardAddress() + ", counting as success");
+          log.warn(msg() + " couldn't connect to " + srsp.getShardAddress() + ", counting as success", srsp.getException());
 
           return true;
         }
       }
       
       if (cantReachIsSuccess && sreq.purpose == 1 && srsp.getException() instanceof SolrException && ((SolrException) srsp.getException()).code() == 503) {
-        log.warn(msg() + " got a 503 from " + srsp.getShardAddress() + ", counting as success");
+        log.warn(msg() + " got a 503 from " + srsp.getShardAddress() + ", counting as success", srsp.getException());
         return true;
       }
       
       if (cantReachIsSuccess && sreq.purpose == 1 && srsp.getException() instanceof SolrException && ((SolrException) srsp.getException()).code() == 404) {
         log.warn(msg() + " got a 404 from " + srsp.getShardAddress() + ", counting as success. " +
-            "Perhaps /get is not registered?");
+            "Perhaps /get is not registered?", srsp.getException());
         return true;
       }
       
diff --git a/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java b/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java
index a60d271..8f556e1 100644
--- a/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java
+++ b/solr/core/src/java/org/apache/solr/update/SolrCmdDistributor.java
@@ -252,14 +252,11 @@
   private void submit(final Req req, boolean isCommit) {
     if (req.synchronous) {
       blockAndDoRetries();
-      
-      HttpSolrClient client = new HttpSolrClient(req.node.getUrl(), clients.getHttpClient());
-      try {
+
+      try (HttpSolrClient client = new HttpSolrClient(req.node.getUrl(), clients.getHttpClient())) {
         client.request(req.uReq);
       } catch (Exception e) {
         throw new SolrException(ErrorCode.SERVER_ERROR, "Failed synchronous update on shard " + req.node + " update: " + req.uReq , e);
-      } finally {
-        client.shutdown();
       }
       
       return;
diff --git a/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java b/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java
index 9dedfba..793490e 100644
--- a/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java
+++ b/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java
@@ -17,29 +17,31 @@
 
 package org.apache.solr.update;
 
-import org.apache.lucene.index.*;
-import org.apache.lucene.index.IndexWriter.IndexReaderWarmer;
-import org.apache.lucene.util.InfoStream;
-import org.apache.lucene.util.PrintStreamInfoStream;
-import org.apache.lucene.util.Version;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.SolrException.ErrorCode;
-import org.apache.solr.common.cloud.ZkNodeProps;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.core.MapSerializable;
-import org.apache.solr.core.SolrConfig;
-import org.apache.solr.core.PluginInfo;
-import org.apache.solr.schema.IndexSchema;
-import org.apache.solr.util.SolrPluginUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.PrintStream;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
+import org.apache.lucene.index.*;
+import org.apache.lucene.index.IndexWriter.IndexReaderWarmer;
+import org.apache.lucene.util.InfoStream;
+import org.apache.lucene.util.Version;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.MapSerializable;
+import org.apache.solr.core.PluginInfo;
+import org.apache.solr.core.SolrConfig;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.schema.IndexSchema;
+import org.apache.solr.util.SolrPluginUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.core.Config.assertWarnOrFail;
+
 /**
  * This config object encapsulates IndexWriter config params,
  * defined in the &lt;indexConfig&gt; section of solrconfig.xml
@@ -50,14 +52,8 @@
   final String defaultMergePolicyClassName;
   public static final String DEFAULT_MERGE_SCHEDULER_CLASSNAME = ConcurrentMergeScheduler.class.getName();
   public final Version luceneVersion;
-  
-  /**
-   * The explicit value of &lt;useCompoundFile&gt; specified on this index config
-   * @deprecated use {@link #getUseCompoundFile}
-   */
-  @Deprecated
-  public final boolean useCompoundFile;
-  private boolean effectiveUseCompountFileSetting;
+
+  private boolean effectiveUseCompoundFileSetting;
 
   public final int maxBufferedDocs;
   public final int maxMergeDocs;
@@ -87,7 +83,7 @@
   @SuppressWarnings("deprecation")
   private SolrIndexConfig(SolrConfig solrConfig) {
     luceneVersion = solrConfig.luceneMatchVersion;
-    useCompoundFile = effectiveUseCompountFileSetting = false;
+    effectiveUseCompoundFileSetting = false;
     maxBufferedDocs = -1;
     maxMergeDocs = -1;
     maxIndexingThreads = IndexWriterConfig.DEFAULT_MAX_THREAD_STATES;
@@ -127,18 +123,17 @@
     // Assert that end-of-life parameters or syntax is not in our config.
     // Warn for luceneMatchVersion's before LUCENE_3_6, fail fast above
     assertWarnOrFail("The <mergeScheduler>myclass</mergeScheduler> syntax is no longer supported in solrconfig.xml. Please use syntax <mergeScheduler class=\"myclass\"/> instead.",
-        !((solrConfig.getNode(prefix+"/mergeScheduler",false) != null) && (solrConfig.get(prefix+"/mergeScheduler/@class",null) == null)),
+        !((solrConfig.getNode(prefix + "/mergeScheduler", false) != null) && (solrConfig.get(prefix + "/mergeScheduler/@class", null) == null)),
         true);
     assertWarnOrFail("The <mergePolicy>myclass</mergePolicy> syntax is no longer supported in solrconfig.xml. Please use syntax <mergePolicy class=\"myclass\"/> instead.",
-        !((solrConfig.getNode(prefix+"/mergePolicy",false) != null) && (solrConfig.get(prefix+"/mergePolicy/@class",null) == null)),
+        !((solrConfig.getNode(prefix + "/mergePolicy", false) != null) && (solrConfig.get(prefix + "/mergePolicy/@class", null) == null)),
         true);
     assertWarnOrFail("The <luceneAutoCommit>true|false</luceneAutoCommit> parameter is no longer valid in solrconfig.xml.",
-        solrConfig.get(prefix+"/luceneAutoCommit", null) == null,
+        solrConfig.get(prefix + "/luceneAutoCommit", null) == null,
         true);
 
     defaultMergePolicyClassName = def.defaultMergePolicyClassName;
-    useCompoundFile=solrConfig.getBool(prefix+"/useCompoundFile", def.useCompoundFile);
-    effectiveUseCompountFileSetting = useCompoundFile;
+    effectiveUseCompoundFileSetting = solrConfig.getBool(prefix+"/useCompoundFile", def.getUseCompoundFile());
     maxBufferedDocs=solrConfig.getInt(prefix+"/maxBufferedDocs",def.maxBufferedDocs);
     maxMergeDocs=solrConfig.getInt(prefix+"/maxMergeDocs",def.maxMergeDocs);
     maxIndexingThreads=solrConfig.getInt(prefix+"/maxIndexingThreads",def.maxIndexingThreads);
@@ -167,13 +162,10 @@
       }
     }
     mergedSegmentWarmerInfo = getPluginInfo(prefix + "/mergedSegmentWarmer", solrConfig, def.mergedSegmentWarmerInfo);
-    if (mergedSegmentWarmerInfo != null && solrConfig.nrtMode == false) {
-      throw new IllegalArgumentException("Supplying a mergedSegmentWarmer will do nothing since nrtMode is false");
-    }
 
     assertWarnOrFail("Begining with Solr 5.0, <checkIntegrityAtMerge> option is no longer supported and should be removed from solrconfig.xml (these integrity checks are now automatic)",
-                     (null == solrConfig.getNode(prefix+"/checkIntegrityAtMerge",false)),
-                     true);
+        (null == solrConfig.getNode(prefix + "/checkIntegrityAtMerge", false)),
+        true);
   }
   @Override
   public Map<String, Object> toMap() {
@@ -189,32 +181,28 @@
     return m;
   }
 
-  /*
-   * Assert that assertCondition is true.
-   * If not, prints reason as log warning.
-   * If failCondition is true, then throw exception instead of warning 
-   */
-  private void assertWarnOrFail(String reason, boolean assertCondition, boolean failCondition) {
-    if(assertCondition) {
-      return;
-    } else if(failCondition) {
-      throw new SolrException(ErrorCode.FORBIDDEN, reason);
-    } else {
-      log.warn(reason);
-    }
-  }
-
   private PluginInfo getPluginInfo(String path, SolrConfig solrConfig, PluginInfo def)  {
     List<PluginInfo> l = solrConfig.readPluginInfos(path, false, true);
     return l.isEmpty() ? def : l.get(0);
   }
 
-  public IndexWriterConfig toIndexWriterConfig(IndexSchema schema) {
-    // so that we can update the analyzer on core reload, we pass null
-    // for the default analyzer, and explicitly pass an analyzer on 
-    // appropriate calls to IndexWriter
-    
-    IndexWriterConfig iwc = new IndexWriterConfig(null);
+  private static class DelayedSchemaAnalyzer extends DelegatingAnalyzerWrapper {
+    private final SolrCore core;
+
+    public DelayedSchemaAnalyzer(SolrCore core) {
+      super(PER_FIELD_REUSE_STRATEGY);
+      this.core = core;
+    }
+
+    @Override
+    protected Analyzer getWrappedAnalyzer(String fieldName) {
+      return core.getLatestSchema().getIndexAnalyzer();
+    }
+  }
+
+  public IndexWriterConfig toIndexWriterConfig(SolrCore core) {
+    IndexSchema schema = core.getLatestSchema();
+    IndexWriterConfig iwc = new IndexWriterConfig(new DelayedSchemaAnalyzer(core));
     if (maxBufferedDocs != -1)
       iwc.setMaxBufferedDocs(maxBufferedDocs);
 
@@ -327,7 +315,7 @@
   }
 
   public boolean getUseCompoundFile() {
-    return effectiveUseCompountFileSetting;
+    return effectiveUseCompoundFileSetting;
   }
 
   /**
@@ -351,7 +339,7 @@
       if (useCFSArg instanceof Boolean) {
         boolean cfs = ((Boolean)useCFSArg).booleanValue();
         log.warn("Please update your config to specify <useCompoundFile>"+cfs+"</useCompoundFile> directly in your <indexConfig> settings.");
-        effectiveUseCompountFileSetting = cfs;
+        effectiveUseCompoundFileSetting = cfs;
       } else {
         log.error("MergePolicy's 'useCompoundFile' init arg is not a boolean, can not apply back compat logic to apply to the IndexWriterConfig: " + useCFSArg.toString());
       }
diff --git a/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java b/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java
index 650c1a3..8b55281 100644
--- a/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java
+++ b/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java
@@ -21,13 +21,14 @@
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.lucene.index.FilterLeafReader;
+import org.apache.lucene.index.CodecReader;
+import org.apache.lucene.index.FilterCodecReader;
 import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.DocsEnum;
 import org.apache.lucene.index.Fields;
-import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.SlowCodecReaderWrapper;
 import org.apache.lucene.index.Terms;
 import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.search.DocIdSetIterator;
@@ -120,7 +121,7 @@
       } else {
         SolrCore core = searcher.getCore();
         String path = paths.get(partitionNumber);
-        iw = SolrIndexWriter.create("SplittingIndexWriter"+partitionNumber + (ranges != null ? " " + ranges.get(partitionNumber) : ""), path,
+        iw = SolrIndexWriter.create(core, "SplittingIndexWriter"+partitionNumber + (ranges != null ? " " + ranges.get(partitionNumber) : ""), path,
                                     core.getDirectoryFactory(), true, core.getLatestSchema(),
                                     core.getSolrConfig().indexConfig, core.getDeletionPolicy(), core.getCodec());
       }
@@ -129,8 +130,8 @@
         // This removes deletions but optimize might still be needed because sub-shards will have the same number of segments as the parent shard.
         for (int segmentNumber = 0; segmentNumber<leaves.size(); segmentNumber++) {
           log.info("SolrIndexSplitter: partition #" + partitionNumber + " partitionCount=" + numPieces + (ranges != null ? " range=" + ranges.get(partitionNumber) : "") + " segment #"+segmentNumber + " segmentCount=" + leaves.size());
-          IndexReader subReader = new LiveDocsReader( leaves.get(segmentNumber), segmentDocSets.get(segmentNumber)[partitionNumber] );
-          iw.addIndexes(subReader);
+          CodecReader subReader = SlowCodecReaderWrapper.wrap(leaves.get(segmentNumber).reader());
+          iw.addIndexes(new LiveDocsReader(subReader, segmentDocSets.get(segmentNumber)[partitionNumber]));
         }
         success = true;
       } finally {
@@ -232,12 +233,12 @@
 
 
   // change livedocs on the reader to delete those docs we don't want
-  static class LiveDocsReader extends FilterLeafReader {
+  static class LiveDocsReader extends FilterCodecReader {
     final FixedBitSet liveDocs;
     final int numDocs;
 
-    public LiveDocsReader(LeafReaderContext context, FixedBitSet liveDocs) throws IOException {
-      super(context.reader());
+    public LiveDocsReader(CodecReader in, FixedBitSet liveDocs) throws IOException {
+      super(in);
       this.liveDocs = liveDocs;
       this.numDocs = liveDocs.cardinality();
     }
diff --git a/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java b/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java
index be5a16c..036d0ad 100644
--- a/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java
+++ b/solr/core/src/java/org/apache/solr/update/SolrIndexWriter.java
@@ -26,10 +26,11 @@
 import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.InfoStream;
-import org.apache.solr.core.DirectoryFactory;
+import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.core.DirectoryFactory.DirContext;
+import org.apache.solr.core.DirectoryFactory;
+import org.apache.solr.core.SolrCore;
 import org.apache.solr.schema.IndexSchema;
-import org.apache.solr.util.IOUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -56,12 +57,12 @@
   private InfoStream infoStream;
   private Directory directory;
 
-  public static SolrIndexWriter create(String name, String path, DirectoryFactory directoryFactory, boolean create, IndexSchema schema, SolrIndexConfig config, IndexDeletionPolicy delPolicy, Codec codec) throws IOException {
+  public static SolrIndexWriter create(SolrCore core, String name, String path, DirectoryFactory directoryFactory, boolean create, IndexSchema schema, SolrIndexConfig config, IndexDeletionPolicy delPolicy, Codec codec) throws IOException {
 
     SolrIndexWriter w = null;
     final Directory d = directoryFactory.get(path, DirContext.DEFAULT, config.lockType);
     try {
-      w = new SolrIndexWriter(name, path, d, create, schema, 
+      w = new SolrIndexWriter(core, name, path, d, create, schema, 
                               config, delPolicy, codec);
       w.setDirectoryFactory(directoryFactory);
       return w;
@@ -73,9 +74,9 @@
     }
   }
 
-  private SolrIndexWriter(String name, String path, Directory directory, boolean create, IndexSchema schema, SolrIndexConfig config, IndexDeletionPolicy delPolicy, Codec codec) throws IOException {
+  private SolrIndexWriter(SolrCore core, String name, String path, Directory directory, boolean create, IndexSchema schema, SolrIndexConfig config, IndexDeletionPolicy delPolicy, Codec codec) throws IOException {
     super(directory,
-          config.toIndexWriterConfig(schema).
+          config.toIndexWriterConfig(core).
           setOpenMode(create ? IndexWriterConfig.OpenMode.CREATE : IndexWriterConfig.OpenMode.APPEND).
           setIndexDeletionPolicy(delPolicy).setCodec(codec)
           );
diff --git a/solr/core/src/java/org/apache/solr/update/StreamingSolrClients.java b/solr/core/src/java/org/apache/solr/update/StreamingSolrClients.java
index 381d7cb..935de07 100644
--- a/solr/core/src/java/org/apache/solr/update/StreamingSolrClients.java
+++ b/solr/core/src/java/org/apache/solr/update/StreamingSolrClients.java
@@ -106,7 +106,7 @@
   
   public synchronized void shutdown() {
     for (ConcurrentUpdateSolrClient client : solrClients.values()) {
-      client.shutdown();
+      client.close();
     }
   }
   
diff --git a/solr/core/src/java/org/apache/solr/update/TransactionLog.java b/solr/core/src/java/org/apache/solr/update/TransactionLog.java
index 88b8133..88f879e 100644
--- a/solr/core/src/java/org/apache/solr/update/TransactionLog.java
+++ b/solr/core/src/java/org/apache/solr/update/TransactionLog.java
@@ -40,6 +40,7 @@
 import org.apache.solr.common.util.FastInputStream;
 import org.apache.solr.common.util.FastOutputStream;
 import org.apache.solr.common.util.JavaBinCodec;
+import org.apache.solr.common.util.ObjectReleaseTracker;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -177,6 +178,8 @@
       }
 
       success = true;
+      
+      assert ObjectReleaseTracker.track(this);
 
     } catch (IOException e) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
@@ -553,6 +556,8 @@
       }
     } catch (IOException e) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
+    } finally {
+      assert ObjectReleaseTracker.release(this);
     }
   }
   
diff --git a/solr/core/src/java/org/apache/solr/update/UpdateLog.java b/solr/core/src/java/org/apache/solr/update/UpdateLog.java
index 8e066c5..16bbf32 100644
--- a/solr/core/src/java/org/apache/solr/update/UpdateLog.java
+++ b/solr/core/src/java/org/apache/solr/update/UpdateLog.java
@@ -1302,10 +1302,6 @@
                 // versionInfo.unblockUpdates();
               }
             }
-          } catch (InterruptedException e) {
-            SolrException.log(log,e);
-          } catch (IOException e) {
-            SolrException.log(log,e);
           } catch (Exception e) {
             SolrException.log(log,e);
           }
diff --git a/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java b/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java
index b423bb4..891ccc8 100644
--- a/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java
+++ b/solr/core/src/java/org/apache/solr/update/UpdateShardHandler.java
@@ -22,12 +22,14 @@
 
 import org.apache.http.client.HttpClient;
 import org.apache.http.conn.ClientConnectionManager;
+import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.impl.conn.PoolingClientConnectionManager;
 import org.apache.http.impl.conn.SchemeRegistryFactory;
 import org.apache.solr.client.solrj.impl.HttpClientUtil;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.ExecutorUtil;
+import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.common.util.SolrjNamedThreadFactory;
 import org.apache.solr.core.ConfigSolr;
 import org.slf4j.Logger;
@@ -42,7 +44,7 @@
   
   private PoolingClientConnectionManager clientConnectionManager;
   
-  private final HttpClient client;
+  private final CloseableHttpClient client;
 
   public UpdateShardHandler(ConfigSolr cfg) {
     
@@ -52,7 +54,6 @@
       clientConnectionManager.setDefaultMaxPerRoute(cfg.getMaxUpdateConnectionsPerHost());
     }
     
-    
     ModifiableSolrParams params = new ModifiableSolrParams();
     if (cfg != null) {
       params.set(HttpClientUtil.PROP_SO_TIMEOUT,
@@ -60,7 +61,10 @@
       params.set(HttpClientUtil.PROP_CONNECTION_TIMEOUT,
           cfg.getDistributedConnectionTimeout());
     }
-    params.set(HttpClientUtil.PROP_USE_RETRY, false);
+    // in the update case, we want to do retries, and to use
+    // the default Solr retry handler that createClient will 
+    // give us
+    params.set(HttpClientUtil.PROP_USE_RETRY, true);
     log.info("Creating UpdateShardHandler HTTP client with params: {}", params);
     client = HttpClientUtil.createClient(params, clientConnectionManager);
   }
@@ -84,6 +88,7 @@
     } catch (Exception e) {
       SolrException.log(log, e);
     } finally {
+      IOUtils.closeQuietly(client);
       clientConnectionManager.shutdown();
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java b/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java
new file mode 100644
index 0000000..2bc0e73
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java
@@ -0,0 +1,216 @@
+package org.apache.solr.update.processor;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.LinkedHashSet;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.lucene.util.BytesRefBuilder;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrException.ErrorCode;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.SolrInputField;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.schema.IndexSchema;
+import org.apache.solr.schema.SchemaField;
+import org.apache.solr.update.AddUpdateCommand;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @lucene.experimental
+ */
+public class AtomicUpdateDocumentMerger {
+  
+  private final static Logger log = LoggerFactory.getLogger(AtomicUpdateDocumentMerger.class);
+  
+  protected final IndexSchema schema;
+  protected final SchemaField idField;
+  
+  public AtomicUpdateDocumentMerger(SolrQueryRequest queryReq) {
+    schema = queryReq.getSchema();
+    idField = schema.getUniqueKeyField();
+  }
+  
+  /**
+   * Utility method that examines the SolrInputDocument in an AddUpdateCommand
+   * and returns true if the documents contains atomic update instructions.
+   */
+  public static boolean isAtomicUpdate(final AddUpdateCommand cmd) {
+    SolrInputDocument sdoc = cmd.getSolrInputDocument();
+    for (SolrInputField sif : sdoc.values()) {
+      if (sif.getValue() instanceof Map) {
+        return true;
+      }
+    }
+    
+    return false;
+  }
+  
+  /**
+   * Merges the fromDoc into the toDoc using the atomic update syntax.
+   * 
+   * @param fromDoc SolrInputDocument which will merged into the toDoc
+   * @param toDoc the final SolrInputDocument that will be mutated with the values from the fromDoc atomic commands
+   * @return toDoc with mutated values
+   */
+  public SolrInputDocument merge(final SolrInputDocument fromDoc, SolrInputDocument toDoc) {
+    for (SolrInputField sif : fromDoc.values()) {
+     Object val = sif.getValue();
+      if (val instanceof Map) {
+        for (Entry<String,Object> entry : ((Map<String,Object>) val).entrySet()) {
+          String key = entry.getKey();
+          Object fieldVal = entry.getValue();
+          boolean updateField = false;
+          switch (key) {
+            case "add":
+              updateField = true;
+              doAdd(toDoc, sif, fieldVal);
+              break;
+            case "set":
+              updateField = true;
+              doSet(toDoc, sif, fieldVal);
+              break;
+            case "remove":
+              updateField = true;
+              doRemove(toDoc, sif, fieldVal);
+              break;
+            case "removeregex":
+              updateField = true;
+              doRemoveRegex(toDoc, sif, fieldVal);
+              break;
+            case "inc":
+              updateField = true;
+              doInc(toDoc, sif, fieldVal);
+              break;
+            default:
+              //Perhaps throw an error here instead?
+              log.warn("Unknown operation for the an atomic update, operation ignored: " + key);
+              break;
+          }
+          // validate that the field being modified is not the id field.
+          if (updateField && idField.getName().equals(sif.getName())) {
+            throw new SolrException(ErrorCode.BAD_REQUEST, "Invalid update of id field: " + sif);
+          }
+
+        }
+      } else {
+        // normal fields are treated as a "set"
+        toDoc.put(sif.getName(), sif);
+      }
+    }
+    
+    return toDoc;
+  }
+  
+  protected void doSet(SolrInputDocument toDoc, SolrInputField sif, Object fieldVal) {
+    toDoc.setField(sif.getName(), fieldVal, sif.getBoost());
+  }
+
+  private void doAdd(SolrInputDocument toDoc, SolrInputField sif, Object fieldVal) {
+    toDoc.addField(sif.getName(), fieldVal, sif.getBoost());
+  }
+
+  protected void doInc(SolrInputDocument toDoc, SolrInputField sif, Object fieldVal) {
+    SolrInputField numericField = toDoc.get(sif.getName());
+    if (numericField == null) {
+      toDoc.setField(sif.getName(),  fieldVal, sif.getBoost());
+    } else {
+      // TODO: fieldtype needs externalToObject?
+      String oldValS = numericField.getFirstValue().toString();
+      SchemaField sf = schema.getField(sif.getName());
+      BytesRefBuilder term = new BytesRefBuilder();
+      sf.getType().readableToIndexed(oldValS, term);
+      Object oldVal = sf.getType().toObject(sf, term.get());
+
+      String fieldValS = fieldVal.toString();
+      Number result;
+      if (oldVal instanceof Long) {
+        result = ((Long) oldVal).longValue() + Long.parseLong(fieldValS);
+      } else if (oldVal instanceof Float) {
+        result = ((Float) oldVal).floatValue() + Float.parseFloat(fieldValS);
+      } else if (oldVal instanceof Double) {
+        result = ((Double) oldVal).doubleValue() + Double.parseDouble(fieldValS);
+      } else {
+        // int, short, byte
+        result = ((Integer) oldVal).intValue() + Integer.parseInt(fieldValS);
+      }
+
+      toDoc.setField(sif.getName(),  result, sif.getBoost());
+    }
+  }
+  
+  protected void doRemove(SolrInputDocument toDoc, SolrInputField sif, Object fieldVal) {
+    final String name = sif.getName();
+    SolrInputField existingField = toDoc.get(name);
+    if(existingField == null) return;
+    SchemaField sf = schema.getField(name);
+
+    if (sf != null) {
+      final Collection<Object> original = existingField.getValues();
+      if (fieldVal instanceof Collection) {
+        for (Object object : (Collection)fieldVal){
+          original.remove(sf.getType().toNativeType(object));
+        }
+      } else {
+        original.remove(sf.getType().toNativeType(fieldVal));
+      }
+
+      toDoc.setField(name, original);
+    }
+  }
+
+  protected void doRemoveRegex(SolrInputDocument toDoc, SolrInputField sif, Object valuePatterns) {
+    final String name = sif.getName();
+    final SolrInputField existingField = toDoc.get(name);
+    if (existingField != null) {
+      final Collection<Object> valueToRemove = new HashSet<>();
+      final Collection<Object> original = existingField.getValues();
+      final Collection<Pattern> patterns = preparePatterns(valuePatterns);
+      for (Object value : original) {
+        for(Pattern pattern : patterns) {
+          final Matcher m = pattern.matcher(value.toString());
+          if (m.matches()) {
+            valueToRemove.add(value);
+          }
+        }
+      }
+      original.removeAll(valueToRemove);
+      toDoc.setField(name, original);
+    }
+  }
+
+  private Collection<Pattern> preparePatterns(Object fieldVal) {
+    final Collection<Pattern> patterns = new LinkedHashSet<>(1);
+    if (fieldVal instanceof Collection) {
+      Collection<String> patternVals = (Collection<String>) fieldVal;
+      for (String patternVal : patternVals) {
+        patterns.add(Pattern.compile(patternVal));
+      }
+    } else {
+      patterns.add(Pattern.compile(fieldVal.toString()));
+    }
+    return patterns;
+  }
+  
+}
diff --git a/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java
index 29ad7d2..a683053 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java
@@ -16,31 +16,25 @@
  */
 package org.apache.solr.update.processor;
 
-import java.io.IOException;
-import java.util.Collection;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.HashSet;
-
-import org.apache.solr.core.SolrCore;
-import org.apache.solr.util.plugin.SolrCoreAware;
-
-import org.apache.solr.common.util.NamedList;
-
-import org.apache.solr.common.SolrInputField;
-import org.apache.solr.common.SolrInputDocument;
-
-import org.apache.solr.common.SolrException;
 import static org.apache.solr.common.SolrException.ErrorCode.SERVER_ERROR;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.SolrInputField;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.SolrCore;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
-
 import org.apache.solr.update.AddUpdateCommand;
-
-import org.apache.solr.update.processor.FieldMutatingUpdateProcessorFactory.SelectorParams;
 import org.apache.solr.update.processor.FieldMutatingUpdateProcessor.FieldNameSelector;
-
+import org.apache.solr.update.processor.FieldMutatingUpdateProcessorFactory.SelectorParams;
+import org.apache.solr.util.plugin.SolrCoreAware;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -221,8 +215,11 @@
         boolean modified = false;
         for (final String fname : doc.getFieldNames()) {
           if (! srcSelector.shouldMutate(fname)) continue;
+          
+          Collection<Object> srcFieldValues = doc.getFieldValues(fname);
+          if(srcFieldValues == null || srcFieldValues.isEmpty()) continue;
 
-          for (Object val : doc.getFieldValues(fname)) {
+          for (Object val : srcFieldValues) {
             // preserve existing dest boost (multiplicitive), ignore src boost
             destField.addValue(val, 1.0f);
           }
diff --git a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
index 17c5e26..4e6d4a5 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java
@@ -17,29 +17,7 @@
  * limitations under the License.
  */
 
-import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.locks.ReentrantLock;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
 import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.BytesRefBuilder;
 import org.apache.lucene.util.CharsRefBuilder;
 import org.apache.solr.client.solrj.request.UpdateRequest;
 import org.apache.solr.cloud.CloudDescriptor;
@@ -76,7 +54,6 @@
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestInfo;
 import org.apache.solr.response.SolrQueryResponse;
-import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.update.AddUpdateCommand;
 import org.apache.solr.update.CommitUpdateCommand;
@@ -96,6 +73,24 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.locks.ReentrantLock;
+
+import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
+
 // NOT mt-safe... create a new processor for each add thread
 // TODO: we really should not wait for distrib after local? unless a certain replication factor is asked for
 public class DistributedUpdateProcessor extends UpdateRequestProcessor {
@@ -231,6 +226,7 @@
   private final SolrQueryRequest req;
   private final SolrQueryResponse rsp;
   private final UpdateRequestProcessor next;
+  private final AtomicUpdateDocumentMerger docMerger;
 
   public static final String VERSION_FIELD = "_version_";
 
@@ -266,12 +262,20 @@
     
   //used for keeping track of replicas that have processed an add/update from the leader
   private RequestReplicationTracker replicationTracker = null;
-  
+
+  public DistributedUpdateProcessor(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) {
+    this(req, rsp, new AtomicUpdateDocumentMerger(req), next);
+  }
+
+  /** Specification of AtomicUpdateDocumentMerger is currently experimental.
+   * @lucene.experimental
+   */
   public DistributedUpdateProcessor(SolrQueryRequest req,
-      SolrQueryResponse rsp, UpdateRequestProcessor next) {
+      SolrQueryResponse rsp, AtomicUpdateDocumentMerger docMerger, UpdateRequestProcessor next) {
     super(next);
     this.rsp = rsp;
     this.next = next;
+    this.docMerger = docMerger;
     this.idField = req.getSchema().getUniqueKeyField();
     // version init
 
@@ -784,7 +788,7 @@
       } else {
         if (log.isWarnEnabled()) {
           for (Error error : errors) {
-            log.warn("Error sending update", error.e);
+            log.warn("Error sending update to " + error.req.node.getBaseUrl(), error.e);
           }
         }
       }
@@ -949,7 +953,7 @@
     }
 
     if (vinfo == null) {
-      if (isAtomicUpdate(cmd)) {
+      if (AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) {
         throw new SolrException
           (SolrException.ErrorCode.BAD_REQUEST,
            "Atomic document updates are not supported unless <updateLog/> is configured");
@@ -1095,24 +1099,10 @@
     return false;
   }
 
-  /**
-   * Utility method that examines the SolrInputDocument in an AddUpdateCommand
-   * and returns true if the documents contains atomic update instructions.
-   */
-  public static boolean isAtomicUpdate(final AddUpdateCommand cmd) {
-    SolrInputDocument sdoc = cmd.getSolrInputDocument();
-    for (SolrInputField sif : sdoc.values()) {
-      if (sif.getValue() instanceof Map) {
-        return true;
-      }
-    }
-    return false;
-  }
-
   // TODO: may want to switch to using optimistic locking in the future for better concurrency
   // that's why this code is here... need to retry in a loop closely around/in versionAdd
   boolean getUpdatedDocument(AddUpdateCommand cmd, long versionOnUpdate) throws IOException {
-    if (!isAtomicUpdate(cmd)) return false;
+    if (!AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) return false;
 
     SolrInputDocument sdoc = cmd.getSolrInputDocument();
     BytesRef id = cmd.getIndexedId();
@@ -1129,142 +1119,10 @@
     } else {
       oldDoc.remove(VERSION_FIELD);
     }
-
-    IndexSchema schema = cmd.getReq().getSchema();
-    for (SolrInputField sif : sdoc.values()) {
-     Object val = sif.getValue();
-      if (val instanceof Map) {
-        for (Entry<String,Object> entry : ((Map<String,Object>) val).entrySet()) {
-          String key = entry.getKey();
-          Object fieldVal = entry.getValue();
-          boolean updateField = false;
-          switch (key) {
-            case "add":
-              updateField = true;
-              oldDoc.addField(sif.getName(), fieldVal, sif.getBoost());
-              break;
-            case "set":
-              updateField = true;
-              oldDoc.setField(sif.getName(), fieldVal, sif.getBoost());
-              break;
-            case "remove":
-              updateField = true;
-              doRemove(oldDoc, sif, fieldVal, schema);
-              break;
-            case "removeregex":
-              updateField = true;
-              doRemoveRegex(oldDoc, sif, fieldVal);
-              break;
-            case "inc":
-              updateField = true;
-              doInc(oldDoc, schema, sif, fieldVal);
-              break;
-            default:
-              //Perhaps throw an error here instead?
-              log.warn("Unknown operation for the an atomic update, operation ignored: " + key);
-              break;
-          }
-          // validate that the field being modified is not the id field.
-          if (updateField && idField.getName().equals(sif.getName())) {
-            throw new SolrException(ErrorCode.BAD_REQUEST, "Invalid update of id field: " + sif);
-          }
-
-        }
-      } else {
-        // normal fields are treated as a "set"
-        oldDoc.put(sif.getName(), sif);
-      }
-
-    }
-
-    cmd.solrDoc = oldDoc;
-    return true;
-  }
-
-  private void doInc(SolrInputDocument oldDoc, IndexSchema schema, SolrInputField sif, Object fieldVal) {
-    SolrInputField numericField = oldDoc.get(sif.getName());
-    if (numericField == null) {
-      oldDoc.setField(sif.getName(),  fieldVal, sif.getBoost());
-    } else {
-      // TODO: fieldtype needs externalToObject?
-      String oldValS = numericField.getFirstValue().toString();
-      SchemaField sf = schema.getField(sif.getName());
-      BytesRefBuilder term = new BytesRefBuilder();
-      sf.getType().readableToIndexed(oldValS, term);
-      Object oldVal = sf.getType().toObject(sf, term.get());
-
-      String fieldValS = fieldVal.toString();
-      Number result;
-      if (oldVal instanceof Long) {
-        result = ((Long) oldVal).longValue() + Long.parseLong(fieldValS);
-      } else if (oldVal instanceof Float) {
-        result = ((Float) oldVal).floatValue() + Float.parseFloat(fieldValS);
-      } else if (oldVal instanceof Double) {
-        result = ((Double) oldVal).doubleValue() + Double.parseDouble(fieldValS);
-      } else {
-        // int, short, byte
-        result = ((Integer) oldVal).intValue() + Integer.parseInt(fieldValS);
-      }
-
-      oldDoc.setField(sif.getName(),  result, sif.getBoost());
-    }
-  }
-  
-  private boolean doRemove(SolrInputDocument oldDoc, SolrInputField sif, Object fieldVal, IndexSchema schema) {
-    final String name = sif.getName();
-    SolrInputField existingField = oldDoc.get(name);
-    if(existingField == null) return false;
-    SchemaField sf = schema.getField(name);
-    int oldSize = existingField.getValueCount();
-
-    if (sf != null) {
-      final Collection<Object> original = existingField.getValues();
-      if (fieldVal instanceof Collection) {
-        for (Object object : (Collection)fieldVal){
-          original.remove(sf.getType().toNativeType(object));
-        }
-      } else {
-        original.remove(sf.getType().toNativeType(fieldVal));
-      }
-
-      oldDoc.setField(name, original);
-
-    }
     
-    return oldSize > existingField.getValueCount();
-  }
 
-  private void doRemoveRegex(SolrInputDocument oldDoc, SolrInputField sif, Object valuePatterns) {
-    final String name = sif.getName();
-    final SolrInputField existingField = oldDoc.get(name);
-    if (existingField != null) {
-      final Collection<Object> valueToRemove = new HashSet<>();
-      final Collection<Object> original = existingField.getValues();
-      final Collection<Pattern> patterns = preparePatterns(valuePatterns);
-      for (Object value : original) {
-        for(Pattern pattern : patterns) {
-          final Matcher m = pattern.matcher(value.toString());
-          if (m.matches()) {
-            valueToRemove.add(value);
-          }
-        }
-      }
-      original.removeAll(valueToRemove);
-      oldDoc.setField(name, original);
-    }
-  }
-
-  private Collection<Pattern> preparePatterns(Object fieldVal) {
-    final Collection<Pattern> patterns = new LinkedHashSet<>(1);
-    if (fieldVal instanceof Collection) {
-      Collection<String> patternVals = (Collection<String>) fieldVal;
-      for (String patternVal : patternVals) {
-        patterns.add(Pattern.compile(patternVal));
-      }
-    } else {
-      patterns.add(Pattern.compile(fieldVal.toString()));
-    }
-    return patterns;
+    cmd.solrDoc = docMerger.merge(sdoc, oldDoc);
+    return true;
   }
 
   @Override
@@ -1396,7 +1254,6 @@
 
       SolrParams params = req.getParams();
       String route = params.get(ShardParams._ROUTE_);
-      if(route == null) route = params.get(ShardParams.SHARD_KEYS);// deprecated . kept for backcompat
       Collection<Slice> slices = coll.getRouter().getSearchSlices(route, params, coll);
 
       List<Node> leaders =  new ArrayList<>(slices.size());
diff --git a/solr/core/src/java/org/apache/solr/update/processor/FieldNameMutatingUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/FieldNameMutatingUpdateProcessorFactory.java
new file mode 100644
index 0000000..373558d
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/update/processor/FieldNameMutatingUpdateProcessorFactory.java
@@ -0,0 +1,103 @@
+package org.apache.solr.update.processor;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.SolrInputField;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.update.AddUpdateCommand;
+import org.apache.solr.update.DeleteUpdateCommand;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * <p>
+ * In the FieldNameMutatingUpdateProcessorFactory configured below,
+ * fields names will be mutated if the name contains space.
+ * Use multiple instances of this processor for multiple replacements
+ * </p>
+ * <pre class="prettyprint">
+ * &lt;processor class="solr.FieldNameMutatingUpdateProcessorFactory"&gt;
+ *   &lt;str name="pattern "&gt;\s&lt;/str&gt;
+ *   &lt;str name="replacement"&gt;_&lt;/str&gt;
+ * &lt;/processor&gt;</pre>
+ */
+
+public class FieldNameMutatingUpdateProcessorFactory  extends UpdateRequestProcessorFactory{
+  public static final Logger log = LoggerFactory.getLogger(FieldNameMutatingUpdateProcessorFactory.class);
+
+  private String sourcePattern, replacement;
+  private Pattern pattern;
+
+
+  @Override
+  public UpdateRequestProcessor getInstance(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) {
+    return new UpdateRequestProcessor(next) {
+      @Override
+      public void processAdd(AddUpdateCommand cmd) throws IOException {
+        final SolrInputDocument doc = cmd.getSolrInputDocument();
+        final Collection<String> fieldNames
+            = new ArrayList<>(doc.getFieldNames());
+
+        for (final String fname : fieldNames) {
+          Matcher matcher = pattern.matcher(fname);
+          if(matcher.find() ){
+            String newFieldName = matcher.replaceAll(replacement);
+            if(!newFieldName.equals(fname)){
+              SolrInputField old = doc.remove(fname);
+              old.setName(newFieldName);
+              doc.put(newFieldName, old);
+            }
+          }
+        }
+
+        super.processAdd(cmd);
+      }
+
+      @Override
+      public void processDelete(DeleteUpdateCommand cmd) throws IOException {
+        super.processDelete(cmd);
+      }
+    };
+  }
+
+  @Override
+  public void init(NamedList args) {
+    sourcePattern = (String) args.get("pattern");
+    replacement = (String) args.get("replacement");
+    if(sourcePattern ==null || replacement == null) {
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,"'pattern' and 'replacement' are required values");
+    }
+    try {
+      pattern = Pattern.compile(sourcePattern);
+    } catch (Exception e) {
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,"invalid pattern "+ sourcePattern );
+    }
+    super.init(args);
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/update/processor/RunUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/RunUpdateProcessorFactory.java
index 625f5c7..c969586 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/RunUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/RunUpdateProcessorFactory.java
@@ -60,7 +60,7 @@
   @Override
   public void processAdd(AddUpdateCommand cmd) throws IOException {
     
-    if (DistributedUpdateProcessor.isAtomicUpdate(cmd)) {
+    if (AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) {
       throw new SolrException
         (SolrException.ErrorCode.BAD_REQUEST,
          "RunUpdateProcessor has received an AddUpdateCommand containing a document that appears to still contain Atomic document update operations, most likely because DistributedUpdateProcessorFactory was explicitly disabled from this updateRequestProcessorChain");
diff --git a/solr/core/src/java/org/apache/solr/update/processor/SignatureUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/SignatureUpdateProcessorFactory.java
index 4618aad..f6ed262 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/SignatureUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/SignatureUpdateProcessorFactory.java
@@ -134,7 +134,7 @@
       if (enabled) {
         SolrInputDocument doc = cmd.getSolrInputDocument();
         List<String> currDocSigFields = null;
-        boolean isPartialUpdate = DistributedUpdateProcessor.isAtomicUpdate(cmd);
+        boolean isPartialUpdate = AtomicUpdateDocumentMerger.isAtomicUpdate(cmd);
         if (sigFields == null || sigFields.size() == 0) {
           if (isPartialUpdate)  {
             throw new SolrException
diff --git a/solr/core/src/java/org/apache/solr/update/processor/StatelessScriptUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/StatelessScriptUpdateProcessorFactory.java
index c634ec4..d1c3835 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/StatelessScriptUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/StatelessScriptUpdateProcessorFactory.java
@@ -429,13 +429,7 @@
             }
           }
 
-        } catch (ScriptException e) {
-          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, 
-                                  "Unable to invoke function " + name + 
-                                  " in script: " + 
-                                  engine.getScriptFile().getFileName() + 
-                                  ": " + e.getMessage(), e);
-        } catch (NoSuchMethodException e) {
+        } catch (ScriptException | NoSuchMethodException e) {
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, 
                                   "Unable to invoke function " + name + 
                                   " in script: " + 
diff --git a/solr/core/src/java/org/apache/solr/update/processor/URLClassifyProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/URLClassifyProcessor.java
index 62b8a6e..f216487 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/URLClassifyProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/URLClassifyProcessor.java
@@ -125,9 +125,7 @@
             document.setField(canonicalUrlFieldname, getCanonicalUrl(normalizedURL));
           }
           log.debug(document.toString());
-        } catch (MalformedURLException e) {
-          log.warn("cannot get the normalized url for \"" + url + "\" due to " + e.getMessage());
-        } catch (URISyntaxException e) {
+        } catch (MalformedURLException | URISyntaxException e) {
           log.warn("cannot get the normalized url for \"" + url + "\" due to " + e.getMessage());
         }
       }
diff --git a/solr/core/src/java/org/apache/solr/util/DistanceUnits.java b/solr/core/src/java/org/apache/solr/util/DistanceUnits.java
new file mode 100644
index 0000000..e2abcb9
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/util/DistanceUnits.java
@@ -0,0 +1,133 @@
+package org.apache.solr.util;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+
+import com.google.common.collect.ImmutableMap;
+import com.spatial4j.core.distance.DistanceUtils;
+import org.apache.solr.schema.AbstractSpatialFieldType;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Used with a spatial field type for all distance measurements.
+ * 
+ * @see AbstractSpatialFieldType
+ */
+public class DistanceUnits {
+  public final static String KILOMETERS_PARAM = "kilometers";
+  public final static String MILES_PARAM = "miles";
+  public final static String DEGREES_PARAM = "degrees";
+
+  // Singleton distance units instances
+  public final static DistanceUnits KILOMETERS = new DistanceUnits(KILOMETERS_PARAM, DistanceUtils.EARTH_MEAN_RADIUS_KM, 
+      DistanceUtils.KM_TO_DEG);
+  public final static DistanceUnits MILES = new DistanceUnits(MILES_PARAM, DistanceUtils.EARTH_MEAN_RADIUS_MI, 
+      DistanceUtils.MILES_TO_KM * DistanceUtils.KM_TO_DEG);
+  public final static DistanceUnits DEGREES = new DistanceUnits(DEGREES_PARAM, 180.0/Math.PI, 1.0);
+
+  // Previously, distance based filtering was done with km, but scores were based on degrees
+  @Deprecated
+  public final static DistanceUnits BACKCOMPAT = new DistanceUnits("backcompat", DistanceUtils.EARTH_MEAN_RADIUS_KM, 1.0);
+
+  //volatile so other threads see when we replace when copy-on-write
+  private static volatile Map<String, DistanceUnits> instances = ImmutableMap.of(
+      KILOMETERS_PARAM, KILOMETERS,
+      MILES_PARAM, MILES,
+      DEGREES_PARAM, DEGREES);
+
+  private final String stringIdentifier;
+  private final double earthRadius;
+  private final double multiplierThisToDegrees;
+  private final double multiplierDegreesToThis;
+
+  private DistanceUnits(String str, double earthRadius, double multiplierThisToDegrees) {
+    this.stringIdentifier = str;
+    this.earthRadius = earthRadius;
+    this.multiplierThisToDegrees = multiplierThisToDegrees;
+    this.multiplierDegreesToThis = 1.0 / multiplierThisToDegrees;
+  }
+
+  /**
+   * Parses a string representation of distance units and returns its implementing class instance.
+   * Preferred way to parse a DistanceUnits would be to use {@link AbstractSpatialFieldType#parseDistanceUnits(String)},
+   * since it will default to one defined on the field type if the string is null.
+   * 
+   * @param str String representation of distance units, e.g. "kilometers", "miles" etc. (null ok)
+   * @return an instance of the concrete DistanceUnits, null if not found.
+   */
+  public static DistanceUnits valueOf(String str) {
+    return instances.get(str);
+  }
+
+  public static Set<String> getSupportedUnits() {
+    return instances.keySet();
+  }
+
+  /**
+   * 
+   * @return Radius of the earth in this distance units
+   */
+  public double getEarthRadius() {
+    return earthRadius;
+  }
+
+  /**
+   * 
+   * @return multiplier needed to convert a distance in current units to degrees
+   */
+  public double multiplierFromThisUnitToDegrees() {
+    return multiplierThisToDegrees;
+  }
+
+  /**
+   * 
+   * @return multiplier needed to convert a distance in degrees to current units
+   */
+  public double multiplierFromDegreesToThisUnit() {
+    return multiplierDegreesToThis;
+  }
+
+  /**
+   * 
+   * @return the string identifier associated with this units instance
+   */
+  public String getStringIdentifier() {
+    return stringIdentifier;
+  }
+
+  /**
+   * Custom distance units can be supplied using this method. It's thread-safe.
+   * 
+   * @param strId string identifier for the units
+   * @param earthRadius radius of earth in supplied units
+   * @param multiplierThisToDegrees multiplier to convert to degrees
+   */
+  public static synchronized void addUnits(String strId, double earthRadius, double multiplierThisToDegrees) {
+    //copy-on-write.
+    Map<String, DistanceUnits> map = new HashMap<String, DistanceUnits>(instances);
+    map.put(strId, new DistanceUnits(strId, earthRadius, multiplierThisToDegrees));
+    instances = ImmutableMap.copyOf(map);
+  }
+
+  @Override
+  public String toString() {
+    return getStringIdentifier();
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/util/FSHDFSUtils.java b/solr/core/src/java/org/apache/solr/util/FSHDFSUtils.java
new file mode 100644
index 0000000..8b5fc0c
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/util/FSHDFSUtils.java
@@ -0,0 +1,197 @@
+package org.apache.solr.util;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InterruptedIOException;
+import java.lang.reflect.Method;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+/**
+ * Borrowed from Apache HBase to recover an HDFS lease.
+ */
+
+public class FSHDFSUtils {
+  public static Logger log = LoggerFactory.getLogger(FSHDFSUtils.class);
+
+
+  /**
+   * Recover the lease from HDFS, retrying multiple times.
+   */
+  public static void recoverFileLease(final FileSystem fs, final Path p, Configuration conf) throws IOException {
+    // lease recovery not needed for local file system case.
+    if (!(fs instanceof DistributedFileSystem)) return;
+    recoverDFSFileLease((DistributedFileSystem)fs, p, conf);
+  }
+
+  /*
+   * Run the dfs recover lease. recoverLease is asynchronous. It returns:
+   *    -false when it starts the lease recovery (i.e. lease recovery not *yet* done)
+   *    - true when the lease recovery has succeeded or the file is closed.
+   * But, we have to be careful.  Each time we call recoverLease, it starts the recover lease
+   * process over from the beginning.  We could put ourselves in a situation where we are
+   * doing nothing but starting a recovery, interrupting it to start again, and so on.
+   * The findings over in HBASE-8354 have it that the namenode will try to recover the lease
+   * on the file's primary node.  If all is well, it should return near immediately.  But,
+   * as is common, it is the very primary node that has crashed and so the namenode will be
+   * stuck waiting on a socket timeout before it will ask another datanode to start the
+   * recovery. It does not help if we call recoverLease in the meantime and in particular,
+   * subsequent to the socket timeout, a recoverLease invocation will cause us to start
+   * over from square one (possibly waiting on socket timeout against primary node).  So,
+   * in the below, we do the following:
+   * 1. Call recoverLease.
+   * 2. If it returns true, break.
+   * 3. If it returns false, wait a few seconds and then call it again.
+   * 4. If it returns true, break.
+   * 5. If it returns false, wait for what we think the datanode socket timeout is
+   * (configurable) and then try again.
+   * 6. If it returns true, break.
+   * 7. If it returns false, repeat starting at step 5. above.
+   *
+   * If HDFS-4525 is available, call it every second and we might be able to exit early.
+   */
+  static boolean recoverDFSFileLease(final DistributedFileSystem dfs, final Path p, final Configuration conf)
+  throws IOException {
+    log.info("Recovering lease on dfs file " + p);
+    long startWaiting = System.nanoTime();
+    // Default is 15 minutes. It's huge, but the idea is that if we have a major issue, HDFS
+    // usually needs 10 minutes before marking the nodes as dead. So we're putting ourselves
+    // beyond that limit 'to be safe'.
+    long recoveryTimeout = TimeUnit.NANOSECONDS.convert(conf.getInt("solr.hdfs.lease.recovery.timeout", 900000), TimeUnit.MILLISECONDS) + startWaiting;
+    // This setting should be a little bit above what the cluster dfs heartbeat is set to.
+    long firstPause = conf.getInt("solr.hdfs.lease.recovery.first.pause", 4000);
+    // This should be set to how long it'll take for us to timeout against primary datanode if it
+    // is dead.  We set it to 61 seconds, 1 second than the default READ_TIMEOUT in HDFS, the
+    // default value for DFS_CLIENT_SOCKET_TIMEOUT_KEY.
+    long subsequentPause = conf.getInt("solr.hdfs.lease.recovery.dfs.timeout", 61 * 1000);
+    
+    Method isFileClosedMeth = null;
+    // whether we need to look for isFileClosed method
+    boolean findIsFileClosedMeth = true;
+    boolean recovered = false;
+    // We break the loop if we succeed the lease recovery, timeout, or we throw an exception.
+    for (int nbAttempt = 0; !recovered; nbAttempt++) {
+      recovered = recoverLease(dfs, nbAttempt, p, startWaiting);
+      if (recovered) break;
+      if (checkIfTimedout(conf, recoveryTimeout, nbAttempt, p, startWaiting)) break;
+      try {
+        // On the first time through wait the short 'firstPause'.
+        if (nbAttempt == 0) {
+          Thread.sleep(firstPause);
+        } else {
+          // Cycle here until subsequentPause elapses.  While spinning, check isFileClosed if
+          // available (should be in hadoop 2.0.5... not in hadoop 1 though.
+          long localStartWaiting = System.nanoTime();
+          while ((System.nanoTime() - localStartWaiting) <
+              subsequentPause) {
+            Thread.sleep(conf.getInt("solr.hdfs.lease.recovery.pause", 1000));
+            if (findIsFileClosedMeth) {
+              try {
+                isFileClosedMeth = dfs.getClass().getMethod("isFileClosed",
+                  new Class[]{ Path.class });
+              } catch (NoSuchMethodException nsme) {
+                log.debug("isFileClosed not available");
+              } finally {
+                findIsFileClosedMeth = false;
+              }
+            }
+            if (isFileClosedMeth != null && isFileClosed(dfs, isFileClosedMeth, p)) {
+              recovered = true;
+              break;
+            }
+          }
+        }
+      } catch (InterruptedException ie) {
+        InterruptedIOException iioe = new InterruptedIOException();
+        iioe.initCause(ie);
+        throw iioe;
+      }
+    }
+    return recovered;
+  }
+
+  static boolean checkIfTimedout(final Configuration conf, final long recoveryTimeout,
+      final int nbAttempt, final Path p, final long startWaiting) {
+    if (recoveryTimeout < System.nanoTime()) {
+      log.warn("Cannot recoverLease after trying for " +
+        conf.getInt("solr.hdfs.lease.recovery.timeout", 900000) +
+        "ms (solr.hdfs.lease.recovery.timeout); continuing, but may be DATALOSS!!!; " +
+        getLogMessageDetail(nbAttempt, p, startWaiting));
+      return true;
+    }
+    return false;
+  }
+
+  /**
+   * Try to recover the lease.
+   * @return True if dfs#recoverLease came by true.
+   */
+  static boolean recoverLease(final DistributedFileSystem dfs, final int nbAttempt, final Path p, final long startWaiting)
+    throws FileNotFoundException {
+    boolean recovered = false;
+    try {
+      recovered = dfs.recoverLease(p);
+      log.info("recoverLease=" + recovered + ", " +
+        getLogMessageDetail(nbAttempt, p, startWaiting));
+    } catch (IOException e) {
+      if (e instanceof LeaseExpiredException && e.getMessage().contains("File does not exist")) {
+        // This exception comes out instead of FNFE, fix it
+        throw new FileNotFoundException("The given transactionlog file wasn't found at " + p);
+      } else if (e instanceof FileNotFoundException) {
+        throw (FileNotFoundException)e;
+      }
+      log.warn(getLogMessageDetail(nbAttempt, p, startWaiting), e);
+    }
+    return recovered;
+  }
+
+  /**
+   * @return Detail to append to any log message around lease recovering.
+   */
+  private static String getLogMessageDetail(final int nbAttempt, final Path p, final long startWaiting) {
+    return "attempt=" + nbAttempt + " on file=" + p + " after " +
+      TimeUnit.MILLISECONDS.convert(System.nanoTime() - startWaiting, TimeUnit.NANOSECONDS) + "ms";
+  }
+
+  /**
+   * Call HDFS-4525 isFileClosed if it is available.
+   * 
+   * @return True if file is closed.
+   */
+  private static boolean isFileClosed(final DistributedFileSystem dfs, final Method m, final Path p) {
+    try {
+      return (Boolean) m.invoke(dfs, p);
+    } catch (SecurityException e) {
+      log.warn("No access", e);
+    } catch (Exception e) {
+      log.warn("Failed invocation for " + p.toString(), e);
+    }
+    return false;
+  }
+
+}
diff --git a/solr/core/src/java/org/apache/solr/util/SimplePostTool.java b/solr/core/src/java/org/apache/solr/util/SimplePostTool.java
index fa3be11..d9879ea 100644
--- a/solr/core/src/java/org/apache/solr/util/SimplePostTool.java
+++ b/solr/core/src/java/org/apache/solr/util/SimplePostTool.java
@@ -76,7 +76,7 @@
 public class SimplePostTool {
   private static final String DEFAULT_POST_HOST = "localhost";
   private static final String DEFAULT_POST_PORT = "8983";
-  private static final String VERSION_OF_THIS_TOOL = "1.5";
+  private static final String VERSION_OF_THIS_TOOL = "5.0.0";  // TODO: hardcoded for now, but eventually to sync with actual Solr version
 
   private static final String DEFAULT_COMMIT = "yes";
   private static final String DEFAULT_OPTIMIZE = "no";
@@ -130,7 +130,7 @@
     DATA_MODES.add(DATA_MODE_WEB);
     
     mimeMap = new HashMap<>();
-    mimeMap.put("xml", "text/xml");
+    mimeMap.put("xml", "application/xml");
     mimeMap.put("csv", "text/csv");
     mimeMap.put("json", "application/json");
     mimeMap.put("pdf", "application/pdf");
@@ -303,7 +303,7 @@
     currentDepth = 0;
     // Skip posting files if special param "-" given  
     if (!args[0].equals("-")) {
-      info("Posting files to base url " + solrUrl + (!auto?" using content-type "+(type==null?DEFAULT_CONTENT_TYPE:type):"")+"..");
+      info("Posting files to [base] url " + solrUrl + (!auto?" using content-type "+(type==null?DEFAULT_CONTENT_TYPE:type):"")+"...");
       if(auto)
         info("Entering auto mode. File endings considered are "+fileTypes);
       if(recursive > 0)
@@ -314,7 +314,7 @@
   }
 
   private void doArgsMode() {
-    info("POSTing args to " + solrUrl + "..");
+    info("POSTing args to " + solrUrl + "...");
     for (String a : args) {
       postData(stringToStream(a), null, out, type, solrUrl);
     }
@@ -355,7 +355,7 @@
   }
 
   private void doStdinMode() {
-    info("POSTing stdin to " + solrUrl + "..");
+    info("POSTing stdin to " + solrUrl + "...");
     postData(System.in, null, out, type, solrUrl);    
   }
 
@@ -380,7 +380,7 @@
     (USAGE_STRING_SHORT+"\n\n" +
      "Supported System Properties and their defaults:\n"+
      "  -Dc=<core/collection>\n"+
-     "  -Durl=<solr-update-url> \n"+
+     "  -Durl=<base Solr update URL> (overrides -Dc option if specified)\n"+
      "  -Ddata=files|web|args|stdin (default=" + DEFAULT_DATA_MODE + ")\n"+
      "  -Dtype=<content-type> (default=" + DEFAULT_CONTENT_TYPE + ")\n"+
      "  -Dhost=<host> (default: " + DEFAULT_POST_HOST+ ")\n"+
@@ -398,13 +398,13 @@
      "Data can be read from files specified as commandline args,\n"+
      "URLs specified as args, as raw commandline arg strings or via STDIN.\n"+
      "Examples:\n"+
-     "  java -jar post.jar *.xml\n"+
+     "  java -Dc=gettingstarted -jar post.jar *.xml\n"+
      "  java -Ddata=args -Dc=gettingstarted -jar post.jar '<delete><id>42</id></delete>'\n"+
      "  java -Ddata=stdin -Dc=gettingstarted -jar post.jar < hd.xml\n"+
      "  java -Ddata=web -Dc=gettingstarted -jar post.jar http://example.com/\n"+
      "  java -Dtype=text/csv -Dc=gettingstarted -jar post.jar *.csv\n"+
      "  java -Dtype=application/json -Dc=gettingstarted -jar post.jar *.json\n"+
-     "  java -Durl=http://localhost:8983/solr/update/extract -Dparams=literal.id=a -Dtype=application/pdf -jar post.jar a.pdf\n"+
+     "  java -Durl=http://localhost:8983/solr/techproducts/update/extract -Dparams=literal.id=pdf1 -jar post.jar solr-word.pdf\n"+
      "  java -Dauto -Dc=gettingstarted -jar post.jar *\n"+
      "  java -Dauto -Dc=gettingstarted -Drecursive -jar post.jar afolder\n"+
      "  java -Dauto -Dc=gettingstarted -Dfiletypes=ppt,html -jar post.jar afolder\n"+
@@ -729,7 +729,7 @@
    * Does a simple commit operation 
    */
   public void commit() {
-    info("COMMITting Solr index changes to " + solrUrl + "..");
+    info("COMMITting Solr index changes to " + solrUrl + "...");
     doGet(appendParam(solrUrl.toString(), "commit=true"));
   }
 
@@ -737,7 +737,7 @@
    * Does a simple optimize operation 
    */
   public void optimize() {
-    info("Performing an OPTIMIZE to " + solrUrl + "..");
+    info("Performing an OPTIMIZE to " + solrUrl + "...");
     doGet(appendParam(solrUrl.toString(), "optimize=true"));
   }
 
@@ -769,16 +769,18 @@
     InputStream is = null;
     try {
       URL url = solrUrl;
+      String suffix = "";
       if(auto) {
         if(type == null) {
           type = guessType(file);
         }
         if(type != null) {
-          if(type.equals("text/xml") || type.equals("text/csv") || type.equals("application/json")) {
+          if(type.equals("application/xml") || type.equals("text/csv") || type.equals("application/json")) {
             // Default handler
           } else {
             // SolrCell
-            String urlStr = appendUrlPath(solrUrl, "/extract").toString();
+            suffix = "/extract";
+            String urlStr = appendUrlPath(solrUrl, suffix).toString();
             if(urlStr.indexOf("resource.name")==-1)
               urlStr = appendParam(urlStr, "resource.name=" + URLEncoder.encode(file.getAbsolutePath(), "UTF-8"));
             if(urlStr.indexOf("literal.id")==-1)
@@ -792,7 +794,7 @@
       } else {
         if(type == null) type = DEFAULT_CONTENT_TYPE;
       }
-      info("POSTing file " + file.getName() + (auto?" ("+type+")":""));
+      info("POSTing file " + file.getName() + (auto?" ("+type+")":"") + " to [base]" + suffix);
       is = new FileInputStream(file);
       postData(is, (int)file.length(), output, type, url);
     } catch (IOException e) {
diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
index 809b366..a1be0d0 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java
@@ -36,6 +36,7 @@
 import org.apache.http.client.methods.HttpGet;
 import org.apache.http.client.utils.URIBuilder;
 import org.apache.http.conn.ConnectTimeoutException;
+import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.util.EntityUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.LogManager;
@@ -112,10 +113,8 @@
       
       log.debug("Connecting to Solr cluster: " + zkHost);
       int exitStatus = 0;
-      CloudSolrClient cloudSolrClient = null;
-      try {
-        cloudSolrClient = new CloudSolrClient(zkHost);
-        
+      try (CloudSolrClient cloudSolrClient = new CloudSolrClient(zkHost)) {
+
         String collection = cli.getOptionValue("collection");
         if (collection != null)
           cloudSolrClient.setDefaultCollection(collection);
@@ -126,17 +125,11 @@
         // since this is a CLI, spare the user the stacktrace
         String excMsg = exc.getMessage();
         if (excMsg != null) {
-          System.err.println("\nERROR:"+excMsg+"\n");
+          System.err.println("\nERROR: "+excMsg+"\n");
           exitStatus = 1;
         } else {
           throw exc;
         }
-      } finally {
-        if (cloudSolrClient != null) {
-          try {
-            cloudSolrClient.shutdown();
-          } catch (Exception ignore) {}
-        }
       }
       
       return exitStatus;
@@ -191,9 +184,36 @@
     CommandLine cli = 
         processCommandLineArgs(joinCommonAndToolOptions(tool.getOptions()), toolArgs);
 
+    // for SSL support, try to accommodate relative paths set for SSL store props
+    String solrInstallDir = System.getProperty("solr.install.dir");
+    if (solrInstallDir != null) {
+      checkSslStoreSysProp(solrInstallDir, "keyStore");
+      checkSslStoreSysProp(solrInstallDir, "trustStore");
+    }
+
     // run the tool
     System.exit(tool.runTool(cli));
   }
+
+  protected static void checkSslStoreSysProp(String solrInstallDir, String key) {
+    String sysProp = "javax.net.ssl."+key;
+    String keyStore = System.getProperty(sysProp);
+    if (keyStore == null)
+      return;
+
+    File keyStoreFile = new File(keyStore);
+    if (keyStoreFile.isFile())
+      return; // configured setting is OK
+
+    keyStoreFile = new File(solrInstallDir, "server/"+keyStore);
+    if (keyStoreFile.isFile()) {
+      System.setProperty(sysProp, keyStoreFile.getAbsolutePath());
+    } else {
+      System.err.println("WARNING: "+sysProp+" file "+keyStore+
+          " not found! https requests to Solr will likely fail; please update your "+
+          sysProp+" setting to use an absolute path.");
+    }
+  }
   
   /**
    * Support options common to all tools.
@@ -214,6 +234,10 @@
       return new CreateCollectionTool();
     else if ("create_core".equals(toolType))
       return new CreateCoreTool();
+    else if ("create".equals(toolType))
+      return new CreateTool();
+    else if ("delete".equals(toolType))
+      return new DeleteTool();
 
     // If you add a built-in tool to this class, add it here to avoid
     // classpath scanning
@@ -234,6 +258,8 @@
     formatter.printHelp("api", getToolOptions(new ApiTool()));
     formatter.printHelp("create_collection", getToolOptions(new CreateCollectionTool()));
     formatter.printHelp("create_core", getToolOptions(new CreateCoreTool()));
+    formatter.printHelp("create", getToolOptions(new CreateTool()));
+    formatter.printHelp("delete", getToolOptions(new DeleteTool()));
 
     List<Class<Tool>> toolClasses = findToolClassesInPackage("org.apache.solr.util");
     for (Class<Tool> next : toolClasses) {
@@ -379,7 +405,7 @@
     return wasCommError;
   }
   
-  public static HttpClient getHttpClient() {
+  public static CloseableHttpClient getHttpClient() {
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.set(HttpClientUtil.PROP_MAX_CONNECTIONS, 128);
     params.set(HttpClientUtil.PROP_MAX_CONNECTIONS_PER_HOST, 32);
@@ -388,10 +414,10 @@
   }
   
   @SuppressWarnings("deprecation")
-  public static void closeHttpClient(HttpClient httpClient) {
+  public static void closeHttpClient(CloseableHttpClient httpClient) {
     if (httpClient != null) {
       try {
-        httpClient.getConnectionManager().shutdown();
+        HttpClientUtil.close(httpClient);
       } catch (Exception exc) {
         // safe to ignore, we're just shutting things down
       }
@@ -403,7 +429,7 @@
    */
   public static Map<String,Object> getJson(String getUrl) throws Exception {
     Map<String,Object> json = null;
-    HttpClient httpClient = getHttpClient();
+    CloseableHttpClient httpClient = getHttpClient();
     try {
       json = getJson(httpClient, getUrl, 2);
     } finally {
@@ -444,7 +470,16 @@
     public Map<String,Object> handleResponse(HttpResponse response) throws ClientProtocolException, IOException {
       HttpEntity entity = response.getEntity();
       if (entity != null) {
-        Object resp = ObjectBuilder.getVal(new JSONParser(EntityUtils.toString(entity)));
+
+        String respBody = EntityUtils.toString(entity);
+        Object resp = null;
+        try {
+          resp = ObjectBuilder.getVal(new JSONParser(respBody));
+        } catch (JSONParser.ParseException pe) {
+          throw new ClientProtocolException("Expected JSON response from server but received: "+respBody+
+              "\nTypically, this indicates a problem with the Solr server; check the Solr server logs for more information.");
+        }
+
         if (resp != null && resp instanceof Map) {
           return (Map<String,Object>)resp;
         } else {
@@ -595,7 +630,7 @@
 
       int exitCode = 0;
       String systemInfoUrl = solrUrl+"admin/info/system";
-      HttpClient httpClient = getHttpClient();
+      CloseableHttpClient httpClient = getHttpClient();
       try {
         // hit Solr to get system info
         Map<String,Object> systemInfo = getJson(httpClient, systemInfoUrl, 2);
@@ -878,7 +913,7 @@
       if (collection == null)
         throw new IllegalArgumentException("Must provide a collection to run a healthcheck against!");
       
-      log.info("Running healthcheck for "+collection);
+      log.debug("Running healthcheck for "+collection);
       
       ZkStateReader zkStateReader = cloudSolrClient.getZkStateReader();
 
@@ -930,12 +965,13 @@
             replicaStatus = ZkStateReader.DOWN;
           } else {
             // query this replica directly to get doc count and assess health
-            HttpSolrClient solr = new HttpSolrClient(coreUrl);
-            String solrUrl = solr.getBaseURL();
             q = new SolrQuery("*:*");
             q.setRows(0);
             q.set("distrib", "false");
-            try {
+            try (HttpSolrClient solr = new HttpSolrClient(coreUrl)) {
+
+              String solrUrl = solr.getBaseURL();
+
               qr = solr.query(q);
               numDocs = qr.getResults().getNumFound();
 
@@ -950,15 +986,13 @@
               // if we get here, we can trust the state
               replicaStatus = replicaCoreProps.getState();
             } catch (Exception exc) {
-              log.error("ERROR: " + exc + " when trying to reach: " + solrUrl);
+              log.error("ERROR: " + exc + " when trying to reach: " + coreUrl);
 
               if (checkCommunicationError(exc)) {
                 replicaStatus = "down";
               } else {
                 replicaStatus = "error: "+exc;
               }
-            } finally {
-              solr.shutdown();
             }
           }
 
@@ -992,6 +1026,101 @@
     }
   } // end HealthcheckTool
 
+  private static final Option[] CREATE_COLLECTION_OPTIONS = new Option[] {
+    OptionBuilder
+        .withArgName("HOST")
+        .hasArg()
+        .isRequired(false)
+        .withDescription("Address of the Zookeeper ensemble; defaults to: "+ZK_HOST)
+        .create("zkHost"),
+        OptionBuilder
+            .withArgName("HOST")
+            .hasArg()
+            .isRequired(false)
+            .withDescription("Base Solr URL, which can be used to determine the zkHost if that's not known")
+            .create("solrUrl"),
+        OptionBuilder
+            .withArgName("NAME")
+            .hasArg()
+            .isRequired(true)
+            .withDescription("Name of collection to create.")
+            .create("name"),
+        OptionBuilder
+            .withArgName("#")
+            .hasArg()
+            .isRequired(false)
+            .withDescription("Number of shards; default is 1")
+            .create("shards"),
+        OptionBuilder
+            .withArgName("#")
+            .hasArg()
+            .isRequired(false)
+            .withDescription("Number of copies of each document across the collection (replicas per shard); default is 1")
+            .create("replicationFactor"),
+        OptionBuilder
+            .withArgName("#")
+            .hasArg()
+            .isRequired(false)
+            .withDescription("Maximum number of shards per Solr node; default is determined based on the number of shards, replication factor, and live nodes.")
+            .create("maxShardsPerNode"),
+        OptionBuilder
+            .withArgName("NAME")
+            .hasArg()
+            .isRequired(false)
+            .withDescription("Configuration directory to copy when creating the new collection; default is "+DEFAULT_CONFIG_SET)
+            .create("confdir"),
+        OptionBuilder
+            .withArgName("NAME")
+            .hasArg()
+            .isRequired(false)
+            .withDescription("Configuration name; default is the collection name")
+            .create("confname"),
+        OptionBuilder
+            .withArgName("DIR")
+            .hasArg()
+            .isRequired(true)
+            .withDescription("Path to configsets directory on the local system.")
+            .create("configsetsDir")
+  };
+
+  public static String getZkHost(CommandLine cli) throws Exception {
+    String zkHost = cli.getOptionValue("zkHost");
+    if (zkHost != null)
+      return zkHost;
+
+    // find it using the localPort
+    String solrUrl = cli.getOptionValue("solrUrl");
+    if (solrUrl == null)
+      throw new IllegalStateException(
+          "Must provide either the -zkHost or -solrUrl parameters to use the create_collection command!");
+
+    if (!solrUrl.endsWith("/"))
+      solrUrl += "/";
+
+    String systemInfoUrl = solrUrl+"admin/info/system";
+    CloseableHttpClient httpClient = getHttpClient();
+    try {
+      // hit Solr to get system info
+      Map<String,Object> systemInfo = getJson(httpClient, systemInfoUrl, 2);
+
+      // convert raw JSON into user-friendly output
+      StatusTool statusTool = new StatusTool();
+      Map<String,Object> status = statusTool.reportStatus(solrUrl, systemInfo, httpClient);
+      Map<String,Object> cloud = (Map<String, Object>)status.get("cloud");
+      if (cloud != null) {
+        String zookeeper = (String) cloud.get("ZooKeeper");
+        if (zookeeper.endsWith("(embedded)")) {
+          zookeeper = zookeeper.substring(0, zookeeper.length() - "(embedded)".length());
+        }
+        zkHost = zookeeper;
+      }
+    } finally {
+      HttpClientUtil.close(httpClient);
+    }
+
+    return zkHost;
+  }
+
   /**
    * Supports create_collection command in the bin/solr script.
    */
@@ -1005,56 +1134,7 @@
     @SuppressWarnings("static-access")
     @Override
     public Option[] getOptions() {
-      return new Option[] {
-          OptionBuilder
-              .withArgName("HOST")
-              .hasArg()
-              .isRequired(false)
-              .withDescription("Address of the Zookeeper ensemble; defaults to: "+ZK_HOST)
-              .create("zkHost"),
-          OptionBuilder
-              .withArgName("HOST")
-              .hasArg()
-              .isRequired(false)
-              .withDescription("Base Solr URL, which can be used to determine the zkHost if that's not known")
-              .create("solrUrl"),
-          OptionBuilder
-              .withArgName("NAME")
-              .hasArg()
-              .isRequired(true)
-              .withDescription("Name of collection to create.")
-              .create("name"),
-          OptionBuilder
-              .withArgName("#")
-              .hasArg()
-              .isRequired(false)
-              .withDescription("Number of shards; default is 1")
-              .create("shards"),
-          OptionBuilder
-              .withArgName("#")
-              .hasArg()
-              .isRequired(false)
-              .withDescription("Number of copies of each document across the collection (replicas per shard); default is 1")
-              .create("replicationFactor"),
-          OptionBuilder
-              .withArgName("#")
-              .hasArg()
-              .isRequired(false)
-              .withDescription("Maximum number of shards per Solr node; default is determined based on the number of shards, replication factor, and live nodes.")
-              .create("maxShardsPerNode"),
-          OptionBuilder
-              .withArgName("NAME")
-              .hasArg()
-              .isRequired(false)
-              .withDescription("Name of the configuration for this collection; default is "+DEFAULT_CONFIG_SET)
-              .create("config"),
-          OptionBuilder
-              .withArgName("DIR")
-              .hasArg()
-              .isRequired(true)
-              .withDescription("Path to configsets directory on the local system.")
-              .create("configsetsDir")
-      };
+      return CREATE_COLLECTION_OPTIONS;
     }
 
     public int runTool(CommandLine cli) throws Exception {
@@ -1063,49 +1143,17 @@
       LogManager.getLogger("org.apache.zookeeper").setLevel(Level.ERROR);
       LogManager.getLogger("org.apache.solr.common.cloud").setLevel(Level.WARN);
 
-      String zkHost = cli.getOptionValue("zkHost");
+      String zkHost = getZkHost(cli);
       if (zkHost == null) {
-        // find it using the localPort
-        String solrUrl = cli.getOptionValue("solrUrl");
-        if (solrUrl == null)
-          throw new IllegalStateException(
-              "Must provide either the -zkHost or -solrUrl parameters to use the create_collection command!");
-
-        if (!solrUrl.endsWith("/"))
-          solrUrl += "/";
-
-        String systemInfoUrl = solrUrl+"admin/info/system";
-        HttpClient httpClient = getHttpClient();
-        try {
-          // hit Solr to get system info
-          Map<String,Object> systemInfo = getJson(httpClient, systemInfoUrl, 2);
-
-          // convert raw JSON into user-friendly output
-          StatusTool statusTool = new StatusTool();
-          Map<String,Object> status = statusTool.reportStatus(solrUrl, systemInfo, httpClient);
-
-          Map<String,Object> cloud = (Map<String, Object>)status.get("cloud");
-          if (cloud == null) {
-            System.err.println("\nERROR: Solr at "+solrUrl+
-                " is running in standalone server mode, please use the create_core command instead;\n" +
-                "create_collection can only be used when running in SolrCloud mode.\n");
-            return 1;
-          }
-
-          String zookeeper = (String) cloud.get("ZooKeeper");
-          if (zookeeper.endsWith("(embedded)")) {
-            zookeeper = zookeeper.substring(0,zookeeper.length()-"(embedded)".length());
-          }
-          zkHost = zookeeper;
-        } finally {
-          closeHttpClient(httpClient);
-        }
+        System.err.println("\nERROR: Solr at "+cli.getOptionValue("solrUrl")+
+            " is running in standalone server mode, please use the create_core command instead;\n" +
+            "create_collection can only be used when running in SolrCloud mode.\n");
+        return 1;
       }
 
       int toolExitStatus = 0;
-      CloudSolrClient cloudSolrServer = null;
-      try {
-        cloudSolrServer = new CloudSolrClient(zkHost);
+
+      try (CloudSolrClient cloudSolrServer = new CloudSolrClient(zkHost)) {
         System.out.println("Connecting to ZooKeeper at " + zkHost);
         cloudSolrServer.connect();
         toolExitStatus = runCloudTool(cloudSolrServer, cli);
@@ -1118,12 +1166,6 @@
         } else {
           throw exc;
         }
-      } finally {
-        if (cloudSolrServer != null) {
-          try {
-            cloudSolrServer.shutdown();
-          } catch (Exception ignore) {}
-        }
       }
 
       return toolExitStatus;
@@ -1136,6 +1178,8 @@
             "there is at least 1 live node in the cluster.");
       String firstLiveNode = liveNodes.iterator().next();
 
+      String collectionName = cli.getOptionValue("name");
+
       // build a URL to create the collection
       int numShards = optionAsInt(cli, "shards", 1);
       int replicationFactor = optionAsInt(cli, "replicationFactor", 1);
@@ -1149,48 +1193,51 @@
         maxShardsPerNode = ((numShards*replicationFactor)+numNodes-1)/numNodes;
       }
 
-      String configSet = cli.getOptionValue("config", DEFAULT_CONFIG_SET);
-      String configSetNameInZk = configSet;
-      File configSetDir = null;
-      // we try to be flexible and allow the user to specify a configuration directory instead of a configset name
-      File possibleConfigDir = new File(configSet);
-      if (possibleConfigDir.isDirectory()) {
-        configSetDir = possibleConfigDir;
-        configSetNameInZk = possibleConfigDir.getName();
+      String confname = cli.getOptionValue("confname", collectionName);
+      boolean configExistsInZk =
+          cloudSolrClient.getZkStateReader().getZkClient().exists("/configs/"+confname, true);
+
+      if (configExistsInZk) {
+        System.out.println("Re-using existing configuration directory "+confname);
       } else {
-        File configsetsDir = new File(cli.getOptionValue("configsetsDir"));
-        if (!configsetsDir.isDirectory())
-          throw new FileNotFoundException(configsetsDir.getAbsolutePath()+" not found!");
-
-        // upload the configset if it exists
-        configSetDir = new File(configsetsDir, configSet);
-        if (!configSetDir.isDirectory()) {
-          throw new FileNotFoundException("Specified config " + configSet +
-              " not found in " + configsetsDir.getAbsolutePath());
-        }
-      }
-
-      File confDir = new File(configSetDir, "conf");
-      if (!confDir.isDirectory()) {
-        // config dir should contain a conf sub-directory but if not and there's a solrconfig.xml, then use it
-        if ((new File(configSetDir, "solrconfig.xml")).isFile()) {
-          confDir = configSetDir;
+        String configSet = cli.getOptionValue("confdir", DEFAULT_CONFIG_SET);
+        File configSetDir = null;
+        // we try to be flexible and allow the user to specify a configuration directory instead of a configset name
+        File possibleConfigDir = new File(configSet);
+        if (possibleConfigDir.isDirectory()) {
+          configSetDir = possibleConfigDir;
         } else {
-          System.err.println("Specified configuration directory "+configSetDir.getAbsolutePath()+
-              " is invalid;\nit should contain either conf sub-directory or solrconfig.xml");
-          return 1;
-        }
-      }
+          File configsetsDir = new File(cli.getOptionValue("configsetsDir"));
+          if (!configsetsDir.isDirectory())
+            throw new FileNotFoundException(configsetsDir.getAbsolutePath()+" not found!");
 
-      // test to see if that config exists in ZK
-      if (!cloudSolrClient.getZkStateReader().getZkClient().exists("/configs/"+configSetNameInZk, true)) {
+          // upload the configset if it exists
+          configSetDir = new File(configsetsDir, configSet);
+          if (!configSetDir.isDirectory()) {
+            throw new FileNotFoundException("Specified config " + configSet +
+                " not found in " + configsetsDir.getAbsolutePath());
+          }
+        }
+
+        File confDir = new File(configSetDir, "conf");
+        if (!confDir.isDirectory()) {
+          // config dir should contain a conf sub-directory but if not and there's a solrconfig.xml, then use it
+          if ((new File(configSetDir, "solrconfig.xml")).isFile()) {
+            confDir = configSetDir;
+          } else {
+            System.err.println("Specified configuration directory "+configSetDir.getAbsolutePath()+
+                " is invalid;\nit should contain either conf sub-directory or solrconfig.xml");
+            return 1;
+          }
+        }
+
+        // test to see if that config exists in ZK
         System.out.println("Uploading "+confDir.getAbsolutePath()+
-            " for config "+configSetNameInZk+" to ZooKeeper at "+cloudSolrClient.getZkHost());
-        ZkController.uploadConfigDir(cloudSolrClient.getZkStateReader().getZkClient(), confDir, configSetNameInZk);
+            " for config "+confname+" to ZooKeeper at "+cloudSolrClient.getZkHost());
+        ZkController.uploadConfigDir(cloudSolrClient.getZkStateReader().getZkClient(), confDir, confname);
       }
 
       String baseUrl = cloudSolrClient.getZkStateReader().getBaseUrlForNodeName(firstLiveNode);
-      String collectionName = cli.getOptionValue("name");
 
       // since creating a collection is a heavy-weight operation, check for existence first
       String collectionListUrl = baseUrl+"/admin/collections?action=list";
@@ -1210,7 +1257,7 @@
               numShards,
               replicationFactor,
               maxShardsPerNode,
-              configSetNameInZk);
+              confname);
 
       System.out.println("\nCreating new collection '"+collectionName+"' using command:\n"+createCollectionUrl+"\n");
 
@@ -1281,8 +1328,8 @@
               .withArgName("CONFIG")
               .hasArg()
               .isRequired(false)
-              .withDescription("Name of the configuration for this core; default is "+DEFAULT_CONFIG_SET)
-              .create("config"),
+              .withDescription("Configuration directory to copy when creating the new core; default is "+DEFAULT_CONFIG_SET)
+              .create("confdir"),
           OptionBuilder
               .withArgName("DIR")
               .hasArg()
@@ -1303,7 +1350,7 @@
       if (!configsetsDir.isDirectory())
         throw new FileNotFoundException(configsetsDir.getAbsolutePath() + " not found!");
 
-      String configSet = cli.getOptionValue("config", DEFAULT_CONFIG_SET);
+      String configSet = cli.getOptionValue("confdir", DEFAULT_CONFIG_SET);
       File configSetDir = new File(configsetsDir, configSet);
       if (!configSetDir.isDirectory()) {
         // we allow them to pass a directory instead of a configset name
@@ -1311,7 +1358,7 @@
         if (possibleConfigDir.isDirectory()) {
           configSetDir = possibleConfigDir;
         } else {
-          throw new FileNotFoundException("Specified config " + configSet +
+          throw new FileNotFoundException("Specified config directory " + configSet +
               " not found in " + configsetsDir.getAbsolutePath());
         }
       }
@@ -1319,7 +1366,7 @@
       String coreName = cli.getOptionValue("name");
 
       String systemInfoUrl = solrUrl+"admin/info/system";
-      HttpClient httpClient = getHttpClient();
+      CloseableHttpClient httpClient = getHttpClient();
       String solrHome = null;
       try {
         Map<String,Object> systemInfo = getJson(httpClient, systemInfoUrl, 2);
@@ -1414,4 +1461,259 @@
       return exists;
     }
   } // end CreateCoreTool class
+
+  public static class CreateTool implements Tool {
+
+    @Override
+    public String getName() {
+      return "create";
+    }
+
+    @SuppressWarnings("static-access")
+    @Override
+    public Option[] getOptions() {
+      return CREATE_COLLECTION_OPTIONS;
+    }
+
+    @Override
+    public int runTool(CommandLine cli) throws Exception {
+
+      String solrUrl = cli.getOptionValue("solrUrl", "http://localhost:8983/solr");
+      if (!solrUrl.endsWith("/"))
+        solrUrl += "/";
+
+      String systemInfoUrl = solrUrl+"admin/info/system";
+      CloseableHttpClient httpClient = getHttpClient();
+
+      int result = -1;
+      Tool tool = null;
+      try {
+        Map<String, Object> systemInfo = getJson(httpClient, systemInfoUrl, 2);
+        if ("solrcloud".equals(systemInfo.get("mode"))) {
+          tool = new CreateCollectionTool();
+        } else {
+          tool = new CreateCoreTool();
+        }
+        result = tool.runTool(cli);
+      } catch (Exception exc) {
+        System.err.println("ERROR: create failed due to: "+exc.getMessage());
+        System.err.println();
+        result = 1;
+      } finally {
+        closeHttpClient(httpClient);
+      }
+
+      return result;
+    }
+
+  } // end CreateTool class
+
+  public static class DeleteTool implements Tool {
+
+    @Override
+    public String getName() {
+      return "delete";
+    }
+
+    @SuppressWarnings("static-access")
+    @Override
+    public Option[] getOptions() {
+      return new Option[]{
+          OptionBuilder
+              .withArgName("URL")
+              .hasArg()
+              .isRequired(false)
+              .withDescription("Base Solr URL, default is http://localhost:8983/solr")
+              .create("solrUrl"),
+          OptionBuilder
+              .withArgName("NAME")
+              .hasArg()
+              .isRequired(true)
+              .withDescription("Name of the core / collection to delete.")
+              .create("name"),
+          OptionBuilder
+              .withArgName("true|false")
+              .hasArg()
+              .isRequired(false)
+              .withDescription("Flag to indicate if the underlying configuration directory for a collection should also be deleted; default is true")
+              .create("deleteConfig"),
+          OptionBuilder
+              .isRequired(false)
+              .withDescription("Skip safety checks when deleting the configuration directory used by a collection")
+              .create("forceDeleteConfig"),
+          OptionBuilder
+              .withArgName("HOST")
+              .hasArg()
+              .isRequired(false)
+              .withDescription("Address of the Zookeeper ensemble; defaults to: "+ZK_HOST)
+              .create("zkHost")
+      };
+    }
+
+    @Override
+    public int runTool(CommandLine cli) throws Exception {
+
+      // quiet down the ZK logging for cli tools
+      LogManager.getLogger("org.apache.zookeeper").setLevel(Level.ERROR);
+      LogManager.getLogger("org.apache.solr.common.cloud").setLevel(Level.WARN);
+
+      String solrUrl = cli.getOptionValue("solrUrl", "http://localhost:8983/solr");
+      if (!solrUrl.endsWith("/"))
+        solrUrl += "/";
+
+      String systemInfoUrl = solrUrl+"admin/info/system";
+      CloseableHttpClient httpClient = getHttpClient();
+
+      int result = 0;
+      try {
+        Map<String,Object> systemInfo = getJson(httpClient, systemInfoUrl, 2);
+        if ("solrcloud".equals(systemInfo.get("mode"))) {
+          result = deleteCollection(cli);
+        } else {
+          result = deleteCore(cli, httpClient, solrUrl);
+        }
+      } finally {
+        closeHttpClient(httpClient);
+      }
+
+      return result;
+    }
+
+    protected int deleteCollection(CommandLine cli) throws Exception {
+
+      String zkHost = getZkHost(cli);
+
+      int toolExitStatus = 0;
+      try (CloudSolrClient cloudSolrClient = new CloudSolrClient(zkHost)) {
+        System.out.println("Connecting to ZooKeeper at " + zkHost);
+        cloudSolrClient.connect();
+        toolExitStatus = deleteCollection(cloudSolrClient, cli);
+      } catch (Exception exc) {
+        // since this is a CLI, spare the user the stacktrace
+        String excMsg = exc.getMessage();
+        if (excMsg != null) {
+          System.err.println("\nERROR: "+excMsg+"\n");
+          toolExitStatus = 1;
+        } else {
+          throw exc;
+        }
+      }
+
+      return toolExitStatus;
+    }
+
+    protected int deleteCollection(CloudSolrClient cloudSolrClient, CommandLine cli) throws Exception {
+      Set<String> liveNodes = cloudSolrClient.getZkStateReader().getClusterState().getLiveNodes();
+      if (liveNodes.isEmpty())
+        throw new IllegalStateException("No live nodes found! Cannot delete a collection until " +
+            "there is at least 1 live node in the cluster.");
+      String firstLiveNode = liveNodes.iterator().next();
+      ZkStateReader zkStateReader = cloudSolrClient.getZkStateReader();
+      String baseUrl = zkStateReader.getBaseUrlForNodeName(firstLiveNode);
+
+      String collectionName = cli.getOptionValue("name");
+
+      if (!zkStateReader.getClusterState().hasCollection(collectionName)) {
+        System.err.println("\nERROR: Collection "+collectionName+" not found!");
+        System.err.println();
+        return 1;
+      }
+
+      String configName = zkStateReader.readConfigName(collectionName);
+      boolean deleteConfig = "true".equals(cli.getOptionValue("deleteConfig", "true"));
+      if (deleteConfig && configName != null) {
+        if (cli.hasOption("forceDeleteConfig")) {
+          log.warn("Skipping safety checks, configuration directory "+configName+" will be deleted with impunity.");
+        } else {
+          // need to scan all Collections to see if any are using the config
+          Set<String> collections = zkStateReader.getClusterState().getCollections();
+
+          // give a little note to the user if there are many collections in case it takes a while
+          if (collections.size() > 50)
+            log.info("Scanning " + collections.size() +
+                " to ensure no other collections are using config " + configName);
+
+          for (String next : collections) {
+            if (collectionName.equals(next))
+              continue; // don't check the collection we're deleting
+
+            if (configName.equals(zkStateReader.readConfigName(next))) {
+              deleteConfig = false;
+              log.warn("Configuration directory "+configName+" is also being used by "+next+
+                  "; configuration will not be deleted from ZooKeeper. You can pass the -forceDeleteConfig flag to force delete.");
+              break;
+            }
+          }
+        }
+      }
+
+      String deleteCollectionUrl =
+          String.format(Locale.ROOT,
+              "%s/admin/collections?action=DELETE&name=%s",
+              baseUrl,
+              collectionName);
+
+      System.out.println("\nDeleting collection '"+collectionName+"' using command:\n"+deleteCollectionUrl+"\n");
+
+      Map<String,Object> json = null;
+      try {
+        json = getJson(deleteCollectionUrl);
+      } catch (SolrServerException sse) {
+        System.err.println("Failed to delete collection '"+collectionName+"' due to: "+sse.getMessage());
+        System.err.println();
+        return 1;
+      }
+
+      if (deleteConfig) {
+        String configZnode = "/configs/" + configName;
+        try {
+          zkStateReader.getZkClient().clean(configZnode);
+        } catch (Exception exc) {
+          System.err.println("\nERROR: Failed to delete configuration directory "+configZnode+" in ZooKeeper due to: "+
+            exc.getMessage()+"\nYou'll need to manually delete this znode using the zkcli script.");
+        }
+      }
+
+      if (json != null) {
+        CharArr arr = new CharArr();
+        new JSONWriter(arr, 2).write(json);
+        System.out.println(arr.toString());
+        System.out.println();
+      }
+
+      return 0;
+    }
+
+    protected int deleteCore(CommandLine cli, CloseableHttpClient httpClient, String solrUrl) throws Exception {
+
+      int status = 0;
+      String coreName = cli.getOptionValue("name");
+      String deleteCoreUrl =
+          String.format(Locale.ROOT,
+              "%sadmin/cores?action=UNLOAD&core=%s&deleteIndex=true&deleteDataDir=true&deleteInstanceDir=true",
+              solrUrl,
+              coreName);
+
+      System.out.println("\nDeleting core '"+coreName+"' using command:\n"+deleteCoreUrl+"\n");
+
+      Map<String,Object> json = null;
+      try {
+        json = getJson(deleteCoreUrl);
+      } catch (SolrServerException sse) {
+        System.err.println("Failed to delete core '"+coreName+"' due to: "+sse.getMessage());
+        System.err.println();
+        status = 1;
+      }
+
+      if (json != null) {
+        CharArr arr = new CharArr();
+        new JSONWriter(arr, 2).write(json);
+        System.out.println(arr.toString());
+        System.out.println();
+      }
+
+      return status;
+    }
+
+  } // end DeleteTool class
 }
diff --git a/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java b/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java
index acad81e..d1812b2 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java
@@ -31,6 +31,7 @@
 import java.util.TreeMap;
 import java.util.regex.Pattern;
 
+import com.google.common.collect.ImmutableMap;
 import org.apache.lucene.index.StorableField;
 import org.apache.lucene.index.StoredDocument;
 import org.apache.lucene.search.BooleanClause;
@@ -49,7 +50,6 @@
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.common.util.StrUtils;
-import org.apache.solr.core.InitParams;
 import org.apache.solr.core.RequestParams;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.handler.component.HighlightComponent;
@@ -115,6 +115,10 @@
       purposes = Collections.unmodifiableMap(map);
   }
 
+  private static final MapSolrParams maskUseParams = new MapSolrParams(ImmutableMap.<String, String>builder()
+      .put(RequestParams.USEPARAM, "")
+      .build());
+
   /**
    * Set default-ish params on a SolrQueryRequest.
    *
@@ -131,8 +135,18 @@
 
     List<String> paramNames =null;
     String useParams = req.getParams().get(RequestParams.USEPARAM);
+    if(useParams!=null && !useParams.isEmpty()){
+      // now that we have expanded the request macro useParams with the actual values
+      // it makes no sense to keep it visible now on.
+      // distrib request sends all params to the nodes down the line and
+      // if it sends the useParams to other nodes , they will expand them as well.
+      // which is not desirable. At the same time, because we send the useParams
+      // value as an empty string to other nodes we get the desired benefit of
+      // overriding the useParams specified in the requestHandler directly
+      req.setParams(SolrParams.wrapDefaults(maskUseParams,req.getParams()));
+    }
     if(useParams == null) useParams = (String) req.getContext().get(RequestParams.USEPARAM);
-    if(useParams !=null) paramNames = StrUtils.splitSmart(useParams, ',');
+    if(useParams !=null && !useParams.isEmpty()) paramNames = StrUtils.splitSmart(useParams, ',');
     if(paramNames != null){
         for (String name : paramNames) {
           SolrParams requestParams = req.getCore().getSolrConfig().getRequestParams().getParams(name);
@@ -978,9 +992,7 @@
         Class pClazz = method.getParameterTypes()[0];
         Object val = entry.getValue();
         method.invoke(bean, val);
-      } catch (InvocationTargetException e1) {
-        throw new RuntimeException("Error invoking setter " + setterName + " on class : " + clazz.getName(), e1);
-      } catch (IllegalAccessException e1) {
+      } catch (InvocationTargetException | IllegalAccessException e1) {
         throw new RuntimeException("Error invoking setter " + setterName + " on class : " + clazz.getName(), e1);
       }
     }
diff --git a/solr/core/src/java/org/apache/solr/util/plugin/ResourceLoaderAware.java b/solr/core/src/java/org/apache/solr/util/plugin/ResourceLoaderAware.java
deleted file mode 100644
index 89c3674..0000000
--- a/solr/core/src/java/org/apache/solr/util/plugin/ResourceLoaderAware.java
+++ /dev/null
@@ -1,26 +0,0 @@
-package org.apache.solr.util.plugin;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * @deprecated This interface has been kept for backwards compatibility and will
- * be removed in (5.0).  Use {@link org.apache.lucene.analysis.util.ResourceLoaderAware}.
- */
-@Deprecated
-public interface ResourceLoaderAware extends org.apache.lucene.analysis.util.ResourceLoaderAware {
-}
diff --git a/solr/core/src/test-files/log4j.properties b/solr/core/src/test-files/log4j.properties
index f084601..4a3a20a 100644
--- a/solr/core/src/test-files/log4j.properties
+++ b/solr/core/src/test-files/log4j.properties
@@ -8,6 +8,7 @@
 
 log4j.logger.org.apache.zookeeper=WARN
 log4j.logger.org.apache.hadoop=WARN
+log4j.logger.org.apache.directory=WARN
 log4j.logger.org.apache.solr.hadoop=INFO
 
 #log4j.logger.org.apache.solr.update.processor.LogUpdateProcessor=DEBUG
diff --git a/solr/core/src/test-files/solr/collection1/conf/bad-solrconfig-warmer-no-reopen.xml b/solr/core/src/test-files/solr/collection1/conf/bad-error-solrconfig.xml
similarity index 80%
rename from solr/core/src/test-files/solr/collection1/conf/bad-solrconfig-warmer-no-reopen.xml
rename to solr/core/src/test-files/solr/collection1/conf/bad-error-solrconfig.xml
index 1354052..7874f1f 100644
--- a/solr/core/src/test-files/solr/collection1/conf/bad-solrconfig-warmer-no-reopen.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/bad-error-solrconfig.xml
@@ -19,9 +19,10 @@
 
 <config>
 
+  <directoryFactory name="DirectoryFactory" class="${solr.directoryFactory:solr.RAMDirectoryFactory}"/>
+
   <luceneMatchVersion>${tests.luceneMatchVersion:LATEST}</luceneMatchVersion>
-  <indexConfig>
-    <mergedSegmentWarmer class="org.apache.lucene.index.SimpleMergedSegmentWarmer"/>
-    <nrtMode>false</nrtMode>    <!-- BAD -->
-  </indexConfig>
+
+  <requestHandler name="my_error_handler" class="solr.ThrowErrorOnInitRequestHandler"></requestHandler>
+
 </config>
diff --git a/solr/core/src/test-files/solr/collection1/conf/bad-solrconfig-warmer-no-reopen.xml b/solr/core/src/test-files/solr/collection1/conf/bad-schema-init-error.xml
similarity index 75%
copy from solr/core/src/test-files/solr/collection1/conf/bad-solrconfig-warmer-no-reopen.xml
copy to solr/core/src/test-files/solr/collection1/conf/bad-schema-init-error.xml
index 1354052..e18b048 100644
--- a/solr/core/src/test-files/solr/collection1/conf/bad-solrconfig-warmer-no-reopen.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/bad-schema-init-error.xml
@@ -1,5 +1,4 @@
 <?xml version="1.0" ?>
-
 <!--
  Licensed to the Apache Software Foundation (ASF) under one or more
  contributor license agreements.  See the NOTICE file distributed with
@@ -17,11 +16,14 @@
  limitations under the License.
 -->
 
-<config>
+<schema name="bad-schema-throws-java-error" version="1.5">
+  <types>
+    <fieldType name="error_ft" class="solr.ThrowErrorOnInitFieldType" />
+ </types>
 
-  <luceneMatchVersion>${tests.luceneMatchVersion:LATEST}</luceneMatchVersion>
-  <indexConfig>
-    <mergedSegmentWarmer class="org.apache.lucene.index.SimpleMergedSegmentWarmer"/>
-    <nrtMode>false</nrtMode>    <!-- BAD -->
-  </indexConfig>
-</config>
+
+ <fields>
+   <field name="id" type="error_ft" />
+ </fields>
+
+</schema>
diff --git a/solr/core/src/test-files/solr/collection1/conf/bad-solrconfig-warmer-no-reopen.xml b/solr/core/src/test-files/solr/collection1/conf/bad-solrconfig-nrtmode.xml
similarity index 77%
copy from solr/core/src/test-files/solr/collection1/conf/bad-solrconfig-warmer-no-reopen.xml
copy to solr/core/src/test-files/solr/collection1/conf/bad-solrconfig-nrtmode.xml
index 1354052..879c1ad 100644
--- a/solr/core/src/test-files/solr/collection1/conf/bad-solrconfig-warmer-no-reopen.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/bad-solrconfig-nrtmode.xml
@@ -20,8 +20,16 @@
 <config>
 
   <luceneMatchVersion>${tests.luceneMatchVersion:LATEST}</luceneMatchVersion>
+
+  <xi:include href="solrconfig.snippet.randomindexconfig.xml" xmlns:xi="http://www.w3.org/2001/XInclude"/>
+
+  <directoryFactory name="DirectoryFactory" class="NRTCachingDirectoryFactory"/>
+
+  <!-- BEGIN: BAD -->
   <indexConfig>
-    <mergedSegmentWarmer class="org.apache.lucene.index.SimpleMergedSegmentWarmer"/>
-    <nrtMode>false</nrtMode>    <!-- BAD -->
+    <nrtMode>false</nrtMode>
   </indexConfig>
+  <!-- END: BAD -->
+
+
 </config>
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema-spatial.xml b/solr/core/src/test-files/solr/collection1/conf/schema-spatial.xml
index dc17bd1..673f3b3 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema-spatial.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema-spatial.xml
@@ -30,28 +30,25 @@
     <fieldType name="string" class="solr.StrField" sortMissingLast="true"/>
 
     <fieldType name="srpt_geohash"   class="solr.SpatialRecursivePrefixTreeFieldType"
-               prefixTree="geohash" units="degrees"
+               prefixTree="geohash" distanceUnits="degrees"
         />
     <fieldType name="srpt_quad"   class="solr.SpatialRecursivePrefixTreeFieldType"
-              prefixTree="quad" units="degrees"
+              prefixTree="quad" distanceUnits="degrees"
         />
     <fieldType name="srpt_100km"   class="solr.SpatialRecursivePrefixTreeFieldType"
-              maxDistErr="0.9" units="degrees"
+              maxDistErr="100" distanceUnits="kilometers"
         />
     <fieldType name="stqpt_geohash"   class="solr.SpatialTermQueryPrefixTreeFieldType"
-              prefixTree="geohash" units="degrees" />
+              prefixTree="geohash" distanceUnits="degrees" />
 
     <fieldType name="stqpt_u"  class="solr.SpatialTermQueryPrefixTreeFieldType"
-        geo="false" distCalculator="cartesian^2" worldBounds="ENVELOPE(0, 1000, 1000, 0)" units="degrees"/>
+        geo="false" distCalculator="cartesian^2" worldBounds="ENVELOPE(0, 1000, 1000, 0)" distanceUnits="degrees"/>
 
     <fieldType name="pointvector" class="solr.SpatialPointVectorFieldType"
-               numberType="tdouble" units="degrees"/>
-
-    <fieldType name="stqpt_u_oldworldbounds"  class="solr.SpatialTermQueryPrefixTreeFieldType"
-               geo="false" distCalculator="cartesian^2" worldBounds="0 0 1000 1000" units="degrees"/>
+               numberType="tdouble" distanceUnits="degrees"/>
 
     <fieldType name="bbox" class="solr.BBoxField"
-               numberType="tdoubleDV" units="degrees"/>
+               numberType="tdoubleDV" distanceUnits="degrees"/>
   </types>
 
 
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema.xml b/solr/core/src/test-files/solr/collection1/conf/schema.xml
index 75b346b..48ffc1e 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema.xml
@@ -401,6 +401,16 @@
     </analyzer>
   </fieldType>
 
+  <fieldType name="payloadDelimited" class="solr.TextField">
+    <analyzer type="index">
+      <tokenizer class="solr.WhitespaceTokenizerFactory" />
+      <filter class="solr.DelimitedPayloadTokenFilterFactory" encoder="integer" />
+    </analyzer>
+    <analyzer type="query">
+      <tokenizer class="solr.WhitespaceTokenizerFactory" />
+    </analyzer>
+  </fieldType>
+
 </types>
 
 
@@ -536,6 +546,8 @@
    <field name="lower" type="lowertok" indexed="false" stored="true" multiValued="true" />
    <field name="_route_" type="string" indexed="true" stored="true" multiValued="false" />
 
+   <field name="payloadDelimited" type="payloadDelimited" />
+
    <!-- Dynamic field definitions.  If a field name is not found, dynamicFields
         will be used if the name matches any of the patterns.
         RESTRICTION: the glob-like pattern in the name attribute must have
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema12.xml b/solr/core/src/test-files/solr/collection1/conf/schema12.xml
index 4b73e8d..5481bbc 100755
--- a/solr/core/src/test-files/solr/collection1/conf/schema12.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema12.xml
@@ -392,7 +392,7 @@
     <fieldType name="location" class="solr.LatLonType" subFieldSuffix="_coordinate"/>
     <!-- sub-centimeter accuracy for RPT; distance calcs -->
     <fieldType name="location_rpt" class="solr.SpatialRecursivePrefixTreeFieldType"
-      geo="true" distErrPct="0.025" maxDistErr="0.00000009" units="degrees" />
+               geo="true" distErrPct="0.025" maxDistErr="0.00001" distanceUnits="kilometers" />
 
   <fieldType name="currency" class="solr.CurrencyField" currencyConfig="currency.xml" multiValued="false" />
  </types>
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-slave.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-slave.xml
index edf195c..1782c54 100644
--- a/solr/core/src/test-files/solr/collection1/conf/solrconfig-slave.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-slave.xml
@@ -42,7 +42,7 @@
 
   <requestHandler name="/replication" class="solr.ReplicationHandler">
 	<lst name="slave">
-		<str name="masterUrl">http://127.0.0.1:TEST_PORT/solr</str>
+		<str name="masterUrl">http://127.0.0.1:TEST_PORT/solr/collection1</str>
 		<str name="pollInterval">00:00:01</str>
         <str name="compression">COMPRESSION</str>
      </lst>
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-suggestercomponent.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-suggestercomponent.xml
index 2b0b301..bf4ed86 100644
--- a/solr/core/src/test-files/solr/collection1/conf/solrconfig-suggestercomponent.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-suggestercomponent.xml
@@ -23,7 +23,7 @@
         solr.StandardDirectoryFactory, the default, is filesystem based.
         solr.RAMDirectoryFactory is memory based and not persistent. -->
   <dataDir>${solr.data.dir:}</dataDir>
-  <directoryFactory name="DirectoryFactory" class="${solr.directoryFactory:solr.RAMDirectoryFactory}"/>
+  <directoryFactory name="DirectoryFactory" class="solr.NRTCachingDirectoryFactory"/>
 
   <updateHandler class="solr.DirectUpdateHandler2"/>
 
@@ -40,7 +40,6 @@
       <str name="suggestAnalyzerFieldType">text</str>
       <str name="buildOnCommit">true</str>
 
-      <!-- Suggester properties -->
       <float name="threshold">0.0</float>
     </lst>
 	
@@ -61,9 +60,9 @@
       <str name="dictionaryImpl">DocumentDictionaryFactory</str>
       <str name="field">cat</str>
       <str name="weightField">price</str>
-      <str name="storeDir">suggest_fuzzy_doc_dict_payload</str>
       <str name="suggestAnalyzerFieldType">text</str>
       <str name="buildOnCommit">true</str>
+      <str name="buildOnStartup">false</str>
     </lst>
 
 	<!-- Suggest component (Document Expression Dictionary) -->
@@ -79,6 +78,57 @@
       <str name="suggestAnalyzerFieldType">text</str>
       <str name="buildOnCommit">true</str>
     </lst>
+     
+    <!-- Suggest component (Document Dictionary) that is built on startup-->
+    <lst name="suggester">
+      <str name="name">suggest_fuzzy_doc_dict_build_startup</str>
+      <str name="lookupImpl">FuzzyLookupFactory</str>
+      <str name="dictionaryImpl">DocumentDictionaryFactory</str>
+      <str name="field">cat</str>
+      <str name="weightField">price</str>
+      <str name="suggestAnalyzerFieldType">text</str>
+      <str name="buildOnCommit">false</str>
+      <str name="buildOnStartup">true</str>
+    </lst>
+    
+    <!-- Suggest component (Document Dictionary) only builds manually-->
+    <lst name="suggester">
+      <str name="name">suggest_fuzzy_doc_manal_build</str>
+      <str name="lookupImpl">FuzzyLookupFactory</str>
+      <str name="dictionaryImpl">DocumentDictionaryFactory</str>
+      <str name="field">cat</str>
+      <str name="weightField">price</str>
+      <str name="suggestAnalyzerFieldType">text</str>
+      <str name="buildOnCommit">false</str>
+      <str name="buildOnStartup">false</str>
+      <str name="storeDir">suggest_fuzzy_doc_manal_build</str>
+    </lst>
+    
+    <!-- Suggest component (Document Dictionary) only builds manually and
+         has the default buildOnStartup behavior -->
+    <lst name="suggester">
+      <str name="name">suggest_doc_default_startup</str>
+      <str name="lookupImpl">AnalyzingLookupFactory</str>
+      <str name="dictionaryImpl">DocumentDictionaryFactory</str>
+      <str name="field">cat</str>
+      <str name="weightField">price</str>
+      <str name="suggestAnalyzerFieldType">text</str>
+      <str name="buildOnCommit">false</str>
+      <str name="storeDir">suggest_doc_default_startup</str>
+    </lst>
+    
+    <!-- Suggest component (Document Dictionary) only builds manually and
+         has the default buildOnStartup behavior with no storeDir -->
+    <lst name="suggester">
+      <str name="name">suggest_doc_default_startup_no_store</str>
+      <str name="lookupImpl">AnalyzingLookupFactory</str>
+      <str name="dictionaryImpl">DocumentDictionaryFactory</str>
+      <str name="field">cat</str>
+      <str name="weightField">price</str>
+      <str name="suggestAnalyzerFieldType">text</str>
+      <str name="buildOnCommit">false</str>
+    </lst>
+    
   </searchComponent>
 
   <requestHandler name="/suggest" class="org.apache.solr.handler.component.SearchHandler">
@@ -91,5 +141,7 @@
   </requestHandler>
   
   <requestHandler name="/update" class="solr.UpdateRequestHandler" />
+  
+  <query><useColdSearcher>false</useColdSearcher></query>
 
 </config>
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog-with-delayingcomponent.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog-with-delayingcomponent.xml
index 5db588f..adcb6c0 100644
--- a/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog-with-delayingcomponent.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog-with-delayingcomponent.xml
@@ -81,12 +81,6 @@
     </lst>
   </requestHandler>
 
-  <requestHandler name="/admin/fileedit" class="solr.admin.EditFileRequestHandler" >
-    <lst name="invariants">
-      <str name="hidden">bogus.txt</str>
-    </lst>
-  </requestHandler>
-
   <updateRequestProcessorChain name="distrib-dup-test-chain-explicit">
     <!-- explicit test using processors before and after distrib -->
     <processor class="solr.RegexReplaceProcessorFactory">
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml
index 75ea5ca..412f443 100644
--- a/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml
@@ -82,12 +82,6 @@
     </lst>
   </requestHandler>
 
-  <requestHandler name="/admin/fileedit" class="solr.admin.EditFileRequestHandler" >
-    <lst name="invariants">
-      <str name="hidden">bogus.txt</str>
-    </lst>
-  </requestHandler>
-
   <updateRequestProcessorChain name="distrib-dup-test-chain-explicit">
     <!-- explicit test using processors before and after distrib -->
     <processor class="solr.RegexReplaceProcessorFactory">
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml
index 055f3d7..7514aa4 100644
--- a/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml
@@ -35,7 +35,6 @@
   <ramBufferSizeMB>${solr.tests.ramBufferSizeMB}</ramBufferSizeMB>
 
   <mergeScheduler class="${solr.tests.mergeScheduler}" />
-  <nrtMode>${solr.tests.nrtMode:true}</nrtMode>
 
   <writeLockTimeout>1000</writeLockTimeout>
   <commitLockTimeout>10000</commitLockTimeout>
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig.xml
index 95bacab..8307452 100644
--- a/solr/core/src/test-files/solr/collection1/conf/solrconfig.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig.xml
@@ -235,12 +235,6 @@
     </lst>
   </requestHandler>
 
-  <requestHandler name="/admin/fileedit" class="solr.admin.EditFileRequestHandler" >
-    <lst name="invariants">
-      <str name="hidden">bogus.txt</str>
-    </lst>
-  </requestHandler>
-
   <!-- test query parameter defaults -->
   <requestHandler name="defaults" class="solr.StandardRequestHandler">
     <lst name="defaults">
diff --git a/solr/core/src/java/org/apache/solr/common/package.html b/solr/core/src/test-files/solr/configsets/bad-mergepolicy/conf/schema.xml
similarity index 72%
copy from solr/core/src/java/org/apache/solr/common/package.html
copy to solr/core/src/test-files/solr/configsets/bad-mergepolicy/conf/schema.xml
index 6c25154..9e2f947 100644
--- a/solr/core/src/java/org/apache/solr/common/package.html
+++ b/solr/core/src/test-files/solr/configsets/bad-mergepolicy/conf/schema.xml
@@ -1,4 +1,4 @@
-<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
+<?xml version="1.0" encoding="UTF-8" ?>
 <!--
  Licensed to the Apache Software Foundation (ASF) under one or more
  contributor license agreements.  See the NOTICE file distributed with
@@ -15,15 +15,11 @@
  See the License for the specific language governing permissions and
  limitations under the License.
 -->
-<html>
-<head>
-   <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
-</head>
-<body>
-<p>
-Commonly reused classes and interfaces (deprecated package, do not add new classes)
-
-
-</p>
-</body>
-</html>
+<schema name="minimal" version="1.1">
+ <types>
+  <fieldType name="string" class="solr.StrField"/>
+ </types>
+ <fields>
+   <dynamicField name="*" type="string" indexed="true" stored="true" />
+ </fields>
+</schema>
diff --git a/solr/core/src/test-files/solr/collection1/conf/bad-solrconfig-warmer-no-reopen.xml b/solr/core/src/test-files/solr/configsets/bad-mergepolicy/conf/solrconfig.xml
similarity index 72%
copy from solr/core/src/test-files/solr/collection1/conf/bad-solrconfig-warmer-no-reopen.xml
copy to solr/core/src/test-files/solr/configsets/bad-mergepolicy/conf/solrconfig.xml
index 1354052..0cc5faa 100644
--- a/solr/core/src/test-files/solr/collection1/conf/bad-solrconfig-warmer-no-reopen.xml
+++ b/solr/core/src/test-files/solr/configsets/bad-mergepolicy/conf/solrconfig.xml
@@ -19,9 +19,16 @@
 
 <config>
 
+  <directoryFactory name="DirectoryFactory" class="${solr.directoryFactory:solr.RAMDirectoryFactory}"/>
+
   <luceneMatchVersion>${tests.luceneMatchVersion:LATEST}</luceneMatchVersion>
+
   <indexConfig>
-    <mergedSegmentWarmer class="org.apache.lucene.index.SimpleMergedSegmentWarmer"/>
-    <nrtMode>false</nrtMode>    <!-- BAD -->
+    <mergePolicy class="org.apache.solr.update.DummyMergePolicy"/>
+    <mergeFactor>8</mergeFactor>
   </indexConfig>
+
+  <updateHandler class="solr.DirectUpdateHandler2"/>
+  <requestHandler name="standard" class="solr.StandardRequestHandler"></requestHandler>
+
 </config>
diff --git a/solr/core/src/test-files/solr/solr-no-core-old-style.xml b/solr/core/src/test-files/solr/solr-no-core-old-style.xml
deleted file mode 100644
index c7e2696..0000000
--- a/solr/core/src/test-files/solr/solr-no-core-old-style.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<!--
- All (relative) paths are relative to the installation path
-  
-  persistent: Save changes made via the API to this file
-  sharedLib: path to a lib directory that will be shared across all cores
--->
-<solr persistent="${solr.xml.persist:true}">
-
-  <!--
-  adminPath: RequestHandler path to manage cores.  
-    If 'null' (or absent), cores will not be manageable via request handler
-  -->
-  <cores adminPath="/admin/cores" defaultCoreName="collection1" host="127.0.0.1" hostPort="${hostPort:8983}" 
-         hostContext="${hostContext:solr}" zkClientTimeout="${solr.zkclienttimeout:30000}" numShards="${numShards:3}" shareSchema="${shareSchema:false}" 
-         genericCoreNodeNames="${genericCoreNodeNames:true}" leaderVoteWait="0"
-         distribUpdateConnTimeout="${distribUpdateConnTimeout:45000}" distribUpdateSoTimeout="${distribUpdateSoTimeout:3400000}">
-    <shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
-      <int name="socketTimeout">${socketTimeout:120000}</int>
-      <int name="connTimeout">${connTimeout:15000}</int>
-    </shardHandlerFactory>
-  </cores>
-  
-</solr>
diff --git a/solr/core/src/test-files/solr/solr-no-core.xml b/solr/core/src/test-files/solr/solr-no-core.xml
index 60d2ff1..52d3e76 100644
--- a/solr/core/src/test-files/solr/solr-no-core.xml
+++ b/solr/core/src/test-files/solr/solr-no-core.xml
@@ -19,6 +19,8 @@
 <solr>
 
   <str name="shareSchema">${shareSchema:false}</str>
+  <str name="configSetBaseDir">${configSetBaseDir:configsets}</str>
+  <str name="coreRootDirectory">${coreRootDirectory:.}</str>
 
   <solrcloud>
     <str name="host">127.0.0.1</str>
diff --git a/solr/core/src/test-files/solr/solr-shardhandler-old.xml b/solr/core/src/test-files/solr/solr-shardhandler-old.xml
deleted file mode 100644
index 70aaa56..0000000
--- a/solr/core/src/test-files/solr/solr-shardhandler-old.xml
+++ /dev/null
@@ -1,29 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<!--
- old-style solr.xml specifying a custom shardHandlerFactory
--->
-<solr>
-  <cores>
-    <shardHandlerFactory name="shardHandlerFactory"
-                         class="org.apache.solr.core.MockShardHandlerFactory">
-      <str name="myMagicRequiredParameter">myMagicRequiredValue</str>
-    </shardHandlerFactory>
-  </cores>
-</solr>
diff --git a/solr/core/src/test-files/solr/solr-stress-old.xml b/solr/core/src/test-files/solr/solr-stress-old.xml
deleted file mode 100644
index 9a83cf2..0000000
--- a/solr/core/src/test-files/solr/solr-stress-old.xml
+++ /dev/null
@@ -1,59 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<!--
- All (relative) paths are relative to the installation path
-
-  persistent: Save changes made via the API to this file
-  sharedLib: path to a lib directory that will be shared across all cores
--->
-<solr persistent="${solr.xml.persist:true}">
-
-  <!--
-  adminPath: RequestHandler path to manage cores.
-    If 'null' (or absent), cores will not be manageable via request handler
-  -->
-  <cores adminPath="/admin/cores" defaultCoreName="collection1" host="127.0.0.1" hostPort="${hostPort:8983}"
-         hostContext="${hostContext:solr}">
-    <core name="00000_core" instanceDir="00000_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="false" loadOnStartup="true" />
-    <core name="00001_core" instanceDir="00001_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="true" loadOnStartup="false" />
-    <core name="00002_core" instanceDir="00002_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="true" loadOnStartup="false" />
-    <core name="00003_core" instanceDir="00003_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="true" loadOnStartup="false" />
-    <core name="00004_core" instanceDir="00004_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="true" loadOnStartup="false" />
-    <core name="00005_core" instanceDir="00005_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="true" loadOnStartup="false" />
-    <core name="00006_core" instanceDir="00006_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="true" loadOnStartup="false" />
-    <core name="00007_core" instanceDir="00007_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="true" loadOnStartup="false" />
-    <core name="00008_core" instanceDir="00008_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="true" loadOnStartup="false" />
-    <core name="00009_core" instanceDir="00009_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="true" loadOnStartup="false" />
-    <core name="00010_core" instanceDir="00010_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="true" loadOnStartup="false" />
-    <core name="00011_core" instanceDir="00011_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="true" loadOnStartup="false" />
-    <core name="00012_core" instanceDir="00012_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="true" loadOnStartup="false" />
-    <core name="00013_core" instanceDir="00013_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="true" loadOnStartup="false" />
-    <core name="00014_core" instanceDir="00014_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="true" loadOnStartup="false" />
-    <core name="00015_core" instanceDir="00015_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="true" loadOnStartup="false" />
-    <core name="00016_core" instanceDir="00016_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="true" loadOnStartup="false" />
-    <core name="00017_core" instanceDir="00017_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="true" loadOnStartup="false" />
-    <core name="00018_core" instanceDir="00018_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="true" loadOnStartup="false" />
-    <core name="00019_core" instanceDir="00019_core" schema="schema-tiny.xml" config="solrconfig-minimal.xml" transient="true" loadOnStartup="false" />
-    <shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
-      <int name="socketTimeout">${socketTimeout:90000}</int>
-      <int name="connTimeout">${connTimeout:15000}</int>
-    </shardHandlerFactory>
-  </cores>
-
-</solr>
diff --git a/solr/core/src/test-files/solr/solr.xml b/solr/core/src/test-files/solr/solr.xml
index 4fa106c..09f0121 100644
--- a/solr/core/src/test-files/solr/solr.xml
+++ b/solr/core/src/test-files/solr/solr.xml
@@ -18,27 +18,28 @@
 
 <!--
  All (relative) paths are relative to the installation path
-  
-  persistent: Save changes made via the API to this file
-  sharedLib: path to a lib directory that will be shared across all cores
 -->
-<solr persistent="${solr.xml.persist:false}">
+<solr>
 
-  <!--
-  adminPath: RequestHandler path to manage cores.  
-    If 'null' (or absent), cores will not be manageable via request handler
-  -->
-  <cores adminPath="/admin/cores" defaultCoreName="collection1" host="127.0.0.1" hostPort="${hostPort:8983}" 
-         hostContext="${hostContext:solr}" zkClientTimeout="${solr.zkclienttimeout:30000}" shareSchema="${shareSchema:false}" 
-         genericCoreNodeNames="${genericCoreNodeNames:true}" leaderVoteWait="0"
-         distribUpdateConnTimeout="${distribUpdateConnTimeout:45000}" distribUpdateSoTimeout="${distribUpdateSoTimeout:340000}">
-    <core name="collection1" instanceDir="collection1" shard="${shard:}" collection="${collection:collection1}" config="${solrconfig:solrconfig.xml}" schema="${schema:schema.xml}"
-          coreNodeName="${coreNodeName:}"/>
-    <shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
-      <str name="urlScheme">${urlScheme:}</str>
-      <int name="socketTimeout">${socketTimeout:90000}</int>
-      <int name="connTimeout">${connTimeout:15000}</int>
-    </shardHandlerFactory>
-  </cores>
+  <str name="shareSchema">${shareSchema:false}</str>
+  <str name="configSetBaseDir">${configSetBaseDir:configsets}</str>
+  <str name="coreRootDirectory">${coreRootDirectory:.}</str>
+
+  <shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
+    <str name="urlScheme">${urlScheme:}</str>
+    <int name="socketTimeout">${socketTimeout:90000}</int>
+    <int name="connTimeout">${connTimeout:15000}</int>
+  </shardHandlerFactory>
+
+  <solrcloud>
+    <str name="host">127.0.0.1</str>
+    <int name="hostPort">${hostPort:8983}</int>
+    <str name="hostContext">${hostContext:solr}</str>
+    <int name="zkClientTimeout">${solr.zkclienttimeout:30000}</int>
+    <bool name="genericCoreNodeNames">${genericCoreNodeNames:true}</bool>
+    <int name="leaderVoteWait">0</int>
+    <int name="distribUpdateConnTimeout">${distribUpdateConnTimeout:45000}</int>
+    <int name="distribUpdateSoTimeout">${distribUpdateSoTimeout:340000}</int>
+  </solrcloud>
   
 </solr>
diff --git a/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java b/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java
index 2a11d45..7cf28dd 100644
--- a/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java
+++ b/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java
@@ -37,7 +37,6 @@
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext.Context;
 import org.apache.lucene.store.MockDirectoryWrapper;
-import org.apache.lucene.store.RateLimitedDirectoryWrapper;
 import org.apache.lucene.util.English;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.CommonParams;
diff --git a/solr/core/src/test/org/apache/solr/DistributedIntervalFacetingTest.java b/solr/core/src/test/org/apache/solr/DistributedIntervalFacetingTest.java
index 275e994..b6d0bfa 100644
--- a/solr/core/src/test/org/apache/solr/DistributedIntervalFacetingTest.java
+++ b/solr/core/src/test/org/apache/solr/DistributedIntervalFacetingTest.java
@@ -10,6 +10,7 @@
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
@@ -37,8 +38,8 @@
     configString = "solrconfig-basic.xml";
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     del("*:*");
     commit();
     testRandom();
diff --git a/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java b/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java
index e6ef47e..7b86bd8 100644
--- a/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java
+++ b/solr/core/src/test/org/apache/solr/TestDistributedGrouping.java
@@ -24,6 +24,7 @@
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
+import org.junit.Test;
 
 /**
  * TODO? perhaps use:
@@ -43,8 +44,8 @@
   String tdate_b = "b_n_tdt";
   String oddField="oddField_s";
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     del("*:*");
     commit();
 
diff --git a/solr/core/src/test/org/apache/solr/TestDistributedMissingSort.java b/solr/core/src/test/org/apache/solr/TestDistributedMissingSort.java
index 7dd5815..f877c54 100644
--- a/solr/core/src/test/org/apache/solr/TestDistributedMissingSort.java
+++ b/solr/core/src/test/org/apache/solr/TestDistributedMissingSort.java
@@ -19,6 +19,7 @@
 
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.client.solrj.response.QueryResponse;
+import org.junit.Test;
 
 /**
  * Tests sortMissingFirst and sortMissingLast in distributed sort
@@ -37,8 +38,8 @@
   String string1_ml = "five_s1_ml"; // StringField, sortMissingLast=true, multiValued=false
   String string1_mf = "six_s1_mf";  // StringField, sortMissingFirst=true, multiValued=false
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     index();
     testSortMissingLast();
     testSortMissingFirst();
diff --git a/solr/core/src/test/org/apache/solr/TestDistributedSearch.java b/solr/core/src/test/org/apache/solr/TestDistributedSearch.java
index be50d62..76e98ca 100644
--- a/solr/core/src/test/org/apache/solr/TestDistributedSearch.java
+++ b/solr/core/src/test/org/apache/solr/TestDistributedSearch.java
@@ -37,6 +37,7 @@
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.ShardParams;
 import org.apache.solr.common.util.NamedList;
+import org.junit.Test;
 
 /**
  * TODO? perhaps use:
@@ -61,8 +62,8 @@
   String missingField="ignore_exception__missing_but_valid_field_t";
   String invalidField="ignore_exception__invalid_field_not_in_schema";
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     QueryResponse rsp = null;
     int backupStress = stress; // make a copy so we can restore
 
diff --git a/solr/core/src/test/org/apache/solr/TestHighlightDedupGrouping.java b/solr/core/src/test/org/apache/solr/TestHighlightDedupGrouping.java
index fbccf97..3d1ec6b 100644
--- a/solr/core/src/test/org/apache/solr/TestHighlightDedupGrouping.java
+++ b/solr/core/src/test/org/apache/solr/TestHighlightDedupGrouping.java
@@ -18,10 +18,10 @@
 package org.apache.solr;
 
 import org.apache.lucene.util.TestUtil;
-import org.apache.lucene.util.TestUtil;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrInputDocument;
+import org.junit.Test;
 
 import java.io.IOException;
 
@@ -35,14 +35,9 @@
   private static final String group_ti1 = "group_ti1";
   private static final String shard_i1 = "shard_i1";
 
-  public TestHighlightDedupGrouping() {
-    super();
-    fixShardCount = true;
-    shardCount = 2;
-  }
-
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 2)
+  public void test() throws Exception {
     basicTest();
     randomizedTest();
   }
@@ -57,7 +52,7 @@
 
     int docid = 1;
     int group = 5;
-    for (int shard = 0 ; shard < shardCount ; ++shard) {
+    for (int shard = 0 ; shard < getShardCount(); ++shard) {
       addDoc(docid, group, shard); // add the same doc to both shards
       clients.get(shard).commit();
     }
@@ -67,7 +62,7 @@
          "shards",      shards,
          "group",       "true",
          "group.field", id_s1,
-         "group.limit", Integer.toString(shardCount),
+         "group.limit", Integer.toString(getShardCount()),
          "hl",          "true",
          "hl.fl",       id_s1
         ));
@@ -93,15 +88,15 @@
       ++docsInGroup[group];
       boolean makeDuplicate = 0 == TestUtil.nextInt(random(), 0, numDocs / percentDuplicates);
       if (makeDuplicate) {
-        for (int shard = 0 ; shard < shardCount ; ++shard) {
+        for (int shard = 0 ; shard < getShardCount(); ++shard) {
           addDoc(docid, group, shard);
         }
       } else {
-        int shard = TestUtil.nextInt(random(), 0, shardCount - 1);
+        int shard = TestUtil.nextInt(random(), 0, getShardCount() - 1);
         addDoc(docid, group, shard);
       }
     }
-    for (int shard = 0 ; shard < shardCount ; ++shard) {
+    for (int shard = 0 ; shard < getShardCount(); ++shard) {
       clients.get(shard).commit();
     }
 
diff --git a/solr/core/src/test/org/apache/solr/TestSolrCoreProperties.java b/solr/core/src/test/org/apache/solr/TestSolrCoreProperties.java
index 283dddc..42fea2f 100644
--- a/solr/core/src/test/org/apache/solr/TestSolrCoreProperties.java
+++ b/solr/core/src/test/org/apache/solr/TestSolrCoreProperties.java
@@ -16,20 +16,21 @@
  */
 package org.apache.solr;
 
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-import java.util.Properties;
-
 import org.apache.commons.io.FileUtils;
 import org.apache.lucene.util.IOUtils;
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.junit.BeforeClass;
 
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
 import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.util.Properties;
 
 /**
  * <p> Test for Loading core properties from a properties file </p>
@@ -39,6 +40,8 @@
  */
 public class TestSolrCoreProperties extends SolrJettyTestBase {
 
+  // TODO these properties files don't work with configsets
+
   @BeforeClass
   public static void beforeTest() throws Exception {
     File homeDir = createTempDir().toFile();
@@ -68,7 +71,19 @@
     p.store(fos, null);
     IOUtils.close(fos);
 
-    createJetty(homeDir.getAbsolutePath(), null, null);
+    Files.createFile(collDir.toPath().resolve("core.properties"));
+
+    jetty = new JettySolrRunner(homeDir.getAbsolutePath(), "/solr", 0, null, null, true, null, sslConfig);
+
+    // this sets the property for jetty starting SolrDispatchFilter
+    if (System.getProperty("solr.data.dir") == null && System.getProperty("solr.hdfs.home") == null) {
+      jetty.setDataDir(createTempDir().toFile().getCanonicalPath());
+    }
+
+    jetty.start();
+    port = jetty.getLocalPort();
+
+    //createJetty(homeDir.getAbsolutePath(), null, null);
   }
 
   public void testSimple() throws Exception {
diff --git a/solr/core/src/test/org/apache/solr/TestTolerantSearch.java b/solr/core/src/test/org/apache/solr/TestTolerantSearch.java
index d6f7816..4e3581e 100644
--- a/solr/core/src/test/org/apache/solr/TestTolerantSearch.java
+++ b/solr/core/src/test/org/apache/solr/TestTolerantSearch.java
@@ -1,12 +1,8 @@
 package org.apache.solr;
 
-import java.io.File;
-import java.io.IOException;
-import java.io.OutputStream;
-
 import org.apache.commons.io.FileUtils;
-import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.CoreAdminRequest;
@@ -21,6 +17,10 @@
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 
+import java.io.File;
+import java.io.IOException;
+import java.io.OutputStream;
+
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -60,7 +60,7 @@
     solrHome = createSolrHome();
     createJetty(solrHome.getAbsolutePath(), null, null);
     String url = jetty.getBaseUrl().toString();
-    collection1 = new HttpSolrClient(url);
+    collection1 = new HttpSolrClient(url + "/collection1");
     collection2 = new HttpSolrClient(url + "/collection2");
     
     String urlCollection1 = jetty.getBaseUrl().toString() + "/" + "collection1";
@@ -69,10 +69,13 @@
     shard2 = urlCollection2.replaceAll("https?://", "");
     
     //create second core
-    CoreAdminRequest.Create req = new CoreAdminRequest.Create();
-    req.setCoreName("collection2");
-    collection1.request(req);
-    
+    try (HttpSolrClient nodeClient = new HttpSolrClient(url)) {
+      CoreAdminRequest.Create req = new CoreAdminRequest.Create();
+      req.setCoreName("collection2");
+      req.setConfigSet("collection1");
+      nodeClient.request(req);
+    }
+
     SolrInputDocument doc = new SolrInputDocument();
     doc.setField("id", "1");
     doc.setField("subject", "batman");
@@ -96,8 +99,8 @@
   
   @AfterClass
   public static void destroyThings() throws Exception {
-    collection1.shutdown();
-    collection2.shutdown();
+    collection1.close();
+    collection2.close();
     collection1 = null;
     collection2 = null;
     jetty.stop();
diff --git a/solr/core/src/test/org/apache/solr/analysis/LegacyHTMLStripCharFilterTest.java b/solr/core/src/test/org/apache/solr/analysis/LegacyHTMLStripCharFilterTest.java
deleted file mode 100644
index 58158d9..0000000
--- a/solr/core/src/test/org/apache/solr/analysis/LegacyHTMLStripCharFilterTest.java
+++ /dev/null
@@ -1,321 +0,0 @@
-package org.apache.solr.analysis;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.io.StringReader;
-import java.nio.charset.StandardCharsets;
-import java.util.HashSet;
-import java.util.Set;
-
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.BaseTokenStreamTestCase;
-import org.apache.lucene.analysis.MockTokenizer;
-import org.apache.lucene.analysis.Tokenizer;
-import org.apache.lucene.util.TestUtil;
-import org.junit.Ignore;
-
-public class LegacyHTMLStripCharFilterTest extends BaseTokenStreamTestCase {
-
-  //this is some text  here is a  link  and another  link . This is an entity: & plus a <.  Here is an &
-  //
-  public void test() throws IOException {
-    String html = "<div class=\"foo\">this is some text</div> here is a <a href=\"#bar\">link</a> and " +
-            "another <a href=\"http://lucene.apache.org/\">link</a>. " +
-            "This is an entity: &amp; plus a &lt;.  Here is an &. <!-- is a comment -->";
-    String gold = " this is some text  here is a  link  and " +
-            "another  link . " +
-            "This is an entity: & plus a <.  Here is an &.  ";
-    LegacyHTMLStripCharFilter reader = new LegacyHTMLStripCharFilter(new StringReader(html));
-    StringBuilder builder = new StringBuilder();
-    int ch = -1;
-    char [] goldArray = gold.toCharArray();
-    int position = 0;
-    while ((ch = reader.read()) != -1){
-      char theChar = (char) ch;
-      builder.append(theChar);
-      assertTrue("\"" + theChar + "\"" + " at position: " + position + " does not equal: " + goldArray[position]
-              + " Buffer so far: " + builder + "<EOB>", theChar == goldArray[position]);
-      position++;
-    }
-    assertEquals(gold, builder.toString());
-  }
-
-  //Some sanity checks, but not a full-fledged check
-  public void testHTML() throws Exception {
-    InputStream stream = getClass().getResourceAsStream("htmlStripReaderTest.html");
-    LegacyHTMLStripCharFilter reader = new LegacyHTMLStripCharFilter(new InputStreamReader(stream, StandardCharsets.UTF_8));
-    StringBuilder builder = new StringBuilder();
-    int ch = -1;
-    while ((ch = reader.read()) != -1){
-      builder.append((char)ch);
-    }
-    String str = builder.toString();
-    assertTrue("Entity not properly escaped", str.indexOf("&lt;") == -1);//there is one > in the text
-    assertTrue("Forrest should have been stripped out", str.indexOf("forrest") == -1 && str.indexOf("Forrest") == -1);
-    assertTrue("File should start with 'Welcome to Solr' after trimming", str.trim().startsWith("Welcome to Solr"));
-
-    assertTrue("File should start with 'Foundation.' after trimming", str.trim().endsWith("Foundation."));
-    
-  }
-
-  public void testGamma() throws Exception {
-    String test = "&Gamma;";
-    String gold = "\u0393";
-    Set<String> set = new HashSet<>();
-    set.add("reserved");
-    Reader reader = new LegacyHTMLStripCharFilter(new StringReader(test), set);
-    StringBuilder builder = new StringBuilder();
-    int ch = 0;
-    while ((ch = reader.read()) != -1){
-      builder.append((char)ch);
-    }
-    String result = builder.toString();
-    // System.out.println("Resu: " + result + "<EOL>");
-    // System.out.println("Gold: " + gold + "<EOL>");
-    assertTrue(result + " is not equal to " + gold + "<EOS>", result.equals(gold) == true);
-  }
-
-  public void testEntities() throws Exception {
-    String test = "&nbsp; &lt;foo&gt; &Uuml;bermensch &#61; &Gamma; bar &#x393;";
-    String gold = "  <foo> \u00DCbermensch = \u0393 bar \u0393";
-    Set<String> set = new HashSet<>();
-    set.add("reserved");
-    Reader reader = new LegacyHTMLStripCharFilter(new StringReader(test), set);
-    StringBuilder builder = new StringBuilder();
-    int ch = 0;
-    while ((ch = reader.read()) != -1){
-      builder.append((char)ch);
-    }
-    String result = builder.toString();
-    // System.out.println("Resu: " + result + "<EOL>");
-    // System.out.println("Gold: " + gold + "<EOL>");
-    assertTrue(result + " is not equal to " + gold + "<EOS>", result.equals(gold) == true);
-  }
-
-  public void testMoreEntities() throws Exception {
-    String test = "&nbsp; &lt;junk/&gt; &nbsp; &#33; &#64; and &#8217;";
-    String gold = "  <junk/>   ! @ and ’";
-    Set<String> set = new HashSet<>();
-    set.add("reserved");
-    Reader reader = new LegacyHTMLStripCharFilter(new StringReader(test), set);
-    StringBuilder builder = new StringBuilder();
-    int ch = 0;
-    while ((ch = reader.read()) != -1){
-      builder.append((char)ch);
-    }
-    String result = builder.toString();
-    // System.out.println("Resu: " + result + "<EOL>");
-    // System.out.println("Gold: " + gold + "<EOL>");
-    assertTrue(result + " is not equal to " + gold, result.equals(gold) == true);
-  }
-
-  public void testReserved() throws Exception {
-    String test = "aaa bbb <reserved ccc=\"ddddd\"> eeee </reserved> ffff <reserved ggg=\"hhhh\"/> <other/>";
-    Set<String> set = new HashSet<>();
-    set.add("reserved");
-    Reader reader = new LegacyHTMLStripCharFilter(new StringReader(test), set);
-    StringBuilder builder = new StringBuilder();
-    int ch = 0;
-    while ((ch = reader.read()) != -1){
-      builder.append((char)ch);
-    }
-    String result = builder.toString();
-    // System.out.println("Result: " + result);
-    assertTrue("Escaped tag not preserved: "  + result.indexOf("reserved"), result.indexOf("reserved") == 9);
-    assertTrue("Escaped tag not preserved: " + result.indexOf("reserved", 15), result.indexOf("reserved", 15) == 38);
-    assertTrue("Escaped tag not preserved: " + result.indexOf("reserved", 41), result.indexOf("reserved", 41) == 54);
-    assertTrue("Other tag should be removed", result.indexOf("other") == -1);
-  }
-
-  public void testMalformedHTML() throws Exception {
-    String test = "a <a hr<ef=aa<a>> </close</a>";
-    String gold = "a <a hr<ef=aa > </close ";
-    Reader reader = new LegacyHTMLStripCharFilter(new StringReader(test));
-    StringBuilder builder = new StringBuilder();
-    int ch = 0;
-    while ((ch = reader.read()) != -1){
-      builder.append((char)ch);
-    }
-    String result = builder.toString();
-    // System.out.println("Resu: " + result + "<EOL>");
-    // System.out.println("Gold: " + gold + "<EOL>");
-    assertTrue(result + " is not equal to " + gold + "<EOS>", result.equals(gold) == true);
-  }
-
-  public void testBufferOverflow() throws Exception {
-    StringBuilder testBuilder = new StringBuilder(LegacyHTMLStripCharFilter.DEFAULT_READ_AHEAD + 50);
-    testBuilder.append("ah<?> ??????");
-    appendChars(testBuilder, LegacyHTMLStripCharFilter.DEFAULT_READ_AHEAD + 500);
-    processBuffer(testBuilder.toString(), "Failed on pseudo proc. instr.");//processing instructions
-
-    testBuilder.setLength(0);
-    testBuilder.append("<!--");//comments
-    appendChars(testBuilder, 3*LegacyHTMLStripCharFilter.DEFAULT_READ_AHEAD + 500);//comments have two lookaheads
-
-    testBuilder.append("-->foo");
-    processBuffer(testBuilder.toString(), "Failed w/ comment");
-
-    testBuilder.setLength(0);
-    testBuilder.append("<?");
-    appendChars(testBuilder, LegacyHTMLStripCharFilter.DEFAULT_READ_AHEAD + 500);
-    testBuilder.append("?>");
-    processBuffer(testBuilder.toString(), "Failed with proc. instr.");
-    
-    testBuilder.setLength(0);
-    testBuilder.append("<b ");
-    appendChars(testBuilder, LegacyHTMLStripCharFilter.DEFAULT_READ_AHEAD + 500);
-    testBuilder.append("/>");
-    processBuffer(testBuilder.toString(), "Failed on tag");
-
-  }
-
-  private void appendChars(StringBuilder testBuilder, int numChars) {
-    int i1 = numChars / 2;
-    for (int i = 0; i < i1; i++){
-      testBuilder.append('a').append(' ');//tack on enough to go beyond the mark readahead limit, since <?> makes LegacyHTMLStripCharFilter think it is a processing instruction
-    }
-  }  
-
-
-  private void processBuffer(String test, String assertMsg) throws IOException {
-    // System.out.println("-------------------processBuffer----------");
-    Reader reader = new LegacyHTMLStripCharFilter(new BufferedReader(new StringReader(test)));//force the use of BufferedReader
-    int ch = 0;
-    StringBuilder builder = new StringBuilder();
-    try {
-      while ((ch = reader.read()) != -1){
-        builder.append((char)ch);
-      }
-    } finally {
-      // System.out.println("String (trimmed): " + builder.toString().trim() + "<EOS>");
-    }
-    assertTrue(assertMsg + "::: " + builder.toString() + " is not equal to " + test, builder.toString().equals(test) == true);
-  }
-
-  public void testComment() throws Exception {
-
-    String test = "<!--- three dashes, still a valid comment ---> ";
-    String gold = "  ";
-    Reader reader = new LegacyHTMLStripCharFilter(new BufferedReader(new StringReader(test)));//force the use of BufferedReader
-    int ch = 0;
-    StringBuilder builder = new StringBuilder();
-    try {
-      while ((ch = reader.read()) != -1){
-        builder.append((char)ch);
-      }
-    } finally {
-      // System.out.println("String: " + builder.toString());
-    }
-    assertTrue(builder.toString() + " is not equal to " + gold + "<EOS>", builder.toString().equals(gold) == true);
-  }
-
-
-  public void doTestOffsets(String in) throws Exception {
-    LegacyHTMLStripCharFilter reader = new LegacyHTMLStripCharFilter(new BufferedReader(new StringReader(in)));
-    int ch = 0;
-    int off = 0;     // offset in the reader
-    int strOff = -1; // offset in the original string
-    while ((ch = reader.read()) != -1) {
-      int correctedOff = reader.correctOffset(off);
-
-      if (ch == 'X') {
-        strOff = in.indexOf('X',strOff+1);
-        assertEquals(strOff, correctedOff);
-      }
-
-      off++;
-    }
-  }
-
-  public void testOffsets() throws Exception {
-    doTestOffsets("hello X how X are you");
-    doTestOffsets("hello <p> X<p> how <p>X are you");
-    doTestOffsets("X &amp; X &#40; X &lt; &gt; X");
-
-    // test backtracking
-    doTestOffsets("X < &zz >X &# < X > < &l > &g < X");
-  }
-  
-  @Ignore("broken offsets: see LUCENE-2208")
-  public void testRandom() throws Exception {
-    Analyzer analyzer = new Analyzer() {
-
-      @Override
-      protected TokenStreamComponents createComponents(String fieldName) {
-        Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
-        return new TokenStreamComponents(tokenizer, tokenizer);
-      }
-
-      @Override
-      protected Reader initReader(String fieldName, Reader reader) {
-        return new LegacyHTMLStripCharFilter(new BufferedReader(reader));
-      }
-    };
-    
-    int numRounds = RANDOM_MULTIPLIER * 10000;
-    checkRandomData(random(), analyzer, numRounds);
-  }
-
-  public void testRandomBrokenHTML() throws Exception {
-    int maxNumElements = 10000;
-    String text = TestUtil.randomHtmlishString(random(), maxNumElements);
-    Reader reader
-        = new LegacyHTMLStripCharFilter(new StringReader(text));
-    while (reader.read() != -1);
-  }
-
-  public void testRandomText() throws Exception {
-    StringBuilder text = new StringBuilder();
-    int minNumWords = 10;
-    int maxNumWords = 10000;
-    int minWordLength = 3;
-    int maxWordLength = 20;
-    int numWords = TestUtil.nextInt(random(), minNumWords, maxNumWords);
-    switch (TestUtil.nextInt(random(), 0, 4)) {
-      case 0: {
-        for (int wordNum = 0 ; wordNum < numWords ; ++wordNum) {
-          text.append(TestUtil.randomUnicodeString(random(), maxWordLength));
-          text.append(' ');
-        }
-        break;
-      }
-      case 1: {
-        for (int wordNum = 0 ; wordNum < numWords ; ++wordNum) {
-          text.append(TestUtil.randomRealisticUnicodeString
-              (random(), minWordLength, maxWordLength));
-          text.append(' ');
-        }
-        break;
-      }
-      default: { // ASCII 50% of the time
-        for (int wordNum = 0 ; wordNum < numWords ; ++wordNum) {
-          text.append(TestUtil.randomSimpleString(random()));
-          text.append(' ');
-        }
-      }
-    }
-    Reader reader = new LegacyHTMLStripCharFilter
-        (new StringReader(text.toString()));
-    while (reader.read() != -1);
-  }
-}
diff --git a/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java
index fa7138f..e0eb3e7 100644
--- a/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/AliasIntegrationTest.java
@@ -33,10 +33,11 @@
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.params.CollectionParams.CollectionAction;
 import org.apache.solr.common.params.ModifiableSolrParams;
-import org.junit.After;
 import org.junit.AfterClass;
-import org.junit.Before;
 import org.junit.BeforeClass;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -48,6 +49,8 @@
  */
 @Slow
 public class AliasIntegrationTest extends AbstractFullDistribZkTestBase {
+
+  private static final Logger logger = LoggerFactory.getLogger(AliasIntegrationTest.class);
   
   @BeforeClass
   public static void beforeSuperClass() throws Exception {
@@ -58,34 +61,28 @@
     
   }
   
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     System.setProperty("numShards", Integer.toString(sliceCount));
   }
-  
-  @Override
-  @After
-  public void tearDown() throws Exception {
-    super.tearDown();
-    resetExceptionIgnores();
-  }
-  
+
   public AliasIntegrationTest() {
     super();
     sliceCount = 1;
-    shardCount = random().nextBoolean() ? 3 : 4;
+    fixShardCount(random().nextBoolean() ? 3 : 4);
   }
   
-  @Override
-  public void doTest() throws Exception {
-    
+  @Test
+  public void test() throws Exception {
+
     handle.clear();
     handle.put("timestamp", SKIPVAL);
-    
+
     waitForThingsToLevelOut(30);
 
+    logger.info("### STARTING ACTUAL TEST");
+
     del("*:*");
     
     createCollection("collection2", 2, 1, 10);
@@ -137,60 +134,55 @@
     query.set("collection", "testalias");
     JettySolrRunner jetty = jettys.get(random().nextInt(jettys.size()));
     int port = jetty.getLocalPort();
-    HttpSolrClient client = new HttpSolrClient(buildUrl(port) + "/testalias");
-    res = client.query(query);
-    assertEquals(3, res.getResults().getNumFound());
-    client.shutdown();
-    client = null;
-    
+    try (HttpSolrClient client = new HttpSolrClient(buildUrl(port) + "/testalias")) {
+      res = client.query(query);
+      assertEquals(3, res.getResults().getNumFound());
+    }
+
     // now without collections param
     query = new SolrQuery("*:*");
     jetty = jettys.get(random().nextInt(jettys.size()));
     port = jetty.getLocalPort();
-    client = new HttpSolrClient(buildUrl(port) + "/testalias");
-    res = client.query(query);
-    assertEquals(3, res.getResults().getNumFound());
-    client.shutdown();
-    client = null;
-    
+    try (HttpSolrClient client = new HttpSolrClient(buildUrl(port) + "/testalias")) {
+      res = client.query(query);
+      assertEquals(3, res.getResults().getNumFound());
+    }
     // create alias, collection2 first because it's not on every node
     createAlias("testalias", "collection2,collection1");
     
     // search with new cloud client
-    CloudSolrClient cloudSolrClient = new CloudSolrClient(zkServer.getZkAddress(), random().nextBoolean());
-    cloudSolrClient.setParallelUpdates(random().nextBoolean());
-    query = new SolrQuery("*:*");
-    query.set("collection", "testalias");
-    res = cloudSolrClient.query(query);
-    assertEquals(5, res.getResults().getNumFound());
-    
-    // Try with setDefaultCollection
-    query = new SolrQuery("*:*");
-    cloudSolrClient.setDefaultCollection("testalias");
-    res = cloudSolrClient.query(query);
-    cloudSolrClient.shutdown();
-    assertEquals(5, res.getResults().getNumFound());
-    
+    try (CloudSolrClient cloudSolrClient = new CloudSolrClient(zkServer.getZkAddress(), random().nextBoolean())) {
+      cloudSolrClient.setParallelUpdates(random().nextBoolean());
+      query = new SolrQuery("*:*");
+      query.set("collection", "testalias");
+      res = cloudSolrClient.query(query);
+      assertEquals(5, res.getResults().getNumFound());
+
+      // Try with setDefaultCollection
+      query = new SolrQuery("*:*");
+      cloudSolrClient.setDefaultCollection("testalias");
+      res = cloudSolrClient.query(query);
+      assertEquals(5, res.getResults().getNumFound());
+    }
+
     // search for alias with random non cloud client
     query = new SolrQuery("*:*");
     query.set("collection", "testalias");
     jetty = jettys.get(random().nextInt(jettys.size()));
     port = jetty.getLocalPort();
-    client = new HttpSolrClient(buildUrl(port) + "/testalias");
-    res = client.query(query);
-    assertEquals(5, res.getResults().getNumFound());
-    
-    
+    try (HttpSolrClient client = new HttpSolrClient(buildUrl(port) + "/testalias")) {
+      res = client.query(query);
+      assertEquals(5, res.getResults().getNumFound());
+    }
     // now without collections param
     query = new SolrQuery("*:*");
     jetty = jettys.get(random().nextInt(jettys.size()));
     port = jetty.getLocalPort();
-    client = new HttpSolrClient(buildUrl(port) + "/testalias");
-    res = client.query(query);
-    assertEquals(5, res.getResults().getNumFound());
-    client.shutdown();
-    client = null;
-    
+    try (HttpSolrClient client = new HttpSolrClient(buildUrl(port) + "/testalias")) {
+      res = client.query(query);
+      assertEquals(5, res.getResults().getNumFound());
+    }
+
     // update alias
     createAlias("testalias", "collection2");
     //checkForAlias("testalias", "collection2");
@@ -213,26 +205,24 @@
     // try a std client
     // search 1 and 2, but have no collections param
     query = new SolrQuery("*:*");
-    HttpSolrClient httpclient = new HttpSolrClient(getBaseUrl((HttpSolrClient) clients.get(0)) + "/testalias");
-    res = httpclient.query(query);
-    assertEquals(5, res.getResults().getNumFound());
-    httpclient.shutdown();
-    httpclient = null;
-    
+    try (HttpSolrClient client = new HttpSolrClient(getBaseUrl((HttpSolrClient) clients.get(0)) + "/testalias")) {
+      res = client.query(query);
+      assertEquals(5, res.getResults().getNumFound());
+    }
+
     createAlias("testalias", "collection2");
     
     // a second alias
     createAlias("testalias2", "collection2");
-    
-    httpclient = new HttpSolrClient(getBaseUrl((HttpSolrClient) clients.get(0)) + "/testalias");
-    SolrInputDocument doc8 = getDoc(id, 11, i1, -600, tlong, 600, t1,
-        "humpty dumpy4 sat on a walls");
-    httpclient.add(doc8);
-    httpclient.commit();
-    res = httpclient.query(query);
-    assertEquals(3, res.getResults().getNumFound());
-    httpclient.shutdown();
-    httpclient = null;
+
+    try (HttpSolrClient client = new HttpSolrClient(getBaseUrl((HttpSolrClient) clients.get(0)) + "/testalias")) {
+      SolrInputDocument doc8 = getDoc(id, 11, i1, -600, tlong, 600, t1,
+          "humpty dumpy4 sat on a walls");
+      client.add(doc8);
+      client.commit();
+      res = client.query(query);
+      assertEquals(3, res.getResults().getNumFound());
+    }
     
     createAlias("testalias", "collection2,collection1");
     
@@ -251,46 +241,47 @@
       sawException = true;
     }
     assertTrue(sawException);
+
+    logger.info("### FINISHED ACTUAL TEST");
   }
 
   private void createAlias(String alias, String collections)
       throws SolrServerException, IOException {
-    SolrClient client = createNewSolrClient("",
-        getBaseUrl((HttpSolrClient) clients.get(0)));
-    if (random().nextBoolean()) {
-      ModifiableSolrParams params = new ModifiableSolrParams();
-      params.set("collections", collections);
-      params.set("name", alias);
-      params.set("action", CollectionAction.CREATEALIAS.toString());
-      QueryRequest request = new QueryRequest(params);
-      request.setPath("/admin/collections");
-      client.request(request);
-    } else {
-      CreateAlias request = new CreateAlias();
-      request.setAliasName(alias);
-      request.setAliasedCollections(collections);
-      request.process(client);
+
+    try (SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) {
+      if (random().nextBoolean()) {
+        ModifiableSolrParams params = new ModifiableSolrParams();
+        params.set("collections", collections);
+        params.set("name", alias);
+        params.set("action", CollectionAction.CREATEALIAS.toString());
+        QueryRequest request = new QueryRequest(params);
+        request.setPath("/admin/collections");
+        client.request(request);
+      } else {
+        CreateAlias request = new CreateAlias();
+        request.setAliasName(alias);
+        request.setAliasedCollections(collections);
+        request.process(client);
+      }
     }
-    client.shutdown();
   }
   
   private void deleteAlias(String alias) throws SolrServerException,
       IOException {
-    SolrClient client = createNewSolrClient("",
-        getBaseUrl((HttpSolrClient) clients.get(0)));
-    if (random().nextBoolean()) {
-      ModifiableSolrParams params = new ModifiableSolrParams();
-      params.set("name", alias);
-      params.set("action", CollectionAction.DELETEALIAS.toString());
-      QueryRequest request = new QueryRequest(params);
-      request.setPath("/admin/collections");
-      client.request(request);
-    } else {
-      DeleteAlias request = new DeleteAlias();
-      request.setAliasName(alias);
-      request.process(client);
+    try (SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) {
+      if (random().nextBoolean()) {
+        ModifiableSolrParams params = new ModifiableSolrParams();
+        params.set("name", alias);
+        params.set("action", CollectionAction.DELETEALIAS.toString());
+        QueryRequest request = new QueryRequest(params);
+        request.setPath("/admin/collections");
+        client.request(request);
+      } else {
+        DeleteAlias request = new DeleteAlias();
+        request.setAliasName(alias);
+        request.process(client);
+      }
     }
-    client.shutdown();
   }
   
   protected void indexDoc(List<CloudJettyRunner> skipServers, Object... fields) throws IOException,
diff --git a/solr/core/src/test/org/apache/solr/cloud/AsyncMigrateRouteKeyTest.java b/solr/core/src/test/org/apache/solr/cloud/AsyncMigrateRouteKeyTest.java
index 82414b2..fa5689d 100644
--- a/solr/core/src/test/org/apache/solr/cloud/AsyncMigrateRouteKeyTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/AsyncMigrateRouteKeyTest.java
@@ -24,7 +24,7 @@
 import org.apache.solr.common.params.CollectionParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
-import org.junit.Before;
+import org.junit.Test;
 
 import java.io.IOException;
 
@@ -36,13 +36,8 @@
 
   private static final int MAX_WAIT_SECONDS = 2 * 60;
 
-  @Override
-  @Before
-  public void setUp() throws Exception {
-    super.setUp();
-  }
-
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     waitForThingsToLevelOut(15);
 
     multipleShardMigrateTest();
@@ -117,14 +112,9 @@
         .getBaseURL();
     baseUrl = baseUrl.substring(0, baseUrl.length() - "collection1".length());
 
-    HttpSolrClient baseServer = null;
-
-    try {
-      baseServer = new HttpSolrClient(baseUrl);
+    try (HttpSolrClient baseServer = new HttpSolrClient(baseUrl)) {
       baseServer.setConnectionTimeout(15000);
       return baseServer.request(request);
-    } finally {
-      baseServer.shutdown();
     }
   }
 }
diff --git a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZk2Test.java b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZk2Test.java
index d2b83f4..829b044 100644
--- a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZk2Test.java
+++ b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZk2Test.java
@@ -38,6 +38,7 @@
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.handler.ReplicationHandler;
+import org.junit.Test;
 
 import java.io.File;
 import java.io.FilenameFilter;
@@ -58,21 +59,12 @@
   
   public BasicDistributedZk2Test() {
     super();
-    fixShardCount = true;
-    
     sliceCount = 2;
-    shardCount = 4;
   }
   
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.solr.BaseDistributedSearchTestCase#doTest()
-   * 
-   * Create 3 shards, each with one replica
-   */
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 4)
+  public void test() throws Exception {
     boolean testFinished = false;
     try {
       handle.clear();
@@ -156,11 +148,9 @@
     
   }
   
-  private void testNodeWithoutCollectionForwarding() throws Exception,
-      SolrServerException, IOException {
-    try {
-      final String baseUrl = getBaseUrl((HttpSolrClient) clients.get(0));
-      HttpSolrClient client = new HttpSolrClient(baseUrl);
+  private void testNodeWithoutCollectionForwarding() throws Exception {
+    final String baseUrl = getBaseUrl((HttpSolrClient) clients.get(0));
+    try (HttpSolrClient client = new HttpSolrClient(baseUrl)) {
       client.setConnectionTimeout(30000);
       Create createCmd = new Create();
       createCmd.setRoles("none");
@@ -169,7 +159,6 @@
       createCmd.setNumShards(1);
       createCmd.setDataDir(getDataDir(createTempDir(ONE_NODE_COLLECTION).toFile().getAbsolutePath()));
       client.request(createCmd);
-      client.shutdown();
     } catch (Exception e) {
       e.printStackTrace();
       fail(e.getMessage());
@@ -182,8 +171,8 @@
     
     int docs = 2;
     for (SolrClient client : clients) {
-      final String baseUrl = getBaseUrl((HttpSolrClient) client);
-      addAndQueryDocs(baseUrl, docs);
+      final String clientUrl = getBaseUrl((HttpSolrClient) client);
+      addAndQueryDocs(clientUrl, docs);
       docs += 2;
     }
   }
@@ -191,36 +180,39 @@
   // 2 docs added every call
   private void addAndQueryDocs(final String baseUrl, int docs)
       throws Exception {
-    HttpSolrClient qclient = new HttpSolrClient(baseUrl + "/onenodecollection" + "core");
-    
-    // it might take a moment for the proxy node to see us in their cloud state
-    waitForNon403or404or503(qclient);
-    
-    // add a doc
-    SolrInputDocument doc = new SolrInputDocument();
-    doc.addField("id", docs);
-    qclient.add(doc);
-    qclient.commit();
-    
+
     SolrQuery query = new SolrQuery("*:*");
-    QueryResponse results = qclient.query(query);
-    assertEquals(docs - 1, results.getResults().getNumFound());
-    qclient.shutdown();
+
+    try (HttpSolrClient qclient = new HttpSolrClient(baseUrl + "/onenodecollection" + "core")) {
+
+      // it might take a moment for the proxy node to see us in their cloud state
+      waitForNon403or404or503(qclient);
+
+      // add a doc
+      SolrInputDocument doc = new SolrInputDocument();
+      doc.addField("id", docs);
+      qclient.add(doc);
+      qclient.commit();
+
+
+      QueryResponse results = qclient.query(query);
+      assertEquals(docs - 1, results.getResults().getNumFound());
+    }
     
-    qclient = new HttpSolrClient(baseUrl + "/onenodecollection");
-    results = qclient.query(query);
-    assertEquals(docs - 1, results.getResults().getNumFound());
-    
-    doc = new SolrInputDocument();
-    doc.addField("id", docs + 1);
-    qclient.add(doc);
-    qclient.commit();
-    
-    query = new SolrQuery("*:*");
-    query.set("rows", 0);
-    results = qclient.query(query);
-    assertEquals(docs, results.getResults().getNumFound());
-    qclient.shutdown();
+    try (HttpSolrClient qclient = new HttpSolrClient(baseUrl + "/onenodecollection")) {
+      QueryResponse results = qclient.query(query);
+      assertEquals(docs - 1, results.getResults().getNumFound());
+
+      SolrInputDocument doc = new SolrInputDocument();
+      doc.addField("id", docs + 1);
+      qclient.add(doc);
+      qclient.commit();
+
+      query = new SolrQuery("*:*");
+      query.set("rows", 0);
+      results = qclient.query(query);
+      assertEquals(docs, results.getResults().getNumFound());
+    }
   }
   
   private long testUpdateAndDelete() throws Exception {
@@ -400,7 +392,7 @@
     // make sure we have published we are recovering
     Thread.sleep(1500);
     
-    waitForThingsToLevelOut(45);
+    waitForThingsToLevelOut(60);
     
     Thread.sleep(500);
     
diff --git a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java
index 0930dcf..2713351 100644
--- a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java
@@ -18,6 +18,7 @@
  */
 
 import org.apache.commons.lang.StringUtils;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.JSONTestUtil;
 import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
@@ -50,8 +51,8 @@
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.update.DirectUpdateHandler2;
 import org.apache.solr.util.DefaultSolrThreadFactory;
-import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -106,20 +107,16 @@
   public static void beforeThisClass2() throws Exception {
   }
   
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     System.setProperty("numShards", Integer.toString(sliceCount));
     System.setProperty("solr.xml.persist", "true");
   }
 
   
   public BasicDistributedZkTest() {
-    fixShardCount = true;
-    
     sliceCount = 2;
-    shardCount = 4;
     completionService = new ExecutorCompletionService<>(executor);
     pending = new HashSet<>();
     
@@ -133,7 +130,7 @@
     } else {
       // use shard ids rather than physical locations
       StringBuilder sb = new StringBuilder();
-      for (int i = 0; i < shardCount; i++) {
+      for (int i = 0; i < getShardCount(); i++) {
         if (i > 0)
           sb.append(',');
         sb.append("shard" + (i + 3));
@@ -141,9 +138,10 @@
       params.set("shards", sb.toString());
     }
   }
-  
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  @ShardsFixed(num = 4)
+  public void test() throws Exception {
     // setLoggingLevel(null);
 
     ZkStateReader zkStateReader = cloudClient.getZkStateReader();
@@ -333,7 +331,7 @@
     params.set("commitWithin", 10);
     add(cloudClient, params , getDoc("id", 300));
     
-    long timeout = System.currentTimeMillis() + 30000;
+    long timeout = System.currentTimeMillis() + 45000;
     while (cloudClient.query(new SolrQuery("*:*")).getResults().getNumFound() != before + 1) {
       if (timeout <= System.currentTimeMillis()) {
         fail("commitWithin did not work");
@@ -381,14 +379,11 @@
     createCmd.setSchemaName("nonexistent_schema.xml");
     
     String url = getBaseUrl(clients.get(0));
-    final HttpSolrClient client = new HttpSolrClient(url);
-    try {
+    try (final HttpSolrClient client = new HttpSolrClient(url)) {
       client.request(createCmd);
       fail("Expected SolrCore create to fail");
     } catch (Exception e) {
       
-    } finally {
-      client.shutdown();
     }
     
     long timeout = System.currentTimeMillis() + 15000;
@@ -509,20 +504,20 @@
   private void testStopAndStartCoresInOneInstance() throws Exception {
     SolrClient client = clients.get(0);
     String url3 = getBaseUrl(client);
-    final HttpSolrClient httpSolrClient = new HttpSolrClient(url3);
-    httpSolrClient.setConnectionTimeout(15000);
-    httpSolrClient.setSoTimeout(60000);
-    ThreadPoolExecutor executor = new ThreadPoolExecutor(0, Integer.MAX_VALUE,
-        5, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(),
-        new DefaultSolrThreadFactory("testExecutor"));
-    int cnt = 3;
-    
-    // create the cores
-    createCores(httpSolrClient, executor, "multiunload2", 1, cnt);
-    
-    executor.shutdown();
-    executor.awaitTermination(120, TimeUnit.SECONDS);
-    httpSolrClient.shutdown();
+    try (final HttpSolrClient httpSolrClient = new HttpSolrClient(url3)) {
+      httpSolrClient.setConnectionTimeout(15000);
+      httpSolrClient.setSoTimeout(60000);
+      ThreadPoolExecutor executor = new ThreadPoolExecutor(0, Integer.MAX_VALUE,
+          5, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(),
+          new DefaultSolrThreadFactory("testExecutor"));
+      int cnt = 3;
+
+      // create the cores
+      createCores(httpSolrClient, executor, "multiunload2", 1, cnt);
+
+      executor.shutdown();
+      executor.awaitTermination(120, TimeUnit.SECONDS);
+    }
     
     ChaosMonkey.stop(cloudJettys.get(0).jetty);
     printLayout();
@@ -559,9 +554,7 @@
             createCmd.setDataDir(getDataDir(core3dataDir));
 
             client.request(createCmd);
-          } catch (SolrServerException e) {
-            throw new RuntimeException(e);
-          } catch (IOException e) {
+          } catch (SolrServerException | IOException e) {
             throw new RuntimeException(e);
           }
         }
@@ -608,9 +601,9 @@
           ((HttpSolrClient) clients.get(clientIndex)).getBaseURL().length()
               - DEFAULT_COLLECTION.length() - 1);
       
-      SolrClient aClient = createNewSolrClient("", baseUrl);
-      res.setResponse(aClient.request(request));
-      aClient.shutdown();
+      try (SolrClient aClient = createNewSolrClient("", baseUrl)) {
+        res.setResponse(aClient.request(request));
+      }
     } else {
       res.setResponse(client.request(request));
     }
@@ -733,21 +726,20 @@
 
   private Long getNumCommits(HttpSolrClient sourceClient) throws
       SolrServerException, IOException {
-    HttpSolrClient client = new HttpSolrClient(sourceClient.getBaseURL());
-    client.setConnectionTimeout(15000);
-    client.setSoTimeout(60000);
-    ModifiableSolrParams params = new ModifiableSolrParams();
-    params.set("qt", "/admin/mbeans?key=updateHandler&stats=true");
-    // use generic request to avoid extra processing of queries
-    QueryRequest req = new QueryRequest(params);
-    NamedList<Object> resp = client.request(req);
-    NamedList mbeans = (NamedList) resp.get("solr-mbeans");
-    NamedList uhandlerCat = (NamedList) mbeans.get("UPDATEHANDLER");
-    NamedList uhandler = (NamedList) uhandlerCat.get("updateHandler");
-    NamedList stats = (NamedList) uhandler.get("stats");
-    Long commits = (Long) stats.get("commits");
-    client.shutdown();
-    return commits;
+    try (HttpSolrClient client = new HttpSolrClient(sourceClient.getBaseURL())) {
+      client.setConnectionTimeout(15000);
+      client.setSoTimeout(60000);
+      ModifiableSolrParams params = new ModifiableSolrParams();
+      params.set("qt", "/admin/mbeans?key=updateHandler&stats=true");
+      // use generic request to avoid extra processing of queries
+      QueryRequest req = new QueryRequest(params);
+      NamedList<Object> resp = client.request(req);
+      NamedList mbeans = (NamedList) resp.get("solr-mbeans");
+      NamedList uhandlerCat = (NamedList) mbeans.get("UPDATEHANDLER");
+      NamedList uhandler = (NamedList) uhandlerCat.get("updateHandler");
+      NamedList stats = (NamedList) uhandler.get("stats");
+      return (Long) stats.get("commits");
+    }
   }
 
   private void testANewCollectionInOneInstanceWithManualShardAssignement() throws Exception {
@@ -825,32 +817,30 @@
     ZkCoreNodeProps props = new ZkCoreNodeProps(getCommonCloudSolrClient().getZkStateReader().getClusterState().getLeader(oneInstanceCollection2, "slice1"));
     
     // now test that unloading a core gets us a new leader
-    HttpSolrClient unloadClient = new HttpSolrClient(baseUrl);
-    unloadClient.setConnectionTimeout(15000);
-    unloadClient.setSoTimeout(60000);
-    Unload unloadCmd = new Unload(true);
-    unloadCmd.setCoreName(props.getCoreName());
-    
-    String leader = props.getCoreUrl();
-    
-    unloadClient.request(unloadCmd);
-    unloadClient.shutdown();
-    
-    int tries = 50;
-    while (leader.equals(zkStateReader.getLeaderUrl(oneInstanceCollection2, "slice1", 10000))) {
-      Thread.sleep(100);
-      if (tries-- == 0) {
-        fail("Leader never changed");
+    try (HttpSolrClient unloadClient = new HttpSolrClient(baseUrl)) {
+      unloadClient.setConnectionTimeout(15000);
+      unloadClient.setSoTimeout(60000);
+      Unload unloadCmd = new Unload(true);
+      unloadCmd.setCoreName(props.getCoreName());
+
+      String leader = props.getCoreUrl();
+
+      unloadClient.request(unloadCmd);
+
+      int tries = 50;
+      while (leader.equals(zkStateReader.getLeaderUrl(oneInstanceCollection2, "slice1", 10000))) {
+        Thread.sleep(100);
+        if (tries-- == 0) {
+          fail("Leader never changed");
+        }
       }
     }
 
-    for (SolrClient aClient : collectionClients) {
-      aClient.shutdown();
-    }
+    IOUtils.close(collectionClients);
 
   }
 
-  private void testSearchByCollectionName() throws SolrServerException {
+  private void testSearchByCollectionName() throws SolrServerException, IOException {
     log.info("### STARTING testSearchByCollectionName");
     SolrClient client = clients.get(0);
     final String baseUrl = ((HttpSolrClient) client).getBaseURL().substring(
@@ -860,11 +850,11 @@
     
     // the cores each have different names, but if we add the collection name to the url
     // we should get mapped to the right core
-    SolrClient client1 = createNewSolrClient(oneInstanceCollection, baseUrl);
-    SolrQuery query = new SolrQuery("*:*");
-    long oneDocs = client1.query(query).getResults().getNumFound();
-    assertEquals(3, oneDocs);
-    client1.shutdown();
+    try (SolrClient client1 = createNewSolrClient(oneInstanceCollection, baseUrl)) {
+      SolrQuery query = new SolrQuery("*:*");
+      long oneDocs = client1.query(query).getResults().getNumFound();
+      assertEquals(3, oneDocs);
+    }
   }
   
   private void testUpdateByCollectionName() throws SolrServerException, IOException {
@@ -878,9 +868,9 @@
     // the cores each have different names, but if we add the collection name to the url
     // we should get mapped to the right core
     // test hitting an update url
-    SolrClient client1 = createNewSolrClient(oneInstanceCollection, baseUrl);
-    client1.commit();
-    client1.shutdown();
+    try (SolrClient client1 = createNewSolrClient(oneInstanceCollection, baseUrl)) {
+      client1.commit();
+    }
   }
 
   private void testANewCollectionInOneInstance() throws Exception {
@@ -934,9 +924,8 @@
 //    System.out.println("All Docs:" + allDocs);
     
     assertEquals(3, allDocs);
-    for(SolrClient newCollectionClient:collectionClients) {
-      newCollectionClient.shutdown();
-    }
+    IOUtils.close(collectionClients);
+
   }
 
   private void createCollection(String collection,
@@ -950,9 +939,7 @@
     Callable call = new Callable() {
       @Override
       public Object call() {
-        HttpSolrClient client = null;
-        try {
-          client = new HttpSolrClient(baseUrl);
+        try (HttpSolrClient client = new HttpSolrClient(baseUrl)) {
           client.setConnectionTimeout(15000);
           Create createCmd = new Create();
           createCmd.setRoles("none");
@@ -975,10 +962,6 @@
         } catch (Exception e) {
           e.printStackTrace();
           //fail
-        } finally {
-          if (client != null) {
-            client.shutdown();
-          }
         }
         return null;
       }
@@ -1087,9 +1070,8 @@
       Callable call = new Callable() {
         @Override
         public Object call() {
-          HttpSolrClient client = null;
-          try {
-            client = new HttpSolrClient(baseUrl);
+
+          try (HttpSolrClient client = new HttpSolrClient(baseUrl)) {
             client.setConnectionTimeout(15000);
             client.setSoTimeout(60000);
             Create createCmd = new Create();
@@ -1099,10 +1081,6 @@
           } catch (Exception e) {
             e.printStackTrace();
             //fails
-          } finally {
-            if (client != null) {
-              client.shutdown();
-            }
           }
           return null;
         }
@@ -1162,24 +1140,21 @@
   }
   
   @Override
-  public void tearDown() throws Exception {
-    super.tearDown();
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
     if (commondCloudSolrClient != null) {
-      commondCloudSolrClient.shutdown();
+      commondCloudSolrClient.close();
     }
     if (otherCollectionClients != null) {
       for (List<SolrClient> clientList : otherCollectionClients.values()) {
-        for (SolrClient client : clientList) {
-          client.shutdown();
-        }
+        IOUtils.close(clientList);
       }
     }
     otherCollectionClients = null;
     List<Runnable> tasks = executor.shutdownNow();
     assertTrue(tasks.isEmpty());
-    
+
     System.clearProperty("numShards");
-    System.clearProperty("zkHost");
     System.clearProperty("solr.xml.persist");
     
     // insurance
diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java
index 166aa53..75f1d36 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeTest.java
@@ -18,7 +18,7 @@
  */
 
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
-import org.apache.http.client.HttpClient;
+import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
 import org.apache.solr.client.solrj.SolrClient;
@@ -29,12 +29,12 @@
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.core.Diagnostics;
 import org.apache.solr.update.SolrCmdDistributor;
-import org.junit.After;
 import org.junit.AfterClass;
-import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -46,7 +46,7 @@
 import java.util.concurrent.atomic.AtomicInteger;
 
 @Slow
-@SuppressSSL
+@SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776")
 @ThreadLeakLingering(linger = 60000)
 public class ChaosMonkeyNothingIsSafeTest extends AbstractFullDistribZkTestBase {
   private static final int FAIL_TOLERANCE = 20;
@@ -86,10 +86,9 @@
     return randVals;
   }
   
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     // can help to hide this when testing and looking at logs
     //ignoreException("shard update error");
     System.setProperty("numShards", Integer.toString(sliceCount));
@@ -97,28 +96,27 @@
   }
   
   @Override
-  @After
-  public void tearDown() throws Exception {
+  public void distribTearDown() throws Exception {
     System.clearProperty("numShards");
-    super.tearDown();
-    resetExceptionIgnores();
+    super.distribTearDown();
   }
   
   public ChaosMonkeyNothingIsSafeTest() {
     super();
     sliceCount = Integer.parseInt(System.getProperty("solr.tests.cloud.cm.slicecount", "-1"));
-    shardCount = Integer.parseInt(System.getProperty("solr.tests.cloud.cm.shardcount", "-1"));
-    
     if (sliceCount == -1) {
       sliceCount = random().nextInt(TEST_NIGHTLY ? 5 : 3) + 1;
     }
-    if (shardCount == -1) {
-      shardCount = sliceCount + random().nextInt(TEST_NIGHTLY ? 12 : 2);
+
+    int numShards = Integer.parseInt(System.getProperty("solr.tests.cloud.cm.shardcount", "-1"));
+    if (numShards == -1) {
+      numShards = sliceCount + random().nextInt(TEST_NIGHTLY ? 12 : 2);
     }
+    fixShardCount(numShards);
   }
-  
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  public void test() throws Exception {
     boolean testsSuccesful = false;
     try {
       handle.clear();
@@ -252,13 +250,11 @@
         zkServer.run();
       }
       
-      CloudSolrClient client = createCloudClient("collection1");
-      try {
+
+      try (CloudSolrClient client = createCloudClient("collection1")) {
           createCollection(null, "testcollection",
               1, 1, 1, client, null, "conf1");
 
-      } finally {
-        client.shutdown();
       }
       List<Integer> numShardsNumReplicas = new ArrayList<>(2);
       numShardsNumReplicas.add(1);
@@ -290,7 +286,7 @@
   }
 
   class FullThrottleStopableIndexingThread extends StopableIndexingThread {
-    private HttpClient httpClient = HttpClientUtil.createClient(null);
+    private CloseableHttpClient httpClient = HttpClientUtil.createClient(null);
     private volatile boolean stop = false;
     int clientIndex = 0;
     private ConcurrentUpdateSolrClient cusc;
@@ -389,7 +385,7 @@
       stop = true;
       cusc.blockUntilFinished();
       cusc.shutdownNow();
-      httpClient.getConnectionManager().shutdown();
+      IOUtils.closeQuietly(httpClient);
     }
 
     @Override
diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderTest.java
index 44dd9dc..5df3dcb 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderTest.java
@@ -17,10 +17,6 @@
  * limitations under the License.
  */
 
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -28,10 +24,13 @@
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.core.Diagnostics;
 import org.apache.solr.update.SolrCmdDistributor;
-import org.junit.After;
 import org.junit.AfterClass;
-import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
 
 @Slow
 public class ChaosMonkeySafeLeaderTest extends AbstractFullDistribZkTestBase {
@@ -69,39 +68,37 @@
     return randVals;
   }
   
-  @Before
   @Override
-  public void setUp() throws Exception {
+  public void distribSetUp() throws Exception {
     useFactory("solr.StandardDirectoryFactory");
 
-    super.setUp();
+    super.distribSetUp();
     
     System.setProperty("numShards", Integer.toString(sliceCount));
   }
   
   @Override
-  @After
-  public void tearDown() throws Exception {
+  public void distribTearDown() throws Exception {
     System.clearProperty("numShards");
-    super.tearDown();
-    resetExceptionIgnores();
+    super.distribTearDown();
   }
   
   public ChaosMonkeySafeLeaderTest() {
     super();
     sliceCount = Integer.parseInt(System.getProperty("solr.tests.cloud.cm.slicecount", "-1"));
-    shardCount = Integer.parseInt(System.getProperty("solr.tests.cloud.cm.shardcount", "-1"));
-    
     if (sliceCount == -1) {
       sliceCount = random().nextInt(TEST_NIGHTLY ? 5 : 3) + 1;
     }
-    if (shardCount == -1) {
-      shardCount = sliceCount + random().nextInt(TEST_NIGHTLY ? 12 : 2);
+
+    int numShards = Integer.parseInt(System.getProperty("solr.tests.cloud.cm.shardcount", "-1"));
+    if (numShards == -1) {
+      numShards = sliceCount + random().nextInt(TEST_NIGHTLY ? 12 : 2);
     }
+    fixShardCount(numShards);
   }
-  
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  public void test() throws Exception {
     
     handle.clear();
     handle.put("timestamp", SKIPVAL);
@@ -171,14 +168,10 @@
       zkServer = new ZkTestServer(zkServer.getZkDir(), zkServer.getPort());
       zkServer.run();
     }
-    
-    CloudSolrClient client = createCloudClient("collection1");
-    try {
-        createCollection(null, "testcollection",
-            1, 1, 1, client, null, "conf1");
 
-    } finally {
-      client.shutdown();
+    try (CloudSolrClient client = createCloudClient("collection1")) {
+        createCollection(null, "testcollection", 1, 1, 1, client, null, "conf1");
+
     }
     List<Integer> numShardsNumReplicas = new ArrayList<>(2);
     numShardsNumReplicas.add(1);
diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java
index 9c3be9b..a99d6ff 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java
@@ -31,9 +31,8 @@
 import org.apache.solr.update.UpdateShardHandler;
 import org.apache.solr.util.MockConfigSolr;
 import org.apache.zookeeper.KeeperException;
-import org.junit.After;
-import org.junit.Before;
 import org.junit.Ignore;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.util.Collection;
@@ -50,20 +49,8 @@
   static final int TIMEOUT = 10000;
   private AtomicInteger killCounter = new AtomicInteger();
 
-  @Before
-  @Override
-  public void setUp() throws Exception {
-    super.setUp();
-  }
-
-  @Override
-  @After
-  public void tearDown() throws Exception {
-    super.tearDown();
-  }
-
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     waitForThingsToLevelOut(15);
 
     ClusterState clusterState = cloudClient.getZkStateReader().getClusterState();
diff --git a/solr/core/src/test/org/apache/solr/cloud/CloudExitableDirectoryReaderTest.java b/solr/core/src/test/org/apache/solr/cloud/CloudExitableDirectoryReaderTest.java
index b870048..bc522c4 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CloudExitableDirectoryReaderTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CloudExitableDirectoryReaderTest.java
@@ -22,6 +22,7 @@
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -43,8 +44,8 @@
     return configString;
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     handle.clear();
     handle.put("timestamp", SKIPVAL);
     waitForRecoveriesToFinish(false);
@@ -77,17 +78,18 @@
     time than this. Keeping it at 5 because the delaying search component delays all requests 
     by at 1 second.
      */
-    long fiveSeconds = 5000L;
+    int fiveSeconds = 5000;
     
-    Long timeAllowed = TestUtil.nextLong(random(), fiveSeconds, Long.MAX_VALUE);
+    Integer timeAllowed = TestUtil.nextInt(random(), fiveSeconds, Integer.MAX_VALUE);
     assertSuccess(params("q", "name:a*", "timeAllowed",timeAllowed.toString()));
 
     assertPartialResults(params("q", "name:a*", "timeAllowed", "1"));
 
-    timeAllowed = TestUtil.nextLong(random(), fiveSeconds, Long.MAX_VALUE);
+    timeAllowed = TestUtil.nextInt(random(), fiveSeconds, Integer.MAX_VALUE);
     assertSuccess(params("q", "name:b*", "timeAllowed",timeAllowed.toString()));
 
-    timeAllowed = TestUtil.nextLong(random(), Long.MIN_VALUE, -1L);  // negative timeAllowed should disable timeouts
+    // negative timeAllowed should disable timeouts
+    timeAllowed = TestUtil.nextInt(random(), Integer.MIN_VALUE, -1); 
     assertSuccess(params("q", "name:b*", "timeAllowed",timeAllowed.toString()));
 
     assertSuccess(params("q","name:b*")); // no time limitation
diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java
index 0e5e522..26889c7 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java
@@ -28,7 +28,7 @@
 import org.apache.solr.client.solrj.response.CollectionAdminResponse;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.update.DirectUpdateHandler2;
-import org.junit.Before;
+import org.junit.Test;
 
 import java.io.IOException;
 
@@ -40,10 +40,9 @@
   private static final int MAX_TIMEOUT_SECONDS = 60;
   private static final boolean DEBUG = false;
 
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
 
     useJettyDataDir = false;
 
@@ -52,68 +51,61 @@
   }
 
   public CollectionsAPIAsyncDistributedZkTest() {
-    fixShardCount = true;
-
     sliceCount = 1;
-    shardCount = 1;
   }
 
-  @Override
-  public void doTest() throws Exception {
-
-    testSolrJAPICalls();
+  @Test
+  @ShardsFixed(num = 1)
+  public void testSolrJAPICalls() throws Exception {
+    try (SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) {
+      Create createCollectionRequest = new Create();
+      createCollectionRequest.setCollectionName("testasynccollectioncreation");
+      createCollectionRequest.setNumShards(1);
+      createCollectionRequest.setConfigName("conf1");
+      createCollectionRequest.setAsyncId("1001");
+      createCollectionRequest.process(client);
+  
+      String state = getRequestStateAfterCompletion("1001", MAX_TIMEOUT_SECONDS, client);
+  
+      assertEquals("CreateCollection task did not complete!", "completed", state);
+  
+  
+      createCollectionRequest = new Create();
+      createCollectionRequest.setCollectionName("testasynccollectioncreation");
+      createCollectionRequest.setNumShards(1);
+      createCollectionRequest.setConfigName("conf1");
+      createCollectionRequest.setAsyncId("1002");
+      createCollectionRequest.process(client);
+  
+      state = getRequestStateAfterCompletion("1002", MAX_TIMEOUT_SECONDS, client);
+  
+      assertEquals("Recreating a collection with the same name didn't fail, should have.", "failed", state);
+  
+      CollectionAdminRequest.AddReplica addReplica = new CollectionAdminRequest.AddReplica();
+      addReplica.setCollectionName("testasynccollectioncreation");
+      addReplica.setShardName("shard1");
+      addReplica.setAsyncId("1003");
+      client.request(addReplica);
+      state = getRequestStateAfterCompletion("1003", MAX_TIMEOUT_SECONDS, client);
+      assertEquals("Add replica did not complete", "completed", state);
+  
+  
+      SplitShard splitShardRequest = new SplitShard();
+      splitShardRequest.setCollectionName("testasynccollectioncreation");
+      splitShardRequest.setShardName("shard1");
+      splitShardRequest.setAsyncId("1004");
+      splitShardRequest.process(client);
+  
+      state = getRequestStateAfterCompletion("1004", MAX_TIMEOUT_SECONDS * 2, client);
+  
+      assertEquals("Shard split did not complete. Last recorded state: " + state, "completed", state);
+    }
 
     if (DEBUG) {
-      super.printLayout();
+      printLayout();
     }
   }
 
-  private void testSolrJAPICalls() throws Exception {
-    SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)));
-
-    Create createCollectionRequest = new Create();
-    createCollectionRequest.setCollectionName("testasynccollectioncreation");
-    createCollectionRequest.setNumShards(1);
-    createCollectionRequest.setConfigName("conf1");
-    createCollectionRequest.setAsyncId("1001");
-    createCollectionRequest.process(client);
-
-    String state = getRequestStateAfterCompletion("1001", MAX_TIMEOUT_SECONDS, client);
-
-    assertEquals("CreateCollection task did not complete!", "completed", state);
-
-
-    createCollectionRequest = new Create();
-    createCollectionRequest.setCollectionName("testasynccollectioncreation");
-    createCollectionRequest.setNumShards(1);
-    createCollectionRequest.setConfigName("conf1");
-    createCollectionRequest.setAsyncId("1002");
-    createCollectionRequest.process(client);
-
-    state = getRequestStateAfterCompletion("1002", MAX_TIMEOUT_SECONDS, client);
-
-    assertEquals("Recreating a collection with the same name didn't fail, should have.", "failed", state);
-
-    CollectionAdminRequest.AddReplica addReplica = new CollectionAdminRequest.AddReplica();
-    addReplica.setCollectionName("testasynccollectioncreation");
-    addReplica.setShardName("shard1");
-    addReplica.setAsyncId("1003");
-    client.request(addReplica);
-    state = getRequestStateAfterCompletion("1003", MAX_TIMEOUT_SECONDS, client);
-    assertEquals("Add replica did not complete", "completed", state);
-
-
-    SplitShard splitShardRequest = new SplitShard();
-    splitShardRequest.setCollectionName("testasynccollectioncreation");
-    splitShardRequest.setShardName("shard1");
-    splitShardRequest.setAsyncId("1004");
-    splitShardRequest.process(client);
-
-    state = getRequestStateAfterCompletion("1004", MAX_TIMEOUT_SECONDS * 2, client);
-
-    assertEquals("Shard split did not complete. Last recorded state: " + state, "completed", state);
-  }
-
   private String getRequestStateAfterCompletion(String requestId, int waitForSeconds, SolrClient client)
       throws IOException, SolrServerException {
     String state = null;
@@ -138,10 +130,9 @@
   }
 
   @Override
-  public void tearDown() throws Exception {
-    super.tearDown();
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
     System.clearProperty("numShards");
-    System.clearProperty("zkHost");
     System.clearProperty("solr.xml.persist");
     
     // insurance
diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java
index b47e963..9b30edd 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIDistributedZkTest.java
@@ -19,6 +19,7 @@
 
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.lucene.util.TestUtil;
+import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -53,12 +54,11 @@
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrInfoMBean.Category;
-import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.servlet.SolrDispatchFilter;
 import org.apache.solr.update.DirectUpdateHandler2;
 import org.apache.solr.util.DefaultSolrThreadFactory;
-import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 import javax.management.MBeanServer;
 import javax.management.MBeanServerFactory;
@@ -66,6 +66,9 @@
 import java.io.File;
 import java.io.IOException;
 import java.lang.management.ManagementFactory;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -108,26 +111,18 @@
   
   // we randomly use a second config set rather than just one
   private boolean secondConfigSet = random().nextBoolean();
-  private boolean oldStyleSolrXml = false;
   
   @BeforeClass
   public static void beforeThisClass2() throws Exception {
 
   }
   
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     
     useJettyDataDir = false;
-    
-    oldStyleSolrXml = random().nextBoolean();
-    if (oldStyleSolrXml) {
-      System.err.println("Using old style solr.xml");
-    } else {
-      System.err.println("Using new style solr.xml");
-    }
+
     if (secondConfigSet ) {
       String zkHost = zkServer.getZkHost();
       String zkAddress = zkServer.getZkAddress();
@@ -156,20 +151,16 @@
     }
     
     System.setProperty("numShards", Integer.toString(sliceCount));
-    System.setProperty("solr.xml.persist", "true");
+
   }
   
   protected String getSolrXml() {
-    // test old style and new style solr.xml
-    return oldStyleSolrXml ? "solr-no-core-old-style.xml" : "solr-no-core.xml";
+    return "solr-no-core.xml";
   }
 
   
   public CollectionsAPIDistributedZkTest() {
-    fixShardCount = true;
-    
     sliceCount = 2;
-    shardCount = 4;
     completionService = new ExecutorCompletionService<>(executor);
     pending = new HashSet<>();
     checkCreatedVsState = false;
@@ -184,7 +175,7 @@
     } else {
       // use shard ids rather than physical locations
       StringBuilder sb = new StringBuilder();
-      for (int i = 0; i < shardCount; i++) {
+      for (int i = 0; i < getShardCount(); i++) {
         if (i > 0)
           sb.append(',');
         sb.append("shard" + (i + 3));
@@ -192,9 +183,10 @@
       params.set("shards", sb.toString());
     }
   }
-  
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  @ShardsFixed(num = 4)
+  public void test() throws Exception {
     testNodesUsedByCreate();
     testCollectionsAPI();
     testCollectionsAPIAddRemoveStress();
@@ -231,8 +223,7 @@
     QueryRequest request = new QueryRequest(params);
     request.setPath("/admin/collections");
     try {
-      NamedList<Object> resp = createNewSolrClient("", baseUrl)
-          .request(request);
+      makeRequest(baseUrl, request);
       fail("Expected to fail, because collection is not in clusterstate");
     } catch (RemoteSolrException e) {
       
@@ -256,7 +247,8 @@
     if (secondConfigSet) {
       createCmd.setCollectionConfigName("conf1");
     }
-    createNewSolrClient("", baseUrl).request(createCmd);
+
+    makeRequest(baseUrl, createCmd);
 
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.set("action", CollectionAction.DELETE.toString());
@@ -264,7 +256,7 @@
     QueryRequest request = new QueryRequest(params);
     request.setPath("/admin/collections");
 
-    NamedList<Object> resp = createNewSolrClient("", baseUrl).request(request);
+    makeRequest(baseUrl, request);
     
     checkForMissingCollection(collectionName);
     
@@ -278,7 +270,7 @@
     if (secondConfigSet) {
       params.set("collection.configName", "conf1");
     }
-    resp = createNewSolrClient("", baseUrl).request(request);
+    makeRequest(baseUrl, request);
   }
   
   
@@ -286,11 +278,13 @@
     String baseUrl = getBaseUrl((HttpSolrClient) clients.get(0));
     // now try to remove a collection when a couple of its nodes are down
     if (secondConfigSet) {
-      createCollection(null, "halfdeletedcollection2", 3, 3, 6,
-          createNewSolrClient("", baseUrl), null, "conf2");
+      try (SolrClient client = createNewSolrClient("", baseUrl)) {
+        createCollection(null, "halfdeletedcollection2", 3, 3, 6, client, null, "conf2");
+      }
     } else {
-      createCollection(null, "halfdeletedcollection2", 3, 3, 6,
-          createNewSolrClient("", baseUrl), null);
+      try (SolrClient client = createNewSolrClient("", baseUrl)) {
+        createCollection(null, "halfdeletedcollection2", 3, 3, 6, client, null);
+      }
     }
     
     waitForRecoveriesToFinish("halfdeletedcollection2", false);
@@ -313,7 +307,7 @@
     QueryRequest request = new QueryRequest(params);
     request.setPath("/admin/collections");
     
-    createNewSolrClient("", baseUrl).request(request);
+    makeRequest(baseUrl, request);
     
     long timeout = System.currentTimeMillis() + 10000;
     while (cloudClient.getZkStateReader().getClusterState().hasCollection("halfdeletedcollection2")) {
@@ -329,6 +323,13 @@
 
   }
 
+  private NamedList<Object> makeRequest(String baseUrl, SolrRequest request)
+      throws SolrServerException, IOException {
+    try (SolrClient client = createNewSolrClient("", baseUrl)) {
+      return client.request(request);
+    }
+  }
+
   private void testErrorHandling() throws Exception {
     final String baseUrl = getBaseUrl((HttpSolrClient) clients.get(0));
     
@@ -342,9 +343,8 @@
     QueryRequest request = new QueryRequest(params);
     request.setPath("/admin/collections");
     boolean gotExp = false;
-    NamedList<Object> resp = null;
     try {
-      resp = createNewSolrClient("", baseUrl).request(request);
+      makeRequest(baseUrl, request);
     } catch (SolrException e) {
       gotExp = true;
     }
@@ -364,9 +364,8 @@
     request = new QueryRequest(params);
     request.setPath("/admin/collections");
     gotExp = false;
-    resp = null;
     try {
-      resp = createNewSolrClient("", baseUrl).request(request);
+      makeRequest(baseUrl, request);
     } catch (SolrException e) {
       gotExp = true;
     }
@@ -386,7 +385,7 @@
     request.setPath("/admin/collections");
     gotExp = false;
     try {
-      resp = createNewSolrClient("", baseUrl).request(request);
+      makeRequest(baseUrl, request);
     } catch (SolrException e) {
       gotExp = true;
     }
@@ -404,9 +403,8 @@
     request = new QueryRequest(params);
     request.setPath("/admin/collections");
     gotExp = false;
-    resp = null;
     try {
-      resp = createNewSolrClient("", baseUrl).request(request);
+      makeRequest(baseUrl, request);
     } catch (SolrException e) {
       gotExp = true;
     }
@@ -425,9 +423,8 @@
     request = new QueryRequest(params);
     request.setPath("/admin/collections");
     gotExp = false;
-    resp = null;
     try {
-      resp = createNewSolrClient("", baseUrl).request(request);
+      makeRequest(baseUrl, request);
     } catch (SolrException e) {
       gotExp = true;
     }
@@ -446,7 +443,7 @@
     if (secondConfigSet) {
       createCmd.setCollectionConfigName("conf1");
     }
-    createNewSolrClient("", baseUrl).request(createCmd);
+    makeRequest(baseUrl, createCmd);
     
     createCmd = new Create();
     createCmd.setCoreName("halfcollection_shard1_replica1");
@@ -457,7 +454,7 @@
     if (secondConfigSet) {
       createCmd.setCollectionConfigName("conf1");
     }
-    createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(1))).request(createCmd);
+    makeRequest(getBaseUrl((HttpSolrClient) clients.get(1)), createCmd);
     
     params = new ModifiableSolrParams();
     params.set("action", CollectionAction.CREATE.toString());
@@ -477,7 +474,7 @@
     request = new QueryRequest(params);
     request.setPath("/admin/collections");
     gotExp = false;
-    resp = createNewSolrClient("", baseUrl).request(request);
+    NamedList<Object> resp = makeRequest(baseUrl, request);;
     
     SimpleOrderedMap success = (SimpleOrderedMap) resp.get("success");
     SimpleOrderedMap failure = (SimpleOrderedMap) resp.get("failure");
@@ -507,15 +504,13 @@
       createCmd.setCollectionConfigName("conf1");
     }
     
-    createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(1)))
-        .request(createCmd);
+    makeRequest(getBaseUrl((HttpSolrClient) clients.get(1)), createCmd);
     
     // try and create a SolrCore with no collection name
     createCmd.setCollection(null);
     createCmd.setCoreName("corewithnocollection2");
     
-    createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(1)))
-        .request(createCmd);
+    makeRequest(getBaseUrl((HttpSolrClient) clients.get(1)), createCmd);
     
     // in both cases, the collection should have default to the core name
     cloudClient.getZkStateReader().updateClusterState(true);
@@ -542,7 +537,7 @@
     
     QueryRequest request = new QueryRequest(params);
     request.setPath("/admin/collections");
-    createNewSolrClient("", baseUrl).request(request);
+    makeRequest(baseUrl, request);
     
     List<Integer> numShardsNumReplicaList = new ArrayList<>();
     numShardsNumReplicaList.add(2);
@@ -591,7 +586,7 @@
     int cnt = random().nextInt(TEST_NIGHTLY ? 6 : 1) + 1;
     
     for (int i = 0; i < cnt; i++) {
-      int numShards = TestUtil.nextInt(random(), 0, shardCount) + 1;
+      int numShards = TestUtil.nextInt(random(), 0, getShardCount()) + 1;
       int replicationFactor = TestUtil.nextInt(random(), 0, 3) + 1;
       int maxShardsPerNode = (((numShards * replicationFactor) / getCommonCloudSolrClient()
           .getZkStateReader().getClusterState().getLiveNodes().size())) + 1;
@@ -626,7 +621,7 @@
               numShards, replicationFactor, maxShardsPerNode, client, null);
         }
       } finally {
-        if (client != null) client.shutdown();
+        if (client != null) client.close();
       }
     }
     
@@ -638,11 +633,10 @@
       
       String url = getUrlFromZk(collection);
 
-      HttpSolrClient collectionClient = new HttpSolrClient(url);
-      
-      // poll for a second - it can take a moment before we are ready to serve
-      waitForNon403or404or503(collectionClient);
-      collectionClient.shutdown();
+      try (HttpSolrClient collectionClient = new HttpSolrClient(url)) {
+        // poll for a second - it can take a moment before we are ready to serve
+        waitForNon403or404or503(collectionClient);
+      }
     }
     
     // sometimes we restart one of the jetty nodes
@@ -658,11 +652,10 @@
         
         String url = getUrlFromZk(collection);
         
-        HttpSolrClient collectionClient = new HttpSolrClient(url);
-        
-        // poll for a second - it can take a moment before we are ready to serve
-        waitForNon403or404or503(collectionClient);
-        collectionClient.shutdown();
+        try (HttpSolrClient collectionClient = new HttpSolrClient(url)) {
+          // poll for a second - it can take a moment before we are ready to serve
+          waitForNon403or404or503(collectionClient);
+        }
       }
     }
 
@@ -705,29 +698,27 @@
     
     String url = getUrlFromZk(collectionName);
 
-    HttpSolrClient collectionClient = new HttpSolrClient(url);
-    
-    
-    // lets try and use the solrj client to index a couple documents
-    SolrInputDocument doc1 = getDoc(id, 6, i1, -600, tlong, 600, t1,
-        "humpty dumpy sat on a wall");
-    SolrInputDocument doc2 = getDoc(id, 7, i1, -600, tlong, 600, t1,
-        "humpty dumpy3 sat on a walls");
-    SolrInputDocument doc3 = getDoc(id, 8, i1, -600, tlong, 600, t1,
-        "humpty dumpy2 sat on a walled");
+    try (HttpSolrClient collectionClient = new HttpSolrClient(url)) {
 
-    collectionClient.add(doc1);
-    
-    collectionClient.add(doc2);
+      // lets try and use the solrj client to index a couple documents
+      SolrInputDocument doc1 = getDoc(id, 6, i1, -600, tlong, 600, t1,
+          "humpty dumpy sat on a wall");
+      SolrInputDocument doc2 = getDoc(id, 7, i1, -600, tlong, 600, t1,
+          "humpty dumpy3 sat on a walls");
+      SolrInputDocument doc3 = getDoc(id, 8, i1, -600, tlong, 600, t1,
+          "humpty dumpy2 sat on a walled");
 
-    collectionClient.add(doc3);
-    
-    collectionClient.commit();
-    
-    assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
-    collectionClient.shutdown();
-    collectionClient = null;
-    
+      collectionClient.add(doc1);
+
+      collectionClient.add(doc2);
+
+      collectionClient.add(doc3);
+
+      collectionClient.commit();
+
+      assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
+    }
+
     // lets try a collection reload
     
     // get core open times
@@ -743,7 +734,7 @@
     // we can use this client because we just want base url
     final String baseUrl = getBaseUrl((HttpSolrClient) clients.get(0));
     
-    createNewSolrClient("", baseUrl).request(request);
+    makeRequest(baseUrl, request);
 
     // reloads make take a short while
     boolean allTimesAreCorrect = waitForReloads(collectionName, urlToTimeBefore);
@@ -759,7 +750,7 @@
     request = new QueryRequest(params);
     request.setPath("/admin/collections");
  
-    createNewSolrClient("", baseUrl).request(request);
+    makeRequest(baseUrl, request);
     
     // ensure its out of the state
     checkForMissingCollection(collectionName);
@@ -775,7 +766,7 @@
  
     boolean exp = false;
     try {
-      createNewSolrClient("", baseUrl).request(request);
+      makeRequest(baseUrl, request);
     } catch (SolrException e) {
       exp = true;
     }
@@ -795,7 +786,7 @@
     }
     request = new QueryRequest(params);
     request.setPath("/admin/collections");
-    createNewSolrClient("", baseUrl).request(request);
+    makeRequest(baseUrl, request);
     
     List<Integer> list = new ArrayList<>(2);
     list.add(1);
@@ -804,13 +795,11 @@
     
     url = getUrlFromZk(collectionName);
     
-    collectionClient = new HttpSolrClient(url);
-    
-    // poll for a second - it can take a moment before we are ready to serve
-    waitForNon403or404or503(collectionClient);
-    collectionClient.shutdown();
-    collectionClient = null;
-    
+    try (HttpSolrClient collectionClient = new HttpSolrClient(url)) {
+      // poll for a second - it can take a moment before we are ready to serve
+      waitForNon403or404or503(collectionClient);
+    }
+
     for (int j = 0; j < cnt; j++) {
       waitForRecoveriesToFinish(collectionName, zkStateReader, false);
     }
@@ -821,8 +810,7 @@
     int replicationFactor = 2;
     int maxShardsPerNode = 1;
     collectionInfos = new HashMap<>();
-    CloudSolrClient client = createCloudClient("awholynewcollection_" + cnt);
-    try {
+    try (CloudSolrClient client = createCloudClient("awholynewcollection_" + cnt)) {
       exp = false;
       try {
         createCollection(collectionInfos, "awholynewcollection_" + cnt,
@@ -831,8 +819,6 @@
         exp = true;
       }
       assertTrue("expected exception", exp);
-    } finally {
-      client.shutdown();
     }
 
     
@@ -854,19 +840,17 @@
     numShards = createNodeList.size() * maxShardsPerNode;
     replicationFactor = 1;
     collectionInfos = new HashMap<>();
-    client = createCloudClient("awholynewcollection_" + (cnt+1));
-    try {
+
+    try (SolrClient client = createCloudClient("awholynewcollection_" + (cnt+1))) {
       CollectionAdminResponse res = createCollection(collectionInfos, "awholynewcollection_" + (cnt+1), numShards, replicationFactor, maxShardsPerNode, client, StrUtils.join(createNodeList, ','), "conf1");
       assertTrue(res.isSuccess());
-    } finally {
-      client.shutdown();
     }
     checkForCollection(collectionInfos.keySet().iterator().next(), collectionInfos.entrySet().iterator().next().getValue(), createNodeList);
     
     checkNoTwoShardsUseTheSameIndexDir();
     if(disableLegacy) {
       setClusterProp(client1, ZkStateReader.LEGACY_CLOUD, null);
-      client1.shutdown();
+      client1.close();
     }
   }
   
@@ -874,10 +858,8 @@
     
     class CollectionThread extends Thread {
       
-      private String name;
-
       public CollectionThread(String name) {
-        this.name = name;
+        super(name);
       }
       
       public void run() {
@@ -886,22 +868,14 @@
         int cnt = random().nextInt(TEST_NIGHTLY ? 13 : 1) + 1;
         
         for (int i = 0; i < cnt; i++) {
-          String collectionName = "awholynewstresscollection_" + name + "_" + i;
-          int numShards = TestUtil.nextInt(random(), 0, shardCount * 2) + 1;
+          String collectionName = "awholynewstresscollection_" + getName() + "_" + i;
+          int numShards = TestUtil.nextInt(random(), 0, getShardCount() * 2) + 1;
           int replicationFactor = TestUtil.nextInt(random(), 0, 3) + 1;
           int maxShardsPerNode = (((numShards * 2 * replicationFactor) / getCommonCloudSolrClient()
               .getZkStateReader().getClusterState().getLiveNodes().size())) + 1;
-          
-          CloudSolrClient client = null;
-          try {
-            if (i == 0) {
-              client = createCloudClient(null);
-            } else if (i == 1) {
-              client = createCloudClient(collectionName);
-            } else  {
-              client = createCloudClient(null);
-            }
-            
+
+          try (CloudSolrClient client = createCloudClient(i == 1 ? collectionName : null)) {
+
             createCollection(collectionInfos, collectionName,
                 numShards, replicationFactor, maxShardsPerNode, client, null,
                 "conf1");
@@ -910,14 +884,9 @@
             CollectionAdminRequest.Delete delete = new CollectionAdminRequest.Delete();
             delete.setCollectionName(collectionName);
             client.request(delete);
-          } catch (SolrServerException e) {
+          } catch (SolrServerException | IOException e) {
             e.printStackTrace();
             throw new RuntimeException(e);
-          } catch (IOException e) {
-            e.printStackTrace();
-            throw new RuntimeException(e);
-          } finally {
-            if (client != null) client.shutdown();
           }
         }
       }
@@ -937,28 +906,22 @@
     }
   }
 
-  private void checkInstanceDirs(JettySolrRunner jetty) {
+  private void checkInstanceDirs(JettySolrRunner jetty) throws IOException {
     CoreContainer cores = ((SolrDispatchFilter) jetty.getDispatchFilter()
         .getFilter()).getCores();
     Collection<SolrCore> theCores = cores.getCores();
     for (SolrCore core : theCores) {
-      if (!oldStyleSolrXml) {
-        // look for core props file
-        assertTrue("Could not find expected core.properties file",
-            new File((String) core.getStatistics().get("instanceDir"),
-                "core.properties").exists());
-      }
 
-      try {
-        assertEquals(
-           new File(SolrResourceLoader.normalizeDir(jetty.getSolrHome() + File.separator
-                + core.getName())).getCanonicalPath(),
-            new File(SolrResourceLoader.normalizeDir((String) core.getStatistics().get(
-                "instanceDir"))).getCanonicalPath());
-      } catch (IOException e) {
-        log.error("Failed to get canonical path", e);
-        fail("Failed to get canonical path");
-      }
+      // look for core props file
+      assertTrue("Could not find expected core.properties file",
+          new File((String) core.getStatistics().get("instanceDir"),
+              "core.properties").exists());
+
+      Path expected = Paths.get(jetty.getSolrHome()).toAbsolutePath().resolve("cores").resolve(core.getName());
+      Path reported = Paths.get((String) core.getStatistics().get("instanceDir"));
+
+      assertTrue("Expected: " + expected + "\nFrom core stats: " + reported, Files.isSameFile(expected, reported));
+
     }
   }
 
@@ -1009,12 +972,9 @@
         while (shardIt.hasNext()) {
           Entry<String,Replica> shardEntry = shardIt.next();
           ZkCoreNodeProps coreProps = new ZkCoreNodeProps(shardEntry.getValue());
-          HttpSolrClient server = new HttpSolrClient(coreProps.getBaseUrl());
           CoreAdminResponse mcr;
-          try {
+          try (HttpSolrClient server = new HttpSolrClient(coreProps.getBaseUrl())) {
             mcr = CoreAdminRequest.getStatus(coreProps.getCoreName(), server);
-          } finally {
-            server.shutdown();
           }
           long before = mcr.getStartTime(coreProps.getCoreName()).getTime();
           urlToTime.put(coreProps.getCoreUrl(), before);
@@ -1125,8 +1085,7 @@
 
   private void addReplicaTest() throws Exception {
     String collectionName = "addReplicaColl";
-    CloudSolrClient client = createCloudClient(null);
-    try {
+    try (CloudSolrClient client = createCloudClient(null)) {
       createCollection(collectionName, client, 2, 2);
       String newReplicaName = Assign.assignNode(collectionName, client.getZkStateReader().getClusterState());
       ArrayList<String> nodeList = new ArrayList<>(client.getZkStateReader().getClusterState().getLiveNodes());
@@ -1168,9 +1127,6 @@
 
       assertNotNull(newReplica);
 
-
-    } finally {
-      client.shutdown();
     }
 
   }
@@ -1202,10 +1158,9 @@
   }
   
   @Override
-  public void tearDown() throws Exception {
-    super.tearDown();
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
     System.clearProperty("numShards");
-    System.clearProperty("zkHost");
     System.clearProperty("solr.xml.persist");
 
     // insurance
@@ -1213,12 +1168,10 @@
   }
 
   private void clusterPropTest() throws Exception {
-    CloudSolrClient client = createCloudClient(null);
-
-    assertTrue("cluster property not set", setClusterProp(client, ZkStateReader.LEGACY_CLOUD, "false"));
-    assertTrue("cluster property not unset ", setClusterProp(client, ZkStateReader.LEGACY_CLOUD, null));
-
-    client.shutdown();
+    try (CloudSolrClient client = createCloudClient(null)) {
+      assertTrue("cluster property not set", setClusterProp(client, ZkStateReader.LEGACY_CLOUD, "false"));
+      assertTrue("cluster property not unset ", setClusterProp(client, ZkStateReader.LEGACY_CLOUD, null));
+    }
   }
 
   public static boolean setClusterProp(CloudSolrClient client, String name , String val) throws SolrServerException, IOException, InterruptedException {
diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTests.java b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTests.java
index d247e1c..89a3a08 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTests.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTests.java
@@ -34,6 +34,7 @@
 import org.apache.solr.common.params.CoreAdminParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.zookeeper.KeeperException;
+import org.junit.Test;
 
 import java.io.File;
 import java.io.IOException;
@@ -49,9 +50,9 @@
 
 @LuceneTestCase.Slow
 public class CollectionsAPISolrJTests extends AbstractFullDistribZkTestBase {
-  
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  public void test() throws Exception {
     testCreateAndDeleteCollection();
     testCreateAndDeleteShard();
     testReloadCollection();
@@ -67,19 +68,6 @@
     testBalanceShardUnique();
   }
 
-  public void tearDown() throws Exception {
-    if (controlClient != null) {
-      controlClient.shutdown();
-    }
-    if (cloudClient != null) {
-      cloudClient.shutdown();
-    }
-    if (controlClientCloud != null) {
-      controlClientCloud.shutdown();
-    }
-    super.tearDown();
-  }
-
   protected void testCreateAndDeleteCollection() throws Exception {
     String collectionName = "solrj_test";
     CollectionAdminRequest.Create createCollectionRequest = new CollectionAdminRequest.Create();
@@ -269,9 +257,8 @@
 
     Replica replica1 = testCollection.getReplica("core_node1");
 
-    HttpSolrClient solrServer = new HttpSolrClient(replica1.getStr("base_url"));
-    try {
-      CoreAdminResponse status = CoreAdminRequest.getStatus(replica1.getStr("core"), solrServer);
+    try (HttpSolrClient client = new HttpSolrClient(replica1.getStr("base_url"))) {
+      CoreAdminResponse status = CoreAdminRequest.getStatus(replica1.getStr("core"), client);
       NamedList<Object> coreStatus = status.getCoreStatus(replica1.getStr("core"));
       String dataDirStr = (String) coreStatus.get("dataDir");
       String instanceDirStr = (String) coreStatus.get("instanceDir");
@@ -279,9 +266,6 @@
           new File(instanceDirStr).getAbsolutePath(), instanceDir.getAbsolutePath());
       assertEquals("Data dir does not match param given in property.dataDir syntax",
           new File(dataDirStr).getAbsolutePath(), dataDir.getAbsolutePath());
-
-    } finally {
-      solrServer.shutdown();
     }
 
     CollectionAdminRequest.Delete deleteCollectionRequest = new CollectionAdminRequest.Delete();
diff --git a/solr/core/src/test/org/apache/solr/cloud/CustomCollectionTest.java b/solr/core/src/test/org/apache/solr/cloud/CustomCollectionTest.java
index c6c9e4e..fa3e212 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CustomCollectionTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CustomCollectionTest.java
@@ -17,29 +17,10 @@
  * limitations under the License.
  */
 
-import static org.apache.solr.cloud.OverseerCollectionProcessor.NUM_SLICES;
-import static org.apache.solr.cloud.OverseerCollectionProcessor.ROUTER;
-import static org.apache.solr.cloud.OverseerCollectionProcessor.SHARDS_PROP;
-import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
-import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
-import static org.apache.solr.common.params.ShardParams._ROUTE_;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.concurrent.CompletionService;
-import java.util.concurrent.ExecutorCompletionService;
-import java.util.concurrent.Future;
-import java.util.concurrent.SynchronousQueue;
-import java.util.concurrent.ThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.lucene.util.TestUtil;
+import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
+import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -58,13 +39,35 @@
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.update.DirectUpdateHandler2;
 import org.apache.solr.util.DefaultSolrThreadFactory;
-import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.CompletionService;
+import java.util.concurrent.ExecutorCompletionService;
+import java.util.concurrent.Future;
+import java.util.concurrent.SynchronousQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+
+import static org.apache.solr.cloud.OverseerCollectionProcessor.NUM_SLICES;
+import static org.apache.solr.cloud.OverseerCollectionProcessor.ROUTER;
+import static org.apache.solr.cloud.OverseerCollectionProcessor.SHARDS_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
+import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
+import static org.apache.solr.common.params.ShardParams._ROUTE_;
 
 /**
  * Tests the Custom Sharding API.
  */
 @Slow
+@SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776")
 public class CustomCollectionTest extends AbstractFullDistribZkTestBase {
 
   private static final String DEFAULT_COLLECTION = "collection1";
@@ -81,10 +84,9 @@
   public static void beforeThisClass2() throws Exception {
   }
 
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     System.setProperty("numShards", Integer.toString(sliceCount));
     System.setProperty("solr.xml.persist", "true");
   }
@@ -95,10 +97,7 @@
 
 
   public CustomCollectionTest() {
-    fixShardCount = true;
-
     sliceCount = 2;
-    shardCount = 4;
     completionService = new ExecutorCompletionService<>(executor);
     pending = new HashSet<>();
     checkCreatedVsState = false;
@@ -113,7 +112,7 @@
     } else {
       // use shard ids rather than physical locations
       StringBuilder sb = new StringBuilder();
-      for (int i = 0; i < shardCount; i++) {
+      for (int i = 0; i < getShardCount(); i++) {
         if (i > 0)
           sb.append(',');
         sb.append("shard" + (i + 3));
@@ -122,8 +121,9 @@
     }
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 4)
+  public void test() throws Exception {
     testCustomCollectionsAPI();
     testRouteFieldForHashRouter();
     testCreateShardRepFactor();
@@ -183,7 +183,7 @@
 
         createCollection(collectionInfos, COLL_PREFIX + i,props,client);
       } finally {
-        if (client != null) client.shutdown();
+        if (client != null) client.close();
       }
     }
 
@@ -195,11 +195,10 @@
 
       String url = getUrlFromZk(getCommonCloudSolrClient().getZkStateReader().getClusterState(), collection);
 
-      HttpSolrClient collectionClient = new HttpSolrClient(url);
-
-      // poll for a second - it can take a moment before we are ready to serve
-      waitForNon403or404or503(collectionClient);
-      collectionClient.shutdown();
+      try (HttpSolrClient collectionClient = new HttpSolrClient(url)) {
+        // poll for a second - it can take a moment before we are ready to serve
+        waitForNon403or404or503(collectionClient);
+      }
     }
     ZkStateReader zkStateReader = getCommonCloudSolrClient().getZkStateReader();
     for (int j = 0; j < cnt; j++) {
@@ -223,135 +222,126 @@
 
     String url = getUrlFromZk(getCommonCloudSolrClient().getZkStateReader().getClusterState(), collectionName);
 
-    HttpSolrClient collectionClient = new HttpSolrClient(url);
-
-
-    // lets try and use the solrj client to index a couple documents
-
-    collectionClient.add(getDoc(id, 6, i1, -600, tlong, 600, t1,
-        "humpty dumpy sat on a wall", _ROUTE_,"a"));
-
-    collectionClient.add(getDoc(id, 7, i1, -600, tlong, 600, t1,
-        "humpty dumpy3 sat on a walls", _ROUTE_,"a"));
-
-    collectionClient.add(getDoc(id, 8, i1, -600, tlong, 600, t1,
-        "humpty dumpy2 sat on a walled", _ROUTE_,"a"));
-
-    collectionClient.commit();
-
-    assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
-    assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"b")).getResults().getNumFound());
-    assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"a")).getResults().getNumFound());
-
-    collectionClient.deleteByQuery("*:*");
-    collectionClient.commit(true,true);
-    assertEquals(0, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
-
-    UpdateRequest up = new UpdateRequest();
-    up.setParam(_ROUTE_, "c");
-    up.setParam("commit","true");
-
-    up.add(getDoc(id, 9, i1, -600, tlong, 600, t1,
-        "humpty dumpy sat on a wall"));
-    up.add(getDoc(id, 10, i1, -600, tlong, 600, t1,
-        "humpty dumpy3 sat on a walls"));
-    up.add(getDoc(id, 11, i1, -600, tlong, 600, t1,
-        "humpty dumpy2 sat on a walled"));
-
-    collectionClient.request(up);
-
-    assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
-    assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"a")).getResults().getNumFound());
-    assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"c")).getResults().getNumFound());
-
-    //Testing CREATESHARD
-    ModifiableSolrParams params = new ModifiableSolrParams();
-    params.set("action", CollectionAction.CREATESHARD.toString());
-    params.set("collection", collectionName);
-    params.set("shard", "x");
-    SolrRequest request = new QueryRequest(params);
-    request.setPath("/admin/collections");
-    createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0))).request(request);
-    waitForCollection(zkStateReader,collectionName,4);
-    //wait for all the replicas to become active
-    int attempts = 0;
-    while(true){
-      if(attempts>30 ) fail("Not enough active replicas in the shard 'x'");
-      attempts++;
-      int activeReplicaCount = 0;
-      for (Replica x : zkStateReader.getClusterState().getCollection(collectionName).getSlice("x").getReplicas()) {
-        if("active".equals(x.getStr("state"))) activeReplicaCount++;
-      }
-      Thread.sleep(500);
-      if(activeReplicaCount >= replicationFactor) break;
-    }
-    log.info(zkStateReader.getClusterState().toString());
-
-    collectionClient.add(getDoc(id, 66, i1, -600, tlong, 600, t1,
-        "humpty dumpy sat on a wall", _ROUTE_,"x"));
-    collectionClient.commit();
-    assertEquals(1, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"x")).getResults().getNumFound());
-
-
-    int numShards = 4;
-    replicationFactor = TestUtil.nextInt(random(), 0, 3) + 2;
-    int maxShardsPerNode = (((numShards * replicationFactor) / getCommonCloudSolrClient()
-        .getZkStateReader().getClusterState().getLiveNodes().size())) + 1;
-
-
-    CloudSolrClient client = null;
     String shard_fld = "shard_s";
-    try {
-      client = createCloudClient(null);
-      Map<String, Object> props = ZkNodeProps.makeMap(
-          "router.name", ImplicitDocRouter.NAME,
-          REPLICATION_FACTOR, replicationFactor,
-          MAX_SHARDS_PER_NODE, maxShardsPerNode,
-          SHARDS_PROP,"a,b,c,d",
-          "router.field", shard_fld);
+    try (HttpSolrClient collectionClient = new HttpSolrClient(url)) {
 
-      collectionName = COLL_PREFIX + "withShardField";
-      createCollection(collectionInfos, collectionName,props,client);
-    } finally {
-      if (client != null) client.shutdown();
+      // lets try and use the solrj client to index a couple documents
+  
+      collectionClient.add(getDoc(id, 6, i1, -600, tlong, 600, t1,
+          "humpty dumpy sat on a wall", _ROUTE_,"a"));
+  
+      collectionClient.add(getDoc(id, 7, i1, -600, tlong, 600, t1,
+          "humpty dumpy3 sat on a walls", _ROUTE_,"a"));
+  
+      collectionClient.add(getDoc(id, 8, i1, -600, tlong, 600, t1,
+          "humpty dumpy2 sat on a walled", _ROUTE_,"a"));
+  
+      collectionClient.commit();
+  
+      assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
+      assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"b")).getResults().getNumFound());
+      assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"a")).getResults().getNumFound());
+  
+      collectionClient.deleteByQuery("*:*");
+      collectionClient.commit(true,true);
+      assertEquals(0, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
+  
+      UpdateRequest up = new UpdateRequest();
+      up.setParam(_ROUTE_, "c");
+      up.setParam("commit","true");
+  
+      up.add(getDoc(id, 9, i1, -600, tlong, 600, t1,
+          "humpty dumpy sat on a wall"));
+      up.add(getDoc(id, 10, i1, -600, tlong, 600, t1,
+          "humpty dumpy3 sat on a walls"));
+      up.add(getDoc(id, 11, i1, -600, tlong, 600, t1,
+          "humpty dumpy2 sat on a walled"));
+  
+      collectionClient.request(up);
+  
+      assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
+      assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"a")).getResults().getNumFound());
+      assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"c")).getResults().getNumFound());
+  
+      //Testing CREATESHARD
+      ModifiableSolrParams params = new ModifiableSolrParams();
+      params.set("action", CollectionAction.CREATESHARD.toString());
+      params.set("collection", collectionName);
+      params.set("shard", "x");
+      SolrRequest request = new QueryRequest(params);
+      request.setPath("/admin/collections");
+      try (SolrClient server = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) {
+        server.request(request);
+      }
+      waitForCollection(zkStateReader,collectionName,4);
+      //wait for all the replicas to become active
+      int attempts = 0;
+      while(true){
+        if(attempts>30 ) fail("Not enough active replicas in the shard 'x'");
+        attempts++;
+        int activeReplicaCount = 0;
+        for (Replica x : zkStateReader.getClusterState().getCollection(collectionName).getSlice("x").getReplicas()) {
+          if("active".equals(x.getStr("state"))) activeReplicaCount++;
+        }
+        Thread.sleep(500);
+        if(activeReplicaCount >= replicationFactor) break;
+      }
+      log.info(zkStateReader.getClusterState().toString());
+  
+      collectionClient.add(getDoc(id, 66, i1, -600, tlong, 600, t1,
+          "humpty dumpy sat on a wall", _ROUTE_,"x"));
+      collectionClient.commit();
+      assertEquals(1, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"x")).getResults().getNumFound());
+  
+  
+      int numShards = 4;
+      replicationFactor = TestUtil.nextInt(random(), 0, 3) + 2;
+      int maxShardsPerNode = (((numShards * replicationFactor) / getCommonCloudSolrClient()
+          .getZkStateReader().getClusterState().getLiveNodes().size())) + 1;
+
+      try (CloudSolrClient client = createCloudClient(null)) {
+        Map<String, Object> props = ZkNodeProps.makeMap(
+            "router.name", ImplicitDocRouter.NAME,
+            REPLICATION_FACTOR, replicationFactor,
+            MAX_SHARDS_PER_NODE, maxShardsPerNode,
+            SHARDS_PROP,"a,b,c,d",
+            "router.field", shard_fld);
+  
+        collectionName = COLL_PREFIX + "withShardField";
+        createCollection(collectionInfos, collectionName,props,client);
+      }
+  
+      List<Integer> list = collectionInfos.get(collectionName);
+      checkForCollection(collectionName, list, null);
+  
+  
+      url = getUrlFromZk(getCommonCloudSolrClient().getZkStateReader().getClusterState(), collectionName);
     }
 
-    List<Integer> list = collectionInfos.get(collectionName);
-    checkForCollection(collectionName, list, null);
+    try (HttpSolrClient collectionClient = new HttpSolrClient(url)) {
+         // poll for a second - it can take a moment before we are ready to serve
+      waitForNon403or404or503(collectionClient);
+    }
 
-
-    url = getUrlFromZk(getCommonCloudSolrClient().getZkStateReader().getClusterState(), collectionName);
-    
-    collectionClient.shutdown();
-    collectionClient = new HttpSolrClient(url);
-
-    // poll for a second - it can take a moment before we are ready to serve
-    waitForNon403or404or503(collectionClient);
-
-
-
-    collectionClient.shutdown();
-    collectionClient = new HttpSolrClient(url);
-
-
-    // lets try and use the solrj client to index a couple documents
-
-    collectionClient.add(getDoc(id, 6, i1, -600, tlong, 600, t1,
-        "humpty dumpy sat on a wall", shard_fld,"a"));
-
-    collectionClient.add(getDoc(id, 7, i1, -600, tlong, 600, t1,
-        "humpty dumpy3 sat on a walls", shard_fld,"a"));
-
-    collectionClient.add(getDoc(id, 8, i1, -600, tlong, 600, t1,
-        "humpty dumpy2 sat on a walled", shard_fld,"a"));
-
-    collectionClient.commit();
-
-    assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
-    assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"b")).getResults().getNumFound());
-    //TODO debug the following case
-    assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound());
-    collectionClient.shutdown();
+    try (HttpSolrClient collectionClient = new HttpSolrClient(url)) {
+      // lets try and use the solrj client to index a couple documents
+  
+      collectionClient.add(getDoc(id, 6, i1, -600, tlong, 600, t1,
+          "humpty dumpy sat on a wall", shard_fld,"a"));
+  
+      collectionClient.add(getDoc(id, 7, i1, -600, tlong, 600, t1,
+          "humpty dumpy3 sat on a walls", shard_fld,"a"));
+  
+      collectionClient.add(getDoc(id, 8, i1, -600, tlong, 600, t1,
+          "humpty dumpy2 sat on a walled", shard_fld,"a"));
+  
+      collectionClient.commit();
+  
+      assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
+      assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"b")).getResults().getNumFound());
+      //TODO debug the following case
+      assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound());
+    }
   }
 
   private void testRouteFieldForHashRouter()throws Exception{
@@ -362,10 +352,8 @@
         .getZkStateReader().getClusterState().getLiveNodes().size())) + 1;
 
     HashMap<String, List<Integer>> collectionInfos = new HashMap<>();
-    CloudSolrClient client = null;
     String shard_fld = "shard_s";
-    try {
-      client = createCloudClient(null);
+    try (CloudSolrClient client = createCloudClient(null)) {
       Map<String, Object> props = ZkNodeProps.makeMap(
           REPLICATION_FACTOR, replicationFactor,
           MAX_SHARDS_PER_NODE, maxShardsPerNode,
@@ -373,8 +361,6 @@
           "router.field", shard_fld);
 
       createCollection(collectionInfos, collectionName,props,client);
-    } finally {
-      if (client != null) client.shutdown();
     }
 
     List<Integer> list = collectionInfos.get(collectionName);
@@ -383,48 +369,43 @@
 
     String url = getUrlFromZk(getCommonCloudSolrClient().getZkStateReader().getClusterState(), collectionName);
 
-    HttpSolrClient collectionClient = new HttpSolrClient(url);
-
-    // poll for a second - it can take a moment before we are ready to serve
-    waitForNon403or404or503(collectionClient);
-    collectionClient.shutdown();
+    try (HttpSolrClient collectionClient = new HttpSolrClient(url)) {
+      // poll for a second - it can take a moment before we are ready to serve
+      waitForNon403or404or503(collectionClient);
+    }
 
 
-    collectionClient = new HttpSolrClient(url);
-
-
-    // lets try and use the solrj client to index a couple documents
-
-    collectionClient.add(getDoc(id, 6, i1, -600, tlong, 600, t1,
-        "humpty dumpy sat on a wall", shard_fld,"a"));
-
-    collectionClient.add(getDoc(id, 7, i1, -600, tlong, 600, t1,
-        "humpty dumpy3 sat on a walls", shard_fld,"a"));
-
-    collectionClient.add(getDoc(id, 8, i1, -600, tlong, 600, t1,
-        "humpty dumpy2 sat on a walled", shard_fld,"a"));
-
-    collectionClient.commit();
-
-    assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
-    //TODO debug the following case
-    assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound());
-
-    collectionClient.deleteByQuery("*:*");
-    collectionClient.commit();
-
-    collectionClient.add (getDoc( id,100,shard_fld, "b!doc1"));
-    collectionClient.commit();
-    assertEquals(1, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "b!")).getResults().getNumFound());
-    collectionClient.shutdown();
+    try (HttpSolrClient collectionClient = new HttpSolrClient(url)) {
+      // lets try and use the solrj client to index a couple documents
+  
+      collectionClient.add(getDoc(id, 6, i1, -600, tlong, 600, t1,
+          "humpty dumpy sat on a wall", shard_fld,"a"));
+  
+      collectionClient.add(getDoc(id, 7, i1, -600, tlong, 600, t1,
+          "humpty dumpy3 sat on a walls", shard_fld,"a"));
+  
+      collectionClient.add(getDoc(id, 8, i1, -600, tlong, 600, t1,
+          "humpty dumpy2 sat on a walled", shard_fld,"a"));
+  
+      collectionClient.commit();
+  
+      assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
+      //TODO debug the following case
+      assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound());
+  
+      collectionClient.deleteByQuery("*:*");
+      collectionClient.commit();
+  
+      collectionClient.add (getDoc( id,100,shard_fld, "b!doc1"));
+      collectionClient.commit();
+      assertEquals(1, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "b!")).getResults().getNumFound());
+    }
   }
 
   private void testCreateShardRepFactor() throws Exception  {
     String collectionName = "testCreateShardRepFactor";
     HashMap<String, List<Integer>> collectionInfos = new HashMap<>();
-    CloudSolrClient client = null;
-    try {
-      client = createCloudClient(null);
+    try (CloudSolrClient client = createCloudClient(null)) {
       Map<String, Object> props = ZkNodeProps.makeMap(
           REPLICATION_FACTOR, 1,
           MAX_SHARDS_PER_NODE, 5,
@@ -433,8 +414,6 @@
           "router.name", "implicit");
 
       createCollection(collectionInfos, collectionName, props, client);
-    } finally {
-      if (client != null) client.shutdown();
     }
     ZkStateReader zkStateReader = getCommonCloudSolrClient().getZkStateReader();
     waitForRecoveriesToFinish(collectionName, zkStateReader, false);
@@ -445,7 +424,10 @@
     params.set("shard", "x");
     SolrRequest request = new QueryRequest(params);
     request.setPath("/admin/collections");
-    createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0))).request(request);
+
+    try (SolrClient server = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) {
+      server.request(request);
+    }
 
     waitForRecoveriesToFinish(collectionName, zkStateReader, false);
 
@@ -478,10 +460,9 @@
   }
 
   @Override
-  public void tearDown() throws Exception {
-    super.tearDown();
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
     System.clearProperty("numShards");
-    System.clearProperty("zkHost");
     System.clearProperty("solr.xml.persist");
 
     // insurance
diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java
index e81fb1b..3ad3d99 100644
--- a/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/DeleteInactiveReplicaTest.java
@@ -29,8 +29,7 @@
 import org.apache.solr.common.params.CoreAdminParams;
 import org.apache.solr.common.params.MapSolrParams;
 import org.apache.solr.common.util.NamedList;
-import org.junit.After;
-import org.junit.Before;
+import org.junit.Test;
 
 import java.net.URL;
 import java.util.Map;
@@ -41,125 +40,109 @@
 //@Ignore("Not currently valid see SOLR-5580")
 public class DeleteInactiveReplicaTest extends DeleteReplicaTest{
 
-  @Override
-  public void doTest() throws Exception {
-    deleteInactiveReplicaTest();
-  }
+  @Test
+  public void deleteInactiveReplicaTest() throws Exception {
+    try (CloudSolrClient client = createCloudClient(null)) {
 
-  @Before
-  public void setUp() throws Exception {
-    super.setUp();
-  }
-  
-  @After
-  public void tearDown() throws Exception {
-    super.tearDown();
-  }
-  
-  private void deleteInactiveReplicaTest() throws Exception {
-    CloudSolrClient client = createCloudClient(null);
+      String collectionName = "delDeadColl";
 
-    String collectionName = "delDeadColl";
+      setClusterProp(client, ZkStateReader.LEGACY_CLOUD, "false");
 
-    setClusterProp(client, ZkStateReader.LEGACY_CLOUD, "false");
-    
-    createCollection(collectionName, client);
-    
-    waitForRecoveriesToFinish(collectionName, false);
+      createCollection(collectionName, client);
 
-    Thread.sleep(3000);
+      waitForRecoveriesToFinish(collectionName, false);
 
-    boolean stopped = false;
-    JettySolrRunner stoppedJetty = null;
-    StringBuilder sb = new StringBuilder();
-    Replica replica1 = null;
-    Slice shard1 = null;
-    long timeout = System.currentTimeMillis() + 3000;
-    DocCollection testcoll = null;
-    while(!stopped && System.currentTimeMillis()<timeout ) {
-      testcoll = client.getZkStateReader().getClusterState().getCollection(collectionName);
-      for (JettySolrRunner jetty : jettys)
-        sb.append(jetty.getBaseUrl()).append(",");
+      Thread.sleep(3000);
 
-      for (Slice slice : testcoll.getActiveSlices()) {
-        for (Replica replica : slice.getReplicas())
-          for (JettySolrRunner jetty : jettys) {
-            URL baseUrl = null;
-            try {
-              baseUrl = jetty.getBaseUrl();
-            } catch (Exception e) {
-              continue;
+      boolean stopped = false;
+      JettySolrRunner stoppedJetty = null;
+      StringBuilder sb = new StringBuilder();
+      Replica replica1 = null;
+      Slice shard1 = null;
+      long timeout = System.currentTimeMillis() + 3000;
+      DocCollection testcoll = null;
+      while (!stopped && System.currentTimeMillis() < timeout) {
+        testcoll = client.getZkStateReader().getClusterState().getCollection(collectionName);
+        for (JettySolrRunner jetty : jettys)
+          sb.append(jetty.getBaseUrl()).append(",");
+
+        for (Slice slice : testcoll.getActiveSlices()) {
+          for (Replica replica : slice.getReplicas())
+            for (JettySolrRunner jetty : jettys) {
+              URL baseUrl = null;
+              try {
+                baseUrl = jetty.getBaseUrl();
+              } catch (Exception e) {
+                continue;
+              }
+              if (baseUrl.toString().startsWith(
+                  replica.getStr(ZkStateReader.BASE_URL_PROP))) {
+                stoppedJetty = jetty;
+                ChaosMonkey.stop(jetty);
+                replica1 = replica;
+                shard1 = slice;
+                stopped = true;
+                break;
+              }
             }
-            if (baseUrl.toString().startsWith(
-                replica.getStr(ZkStateReader.BASE_URL_PROP))) {
-              stoppedJetty = jetty;
-              ChaosMonkey.stop(jetty);
-              replica1 = replica;
-              shard1 = slice;
-              stopped = true;
-              break;
-            }
-          }
+        }
+        Thread.sleep(100);
       }
-      Thread.sleep(100);
-    }
 
 
-    if (!stopped) {
-      fail("Could not find jetty to stop in collection " + testcoll
-          + " jettys: " + sb);
-    }
-    
-    long endAt = System.currentTimeMillis() + 3000;
-    boolean success = false;
-    while (System.currentTimeMillis() < endAt) {
-      testcoll = client.getZkStateReader()
-          .getClusterState().getCollection(collectionName);
-      if (!"active".equals(testcoll.getSlice(shard1.getName())
-          .getReplica(replica1.getName()).getStr(Slice.STATE))) {
-        success = true;
+      if (!stopped) {
+        fail("Could not find jetty to stop in collection " + testcoll
+            + " jettys: " + sb);
       }
-      if (success) break;
-      Thread.sleep(100);
+
+      long endAt = System.currentTimeMillis() + 3000;
+      boolean success = false;
+      while (System.currentTimeMillis() < endAt) {
+        testcoll = client.getZkStateReader()
+            .getClusterState().getCollection(collectionName);
+        if (!"active".equals(testcoll.getSlice(shard1.getName())
+            .getReplica(replica1.getName()).getStr(Slice.STATE))) {
+          success = true;
+        }
+        if (success) break;
+        Thread.sleep(100);
+      }
+
+      log.info("removed_replicas {}/{} ", shard1.getName(), replica1.getName());
+      removeAndWaitForReplicaGone(collectionName, client, replica1,
+          shard1.getName());
+      ChaosMonkey.start(stoppedJetty);
+      log.info("restarted jetty");
+
+      Map m = makeMap("qt", "/admin/cores", "action", "status");
+
+      try (SolrClient queryClient = new HttpSolrClient(replica1.getStr(ZkStateReader.BASE_URL_PROP))) {
+        NamedList<Object> resp = queryClient.request(new QueryRequest(new MapSolrParams(m)));
+        assertNull("The core is up and running again",
+            ((NamedList) resp.get("status")).get(replica1.getStr("core")));
+      }
+
+      Exception exp = null;
+
+      try {
+
+        m = makeMap(
+            "action", CoreAdminParams.CoreAdminAction.CREATE.toString(),
+            ZkStateReader.COLLECTION_PROP, collectionName,
+            ZkStateReader.SHARD_ID_PROP, "shard2",
+            CoreAdminParams.NAME, "testcore");
+
+        QueryRequest request = new QueryRequest(new MapSolrParams(m));
+        request.setPath("/admin/cores");
+        NamedList<Object> rsp = client.request(request);
+      } catch (Exception e) {
+        exp = e;
+        log.info("error_expected", e);
+      }
+      assertNotNull("Exception expected", exp);
+      setClusterProp(client, ZkStateReader.LEGACY_CLOUD, null);
+
     }
 
-    log.info("removed_replicas {}/{} ", shard1.getName(), replica1.getName());
-    removeAndWaitForReplicaGone(collectionName, client, replica1,
-        shard1.getName());
-    ChaosMonkey.start(stoppedJetty);
-    log.info("restarted jetty");
-
-    Map m = makeMap("qt", "/admin/cores", "action", "status");
-
-    SolrClient queryClient = new HttpSolrClient(replica1.getStr(ZkStateReader.BASE_URL_PROP));
-    NamedList<Object> resp = queryClient.request(new QueryRequest(new MapSolrParams(m)));
-    assertNull("The core is up and running again",
-        ((NamedList) resp.get("status")).get(replica1.getStr("core")));
-    queryClient.shutdown();
-    queryClient = null;
-
-
-    Exception exp = null;
-
-    try {
-
-      m = makeMap(
-          "action", CoreAdminParams.CoreAdminAction.CREATE.toString(),
-          ZkStateReader.COLLECTION_PROP, collectionName,
-          ZkStateReader.SHARD_ID_PROP, "shard2",
-          CoreAdminParams.NAME, "testcore");
-
-      QueryRequest request = new QueryRequest(new MapSolrParams(m));
-      request.setPath("/admin/cores");
-      NamedList<Object> rsp = client.request(request);
-    } catch (Exception e) {
-      exp = e;
-      log.info("error_expected",e);
-    }
-    assertNotNull( "Exception expected", exp);
-    setClusterProp(client,ZkStateReader.LEGACY_CLOUD,null);
-    client.shutdown();
-
-
   }
 }
diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteLastCustomShardedReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteLastCustomShardedReplicaTest.java
index 3e86bea..401cbc6 100644
--- a/solr/core/src/test/org/apache/solr/cloud/DeleteLastCustomShardedReplicaTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/DeleteLastCustomShardedReplicaTest.java
@@ -27,10 +27,9 @@
 import org.apache.solr.common.cloud.ZkNodeProps;
 import org.apache.solr.common.params.MapSolrParams;
 import org.apache.solr.common.params.SolrParams;
-import org.junit.After;
-import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.util.HashMap;
@@ -53,19 +52,18 @@
 
   }
 
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     System.setProperty("numShards", Integer.toString(sliceCount));
     System.setProperty("solr.xml.persist", "true");
     client = createCloudClient(null);
   }
 
-  @After
-  public void tearDown() throws Exception {
-    super.tearDown();
-    client.shutdown();
+  @Override
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
+    client.close();
   }
 
   protected String getSolrXml() {
@@ -73,16 +71,13 @@
   }
 
   public DeleteLastCustomShardedReplicaTest() {
-    fixShardCount = true;
-
     sliceCount = 2;
-    shardCount = 2;
-
     checkCreatedVsState = false;
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 2)
+  public void test() throws Exception {
     int replicationFactor = 1;
     int maxShardsPerNode = 5;
 
diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java
index e8f3450..9fb53c3 100644
--- a/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java
@@ -32,9 +32,8 @@
 import org.apache.solr.common.params.MapSolrParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
-import org.junit.After;
-import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 import java.io.File;
 import java.io.IOException;
@@ -56,19 +55,18 @@
 
   }
 
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     System.setProperty("numShards", Integer.toString(sliceCount));
     System.setProperty("solr.xml.persist", "true");
     client = createCloudClient(null);
   }
 
-  @After
-  public void tearDown() throws Exception {
-    super.tearDown();
-    client.shutdown();
+  @Override
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
+    client.close();
   }
 
   protected String getSolrXml() {
@@ -76,23 +74,15 @@
   }
 
   public DeleteReplicaTest() {
-    fixShardCount = true;
-
     sliceCount = 2;
-    shardCount = 4;
-
     checkCreatedVsState = false;
   }
 
-  @Override
-  public void doTest() throws Exception {
-    deleteLiveReplicaTest();
-  }
-
-  private void deleteLiveReplicaTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 4)
+  public void deleteLiveReplicaTest() throws Exception {
     String collectionName = "delLiveColl";
-    CloudSolrClient client = createCloudClient(null);
-    try {
+    try (CloudSolrClient client = createCloudClient(null)) {
       createCollection(collectionName, client);
       
       waitForRecoveriesToFinish(collectionName, false);
@@ -120,14 +110,11 @@
 
       if (replica1 == null) fail("no active replicas found");
 
-      HttpSolrClient replica1Client = new HttpSolrClient(replica1.getStr("base_url"));
       String dataDir = null;
-      try {
+      try (HttpSolrClient replica1Client = new HttpSolrClient(replica1.getStr("base_url"))) {
         CoreAdminResponse status = CoreAdminRequest.getStatus(replica1.getStr("core"), replica1Client);
         NamedList<Object> coreStatus = status.getCoreStatus(replica1.getStr("core"));
         dataDir = (String) coreStatus.get("dataDir");
-      } finally {
-        replica1Client.shutdown();
       }
       try {
         // Should not be able to delete a replica that is up if onlyIfDown=true.
@@ -144,8 +131,6 @@
 
       removeAndWaitForReplicaGone(collectionName, client, replica1, shard1.getName());
       assertFalse("dataDir for " + replica1.getName() + " should have been deleted by deleteReplica API", new File(dataDir).exists());
-    } finally {
-      client.shutdown();
     }
   }
 
diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteShardTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteShardTest.java
index 26fdb8e..fd5ccc7 100644
--- a/solr/core/src/test/org/apache/solr/cloud/DeleteShardTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/DeleteShardTest.java
@@ -30,8 +30,7 @@
 import org.apache.solr.common.params.CollectionParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.zookeeper.KeeperException;
-import org.junit.After;
-import org.junit.Before;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.util.HashMap;
@@ -41,46 +40,28 @@
 
   public DeleteShardTest() {
     super();
-    fixShardCount = true;
-    shardCount = 2;
     sliceCount = 2;
   }
 
   @Override
-  @Before
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     System.setProperty("numShards", "2");
     System.setProperty("solr.xml.persist", "true");
   }
 
   @Override
-  @After
-  public void tearDown() throws Exception {
-    super.tearDown();
-
-    if (VERBOSE || printLayoutOnTearDown) {
-      super.printLayout();
-    }
-    if (controlClient != null) {
-      controlClient.shutdown();
-    }
-    if (cloudClient != null) {
-      cloudClient.shutdown();
-    }
-    if (controlClientCloud != null) {
-      controlClientCloud.shutdown();
-    }
-    super.tearDown();
-
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
     System.clearProperty("numShards");
     System.clearProperty("solr.xml.persist");
   }
 
   // TODO: Custom hash slice deletion test
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 2)
+  public void test() throws Exception {
     ClusterState clusterState = cloudClient.getZkStateReader().getClusterState();
 
     Slice slice1 = clusterState.getSlice(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1);
@@ -147,11 +128,11 @@
         .getBaseURL();
     baseUrl = baseUrl.substring(0, baseUrl.length() - "collection1".length());
 
-    HttpSolrClient baseClient = new HttpSolrClient(baseUrl);
-    baseClient.setConnectionTimeout(15000);
-    baseClient.setSoTimeout(60000);
-    baseClient.request(request);
-    baseClient.shutdown();
+    try (HttpSolrClient baseServer = new HttpSolrClient(baseUrl)) {
+      baseServer.setConnectionTimeout(15000);
+      baseServer.setSoTimeout(60000);
+      baseServer.request(request);
+    }
   }
 
   protected void setSliceState(String slice, String state) throws SolrServerException, IOException,
diff --git a/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java b/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java
index 29a3cb6..6a7debc 100644
--- a/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java
@@ -16,34 +16,37 @@
  */
 package org.apache.solr.cloud;
 
+import com.carrotsearch.randomizedtesting.annotations.Seed;
 import org.apache.lucene.util.LuceneTestCase.Slow;
-import org.apache.lucene.util.TestUtil;
 import org.apache.lucene.util.SentinelIntSet;
+import org.apache.lucene.util.TestUtil;
 import org.apache.solr.CursorPagingTest;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.request.LukeRequest;
 import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.GroupParams;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.search.CursorMark;
 import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_PARAM;
 import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_NEXT;
 import static org.apache.solr.common.params.CursorMarkParams.CURSOR_MARK_START;
 import org.apache.solr.search.CursorMark; //jdoc
 
+import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
-import java.util.List;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.List;
 import java.util.Map;
 
 /**
@@ -69,8 +72,8 @@
     return configString;
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     boolean testFinished = false;
     try {
       handle.clear();
diff --git a/solr/core/src/test/org/apache/solr/cloud/DistribDocExpirationUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/cloud/DistribDocExpirationUpdateProcessorTest.java
index 17b63c5..7a1772f 100644
--- a/solr/core/src/test/org/apache/solr/cloud/DistribDocExpirationUpdateProcessorTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/DistribDocExpirationUpdateProcessorTest.java
@@ -26,6 +26,7 @@
 import org.apache.solr.update.processor.DocExpirationUpdateProcessorFactory; // jdoc
 import org.apache.solr.update.processor.DocExpirationUpdateProcessorFactoryTest;
 
+import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -52,8 +53,8 @@
     return configString;
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     assertTrue("only one shard?!?!?!", 1 < shardToJetty.keySet().size());
     log.info("number of shards: {}", shardToJetty.keySet().size());
 
diff --git a/solr/core/src/test/org/apache/solr/cloud/ExternalCollectionsTest.java b/solr/core/src/test/org/apache/solr/cloud/ExternalCollectionsTest.java
index 7bc6131..cb1240a 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ExternalCollectionsTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ExternalCollectionsTest.java
@@ -25,31 +25,23 @@
 import org.apache.solr.common.params.CollectionParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.zookeeper.data.Stat;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.BeforeClass;
+import org.junit.Test;
 
 public class ExternalCollectionsTest extends AbstractFullDistribZkTestBase {
   private CloudSolrClient client;
 
-  @BeforeClass
-  public static void beforeThisClass2() throws Exception {
-
-  }
-
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     System.setProperty("numShards", Integer.toString(sliceCount));
     System.setProperty("solr.xml.persist", "true");
     client = createCloudClient(null);
   }
 
-  @After
-  public void tearDown() throws Exception {
-    super.tearDown();
-    client.shutdown();
+  @Override
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
+    client.close();
   }
 
   protected String getSolrXml() {
@@ -57,25 +49,29 @@
   }
 
   public ExternalCollectionsTest() {
-    fixShardCount = true;
-
-    sliceCount = 2;
-    shardCount = 4;
-
     checkCreatedVsState = false;
   }
 
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 4)
+  public void test() throws Exception {
     testZkNodeLocation();
+    testConfNameAndCollectionNameSame();
   }
 
 
 
   @Override
-  protected int getStateFormat() {
-    return 2;
+  protected String getStateFormat() {
+    return "2";
+  }
+
+  private void testConfNameAndCollectionNameSame() throws Exception{
+    // .system collection precreates the configset
+
+    createCollection(".system", client, 2, 1);
+    waitForRecoveriesToFinish(".system", false);
   }
 
   private void testZkNodeLocation() throws Exception{
diff --git a/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java b/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java
index 97da78d..968fbd8 100644
--- a/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/FullSolrCloudDistribCmdsTest.java
@@ -39,6 +39,7 @@
 import org.apache.solr.update.processor.DistributedUpdateProcessor;
 import org.apache.zookeeper.CreateMode;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -58,13 +59,12 @@
   
   public FullSolrCloudDistribCmdsTest() {
     super();
-    fixShardCount = true;
-    shardCount = 6;
     sliceCount = 3;
   }
-  
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  @ShardsFixed(num = 6)
+  public void test() throws Exception {
     handle.clear();
     handle.put("timestamp", SKIPVAL);
     
@@ -375,13 +375,11 @@
           try {
             uReq.process(cloudClient);
             uReq.process(controlClient);
-          } catch (SolrServerException e) {
-            throw new RuntimeException(e);
-          } catch (IOException e) {
+          } catch (SolrServerException | IOException e) {
             throw new RuntimeException(e);
           }
 
-          
+
         }
       }
     };
@@ -432,12 +430,11 @@
   }
   
   private long testConcurrentIndexing(long docId) throws Exception {
-    ConcurrentUpdateSolrClient concurrentClient = new ConcurrentUpdateSolrClient(
-        ((HttpSolrClient) clients.get(0)).getBaseURL(), 10, 2);
     QueryResponse results = query(cloudClient);
     long beforeCount = results.getResults().getNumFound();
     int cnt = TEST_NIGHTLY ? 2933 : 313;
-    try {
+    try (ConcurrentUpdateSolrClient concurrentClient = new ConcurrentUpdateSolrClient(
+        ((HttpSolrClient) clients.get(0)).getBaseURL(), 10, 2)) {
       concurrentClient.setConnectionTimeout(120000);
       for (int i = 0; i < cnt; i++) {
         index_specific(concurrentClient, id, docId++, "text_t", "some text so that it not's negligent work to parse this doc, even though it's still a pretty short doc");
@@ -448,8 +445,6 @@
 
       checkShardConsistency();
       assertDocCounts(VERBOSE);
-    } finally {
-      concurrentClient.shutdown();
     }
     results = query(cloudClient);
     assertEquals(beforeCount + cnt, results.getResults().getNumFound());
@@ -501,11 +496,6 @@
     return client.query(query);
   }
   
-  @Override
-  public void tearDown() throws Exception {
-    super.tearDown();
-  }
-  
   protected SolrInputDocument addRandFields(SolrInputDocument sdoc) {
     return sdoc;
   }
diff --git a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java
index ea50a7d..7d27950 100644
--- a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java
@@ -17,14 +17,17 @@
  * limitations under the License.
  */
 
+import org.apache.http.NoHttpResponseException;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.JSONTestUtil;
 import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
+import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.client.solrj.request.UpdateRequest;
+import org.apache.solr.client.solrj.response.CollectionAdminResponse;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.cloud.ClusterState;
@@ -36,12 +39,12 @@
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.servlet.SolrDispatchFilter;
-import org.junit.After;
-import org.junit.Before;
+import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.File;
+import java.io.IOException;
 import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -73,26 +76,15 @@
   public HttpPartitionTest() {
     super();
     sliceCount = 2;
-    shardCount = 3;
+    fixShardCount(3);
   }
   
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     System.setProperty("numShards", Integer.toString(sliceCount));
   }
   
-  @Override
-  @After
-  public void tearDown() throws Exception {    
-    try {
-      super.tearDown();
-    } catch (Exception exc) {}
-    
-    resetExceptionIgnores();
-  }
-  
   /**
    * Overrides the parent implementation to install a SocketProxy in-front of the Jetty server.
    */
@@ -104,8 +96,8 @@
     return createProxiedJetty(solrHome, dataDir, shardList, solrConfigOverride, schemaOverride);
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     waitForThingsToLevelOut(30000);
 
     testLeaderInitiatedRecoveryCRUD();
@@ -135,7 +127,7 @@
   protected void testLeaderInitiatedRecoveryCRUD() throws Exception {
     String testCollectionName = "c8n_crud_1x2";
     String shardId = "shard1";
-    createCollection(testCollectionName, 1, 2, 1);
+    createCollectionRetry(testCollectionName, 1, 2, 1);
     cloudClient.setDefaultCollection(testCollectionName);
 
     Replica leader =
@@ -184,7 +176,7 @@
   protected void testRf2() throws Exception {
     // create a collection that has 1 shard but 2 replicas
     String testCollectionName = "c8n_1x2";
-    createCollection(testCollectionName, 1, 2, 1);
+    createCollectionRetry(testCollectionName, 1, 2, 1);
     cloudClient.setDefaultCollection(testCollectionName);
     
     sendDoc(1);
@@ -265,11 +257,12 @@
   protected void testRf3() throws Exception {
     // create a collection that has 1 shard but 2 replicas
     String testCollectionName = "c8n_1x3";
-    createCollection(testCollectionName, 1, 3, 1);
+    createCollectionRetry(testCollectionName, 1, 3, 1);
+    
     cloudClient.setDefaultCollection(testCollectionName);
     
     sendDoc(1);
-    
+
     List<Replica> notLeaders = 
         ensureAllReplicasAreActive(testCollectionName, "shard1", 1, 3, maxWaitSecsToSeeAllActive);
     assertTrue("Expected 2 replicas for collection " + testCollectionName
@@ -318,11 +311,27 @@
     }
   }
 
+  private void createCollectionRetry(String testCollectionName, int numShards, int replicationFactor, int maxShardsPerNode)
+      throws SolrServerException, IOException {
+    CollectionAdminResponse resp = createCollection(testCollectionName, numShards, replicationFactor, maxShardsPerNode);
+    if (resp.getResponse().get("failure") != null) {
+      CollectionAdminRequest.Delete req = new CollectionAdminRequest.Delete();
+      req.setCollectionName(testCollectionName);
+      req.process(cloudClient);
+      
+      resp = createCollection(testCollectionName, numShards, replicationFactor, maxShardsPerNode);
+      
+      if (resp.getResponse().get("failure") != null) {
+        fail("Could not create " + testCollectionName);
+      }
+    }
+  }
+
   // test inspired by SOLR-6511
   protected void testLeaderZkSessionLoss() throws Exception {
 
     String testCollectionName = "c8n_1x2_leader_session_loss";
-    createCollection(testCollectionName, 1, 2, 1);
+    createCollectionRetry(testCollectionName, 1, 2, 1);
     cloudClient.setDefaultCollection(testCollectionName);
 
     sendDoc(1);
@@ -341,7 +350,7 @@
         testCollectionName+"; clusterState: "+printClusterStateInfo(testCollectionName), leader);
     JettySolrRunner leaderJetty = getJettyOnPort(getReplicaPort(leader));
 
-    HttpSolrClient leaderSolr = getHttpSolrClient(leader, testCollectionName);
+
     SolrInputDocument doc = new SolrInputDocument();
     doc.addField(id, String.valueOf(2));
     doc.addField("a_t", "hello" + 2);
@@ -372,27 +381,22 @@
     // TODO: This test logic seems to be timing dependent and fails on Jenkins
     // need to come up with a better approach
     log.info("Sending doc 2 to old leader "+leader.getName());
-    try {
+    try ( HttpSolrClient leaderSolr = getHttpSolrClient(leader, testCollectionName)) {
+    
       leaderSolr.add(doc);
-      leaderSolr.shutdown();
+      leaderSolr.close();
 
       // if the add worked, then the doc must exist on the new leader
-      HttpSolrClient newLeaderSolr = getHttpSolrClient(currentLeader, testCollectionName);
-      try {
+      try (HttpSolrClient newLeaderSolr = getHttpSolrClient(currentLeader, testCollectionName)) {
         assertDocExists(newLeaderSolr, testCollectionName, "2");
-      } finally {
-        newLeaderSolr.shutdown();
       }
 
     } catch (SolrException exc) {
       // this is ok provided the doc doesn't exist on the current leader
-      leaderSolr = getHttpSolrClient(currentLeader, testCollectionName);
-      try {
-        leaderSolr.add(doc); // this should work
-      } finally {
-        leaderSolr.shutdown();
+      try (HttpSolrClient client = getHttpSolrClient(currentLeader, testCollectionName)) {
+        client.add(doc); // this should work
       }
-    }
+    } 
 
     List<Replica> participatingReplicas = getActiveOrRecoveringReplicas(testCollectionName, "shard1");
     Set<String> replicasToCheck = new HashSet<>();
@@ -437,12 +441,12 @@
   protected void assertDocsExistInAllReplicas(List<Replica> notLeaders,
       String testCollectionName, int firstDocId, int lastDocId)
       throws Exception {
-    Replica leader = 
+    Replica leader =
         cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard1", 10000);
     HttpSolrClient leaderSolr = getHttpSolrClient(leader, testCollectionName);
     List<HttpSolrClient> replicas =
         new ArrayList<HttpSolrClient>(notLeaders.size());
-    
+
     for (Replica r : notLeaders) {
       replicas.add(getHttpSolrClient(r, testCollectionName));
     }
@@ -456,10 +460,10 @@
       }
     } finally {
       if (leaderSolr != null) {
-        leaderSolr.shutdown();
+        leaderSolr.close();
       }
       for (HttpSolrClient replicaSolr : replicas) {
-        replicaSolr.shutdown();
+        replicaSolr.close();
       }
     }
   }
@@ -470,17 +474,37 @@
     return new HttpSolrClient(url);
   }
   
-  protected void sendDoc(int docId) throws Exception {
+  protected void doSendDoc(int docid) throws Exception {
     UpdateRequest up = new UpdateRequest();
     up.setParam(UpdateRequest.MIN_REPFACT, String.valueOf(2));
     SolrInputDocument doc = new SolrInputDocument();
-    doc.addField(id, String.valueOf(docId));
-    doc.addField("a_t", "hello" + docId);
+    doc.addField(id, String.valueOf(docid));
+    doc.addField("a_t", "hello" + docid);
     up.add(doc);
     int minAchievedRf =
         cloudClient.getMinAchievedReplicationFactor(cloudClient.getDefaultCollection(), cloudClient.request(up));
   }
   
+  protected void sendDoc(int docId) throws Exception {
+    try {
+      doSendDoc(docId);
+    } catch (SolrServerException e) {
+      if (e.getRootCause() instanceof NoHttpResponseException) {
+        // we don't know if the doc was accepted or not, we send again
+        Thread.sleep(100);
+        try {
+          doSendDoc(docId);
+        } catch (SolrServerException e2) {
+          if (e2.getRootCause() instanceof NoHttpResponseException) {
+            // we don't know if the doc was accepted or not, we send again
+            Thread.sleep(3000);
+            doSendDoc(docId);
+          }
+        }
+      }
+    }
+  }
+   
   /**
    * Query the real-time get handler for a specific doc by ID to verify it
    * exists in the provided server, using distrib=false so it doesn't route to another replica.
diff --git a/solr/core/src/test/org/apache/solr/cloud/KerberosTestUtil.java b/solr/core/src/test/org/apache/solr/cloud/KerberosTestUtil.java
new file mode 100644
index 0000000..632c59a
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/KerberosTestUtil.java
@@ -0,0 +1,105 @@
+package org.apache.solr.cloud;
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.Configuration;
+
+import org.apache.hadoop.minikdc.MiniKdc;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+public class KerberosTestUtil {
+
+  /**
+   * Returns a MiniKdc that can be used for creating kerberos principals
+   * and keytabs.  Caller is responsible for starting/stopping the kdc.
+   */
+  public static MiniKdc getKdc(File workDir) throws Exception {
+    Properties conf = MiniKdc.createConf();
+    return new MiniKdc(conf, workDir);
+  }
+
+  /**
+   * Programmatic version of a jaas.conf file suitable for connecting
+   * to a SASL-configured zookeeper.
+   */
+  public static class JaasConfiguration extends Configuration {
+
+    private static AppConfigurationEntry[] clientEntry;
+    private static AppConfigurationEntry[] serverEntry;
+
+    /**
+     * Add an entry to the jaas configuration with the passed in name,
+     * principal, and keytab. The other necessary options will be set for you.
+     *
+     * @param clientPrincipal The principal of the client
+     * @param clientKeytab The location of the keytab with the clientPrincipal
+     * @param serverPrincipal The principal of the server
+     * @param serverKeytab The location of the keytab with the serverPrincipal
+     */
+    public JaasConfiguration(String clientPrincipal, File clientKeytab,
+        String serverPrincipal, File serverKeytab) {
+      Map<String, String> clientOptions = new HashMap<String, String>();
+      clientOptions.put("principal", clientPrincipal);
+      clientOptions.put("keyTab", clientKeytab.getAbsolutePath());
+      clientOptions.put("useKeyTab", "true");
+      clientOptions.put("storeKey", "true");
+      clientOptions.put("useTicketCache", "false");
+      clientOptions.put("refreshKrb5Config", "true");
+      String jaasProp = System.getProperty("solr.jaas.debug");
+      if (jaasProp != null && "true".equalsIgnoreCase(jaasProp)) {
+        clientOptions.put("debug", "true");
+      }
+      clientEntry = new AppConfigurationEntry[]{
+        new AppConfigurationEntry(getKrb5LoginModuleName(),
+        AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+        clientOptions)};
+      Map<String, String> serverOptions = new HashMap<String, String>(clientOptions);
+      serverOptions.put("principal", serverPrincipal);
+      serverOptions.put("keytab", serverKeytab.getAbsolutePath());
+      serverEntry =  new AppConfigurationEntry[]{
+        new AppConfigurationEntry(getKrb5LoginModuleName(),
+        AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+        serverOptions)};
+    }
+
+    @Override
+    public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
+      if ("Client".equals(name)) {
+        return clientEntry;
+      } else if ("Server".equals(name)) {
+        return serverEntry;
+      }
+      return null;
+    }
+
+    private String getKrb5LoginModuleName() {
+      String krb5LoginModuleName;
+      if (System.getProperty("java.vendor").contains("IBM")) {
+        krb5LoginModuleName = "com.ibm.security.auth.module.Krb5LoginModule";
+      } else {
+        krb5LoginModuleName = "com.sun.security.auth.module.Krb5LoginModule";
+      }
+      return krb5LoginModuleName;
+    }
+  }
+}
diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionIntegrationTest.java
index 55a02a5..bed3d72 100644
--- a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionIntegrationTest.java
@@ -23,6 +23,7 @@
 import org.apache.solr.common.cloud.ZkNodeProps;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.CoreDescriptor;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -146,8 +147,7 @@
     ports.add(port);
     CoreContainer container = new CoreContainer();
     container.load();
-    assertTrue("Container " + port + " has no cores!", container.getCores()
-        .size() > 0);
+    container.create(new CoreDescriptor(container, "collection1", "collection1", "collection", "collection1"));
     containerMap.put(port, container);
     System.clearProperty("solr.solr.home");
     System.clearProperty("hostPort");
diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java
index e81f457..db0ca46 100644
--- a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java
@@ -170,10 +170,6 @@
     public void run() {
       try {
         setupOnConnect();
-      } catch (InterruptedException e) {
-        log.error("setup failed", e);
-        es.close();
-        return;
       } catch (Throwable e) {
         log.error("setup failed", e);
         es.close();
@@ -251,9 +247,7 @@
         ZkCoreNodeProps leaderProps = new ZkCoreNodeProps(
             ZkNodeProps.load(data));
         return leaderProps.getCoreUrl();
-      } catch (NoNodeException e) {
-        Thread.sleep(500);
-      } catch (SessionExpiredException e) {
+      } catch (NoNodeException | SessionExpiredException e) {
         Thread.sleep(500);
       }
     }
diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java
index 54044f8..317b9dd 100644
--- a/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/LeaderFailoverAfterPartitionTest.java
@@ -20,8 +20,11 @@
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.common.cloud.Replica;
+import org.junit.Test;
+
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
@@ -40,8 +43,8 @@
   }
 
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     waitForThingsToLevelOut(30000);
 
     // kill a leader and make sure recovery occurs as expected
@@ -109,10 +112,15 @@
     // indexing during a partition
     // doc should be on leader and 1 replica
     sendDoc(5);
+    
+    try (HttpSolrClient server = getHttpSolrClient(leader, testCollectionName)) {
+      assertDocExists(server, testCollectionName, "5");
+    }
 
-    assertDocExists(getHttpSolrClient(leader, testCollectionName), testCollectionName, "5");
-    assertDocExists(getHttpSolrClient(notLeaders.get(1), testCollectionName), testCollectionName, "5");
-
+    try (HttpSolrClient server = getHttpSolrClient(notLeaders.get(1), testCollectionName)) {
+      assertDocExists(server, testCollectionName, "5");
+    }
+  
     Thread.sleep(sleepMsBeforeHealPartition);
     
     String shouldNotBeNewLeaderNode = notLeaders.get(0).getNodeName();
@@ -160,12 +168,14 @@
             printClusterStateInfo(testCollectionName),
         participatingReplicas.size() >= 2);
 
+    
     sendDoc(6);
 
+
     Set<String> replicasToCheck = new HashSet<>();
     for (Replica stillUp : participatingReplicas)
       replicasToCheck.add(stillUp.getName());
-    waitToSeeReplicasActive(testCollectionName, "shard1", replicasToCheck, 20);
+    waitToSeeReplicasActive(testCollectionName, "shard1", replicasToCheck, 90);
     assertDocsExistInAllReplicas(participatingReplicas, testCollectionName, 1, 6);
 
     // try to clean up
diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderInitiatedRecoveryOnCommitTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderInitiatedRecoveryOnCommitTest.java
index ef7d4bf..2c530de 100644
--- a/solr/core/src/test/org/apache/solr/cloud/LeaderInitiatedRecoveryOnCommitTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/LeaderInitiatedRecoveryOnCommitTest.java
@@ -22,8 +22,7 @@
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.ZkCoreNodeProps;
-import org.junit.After;
-import org.junit.Before;
+import org.junit.Test;
 
 import java.io.File;
 import java.util.List;
@@ -35,29 +34,22 @@
   public LeaderInitiatedRecoveryOnCommitTest() {
     super();
     sliceCount = 1;
-    shardCount = 4;
+    fixShardCount(4);
   }
 
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     System.setProperty("numShards", Integer.toString(sliceCount));
   }
 
   @Override
-  @After
-  public void tearDown() throws Exception {
+  public void distribTearDown() throws Exception {
     System.clearProperty("numShards");
 
-    try {
-      super.tearDown();
-    } catch (Exception exc) {
-    }
+    super.distribTearDown();
 
-    resetExceptionIgnores();
-
-    // close socket proxies after super.tearDown
+    // close socket proxies after super.distribTearDown
     if (!proxies.isEmpty()) {
       for (SocketProxy proxy : proxies.values()) {
         proxy.close();
@@ -65,8 +57,8 @@
     }
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     oneShardTest();
     multiShardTest();
   }
@@ -91,8 +83,9 @@
 
     // let's find the leader of shard2 and ask him to commit
     Replica shard2Leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard2");
-    HttpSolrClient client = new HttpSolrClient(ZkCoreNodeProps.getCoreUrl(shard2Leader.getStr("base_url"), shard2Leader.getStr("core")));
-    client.commit();
+    try (HttpSolrClient server = new HttpSolrClient(ZkCoreNodeProps.getCoreUrl(shard2Leader.getStr("base_url"), shard2Leader.getStr("core")))) {
+      server.commit();
+    }
 
     Thread.sleep(sleepMsBeforeHealPartition);
 
@@ -133,8 +126,9 @@
     leaderProxy.close();
 
     Replica replica = notLeaders.get(0);
-    HttpSolrClient client = new HttpSolrClient(ZkCoreNodeProps.getCoreUrl(replica.getStr("base_url"), replica.getStr("core")));
-    client.commit();
+    try (HttpSolrClient client = new HttpSolrClient(ZkCoreNodeProps.getCoreUrl(replica.getStr("base_url"), replica.getStr("core")))) {
+      client.commit();
+    }
 
     Thread.sleep(sleepMsBeforeHealPartition);
 
@@ -166,4 +160,4 @@
     return createProxiedJetty(solrHome, dataDir, shardList, solrConfigOverride, schemaOverride);
   }
 
-}
\ No newline at end of file
+}
diff --git a/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java b/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java
index 3bd56ce..969f605 100644
--- a/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/MigrateRouteKeyTest.java
@@ -33,8 +33,7 @@
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.update.DirectUpdateHandler2;
 import org.apache.zookeeper.KeeperException;
-import org.junit.After;
-import org.junit.Before;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.util.HashMap;
@@ -52,31 +51,15 @@
   }
 
   @Override
-  @Before
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     System.setProperty("numShards", Integer.toString(sliceCount));
     System.setProperty("solr.xml.persist", "true");
   }
 
   @Override
-  @After
-  public void tearDown() throws Exception {
-    super.tearDown();
-
-    if (VERBOSE || printLayoutOnTearDown) {
-      super.printLayout();
-    }
-    if (controlClient != null) {
-      controlClient.shutdown();
-    }
-    if (cloudClient != null) {
-      cloudClient.shutdown();
-    }
-    if (controlClientCloud != null) {
-      controlClientCloud.shutdown();
-    }
-    super.tearDown();
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
 
     System.clearProperty("zkHost");
     System.clearProperty("numShards");
@@ -86,8 +69,8 @@
     DirectUpdateHandler2.commitOnClose = true;
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     waitForThingsToLevelOut(15);
 
     if (usually()) {
@@ -136,26 +119,23 @@
         .getBaseURL();
     baseUrl = baseUrl.substring(0, baseUrl.length() - "collection1".length());
 
-    HttpSolrClient baseClient = new HttpSolrClient(baseUrl);
-    baseClient.setConnectionTimeout(15000);
-    baseClient.setSoTimeout(60000 * 5);
-    baseClient.request(request);
-    baseClient.shutdown();
+    try (HttpSolrClient baseClient = new HttpSolrClient(baseUrl)) {
+      baseClient.setConnectionTimeout(15000);
+      baseClient.setSoTimeout(60000 * 5);
+      baseClient.request(request);
+    }
   }
 
   private void createCollection(String targetCollection) throws Exception {
     HashMap<String, List<Integer>> collectionInfos = new HashMap<>();
-    CloudSolrClient client = null;
-    try {
-      client = createCloudClient(null);
+
+    try (CloudSolrClient client = createCloudClient(null)) {
       Map<String, Object> props = ZkNodeProps.makeMap(
           REPLICATION_FACTOR, 1,
           MAX_SHARDS_PER_NODE, 5,
           NUM_SLICES, 1);
 
       createCollection(collectionInfos, targetCollection, props, client);
-    } finally {
-      if (client != null) client.shutdown();
     }
 
     List<Integer> list = collectionInfos.get(targetCollection);
@@ -193,42 +173,42 @@
     indexer.start();
 
     String url = CustomCollectionTest.getUrlFromZk(getCommonCloudSolrClient().getZkStateReader().getClusterState(), targetCollection);
-    HttpSolrClient collectionClient = new HttpSolrClient(url);
 
-    SolrQuery solrQuery = new SolrQuery("*:*");
-    assertEquals("DocCount on target collection does not match", 0, collectionClient.query(solrQuery).getResults().getNumFound());
+    try (HttpSolrClient collectionClient = new HttpSolrClient(url)) {
 
-    invokeMigrateApi(AbstractDistribZkTestBase.DEFAULT_COLLECTION, splitKey + "/" + BIT_SEP + "!", targetCollection);
-    long finishTime = System.currentTimeMillis();
+      SolrQuery solrQuery = new SolrQuery("*:*");
+      assertEquals("DocCount on target collection does not match", 0, collectionClient.query(solrQuery).getResults().getNumFound());
 
-    indexer.join();
-    splitKeyCount[0] += indexer.getSplitKeyCount();
+      invokeMigrateApi(AbstractDistribZkTestBase.DEFAULT_COLLECTION, splitKey + "/" + BIT_SEP + "!", targetCollection);
+      long finishTime = System.currentTimeMillis();
 
-    try {
-      cloudClient.deleteById("a/" + BIT_SEP + "!104");
-      splitKeyCount[0]--;
-    } catch (Exception e) {
-      log.warn("Error deleting document a/" + BIT_SEP + "!104", e);
+      indexer.join();
+      splitKeyCount[0] += indexer.getSplitKeyCount();
+
+      try {
+        cloudClient.deleteById("a/" + BIT_SEP + "!104");
+        splitKeyCount[0]--;
+      } catch (Exception e) {
+        log.warn("Error deleting document a/" + BIT_SEP + "!104", e);
+      }
+      cloudClient.commit();
+      collectionClient.commit();
+
+      solrQuery = new SolrQuery("*:*").setRows(1000);
+      QueryResponse response = collectionClient.query(solrQuery);
+      log.info("Response from target collection: " + response);
+      assertEquals("DocCount on target collection does not match", splitKeyCount[0], response.getResults().getNumFound());
+
+      getCommonCloudSolrClient().getZkStateReader().updateClusterState(true);
+      ClusterState state = getCommonCloudSolrClient().getZkStateReader().getClusterState();
+      Slice slice = state.getSlice(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD2);
+      assertNotNull("Routing rule map is null", slice.getRoutingRules());
+      assertFalse("Routing rule map is empty", slice.getRoutingRules().isEmpty());
+      assertNotNull("No routing rule exists for route key: " + splitKey, slice.getRoutingRules().get(splitKey + "!"));
+
+      boolean ruleRemoved = waitForRuleToExpire(splitKey, finishTime);
+      assertTrue("Routing rule was not expired", ruleRemoved);
     }
-    cloudClient.commit();
-    collectionClient.commit();
-
-    solrQuery = new SolrQuery("*:*").setRows(1000);
-    QueryResponse response = collectionClient.query(solrQuery);
-    log.info("Response from target collection: " + response);
-    assertEquals("DocCount on target collection does not match", splitKeyCount[0], response.getResults().getNumFound());
-    collectionClient.shutdown();
-    collectionClient = null;
-
-    getCommonCloudSolrClient().getZkStateReader().updateClusterState(true);
-    ClusterState state = getCommonCloudSolrClient().getZkStateReader().getClusterState();
-    Slice slice = state.getSlice(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD2);
-    assertNotNull("Routing rule map is null", slice.getRoutingRules());
-    assertFalse("Routing rule map is empty", slice.getRoutingRules().isEmpty());
-    assertNotNull("No routing rule exists for route key: " + splitKey, slice.getRoutingRules().get(splitKey + "!"));
-
-    boolean ruleRemoved = waitForRuleToExpire(splitKey, finishTime);
-    assertTrue("Routing rule was not expired", ruleRemoved);
   }
 
   static class Indexer extends Thread {
diff --git a/solr/core/src/test/org/apache/solr/cloud/MultiThreadedOCPTest.java b/solr/core/src/test/org/apache/solr/cloud/MultiThreadedOCPTest.java
index f355f96..d2b9ac4 100644
--- a/solr/core/src/test/org/apache/solr/cloud/MultiThreadedOCPTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/MultiThreadedOCPTest.java
@@ -30,7 +30,7 @@
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.update.DirectUpdateHandler2;
-import org.junit.Before;
+import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -49,10 +49,9 @@
 
   private static final int NUM_COLLECTIONS = 4;
 
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
 
     useJettyDataDir = false;
 
@@ -61,13 +60,12 @@
   }
 
   public MultiThreadedOCPTest() {
-    fixShardCount = true;
     sliceCount = 2;
-    shardCount = 4;
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 4)
+  public void test() throws Exception {
 
     testParallelCollectionAPICalls();
     testTaskExclusivity();
@@ -76,131 +74,133 @@
   }
 
   private void testParallelCollectionAPICalls() throws IOException, SolrServerException {
-    SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)));
-
-    for(int i = 1 ; i <= NUM_COLLECTIONS ; i++) {
-      Create createCollectionRequest = new Create();
-      createCollectionRequest.setCollectionName("ocptest" + i);
-      createCollectionRequest.setNumShards(4);
-      createCollectionRequest.setConfigName("conf1");
-      createCollectionRequest.setAsyncId(String.valueOf(i));
-      createCollectionRequest.process(client);
-    }
-
-    boolean pass = false;
-    int counter = 0;
-    while(true) {
-      int numRunningTasks = 0;
-      for (int i = 1; i <= NUM_COLLECTIONS; i++)
-        if (getRequestState(i + "", client).equals("running"))
-          numRunningTasks++;
-      if(numRunningTasks > 1) {
-        pass = true;
-        break;
-      } else if(counter++ > 100)
-        break;
-      try {
-        Thread.sleep(100);
-      } catch (InterruptedException e) {
-        Thread.currentThread().interrupt();
+    try (SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) {
+      for(int i = 1 ; i <= NUM_COLLECTIONS ; i++) {
+        Create createCollectionRequest = new Create();
+        createCollectionRequest.setCollectionName("ocptest" + i);
+        createCollectionRequest.setNumShards(4);
+        createCollectionRequest.setConfigName("conf1");
+        createCollectionRequest.setAsyncId(String.valueOf(i));
+        createCollectionRequest.process(client);
       }
-    }
-    assertTrue("More than one tasks were supposed to be running in parallel but they weren't.", pass);
-    for(int i=1;i<=NUM_COLLECTIONS;i++) {
-      String state = getRequestStateAfterCompletion(i + "", REQUEST_STATUS_TIMEOUT, client);
-      assertTrue("Task " + i + " did not complete, final state: " + state,state.equals("completed"));
+  
+      boolean pass = false;
+      int counter = 0;
+      while(true) {
+        int numRunningTasks = 0;
+        for (int i = 1; i <= NUM_COLLECTIONS; i++)
+          if (getRequestState(i + "", client).equals("running"))
+            numRunningTasks++;
+        if(numRunningTasks > 1) {
+          pass = true;
+          break;
+        } else if(counter++ > 100)
+          break;
+        try {
+          Thread.sleep(100);
+        } catch (InterruptedException e) {
+          Thread.currentThread().interrupt();
+        }
+      }
+      assertTrue("More than one tasks were supposed to be running in parallel but they weren't.", pass);
+      for(int i=1;i<=NUM_COLLECTIONS;i++) {
+        String state = getRequestStateAfterCompletion(i + "", REQUEST_STATUS_TIMEOUT, client);
+        assertTrue("Task " + i + " did not complete, final state: " + state,state.equals("completed"));
+      }
     }
   }
 
   private void testTaskExclusivity() throws IOException, SolrServerException {
-    SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)));
-    Create createCollectionRequest = new Create();
-    createCollectionRequest.setCollectionName("ocptest_shardsplit");
-    createCollectionRequest.setNumShards(4);
-    createCollectionRequest.setConfigName("conf1");
-    createCollectionRequest.setAsyncId("1000");
-    createCollectionRequest.process(client);
-
-    SplitShard splitShardRequest = new SplitShard();
-    splitShardRequest.setCollectionName("ocptest_shardsplit");
-    splitShardRequest.setShardName(SHARD1);
-    splitShardRequest.setAsyncId("1001");
-    splitShardRequest.process(client);
-
-    splitShardRequest = new SplitShard();
-    splitShardRequest.setCollectionName("ocptest_shardsplit");
-    splitShardRequest.setShardName(SHARD2);
-    splitShardRequest.setAsyncId("1002");
-    splitShardRequest.process(client);
-
-    int iterations = 0;
-    while(true) {
-      int runningTasks = 0;
-      int completedTasks = 0;
+    try (SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) {
+      Create createCollectionRequest = new Create();
+      createCollectionRequest.setCollectionName("ocptest_shardsplit");
+      createCollectionRequest.setNumShards(4);
+      createCollectionRequest.setConfigName("conf1");
+      createCollectionRequest.setAsyncId("1000");
+      createCollectionRequest.process(client);
+  
+      SplitShard splitShardRequest = new SplitShard();
+      splitShardRequest.setCollectionName("ocptest_shardsplit");
+      splitShardRequest.setShardName(SHARD1);
+      splitShardRequest.setAsyncId("1001");
+      splitShardRequest.process(client);
+  
+      splitShardRequest = new SplitShard();
+      splitShardRequest.setCollectionName("ocptest_shardsplit");
+      splitShardRequest.setShardName(SHARD2);
+      splitShardRequest.setAsyncId("1002");
+      splitShardRequest.process(client);
+  
+      int iterations = 0;
+      while(true) {
+        int runningTasks = 0;
+        int completedTasks = 0;
+        for (int i=1001;i<=1002;i++) {
+          String state = getRequestState(i, client);
+          if (state.equals("running"))
+            runningTasks++;
+          if (state.equals("completed"))
+            completedTasks++;
+          assertTrue("We have a failed SPLITSHARD task", !state.equals("failed"));
+        }
+        // TODO: REQUESTSTATUS might come back with more than 1 running tasks over multiple calls.
+        // The only way to fix this is to support checking of multiple requestids in a single REQUESTSTATUS task.
+        
+        assertTrue("Mutual exclusion failed. Found more than one task running for the same collection", runningTasks < 2);
+  
+        if(completedTasks == 2 || iterations++ > REQUEST_STATUS_TIMEOUT)
+          break;
+  
+        try {
+          Thread.sleep(1000);
+        } catch (InterruptedException e) {
+          Thread.currentThread().interrupt();
+          return;
+        }
+      }
       for (int i=1001;i<=1002;i++) {
-        String state = getRequestState(i, client);
-        if (state.equals("running"))
-          runningTasks++;
-        if (state.equals("completed"))
-          completedTasks++;
-        assertTrue("We have a failed SPLITSHARD task", !state.equals("failed"));
+        String state = getRequestStateAfterCompletion(i + "", REQUEST_STATUS_TIMEOUT, client);
+        assertTrue("Task " + i + " did not complete, final state: " + state,state.equals("completed"));
       }
-      // TODO: REQUESTSTATUS might come back with more than 1 running tasks over multiple calls.
-      // The only way to fix this is to support checking of multiple requestids in a single REQUESTSTATUS task.
-      
-      assertTrue("Mutual exclusion failed. Found more than one task running for the same collection", runningTasks < 2);
-
-      if(completedTasks == 2 || iterations++ > REQUEST_STATUS_TIMEOUT)
-        break;
-
-      try {
-        Thread.sleep(1000);
-      } catch (InterruptedException e) {
-        Thread.currentThread().interrupt();
-        return;
-      }
-    }
-    for (int i=1001;i<=1002;i++) {
-      String state = getRequestStateAfterCompletion(i + "", REQUEST_STATUS_TIMEOUT, client);
-      assertTrue("Task " + i + " did not complete, final state: " + state,state.equals("completed"));
     }
   }
 
   private void testDeduplicationOfSubmittedTasks() throws IOException, SolrServerException {
-    SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)));
-    Create createCollectionRequest = new Create();
-    createCollectionRequest.setCollectionName("ocptest_shardsplit2");
-    createCollectionRequest.setNumShards(4);
-    createCollectionRequest.setConfigName("conf1");
-    createCollectionRequest.setAsyncId("3000");
-    createCollectionRequest.process(client);
-
-    SplitShard splitShardRequest = new SplitShard();
-    splitShardRequest.setCollectionName("ocptest_shardsplit2");
-    splitShardRequest.setShardName(SHARD1);
-    splitShardRequest.setAsyncId("3001");
-    splitShardRequest.process(client);
-
-    splitShardRequest = new SplitShard();
-    splitShardRequest.setCollectionName("ocptest_shardsplit2");
-    splitShardRequest.setShardName(SHARD2);
-    splitShardRequest.setAsyncId("3002");
-    splitShardRequest.process(client);
-
-    // Now submit another task with the same id. At this time, hopefully the previous 3002 should still be in the queue.
-    splitShardRequest = new SplitShard();
-    splitShardRequest.setCollectionName("ocptest_shardsplit2");
-    splitShardRequest.setShardName(SHARD1);
-    splitShardRequest.setAsyncId("3002");
-    CollectionAdminResponse response = splitShardRequest.process(client);
-
-    NamedList r = response.getResponse();
-    assertEquals("Duplicate request was supposed to exist but wasn't found. De-duplication of submitted task failed.",
-        "Task with the same requestid already exists.", r.get("error"));
-
-    for (int i=3001;i<=3002;i++) {
-      String state = getRequestStateAfterCompletion(i + "", REQUEST_STATUS_TIMEOUT, client);
-      assertTrue("Task " + i + " did not complete, final state: " + state,state.equals("completed"));
+    try (SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) {
+      Create createCollectionRequest = new Create();
+      createCollectionRequest.setCollectionName("ocptest_shardsplit2");
+      createCollectionRequest.setNumShards(4);
+      createCollectionRequest.setConfigName("conf1");
+      createCollectionRequest.setAsyncId("3000");
+      createCollectionRequest.process(client);
+  
+      SplitShard splitShardRequest = new SplitShard();
+      splitShardRequest.setCollectionName("ocptest_shardsplit2");
+      splitShardRequest.setShardName(SHARD1);
+      splitShardRequest.setAsyncId("3001");
+      splitShardRequest.process(client);
+  
+      splitShardRequest = new SplitShard();
+      splitShardRequest.setCollectionName("ocptest_shardsplit2");
+      splitShardRequest.setShardName(SHARD2);
+      splitShardRequest.setAsyncId("3002");
+      splitShardRequest.process(client);
+  
+      // Now submit another task with the same id. At this time, hopefully the previous 3002 should still be in the queue.
+      splitShardRequest = new SplitShard();
+      splitShardRequest.setCollectionName("ocptest_shardsplit2");
+      splitShardRequest.setShardName(SHARD1);
+      splitShardRequest.setAsyncId("3002");
+      CollectionAdminResponse response = splitShardRequest.process(client);
+  
+      NamedList r = response.getResponse();
+      assertEquals("Duplicate request was supposed to exist but wasn't found. De-duplication of submitted task failed.",
+          "Task with the same requestid already exists.", r.get("error"));
+  
+      for (int i=3001;i<=3002;i++) {
+        String state = getRequestStateAfterCompletion(i + "", REQUEST_STATUS_TIMEOUT, client);
+        assertTrue("Task " + i + " did not complete, final state: " + state,state.equals("completed"));
+      }
     }
   }
 
@@ -221,10 +221,8 @@
       }
     };
     indexThread.start();
+    try (SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) {
 
-    try {
-
-      SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)));
       SplitShard splitShardRequest = new SplitShard();
       splitShardRequest.setCollectionName("collection1");
       splitShardRequest.setShardName(SHARD1);
@@ -299,10 +297,9 @@
   }
 
   @Override
-  public void tearDown() throws Exception {
-    super.tearDown();
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
     System.clearProperty("numShards");
-    System.clearProperty("zkHost");
     System.clearProperty("solr.xml.persist");
     
     // insurance
diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionProcessorTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionProcessorTest.java
index 949d0b9..b4a4b2a 100644
--- a/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionProcessorTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionProcessorTest.java
@@ -338,6 +338,8 @@
       }
     }).anyTimes();
     
+    zkMap.put("/configs/myconfig", null);
+    
     return liveNodes;
   }
 
diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java
index 9333996..49b7b60 100644
--- a/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java
@@ -18,20 +18,6 @@
  */
 
 
-import static org.apache.solr.cloud.OverseerCollectionProcessor.NUM_SLICES;
-import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
-import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
-import static org.apache.solr.cloud.OverseerCollectionProcessor.getSortedOverseerNodeNames;
-import static org.apache.solr.cloud.OverseerCollectionProcessor.getLeaderNode;
-import static org.apache.solr.common.cloud.ZkNodeProps.makeMap;
-
-import java.io.IOException;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
 import org.apache.solr.client.solrj.SolrRequest;
@@ -46,9 +32,22 @@
 import org.apache.solr.common.params.MapSolrParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.zookeeper.data.Stat;
-import org.junit.After;
-import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+
+import static org.apache.solr.cloud.OverseerCollectionProcessor.NUM_SLICES;
+import static org.apache.solr.cloud.OverseerCollectionProcessor.getLeaderNode;
+import static org.apache.solr.cloud.OverseerCollectionProcessor.getSortedOverseerNodeNames;
+import static org.apache.solr.common.cloud.ZkNodeProps.makeMap;
+import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
+import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
 
 @LuceneTestCase.Slow
 @SuppressSSL     // See SOLR-5776
@@ -60,19 +59,18 @@
 
   }
 
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     System.setProperty("numShards", Integer.toString(sliceCount));
     System.setProperty("solr.xml.persist", "true");
     client = createCloudClient(null);
   }
 
-  @After
-  public void tearDown() throws Exception {
-    super.tearDown();
-    client.shutdown();
+  @Override
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
+    client.close();
   }
 
   protected String getSolrXml() {
@@ -80,16 +78,14 @@
   }
 
   public OverseerRolesTest() {
-    fixShardCount = true;
-
     sliceCount = 2;
-    shardCount = TEST_NIGHTLY ? 6 : 2;
+    fixShardCount(TEST_NIGHTLY ? 6 : 2);
 
     checkCreatedVsState = false;
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     testQuitCommand();
     testOverseerRole();
   }
diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerStatusTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerStatusTest.java
index f1793e7..941baf6 100644
--- a/solr/core/src/test/org/apache/solr/cloud/OverseerStatusTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/OverseerStatusTest.java
@@ -17,20 +17,11 @@
  * limitations under the License.
  */
 
-import org.apache.solr.client.solrj.SolrRequest;
-import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.HttpSolrClient;
-import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.client.solrj.response.CollectionAdminResponse;
-import org.apache.solr.common.cloud.DocRouter;
 import org.apache.solr.common.params.CollectionParams;
-import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
-import org.junit.After;
-import org.junit.Before;
-
-import java.io.IOException;
+import org.junit.Test;
 
 public class OverseerStatusTest extends BasicDistributedZkTest {
 
@@ -39,33 +30,15 @@
   }
 
   @Override
-  @Before
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     System.setProperty("numShards", Integer.toString(sliceCount));
     System.setProperty("solr.xml.persist", "true");
   }
 
-  @Override
-  @After
-  public void tearDown() throws Exception {
-    if (VERBOSE || printLayoutOnTearDown) {
-      super.printLayout();
-    }
-    if (controlClient != null) {
-      controlClient.shutdown();
-    }
-    if (cloudClient != null) {
-      cloudClient.shutdown();
-    }
-    if (controlClientCloud != null) {
-      controlClientCloud.shutdown();
-    }
-    super.tearDown();
-  }
+  @Test
+  public void test() throws Exception {
 
-  @Override
-  public void doTest() throws Exception {
     waitForThingsToLevelOut(15);
 
     // find existing command counts because collection may be created by base test class too
diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
index b87c94e..30ec2be 100644
--- a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
@@ -71,6 +71,8 @@
   
   private List<Overseer> overseers = new ArrayList<>();
   private List<ZkStateReader> readers = new ArrayList<>();
+  private List<HttpShardHandlerFactory> httpShardHandlerFactorys = new ArrayList<>();
+  private List<UpdateShardHandler> updateShardHandlers = new ArrayList<>();
   
   private String collection = "collection1";
   
@@ -205,6 +207,16 @@
       reader.close();
     }
     readers.clear();
+    
+    for (HttpShardHandlerFactory handlerFactory : httpShardHandlerFactorys) {
+      handlerFactory.close();
+    }
+    httpShardHandlerFactorys.clear();
+    
+    for (UpdateShardHandler updateShardHandler : updateShardHandlers) {
+      updateShardHandler.close();
+    }
+    updateShardHandlers.clear();
   }
 
   @Test
@@ -1118,8 +1130,11 @@
       overseers.get(overseers.size() -1).getZkStateReader().getZkClient().close();
     }
     UpdateShardHandler updateShardHandler = new UpdateShardHandler(null);
+    updateShardHandlers.add(updateShardHandler);
+    HttpShardHandlerFactory httpShardHandlerFactory = new HttpShardHandlerFactory();
+    httpShardHandlerFactorys.add(httpShardHandlerFactory);
     Overseer overseer = new Overseer(
-        new HttpShardHandlerFactory().getShardHandler(), updateShardHandler, "/admin/cores", reader, null, new MockConfigSolr());
+        httpShardHandlerFactory.getShardHandler(), updateShardHandler, "/admin/cores", reader, null, new MockConfigSolr());
     overseers.add(overseer);
     ElectionContext ec = new OverseerElectionContext(zkClient, overseer,
         address.replaceAll("/", "_"));
diff --git a/solr/core/src/test/org/apache/solr/cloud/RecoveryAfterSoftCommitTest.java b/solr/core/src/test/org/apache/solr/cloud/RecoveryAfterSoftCommitTest.java
new file mode 100644
index 0000000..6c6d4a6
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/RecoveryAfterSoftCommitTest.java
@@ -0,0 +1,106 @@
+package org.apache.solr.cloud;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.File;
+import java.util.List;
+
+import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
+import org.apache.solr.client.solrj.request.UpdateRequest;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.cloud.Replica;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+// See SOLR-6640
+@SolrTestCaseJ4.SuppressSSL
+public class RecoveryAfterSoftCommitTest extends AbstractFullDistribZkTestBase {
+
+  public RecoveryAfterSoftCommitTest() {
+    sliceCount = 1;
+    fixShardCount(2);
+  }
+
+  @BeforeClass
+  public static void beforeTests() {
+    System.setProperty("solr.tests.maxBufferedDocs", "2");
+  }
+
+  @AfterClass
+  public static void afterTest()  {
+    System.clearProperty("solr.tests.maxBufferedDocs");
+  }
+
+  /**
+   * Overrides the parent implementation to install a SocketProxy in-front of the Jetty server.
+   */
+  @Override
+  public JettySolrRunner createJetty(File solrHome, String dataDir,
+                                     String shardList, String solrConfigOverride, String schemaOverride)
+      throws Exception
+  {
+    return createProxiedJetty(solrHome, dataDir, shardList, solrConfigOverride, schemaOverride);
+  }
+
+  @Test
+  public void test() throws Exception {
+    // flush twice
+    for (int i=0; i<4; i++) {
+      SolrInputDocument document = new SolrInputDocument();
+      document.addField("id", String.valueOf(i));
+      document.addField("a_t", "text_" + i);
+      cloudClient.add(document);
+    }
+
+    // soft-commit so searchers are open on un-committed but flushed segment files
+    AbstractUpdateRequest request = new UpdateRequest().setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true, true);
+    cloudClient.request(request);
+
+    Replica notLeader = ensureAllReplicasAreActive(DEFAULT_COLLECTION, "shard1", 1, 2, 30).get(0);
+    // ok, now introduce a network partition between the leader and the replica
+    SocketProxy proxy = getProxyForReplica(notLeader);
+
+    proxy.close();
+
+    // add more than 100 docs so that peer sync cannot be used for recovery
+    for (int i=5; i<115; i++) {
+      SolrInputDocument document = new SolrInputDocument();
+      document.addField("id", String.valueOf(i));
+      document.addField("a_t", "text_" + i);
+      cloudClient.add(document);
+    }
+
+    // Have the partition last at least 1 sec
+    // While this gives the impression that recovery is timing related, this is
+    // really only
+    // to give time for the state to be written to ZK before the test completes.
+    // In other words,
+    // without a brief pause, the test finishes so quickly that it doesn't give
+    // time for the recovery process to kick-in
+    Thread.sleep(2000L);
+
+    proxy.reopen();
+
+    List<Replica> notLeaders =
+        ensureAllReplicasAreActive(DEFAULT_COLLECTION, "shard1", 1, 2, 30);
+  }
+}
+
diff --git a/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java b/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java
index 638fd34..9966178 100644
--- a/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/RecoveryZkTest.java
@@ -25,6 +25,7 @@
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.cloud.ZkStateReader;
+import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -39,7 +40,7 @@
   public RecoveryZkTest() {
     super();
     sliceCount = 1;
-    shardCount = 2;
+    fixShardCount(2);
     schemaString = "schema15.xml";      // we need a string id
   }
   
@@ -54,8 +55,8 @@
     return randVals;
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     handle.clear();
     handle.put("timestamp", SKIPVAL);
     
@@ -148,7 +149,7 @@
 
   
   @Override
-  public void tearDown() throws Exception {
+  public void distribTearDown() throws Exception {
     // make sure threads have been stopped...
     indexThread.safeStop();
     indexThread2.safeStop();
@@ -156,7 +157,7 @@
     indexThread.join();
     indexThread2.join();
     
-    super.tearDown();
+    super.distribTearDown();
   }
   
   // skip the randoms - they can deadlock...
diff --git a/solr/core/src/test/org/apache/solr/cloud/RemoteQueryErrorTest.java b/solr/core/src/test/org/apache/solr/cloud/RemoteQueryErrorTest.java
index 20e49dc..c583a1a 100644
--- a/solr/core/src/test/org/apache/solr/cloud/RemoteQueryErrorTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/RemoteQueryErrorTest.java
@@ -21,6 +21,7 @@
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
+import org.junit.Test;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -36,11 +37,11 @@
   public RemoteQueryErrorTest() {
     super();
     sliceCount = 1;
-    shardCount = random().nextBoolean() ? 3 : 4;
+    fixShardCount(random().nextBoolean() ? 3 : 4);
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     handle.clear();
     handle.put("timestamp", SKIPVAL);
     
@@ -66,8 +67,8 @@
       } catch(Exception ex) {
         fail("Expected a SolrException to occur, instead received: " + ex.getClass());
       } finally {
-        solrClient.shutdown();
+        solrClient.close();
       }
     }
   }
-}
\ No newline at end of file
+}
diff --git a/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java b/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java
index 965f35b..bb94825 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ReplicationFactorTest.java
@@ -19,27 +19,22 @@
 
 import java.io.File;
 import java.net.ServerSocket;
-import java.net.URI;
-import java.net.URL;
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Locale;
-import java.util.Map;
 
-import org.apache.lucene.util.LuceneTestCase.AwaitsFix;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.request.UpdateRequest;
+import org.apache.solr.client.solrj.response.CollectionAdminResponse;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.ZkCoreNodeProps;
-import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.util.NamedList;
-import org.junit.After;
-import org.junit.Before;
+import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -59,31 +54,25 @@
   public ReplicationFactorTest() {
     super();
     sliceCount = 3;
-    shardCount = 3;
+    fixShardCount(3);
   }
   
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     System.setProperty("numShards", Integer.toString(sliceCount));
   }
   
   @Override
-  @After
-  public void tearDown() throws Exception {
+  public void distribTearDown() throws Exception {
     
     log.info("tearing down replicationFactorTest!");
     
     System.clearProperty("numShards");
     
-    try {
-      super.tearDown();
-    } catch (Exception exc) {}
-    
-    resetExceptionIgnores();    
-    
-    log.info("super.tearDown complete, closing all socket proxies");
+    super.distribTearDown();
+
+    log.info("super.distribTearDown complete, closing all socket proxies");
     if (!proxies.isEmpty()) {
       for (SocketProxy proxy : proxies.values()) {
         proxy.close();
@@ -111,10 +100,10 @@
       port = s.getLocalPort();
     }
     return port;
-  }  
-   
-  @Override
-  public void doTest() throws Exception {
+  }
+
+  @Test
+  public void test() throws Exception {
     log.info("replication factor test running");
     waitForThingsToLevelOut(30000);
     
@@ -141,7 +130,20 @@
     String shardId = "shard1";
     int minRf = 2;
     
-    createCollection(testCollectionName, numShards, replicationFactor, maxShardsPerNode);
+    CollectionAdminResponse resp = createCollection(testCollectionName, numShards, replicationFactor, maxShardsPerNode);
+    
+    if (resp.getResponse().get("failure") != null) {
+      CollectionAdminRequest.Delete req = new CollectionAdminRequest.Delete();
+      req.setCollectionName(testCollectionName);
+      req.process(cloudClient);
+      
+      resp = createCollection(testCollectionName, numShards, replicationFactor, maxShardsPerNode);
+      
+      if (resp.getResponse().get("failure") != null) {
+        fail("Could not create " + testCollectionName);
+      }
+    }
+    
     cloudClient.setDefaultCollection(testCollectionName);
     
     List<Replica> replicas = 
@@ -162,8 +164,8 @@
     up.add(batch);
 
     Replica leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, shardId);
-    sendNonDirectUpdateRequestReplica(leader, up, 2, testCollectionName);    
-    sendNonDirectUpdateRequestReplica(replicas.get(0), up, 2, testCollectionName);    
+    sendNonDirectUpdateRequestReplicaWithRetry(leader, up, 2, testCollectionName);    
+    sendNonDirectUpdateRequestReplicaWithRetry(replicas.get(0), up, 2, testCollectionName);    
     
     // so now kill the replica of shard2 and verify the achieved rf is only 1
     List<Replica> shard2Replicas = 
@@ -175,8 +177,8 @@
     Thread.sleep(2000);
     
     // shard1 will have rf=2 but shard2 will only have rf=1
-    sendNonDirectUpdateRequestReplica(leader, up, 1, testCollectionName);    
-    sendNonDirectUpdateRequestReplica(replicas.get(0), up, 1, testCollectionName);
+    sendNonDirectUpdateRequestReplicaWithRetry(leader, up, 1, testCollectionName);    
+    sendNonDirectUpdateRequestReplicaWithRetry(replicas.get(0), up, 1, testCollectionName);
     
     // heal the partition
     getProxyForReplica(shard2Replicas.get(0)).reopen();
@@ -184,22 +186,27 @@
     Thread.sleep(2000);
   }
   
+
+  protected void sendNonDirectUpdateRequestReplicaWithRetry(Replica replica, UpdateRequest up, int expectedRf, String collection) throws Exception {
+    try {
+      sendNonDirectUpdateRequestReplica(replica, up, expectedRf, collection);
+    } catch (Exception e) {
+      sendNonDirectUpdateRequestReplica(replica, up, expectedRf, collection);
+    }
+  }
+  
   @SuppressWarnings("rawtypes")
   protected void sendNonDirectUpdateRequestReplica(Replica replica, UpdateRequest up, int expectedRf, String collection) throws Exception {
-    HttpSolrClient solrServer = null;
-    try {
-      ZkCoreNodeProps zkProps = new ZkCoreNodeProps(replica);
-      String url = zkProps.getBaseUrl() + "/" + collection;
-      solrServer = new HttpSolrClient(url);
-            
+
+    ZkCoreNodeProps zkProps = new ZkCoreNodeProps(replica);
+    String url = zkProps.getBaseUrl() + "/" + collection;
+
+    try (HttpSolrClient solrServer = new HttpSolrClient(url)) {
       NamedList resp = solrServer.request(up);
       NamedList hdr = (NamedList) resp.get("responseHeader");
       Integer batchRf = (Integer)hdr.get(UpdateRequest.REPFACT);
       assertTrue("Expected rf="+expectedRf+" for batch but got "+
         batchRf+"; clusterState: "+printClusterStateInfo(), batchRf == expectedRf);      
-    } finally {
-      if (solrServer != null)
-        solrServer.shutdown();
     }
   }
     
diff --git a/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java b/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java
index 9e90821..4523d52 100644
--- a/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java
@@ -21,8 +21,7 @@
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.params.CollectionParams;
 import org.apache.zookeeper.KeeperException;
-import org.junit.After;
-import org.junit.Before;
+import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -36,29 +35,25 @@
   private static final long MAX_WAIT_TIME = TimeUnit.NANOSECONDS.convert(300, TimeUnit.SECONDS);
 
   public RollingRestartTest() {
-    fixShardCount = true;
     sliceCount = 2;
-    shardCount = TEST_NIGHTLY ? 16 : 2;
+    fixShardCount(TEST_NIGHTLY ? 16 : 2);
   }
 
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     System.setProperty("numShards", Integer.toString(sliceCount));
     useFactory("solr.StandardDirectoryFactory");
   }
 
   @Override
-  @After
-  public void tearDown() throws Exception {
+  public void distribTearDown() throws Exception {
     System.clearProperty("numShards");
-    super.tearDown();
-    resetExceptionIgnores();
+    super.distribTearDown();
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     waitForRecoveriesToFinish(false);
 
     restartWithRolesTest();
@@ -75,11 +70,11 @@
     cloudClient.getZkStateReader().getZkClient().printLayoutToStdOut();
 
     int numDesignateOverseers = TEST_NIGHTLY ? 16 : 2;
-    numDesignateOverseers = Math.max(shardCount, numDesignateOverseers);
+    numDesignateOverseers = Math.max(getShardCount(), numDesignateOverseers);
     List<String> designates = new ArrayList<>();
     List<CloudJettyRunner> designateJettys = new ArrayList<>();
     for (int i = 0; i < numDesignateOverseers; i++) {
-      int n = random().nextInt(shardCount);
+      int n = random().nextInt(getShardCount());
       String nodeName = cloudJettys.get(n).nodeName;
       log.info("Chose {} as overseer designate", nodeName);
       invokeCollectionApi(CollectionParams.ACTION, CollectionParams.CollectionAction.ADDROLE.toLower(), "role", "overseer", "node", nodeName);
diff --git a/solr/core/src/test/org/apache/solr/cloud/SSLMigrationTest.java b/solr/core/src/test/org/apache/solr/cloud/SSLMigrationTest.java
index fff1bf4..ee6fddc 100644
--- a/solr/core/src/test/org/apache/solr/cloud/SSLMigrationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/SSLMigrationTest.java
@@ -41,6 +41,7 @@
 import org.apache.solr.util.SSLTestConfig;
 import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
 import org.apache.lucene.util.LuceneTestCase.Slow;
+import org.junit.Test;
 
 /**
  * We want to make sure that when migrating between http and https modes the
@@ -51,9 +52,9 @@
 @SuppressSSL
 @BadApple(bugUrl = "https://issues.apache.org/jira/browse/SOLR-6213")
 public class SSLMigrationTest extends AbstractFullDistribZkTestBase {
-  
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  public void test() throws Exception {
     //Migrate from HTTP -> HTTPS -> HTTP
     assertReplicaInformation("http");
     testMigrateSSL(new SSLTestConfig(true, false));
@@ -118,4 +119,4 @@
     new LBHttpSolrClient(urls.toArray(new String[]{})).request(request);
   }
   
-}
\ No newline at end of file
+}
diff --git a/solr/core/src/test/org/apache/solr/cloud/SaslZkACLProviderTest.java b/solr/core/src/test/org/apache/solr/cloud/SaslZkACLProviderTest.java
new file mode 100644
index 0000000..0f33825
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/SaslZkACLProviderTest.java
@@ -0,0 +1,238 @@
+package org.apache.solr.cloud;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Locale;
+import javax.security.auth.login.Configuration;
+
+import org.apache.lucene.util.Constants;
+import org.apache.hadoop.minikdc.MiniKdc;
+import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.common.cloud.SaslZkACLProvider;
+import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.solr.common.cloud.ZkACLProvider;
+import org.apache.solr.common.cloud.DefaultZkACLProvider;
+import org.apache.zookeeper.CreateMode;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+@ThreadLeakScope(Scope.NONE) // zookeeper sasl login can leak threads, see ZOOKEEPER-2100
+public class SaslZkACLProviderTest extends SolrTestCaseJ4 {
+
+  protected static Logger log = LoggerFactory
+      .getLogger(SaslZkACLProviderTest.class);
+
+  private static final Charset DATA_ENCODING = Charset.forName("UTF-8");
+  // These Locales don't generate dates that are compatibile with Hadoop MiniKdc.
+  protected final static List<String> brokenLocales =
+    Arrays.asList(
+      "th_TH_TH_#u-nu-thai",
+      "ja_JP_JP_#u-ca-japanese",
+      "hi_IN");
+  protected Locale savedLocale = null;
+
+  protected ZkTestServer zkServer;
+
+  @BeforeClass
+  public static void beforeClass() {
+    assumeFalse("FIXME: SOLR-7040: This test fails under IBM J9",
+                Constants.JAVA_VENDOR.startsWith("IBM"));
+    System.setProperty("solrcloud.skip.autorecovery", "true");
+  }
+  
+  @AfterClass
+  public static void afterClass() throws InterruptedException {
+    System.clearProperty("solrcloud.skip.autorecovery");
+  }
+
+  @Override
+  public void setUp() throws Exception {
+    super.setUp();
+    if (brokenLocales.contains(Locale.getDefault().toString())) {
+      savedLocale = Locale.getDefault();
+      Locale.setDefault(Locale.US);
+    }
+    log.info("####SETUP_START " + getTestName());
+    createTempDir();
+
+    String zkDir = createTempDir() + File.separator
+        + "zookeeper/server1/data";
+    log.info("ZooKeeper dataDir:" + zkDir);
+    zkServer = new SaslZkTestServer(zkDir, createTempDir() + File.separator + "miniKdc");
+    zkServer.run();
+
+    System.setProperty("zkHost", zkServer.getZkAddress());
+
+    SolrZkClient zkClient = new SolrZkClientWithACLs(zkServer.getZkHost(), AbstractZkTestCase.TIMEOUT);
+    try {
+      zkClient.makePath("/solr", false, true);
+    } finally {
+      zkClient.close();
+    }
+    setupZNodes();
+
+    log.info("####SETUP_END " + getTestName());
+  }
+
+  protected void setupZNodes() throws Exception {
+    SolrZkClient zkClient = new SolrZkClientWithACLs(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT);
+    try {
+      zkClient.create("/protectedCreateNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false);
+      zkClient.makePath("/protectedMakePathNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false);
+    } finally {
+      zkClient.close();
+    }
+
+    zkClient = new SolrZkClientNoACLs(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT);
+    try {
+      zkClient.create("/unprotectedCreateNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false);
+      zkClient.makePath("/unprotectedMakePathNode", "content".getBytes(DATA_ENCODING), CreateMode.PERSISTENT, false);
+    } finally {
+      zkClient.close();
+    }
+  }
+
+  @Override
+  public void tearDown() throws Exception {
+    zkServer.shutdown();
+
+    if (savedLocale != null) {
+      Locale.setDefault(savedLocale);
+    }
+    super.tearDown();
+  }
+
+  @Test
+  public void testSaslZkACLProvider() throws Exception {
+    // Test with Sasl enabled
+    SolrZkClient zkClient = new SolrZkClientWithACLs(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT);
+    try {
+      VMParamsZkACLAndCredentialsProvidersTest.doTest(zkClient, true, true, true, true, true);
+     } finally {
+      zkClient.close();
+    }
+
+    // Test without Sasl enabled
+    setupZNodes();
+    System.setProperty("zookeeper.sasl.client", "false");
+    zkClient = new SolrZkClientNoACLs(zkServer.getZkAddress(), AbstractZkTestCase.TIMEOUT);
+    try {
+      VMParamsZkACLAndCredentialsProvidersTest.doTest(zkClient, true, true, false, false, false);
+    } finally {
+      zkClient.close();
+      System.clearProperty("zookeeper.sasl.client");
+    }
+  }
+
+  /**
+   * A SolrZKClient that adds Sasl ACLs
+   */
+  private static class SolrZkClientWithACLs extends SolrZkClient {
+
+    public SolrZkClientWithACLs(String zkServerAddress, int zkClientTimeout) {
+      super(zkServerAddress, zkClientTimeout);
+    }
+
+    @Override
+    public ZkACLProvider createZkACLProvider() {
+      return new SaslZkACLProvider();
+    }
+  }
+
+  /**
+   * A SolrZKClient that doesn't add ACLs
+   */
+  private static class SolrZkClientNoACLs extends SolrZkClient {
+
+    public SolrZkClientNoACLs(String zkServerAddress, int zkClientTimeout) {
+      super(zkServerAddress, zkClientTimeout);
+    }
+
+    @Override
+    public ZkACLProvider createZkACLProvider() {
+      return new DefaultZkACLProvider();
+    }
+  }
+
+  /**
+   * A ZkTestServer with Sasl support
+   */
+  public static class SaslZkTestServer extends ZkTestServer {
+    private String kdcDir;
+    private MiniKdc kdc;
+    private Configuration conf;
+
+    public SaslZkTestServer(String zkDir, String kdcDir) {
+      super(zkDir);
+      this.kdcDir = kdcDir;
+    }
+
+    public SaslZkTestServer(String zkDir, int port, String kdcDir) {
+      super(zkDir, port);
+      this.kdcDir = kdcDir;
+      conf = Configuration.getConfiguration();
+    }
+
+    @Override
+    public void run() throws InterruptedException {
+      try {
+        kdc = KerberosTestUtil.getKdc(new File(kdcDir));
+        // Don't require that credentials match the entire principal string, e.g.
+        // can match "solr" rather than "solr/host@DOMAIN"
+        System.setProperty("zookeeper.kerberos.removeRealmFromPrincipal", "true");
+        System.setProperty("zookeeper.kerberos.removeHostFromPrincipal", "true");
+        File keytabFile = new File(kdcDir, "keytabs");
+        String zkClientPrincipal = "solr";
+        String zkServerPrincipal = "zookeeper/127.0.0.1";
+
+        kdc.start();
+        // Create ZK client and server principals and load them into the Configuration
+        kdc.createPrincipal(keytabFile, zkClientPrincipal, zkServerPrincipal);
+        KerberosTestUtil.JaasConfiguration jaas = new KerberosTestUtil.JaasConfiguration(
+        zkClientPrincipal, keytabFile, zkServerPrincipal, keytabFile);
+        Configuration.setConfiguration(jaas);
+      } catch (Exception ex) {
+        throw new RuntimeException(ex);
+      }
+      super.run();
+    }
+
+    @Override
+    public void shutdown() throws IOException, InterruptedException {
+      super.shutdown();
+      System.clearProperty("zookeeper.authProvider.1");
+      System.clearProperty("zookeeper.kerberos.removeRealmFromPrincipal");
+      System.clearProperty("zookeeper.kerberos.removeHostFromPrincipal");
+      Configuration.setConfiguration(conf);
+      kdc.stop();
+    }
+  }
+}
diff --git a/solr/core/src/test/org/apache/solr/cloud/ShardRoutingCustomTest.java b/solr/core/src/test/org/apache/solr/cloud/ShardRoutingCustomTest.java
index 99a04d9..fdb0871 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ShardRoutingCustomTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ShardRoutingCustomTest.java
@@ -18,6 +18,7 @@
  */
 
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 
 public class ShardRoutingCustomTest extends AbstractFullDistribZkTestBase {
@@ -34,8 +35,8 @@
     sliceCount = 0;
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     boolean testFinished = false;
     try {
       doCustomSharding();
diff --git a/solr/core/src/test/org/apache/solr/cloud/ShardRoutingTest.java b/solr/core/src/test/org/apache/solr/cloud/ShardRoutingTest.java
index a883f99..a3c7c00 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ShardRoutingTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ShardRoutingTest.java
@@ -17,32 +17,16 @@
  * limitations under the License.
  */
 
-import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrClient;
-import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.request.UpdateRequest;
 import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.common.SolrDocument;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.SolrInputDocument;
-import org.apache.solr.common.cloud.CompositeIdRouter;
-import org.apache.solr.common.cloud.ZkNodeProps;
-import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.params.CommonParams;
-import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.ShardParams;
-import org.apache.solr.common.util.StrUtils;
-import org.apache.solr.servlet.SolrDispatchFilter;
-import org.apache.solr.update.DirectUpdateHandler2;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
+import org.junit.Test;
 
-import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
 
 
 public class ShardRoutingTest extends AbstractFullDistribZkTestBase {
@@ -66,8 +50,6 @@
   public ShardRoutingTest() {
     schemaString = "schema15.xml";      // we need a string id
     super.sliceCount = 4;
-    super.shardCount = 8;
-    super.fixShardCount = true;  // we only want to test with exactly 4 slices.
 
     // from negative to positive, the upper bits of the hash ranges should be
     // shard1: top bits:10  80000000:bfffffff
@@ -109,8 +91,9 @@
      ***/
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 8)
+  public void test() throws Exception {
     boolean testFinished = false;
     try {
       handle.clear();
@@ -137,7 +120,7 @@
   private void doHashingTest() throws Exception {
     log.info("### STARTING doHashingTest");
     assertEquals(4, cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION).getSlices().size());
-    String shardKeys = ShardParams.SHARD_KEYS;
+    String shardKeys = ShardParams._ROUTE_;
     // for now,  we know how ranges will be distributed to shards.
     // may have to look it up in clusterstate if that assumption changes.
 
@@ -282,12 +265,12 @@
     assertEquals(1, nEnd - nStart);   // short circuit should prevent distrib search
 
     nStart = getNumRequests();
-    replica.client.solrClient.query( params("q","*:*", "shard.keys","b!") );
+    replica.client.solrClient.query( params("q","*:*", ShardParams._ROUTE_, "b!") );
     nEnd = getNumRequests();
     assertEquals(1, nEnd - nStart);   // short circuit should prevent distrib search
 
     nStart = getNumRequests();
-    leader2.client.solrClient.query( params("q","*:*", "shard.keys","b!") );
+    leader2.client.solrClient.query( params("q","*:*", ShardParams._ROUTE_, "b!") );
     nEnd = getNumRequests();
     assertEquals(3, nEnd - nStart);   // original + 2 phase distrib search.  we could improve this!
 
@@ -297,12 +280,12 @@
     assertEquals(9, nEnd - nStart);   // original + 2 phase distrib search * 4 shards.
 
     nStart = getNumRequests();
-    leader2.client.solrClient.query( params("q","*:*", "shard.keys","b!,d!") );
+    leader2.client.solrClient.query( params("q","*:*", ShardParams._ROUTE_, "b!,d!") );
     nEnd = getNumRequests();
     assertEquals(5, nEnd - nStart);   // original + 2 phase distrib search * 2 shards.
 
     nStart = getNumRequests();
-    leader2.client.solrClient.query( params("q","*:*", "shard.keys","b!,f1!f2!") );
+    leader2.client.solrClient.query( params("q","*:*", ShardParams._ROUTE_, "b!,f1!f2!") );
     nEnd = getNumRequests();
     assertEquals(5, nEnd - nStart);
   }
diff --git a/solr/core/src/test/org/apache/solr/cloud/ShardSplitTest.java b/solr/core/src/test/org/apache/solr/cloud/ShardSplitTest.java
index 06c3df7..2fd38e7 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ShardSplitTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ShardSplitTest.java
@@ -39,8 +39,7 @@
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.CollectionParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
-import org.junit.After;
-import org.junit.Before;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -66,33 +65,15 @@
   }
 
   @Override
-  @Before
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     System.setProperty("numShards", Integer.toString(sliceCount));
     System.setProperty("solr.xml.persist", "true");
   }
 
-  @Override
-  @After
-  public void tearDown() throws Exception {
-    if (VERBOSE || printLayoutOnTearDown) {
-      super.printLayout();
-    }
-    if (controlClient != null) {
-      controlClient.shutdown();
-    }
-    if (cloudClient != null) {
-      cloudClient.shutdown();
-    }
-    if (controlClientCloud != null) {
-      controlClientCloud.shutdown();
-    }
-    super.tearDown();
-  }
+  @Test
+  public void test() throws Exception {
 
-  @Override
-  public void doTest() throws Exception {
     waitForThingsToLevelOut(15);
 
     if (usually()) {
@@ -251,10 +232,8 @@
         .getZkStateReader().getClusterState().getLiveNodes().size())) + 1;
 
     HashMap<String, List<Integer>> collectionInfos = new HashMap<>();
-    CloudSolrClient client = null;
     String shard_fld = "shard_s";
-    try {
-      client = createCloudClient(null);
+    try (CloudSolrClient client = createCloudClient(null)) {
       Map<String, Object> props = ZkNodeProps.makeMap(
           REPLICATION_FACTOR, replicationFactor,
           MAX_SHARDS_PER_NODE, maxShardsPerNode,
@@ -262,8 +241,6 @@
           "router.field", shard_fld);
 
       createCollection(collectionInfos, collectionName,props,client);
-    } finally {
-      if (client != null) client.shutdown();
     }
 
     List<Integer> list = collectionInfos.get(collectionName);
@@ -273,52 +250,52 @@
 
     String url = CustomCollectionTest.getUrlFromZk(getCommonCloudSolrClient().getZkStateReader().getClusterState(), collectionName);
 
-    HttpSolrClient collectionClient = new HttpSolrClient(url);
+    try (HttpSolrClient collectionClient = new HttpSolrClient(url)) {
 
-    ClusterState clusterState = cloudClient.getZkStateReader().getClusterState();
-    final DocRouter router = clusterState.getCollection(collectionName).getRouter();
-    Slice shard1 = clusterState.getSlice(collectionName, SHARD1);
-    DocRouter.Range shard1Range = shard1.getRange() != null ? shard1.getRange() : router.fullRange();
-    final List<DocRouter.Range> ranges = router.partitionRange(2, shard1Range);
-    final int[] docCounts = new int[ranges.size()];
+      ClusterState clusterState = cloudClient.getZkStateReader().getClusterState();
+      final DocRouter router = clusterState.getCollection(collectionName).getRouter();
+      Slice shard1 = clusterState.getSlice(collectionName, SHARD1);
+      DocRouter.Range shard1Range = shard1.getRange() != null ? shard1.getRange() : router.fullRange();
+      final List<DocRouter.Range> ranges = router.partitionRange(2, shard1Range);
+      final int[] docCounts = new int[ranges.size()];
 
-    for (int i = 100; i <= 200; i++) {
-      String shardKey = "" + (char)('a' + (i % 26)); // See comment in ShardRoutingTest for hash distribution
+      for (int i = 100; i <= 200; i++) {
+        String shardKey = "" + (char) ('a' + (i % 26)); // See comment in ShardRoutingTest for hash distribution
 
-      collectionClient.add(getDoc(id, i, "n_ti", i, shard_fld, shardKey));
-      int idx = getHashRangeIdx(router, ranges, shardKey);
-      if (idx != -1)  {
-        docCounts[idx]++;
-      }
-    }
-
-    for (int i = 0; i < docCounts.length; i++) {
-      int docCount = docCounts[i];
-      log.info("Shard {} docCount = {}", "shard1_" + i, docCount);
-    }
-
-    collectionClient.commit();
-
-    for (int i = 0; i < 3; i++) {
-      try {
-        splitShard(collectionName, SHARD1, null, null);
-        break;
-      } catch (HttpSolrClient.RemoteSolrException e) {
-        if (e.code() != 500) {
-          throw e;
-        }
-        log.error("SPLITSHARD failed. " + (i < 2 ? " Retring split" : ""), e);
-        if (i == 2) {
-          fail("SPLITSHARD was not successful even after three tries");
+        collectionClient.add(getDoc(id, i, "n_ti", i, shard_fld, shardKey));
+        int idx = getHashRangeIdx(router, ranges, shardKey);
+        if (idx != -1) {
+          docCounts[idx]++;
         }
       }
+
+      for (int i = 0; i < docCounts.length; i++) {
+        int docCount = docCounts[i];
+        log.info("Shard {} docCount = {}", "shard1_" + i, docCount);
+      }
+
+      collectionClient.commit();
+
+      for (int i = 0; i < 3; i++) {
+        try {
+          splitShard(collectionName, SHARD1, null, null);
+          break;
+        } catch (HttpSolrClient.RemoteSolrException e) {
+          if (e.code() != 500) {
+            throw e;
+          }
+          log.error("SPLITSHARD failed. " + (i < 2 ? " Retring split" : ""), e);
+          if (i == 2) {
+            fail("SPLITSHARD was not successful even after three tries");
+          }
+        }
+      }
+
+      waitForRecoveriesToFinish(collectionName, false);
+
+      assertEquals(docCounts[0], collectionClient.query(new SolrQuery("*:*").setParam("shards", "shard1_0")).getResults().getNumFound());
+      assertEquals(docCounts[1], collectionClient.query(new SolrQuery("*:*").setParam("shards", "shard1_1")).getResults().getNumFound());
     }
-
-    waitForRecoveriesToFinish(collectionName, false);
-
-    assertEquals(docCounts[0], collectionClient.query(new SolrQuery("*:*").setParam("shards", "shard1_0")).getResults().getNumFound());
-    assertEquals(docCounts[1], collectionClient.query(new SolrQuery("*:*").setParam("shards", "shard1_1")).getResults().getNumFound());
-    collectionClient.shutdown();
   }
 
   private void splitByRouteKeyTest() throws Exception {
@@ -330,17 +307,14 @@
         .getZkStateReader().getClusterState().getLiveNodes().size())) + 1;
 
     HashMap<String, List<Integer>> collectionInfos = new HashMap<>();
-    CloudSolrClient client = null;
-    try {
-      client = createCloudClient(null);
+
+    try (CloudSolrClient client = createCloudClient(null)) {
       Map<String, Object> props = ZkNodeProps.makeMap(
           REPLICATION_FACTOR, replicationFactor,
           MAX_SHARDS_PER_NODE, maxShardsPerNode,
           NUM_SLICES, numShards);
 
       createCollection(collectionInfos, collectionName,props,client);
-    } finally {
-      if (client != null) client.shutdown();
     }
 
     List<Integer> list = collectionInfos.get(collectionName);
@@ -350,66 +324,66 @@
 
     String url = CustomCollectionTest.getUrlFromZk(getCommonCloudSolrClient().getZkStateReader().getClusterState(), collectionName);
 
-    HttpSolrClient collectionClient = new HttpSolrClient(url);
+    try (HttpSolrClient collectionClient = new HttpSolrClient(url)) {
 
-    String splitKey = "b!";
+      String splitKey = "b!";
 
-    ClusterState clusterState = cloudClient.getZkStateReader().getClusterState();
-    final DocRouter router = clusterState.getCollection(collectionName).getRouter();
-    Slice shard1 = clusterState.getSlice(collectionName, SHARD1);
-    DocRouter.Range shard1Range = shard1.getRange() != null ? shard1.getRange() : router.fullRange();
-    final List<DocRouter.Range> ranges = ((CompositeIdRouter) router).partitionRangeByKey(splitKey, shard1Range);
-    final int[] docCounts = new int[ranges.size()];
+      ClusterState clusterState = cloudClient.getZkStateReader().getClusterState();
+      final DocRouter router = clusterState.getCollection(collectionName).getRouter();
+      Slice shard1 = clusterState.getSlice(collectionName, SHARD1);
+      DocRouter.Range shard1Range = shard1.getRange() != null ? shard1.getRange() : router.fullRange();
+      final List<DocRouter.Range> ranges = ((CompositeIdRouter) router).partitionRangeByKey(splitKey, shard1Range);
+      final int[] docCounts = new int[ranges.size()];
 
-    int uniqIdentifier = (1<<12);
-    int splitKeyDocCount = 0;
-    for (int i = 100; i <= 200; i++) {
-      String shardKey = "" + (char)('a' + (i % 26)); // See comment in ShardRoutingTest for hash distribution
+      int uniqIdentifier = (1 << 12);
+      int splitKeyDocCount = 0;
+      for (int i = 100; i <= 200; i++) {
+        String shardKey = "" + (char) ('a' + (i % 26)); // See comment in ShardRoutingTest for hash distribution
 
-      String idStr = shardKey + "!" + i;
-      collectionClient.add(getDoc(id, idStr, "n_ti", (shardKey + "!").equals(splitKey) ? uniqIdentifier : i));
-      int idx = getHashRangeIdx(router, ranges, idStr);
-      if (idx != -1)  {
-        docCounts[idx]++;
-      }
-      if (splitKey.equals(shardKey + "!"))
-        splitKeyDocCount++;
-    }
-
-    for (int i = 0; i < docCounts.length; i++) {
-      int docCount = docCounts[i];
-      log.info("Shard {} docCount = {}", "shard1_" + i, docCount);
-    }
-    log.info("Route key doc count = {}", splitKeyDocCount);
-
-    collectionClient.commit();
-
-    for (int i = 0; i < 3; i++) {
-      try {
-        splitShard(collectionName, null, null, splitKey);
-        break;
-      } catch (HttpSolrClient.RemoteSolrException e) {
-        if (e.code() != 500) {
-          throw e;
+        String idStr = shardKey + "!" + i;
+        collectionClient.add(getDoc(id, idStr, "n_ti", (shardKey + "!").equals(splitKey) ? uniqIdentifier : i));
+        int idx = getHashRangeIdx(router, ranges, idStr);
+        if (idx != -1) {
+          docCounts[idx]++;
         }
-        log.error("SPLITSHARD failed. " + (i < 2 ? " Retring split" : ""), e);
-        if (i == 2) {
-          fail("SPLITSHARD was not successful even after three tries");
+        if (splitKey.equals(shardKey + "!"))
+          splitKeyDocCount++;
+      }
+
+      for (int i = 0; i < docCounts.length; i++) {
+        int docCount = docCounts[i];
+        log.info("Shard {} docCount = {}", "shard1_" + i, docCount);
+      }
+      log.info("Route key doc count = {}", splitKeyDocCount);
+
+      collectionClient.commit();
+
+      for (int i = 0; i < 3; i++) {
+        try {
+          splitShard(collectionName, null, null, splitKey);
+          break;
+        } catch (HttpSolrClient.RemoteSolrException e) {
+          if (e.code() != 500) {
+            throw e;
+          }
+          log.error("SPLITSHARD failed. " + (i < 2 ? " Retring split" : ""), e);
+          if (i == 2) {
+            fail("SPLITSHARD was not successful even after three tries");
+          }
         }
       }
+
+      waitForRecoveriesToFinish(collectionName, false);
+      SolrQuery solrQuery = new SolrQuery("*:*");
+      assertEquals("DocCount on shard1_0 does not match", docCounts[0], collectionClient.query(solrQuery.setParam("shards", "shard1_0")).getResults().getNumFound());
+      assertEquals("DocCount on shard1_1 does not match", docCounts[1], collectionClient.query(solrQuery.setParam("shards", "shard1_1")).getResults().getNumFound());
+      assertEquals("DocCount on shard1_2 does not match", docCounts[2], collectionClient.query(solrQuery.setParam("shards", "shard1_2")).getResults().getNumFound());
+
+      solrQuery = new SolrQuery("n_ti:" + uniqIdentifier);
+      assertEquals("shard1_0 must have 0 docs for route key: " + splitKey, 0, collectionClient.query(solrQuery.setParam("shards", "shard1_0")).getResults().getNumFound());
+      assertEquals("Wrong number of docs on shard1_1 for route key: " + splitKey, splitKeyDocCount, collectionClient.query(solrQuery.setParam("shards", "shard1_1")).getResults().getNumFound());
+      assertEquals("shard1_2 must have 0 docs for route key: " + splitKey, 0, collectionClient.query(solrQuery.setParam("shards", "shard1_2")).getResults().getNumFound());
     }
-
-    waitForRecoveriesToFinish(collectionName, false);
-    SolrQuery solrQuery = new SolrQuery("*:*");
-    assertEquals("DocCount on shard1_0 does not match", docCounts[0], collectionClient.query(solrQuery.setParam("shards", "shard1_0")).getResults().getNumFound());
-    assertEquals("DocCount on shard1_1 does not match", docCounts[1], collectionClient.query(solrQuery.setParam("shards", "shard1_1")).getResults().getNumFound());
-    assertEquals("DocCount on shard1_2 does not match", docCounts[2], collectionClient.query(solrQuery.setParam("shards", "shard1_2")).getResults().getNumFound());
-
-    solrQuery = new SolrQuery("n_ti:" + uniqIdentifier);
-    assertEquals("shard1_0 must have 0 docs for route key: " + splitKey, 0, collectionClient.query(solrQuery.setParam("shards", "shard1_0")).getResults().getNumFound());
-    assertEquals("Wrong number of docs on shard1_1 for route key: " + splitKey, splitKeyDocCount, collectionClient.query(solrQuery.setParam("shards", "shard1_1")).getResults().getNumFound());
-    assertEquals("shard1_2 must have 0 docs for route key: " + splitKey, 0, collectionClient.query(solrQuery.setParam("shards", "shard1_2")).getResults().getNumFound());
-    collectionClient.shutdown();
   }
 
   protected void checkDocCountsAndShardStates(int[] docCounts, int numReplicas) throws Exception {
@@ -446,23 +420,17 @@
     query.set("distrib", false);
 
     ZkCoreNodeProps shard1_0 = getLeaderUrlFromZk(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1_0);
-    HttpSolrClient shard1_0Client = new HttpSolrClient(shard1_0.getCoreUrl());
     QueryResponse response;
-    try {
+    try (HttpSolrClient shard1_0Client = new HttpSolrClient(shard1_0.getCoreUrl())) {
       response = shard1_0Client.query(query);
-    } finally {
-      shard1_0Client.shutdown();
     }
     long shard10Count = response.getResults().getNumFound();
 
     ZkCoreNodeProps shard1_1 = getLeaderUrlFromZk(
         AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1_1);
-    HttpSolrClient shard1_1Client = new HttpSolrClient(shard1_1.getCoreUrl());
     QueryResponse response2;
-    try {
+    try (HttpSolrClient shard1_1Client = new HttpSolrClient(shard1_1.getCoreUrl())) {
       response2 = shard1_1Client.query(query);
-    } finally {
-      shard1_1Client.shutdown();
     }
     long shard11Count = response2.getResults().getNumFound();
 
@@ -472,7 +440,7 @@
     assertEquals("Wrong doc count on shard1_1. See SOLR-5309", docCounts[1], shard11Count);
   }
 
-  protected void checkSubShardConsistency(String shard) throws SolrServerException {
+  protected void checkSubShardConsistency(String shard) throws SolrServerException, IOException {
     SolrQuery query = new SolrQuery("*:*").setRows(1000).setFields("id", "_version_");
     query.set("distrib", false);
 
@@ -482,12 +450,9 @@
     int c = 0;
     for (Replica replica : slice.getReplicas()) {
       String coreUrl = new ZkCoreNodeProps(replica).getCoreUrl();
-      HttpSolrClient client = new HttpSolrClient(coreUrl);
       QueryResponse response;
-      try {
+      try (HttpSolrClient client = new HttpSolrClient(coreUrl)) {
         response = client.query(query);
-      } finally {
-        client.shutdown();
       }
       numFound[c++] = response.getResults().getNumFound();
       log.info("Shard: " + shard + " Replica: {} has {} docs", coreUrl, String.valueOf(response.getResults().getNumFound()));
@@ -525,11 +490,11 @@
         .getBaseURL();
     baseUrl = baseUrl.substring(0, baseUrl.length() - "collection1".length());
 
-    HttpSolrClient baseClient = new HttpSolrClient(baseUrl);
-    baseClient.setConnectionTimeout(30000);
-    baseClient.setSoTimeout(60000 * 5);
-    baseClient.request(request);
-    baseClient.shutdown();
+    try (HttpSolrClient baseServer = new HttpSolrClient(baseUrl)) {
+      baseServer.setConnectionTimeout(30000);
+      baseServer.setSoTimeout(60000 * 5);
+      baseServer.request(request);
+    }
   }
 
   protected void indexAndUpdateCount(DocRouter router, List<DocRouter.Range> ranges, int[] docCounts, String id, int n) throws Exception {
diff --git a/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverTest.java b/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverTest.java
index c9347ff..8c15a62 100644
--- a/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverTest.java
@@ -49,6 +49,7 @@
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 import static org.apache.solr.common.cloud.ZkNodeProps.makeMap;
 
@@ -80,10 +81,9 @@
     dfsCluster = null;
   }
   
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     useJettyDataDir = false;
     System.setProperty("solr.xml.persist", "true");
   }
@@ -94,18 +94,16 @@
 
   
   public SharedFSAutoReplicaFailoverTest() {
-    fixShardCount = true;
-    
     sliceCount = 2;
-    shardCount = 4;
     completionService = new ExecutorCompletionService<>(executor);
     pending = new HashSet<>();
     checkCreatedVsState = false;
     
   }
-  
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  @ShardsFixed(num = 4)
+  public void test() throws Exception {
     try {
       testBasics();
     } finally {
@@ -238,8 +236,8 @@
   }
   
   @Override
-  public void tearDown() throws Exception {
-    super.tearDown();
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
     System.clearProperty("solr.xml.persist");
   }
 }
diff --git a/solr/core/src/test/org/apache/solr/cloud/SimpleCollectionCreateDeleteTest.java b/solr/core/src/test/org/apache/solr/cloud/SimpleCollectionCreateDeleteTest.java
index bf50d8d..0561183 100644
--- a/solr/core/src/test/org/apache/solr/cloud/SimpleCollectionCreateDeleteTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/SimpleCollectionCreateDeleteTest.java
@@ -22,17 +22,17 @@
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
+import org.junit.Test;
 
 public class SimpleCollectionCreateDeleteTest extends AbstractFullDistribZkTestBase {
 
   public SimpleCollectionCreateDeleteTest() {
-    fixShardCount = true;
     sliceCount = 1;
-    shardCount = 1;
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 1)
+  public void test() throws Exception {
     String overseerNode = OverseerCollectionProcessor.getLeaderNode(cloudClient.getZkStateReader().getZkClient());
     String notOverseerNode = null;
     for (CloudJettyRunner cloudJetty : cloudJettys) {
diff --git a/solr/core/src/test/org/apache/solr/cloud/SolrXmlInZkTest.java b/solr/core/src/test/org/apache/solr/cloud/SolrXmlInZkTest.java
index ccde4bb..7728deb 100644
--- a/solr/core/src/test/org/apache/solr/cloud/SolrXmlInZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/SolrXmlInZkTest.java
@@ -29,6 +29,7 @@
 import org.apache.solr.core.ConfigSolr;
 import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.servlet.SolrDispatchFilter;
+import org.junit.After;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
@@ -52,10 +53,19 @@
 
   private ConfigSolr cfg;
 
+  private SolrDispatchFilter solrDispatchFilter;
+
   @Before
-  public void beforeClass() {
+  public void before() {
     System.setProperty("solr.solrxml.location", "zookeeper");
   }
+  
+  @After
+  public void after() {
+    if (solrDispatchFilter != null) {
+      solrDispatchFilter.destroy();
+    }
+  }
 
   private void setUpZkAndDiskXml(boolean toZk, boolean leaveOnLocal) throws Exception {
     File tmpDir = createTempDir().toFile();
@@ -96,8 +106,9 @@
 
     Method method = SolrDispatchFilter.class.getDeclaredMethod("loadConfigSolr", SolrResourceLoader.class);
     method.setAccessible(true);
-
-    Object obj = method.invoke(new SolrDispatchFilter(), new SolrResourceLoader(null));
+    if (solrDispatchFilter != null) solrDispatchFilter.destroy();
+    solrDispatchFilter = new SolrDispatchFilter();
+    Object obj = method.invoke(solrDispatchFilter, new SolrResourceLoader(null));
     cfg = (ConfigSolr) obj;
 
     log.info("####SETUP_END " + getTestName());
@@ -200,7 +211,9 @@
     try {
       Method method = SolrDispatchFilter.class.getDeclaredMethod("loadConfigSolr", SolrResourceLoader.class);
       method.setAccessible(true);
-      method.invoke(new SolrDispatchFilter(), new SolrResourceLoader(null));
+      if (solrDispatchFilter != null) solrDispatchFilter.destroy();
+      solrDispatchFilter = new SolrDispatchFilter();
+      method.invoke(solrDispatchFilter, new SolrResourceLoader(null));
       fail("Should have thrown an exception");
     } catch (InvocationTargetException ite) {
       assertTrue("Should be catching a SolrException", ite.getTargetException() instanceof SolrException);
diff --git a/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java b/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java
index ef2278f..fedec65 100644
--- a/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/SyncSliceTest.java
@@ -32,10 +32,9 @@
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.CollectionParams.CollectionAction;
 import org.apache.solr.common.params.ModifiableSolrParams;
-import org.junit.After;
 import org.junit.AfterClass;
-import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -61,33 +60,30 @@
     
   }
   
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     // we expect this time of exception as shards go up and down...
     //ignoreException(".*");
     System.setProperty("numShards", Integer.toString(sliceCount));
   }
   
   @Override
-  @After
-  public void tearDown() throws Exception {
+  public void distribTearDown() throws Exception {
     if (!success) {
       printLayoutOnTearDown = true;
     }
-    super.tearDown();
-    resetExceptionIgnores();
+    super.distribTearDown();
   }
   
   public SyncSliceTest() {
     super();
     sliceCount = 1;
-    shardCount = TEST_NIGHTLY ? 7 : 4;
+    fixShardCount(TEST_NIGHTLY ? 7 : 4);
   }
-  
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  public void test() throws Exception {
     
     handle.clear();
     handle.put("timestamp", SKIPVAL);
@@ -132,13 +128,12 @@
         .getBaseURL();
     baseUrl = baseUrl.substring(0, baseUrl.length() - "collection1".length());
     
-    HttpSolrClient baseClient = new HttpSolrClient(baseUrl);
-    // we only set the connect timeout, not so timeout
-    baseClient.setConnectionTimeout(30000);
-    baseClient.request(request);
-    baseClient.shutdown();
-    baseClient = null;
-    
+    try (HttpSolrClient baseClient = new HttpSolrClient(baseUrl)) {
+      // we only set the connect timeout, not so timeout
+      baseClient.setConnectionTimeout(30000);
+      baseClient.request(request);
+    }
+
     waitForThingsToLevelOut(15);
     
     checkShardConsistency(false, true);
@@ -160,7 +155,7 @@
     Set<CloudJettyRunner> jetties = new HashSet<>();
     jetties.addAll(shardToJetty.get("shard1"));
     jetties.remove(leaderJetty);
-    assertEquals(shardCount - 1, jetties.size());
+    assertEquals(getShardCount() - 1, jetties.size());
     
     chaosMonkey.killJetty(leaderJetty);
     
@@ -224,7 +219,7 @@
     jetties = new HashSet<>();
     jetties.addAll(shardToJetty.get("shard1"));
     jetties.remove(leaderJetty);
-    assertEquals(shardCount - 1, jetties.size());
+    assertEquals(getShardCount() - 1, jetties.size());
 
     
     // kill the current leader
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java
index 5c0d4b7..8b14bfc 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java
@@ -16,8 +16,6 @@
  */
 package org.apache.solr.cloud;
 
-import org.apache.commons.collections.CollectionUtils;
-import org.apache.lucene.util.CollectionUtil;
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -38,6 +36,7 @@
 
 import org.junit.BeforeClass;
 
+import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -95,8 +94,8 @@
     log.info("init'ing useFieldRandomizedFactor = {}", useFieldRandomizedFactor);
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
 
     sanityCheckAssertDoubles();
 
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCollectionAPI.java b/solr/core/src/test/org/apache/solr/cloud/TestCollectionAPI.java
index 9292048..0de7e76 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestCollectionAPI.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestCollectionAPI.java
@@ -33,9 +33,7 @@
 import org.apache.solr.common.params.ShardParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.zookeeper.KeeperException;
-import org.junit.Before;
-
-import static org.apache.solr.cloud.OverseerCollectionProcessor.SHARD_UNIQUE;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -44,6 +42,8 @@
 import java.util.Locale;
 import java.util.Map;
 
+import static org.apache.solr.cloud.OverseerCollectionProcessor.SHARD_UNIQUE;
+
 public class TestCollectionAPI extends ReplicaPropertiesBase {
 
   public static final String COLLECTION_NAME = "testcollection";
@@ -54,23 +54,17 @@
   }
 
   @Override
-  @Before
-  public void setUp() throws Exception {
-    fixShardCount = true;
+  public void distribSetUp() throws Exception {
     sliceCount = 2;
-    shardCount = 2;
-    super.setUp();
+    super.distribSetUp();
   }
 
-  @Override
-  public void doTest() throws Exception {
-    CloudSolrClient client = createCloudClient(null);
-    try {
+  @Test
+  @ShardsFixed(num = 2)
+  public void test() throws Exception {
+    try (CloudSolrClient client = createCloudClient(null)) {
       createCollection(null, COLLECTION_NAME, 2, 2, 2, client, null, "conf1");
       createCollection(null, COLLECTION_NAME1, 1, 1, 1, client, null, "conf1");
-    } finally {
-      //remove collections
-      client.shutdown();
     }
 
     waitForCollection(cloudClient.getZkStateReader(), COLLECTION_NAME, 2);
@@ -89,8 +83,8 @@
   }
 
   private void clusterStatusWithCollectionAndShard() throws IOException, SolrServerException {
-    CloudSolrClient client = createCloudClient(null);
-    try {
+
+    try (CloudSolrClient client = createCloudClient(null)) {
       ModifiableSolrParams params = new ModifiableSolrParams();
       params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString());
       params.set("collection", COLLECTION_NAME);
@@ -111,16 +105,12 @@
       Map<String, Object> selectedShardStatus = (Map<String, Object>) shardStatus.get(SHARD1);
       assertNotNull(selectedShardStatus);
 
-    } finally {
-      //remove collections
-      client.shutdown();
     }
   }
 
 
   private void listCollection() throws IOException, SolrServerException {
-    CloudSolrClient client = createCloudClient(null);
-    try {
+    try (CloudSolrClient client = createCloudClient(null)) {
       ModifiableSolrParams params = new ModifiableSolrParams();
       params.set("action", CollectionParams.CollectionAction.LIST.toString());
       SolrRequest request = new QueryRequest(params);
@@ -132,17 +122,13 @@
       assertTrue(DEFAULT_COLLECTION + " was not found in list", collections.contains(DEFAULT_COLLECTION));
       assertTrue(COLLECTION_NAME + " was not found in list", collections.contains(COLLECTION_NAME));
       assertTrue(COLLECTION_NAME1 + " was not found in list", collections.contains(COLLECTION_NAME1));
-    } finally {
-      //remove collections
-      client.shutdown();
     }
 
-
   }
 
   private void clusterStatusNoCollection() throws Exception {
-    CloudSolrClient client = createCloudClient(null);
-    try {
+
+    try (CloudSolrClient client = createCloudClient(null)) {
       ModifiableSolrParams params = new ModifiableSolrParams();
       params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString());
       SolrRequest request = new QueryRequest(params);
@@ -159,16 +145,12 @@
       List<String> liveNodes = (List<String>) cluster.get("live_nodes");
       assertNotNull("Live nodes should not be null", liveNodes);
       assertFalse(liveNodes.isEmpty());
-    } finally {
-      //remove collections
-      client.shutdown();
     }
 
   }
 
   private void clusterStatusWithCollection() throws IOException, SolrServerException {
-    CloudSolrClient client = createCloudClient(null);
-    try {
+    try (CloudSolrClient client = createCloudClient(null)) {
       ModifiableSolrParams params = new ModifiableSolrParams();
       params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString());
       params.set("collection", COLLECTION_NAME);
@@ -182,15 +164,11 @@
       assertNotNull("Collections should not be null in cluster state", collections);
       assertNotNull(collections.get(COLLECTION_NAME));
       assertEquals(1, collections.size());
-    } finally {
-      //remove collections
-      client.shutdown();
     }
   }
 
   private void clusterStatusWithRouteKey() throws IOException, SolrServerException {
-    CloudSolrClient client = createCloudClient(DEFAULT_COLLECTION);
-    try {
+    try (CloudSolrClient client = createCloudClient(DEFAULT_COLLECTION)) {
       SolrInputDocument doc = new SolrInputDocument();
       doc.addField("id", "a!123"); // goes to shard2. see ShardRoutingTest for details
       client.add(doc);
@@ -215,15 +193,11 @@
       assertEquals(1, shardStatus.size());
       Map<String, Object> selectedShardStatus = (Map<String, Object>) shardStatus.get(SHARD2);
       assertNotNull(selectedShardStatus);
-    } finally {
-      //remove collections
-      client.shutdown();
     }
   }
 
   private void clusterStatusAliasTest() throws Exception  {
-    CloudSolrClient client = createCloudClient(null);
-    try {
+    try (CloudSolrClient client = createCloudClient(null)) {
       ModifiableSolrParams params = new ModifiableSolrParams();
       params.set("action", CollectionParams.CollectionAction.CREATEALIAS.toString());
       params.set("name", "myalias");
@@ -253,14 +227,11 @@
       Map<String, Object> collection = (Map<String, Object>) collections.get(DEFAULT_COLLECTION);
       List<String> collAlias = (List<String>) collection.get("aliases");
       assertEquals("Aliases not found", Lists.newArrayList("myalias"), collAlias);
-    } finally {
-      client.shutdown();
     }
   }
 
   private void clusterStatusRolesTest() throws Exception  {
-    CloudSolrClient client = createCloudClient(null);
-    try {
+    try (CloudSolrClient client = createCloudClient(null)) {
       client.connect();
       Replica replica = client.getZkStateReader().getLeaderRetry(DEFAULT_COLLECTION, SHARD1);
 
@@ -287,14 +258,11 @@
       assertNotNull(overseer);
       assertEquals(1, overseer.size());
       assertTrue(overseer.contains(replica.getNodeName()));
-    } finally {
-      client.shutdown();
     }
   }
 
   private void replicaPropTest() throws Exception {
-    CloudSolrClient client = createCloudClient(null);
-    try {
+    try (CloudSolrClient client = createCloudClient(null)) {
       client.connect();
       Map<String, Slice> slices = client.getZkStateReader().getClusterState().getCollection(COLLECTION_NAME).getSlicesMap();
       List<String> sliceList = new ArrayList<>(slices.keySet());
@@ -570,8 +538,6 @@
       verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "property.node_name");
       verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "property.base_url");
 
-    } finally {
-      client.shutdown();
     }
   }
 
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestDistribDocBasedVersion.java b/solr/core/src/test/org/apache/solr/cloud/TestDistribDocBasedVersion.java
index 8238a04..751c6a0 100755
--- a/solr/core/src/test/org/apache/solr/cloud/TestDistribDocBasedVersion.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestDistribDocBasedVersion.java
@@ -24,6 +24,7 @@
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.StrUtils;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 import java.util.HashMap;
 import java.util.HashSet;
@@ -53,8 +54,6 @@
   public TestDistribDocBasedVersion() {
     schemaString = "schema15.xml";      // we need a string id
     super.sliceCount = 2;
-    super.shardCount = 4;
-    super.fixShardCount = true;  // we only want to test with exactly 2 slices.
 
 
     /***
@@ -87,8 +86,9 @@
      ***/
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 4)
+  public void test() throws Exception {
     boolean testFinished = false;
     try {
       handle.clear();
@@ -346,10 +346,4 @@
     req.setParams(params(reqParams));
     req.process(cloudClient);
   }
-
-  @Override
-  public void tearDown() throws Exception {
-    super.tearDown();
-  }
-
 }
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java b/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java
index 851c41f..91366ff 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionZkExpiry.java
@@ -45,7 +45,6 @@
       server.run();
       AbstractZkTestCase.tryCleanSolrZkNode(server.getZkHost());
       AbstractZkTestCase.makeSolrZkNode(server.getZkHost());
-      cc.load();
 
       final ZkController zkController = new ZkController(cc, server.getZkAddress(), 15000, 30000, "dummy.host.com", "8984", "/solr", 180000, 180000, true, new CurrentCoreDescriptorProvider() {
         @Override
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudClusterSSL.java b/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudClusterSSL.java
new file mode 100644
index 0000000..07bf05a
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudClusterSSL.java
@@ -0,0 +1,86 @@
+package org.apache.solr.cloud;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.CoreAdminRequest;
+import org.apache.solr.common.params.CoreAdminParams.CoreAdminAction;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.File;
+import java.util.List;
+
+/**
+ * Tests SSL (if test framework selects it) with MiniSolrCloudCluster.
+ * {@link TestMiniSolrCloudCluster} does not inherit from {@link SolrTestCaseJ4}
+ * so does not support SSL.
+ */
+public class TestMiniSolrCloudClusterSSL extends SolrTestCaseJ4 {
+
+  private static MiniSolrCloudCluster miniCluster;
+  private static final int NUM_SERVERS = 5;
+
+  @BeforeClass
+  public static void startup() throws Exception {
+    String testHome = SolrTestCaseJ4.TEST_HOME();
+    miniCluster = new MiniSolrCloudCluster(NUM_SERVERS, null, createTempDir().toFile(), new File(testHome, "solr-no-core.xml"),
+      null, null, sslConfig);
+  }
+
+  @AfterClass
+  public static void shutdown() throws Exception {
+    if (miniCluster != null) {
+      miniCluster.shutdown();
+    }
+    miniCluster = null;
+  }
+
+  @Test
+  public void testMiniSolrCloudClusterSSL() throws Exception {
+    // test send request to each server
+    sendRequestToEachServer();
+
+    // shut down a server
+    JettySolrRunner stoppedServer = miniCluster.stopJettySolrRunner(0);
+    assertTrue(stoppedServer.isStopped());
+    assertEquals(NUM_SERVERS - 1, miniCluster.getJettySolrRunners().size());
+
+    // create a new server
+    JettySolrRunner startedServer = miniCluster.startJettySolrRunner(null, null, null, sslConfig);
+    assertTrue(startedServer.isRunning());
+    assertEquals(NUM_SERVERS, miniCluster.getJettySolrRunners().size());
+
+    // test send request to each server
+    sendRequestToEachServer();
+  }
+
+  private void sendRequestToEachServer() throws Exception {
+    List<JettySolrRunner> jettys = miniCluster.getJettySolrRunners();
+    for (JettySolrRunner jetty : jettys) {
+      try (HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString())) {
+        CoreAdminRequest req = new CoreAdminRequest();
+        req.setAction( CoreAdminAction.STATUS );
+        client.request(req);
+      }
+    }
+  }
+}
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestModifyConfFiles.java b/solr/core/src/test/org/apache/solr/cloud/TestModifyConfFiles.java
deleted file mode 100644
index cc73065..0000000
--- a/solr/core/src/test/org/apache/solr/cloud/TestModifyConfFiles.java
+++ /dev/null
@@ -1,134 +0,0 @@
-package org.apache.solr.cloud;
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.commons.io.FileUtils;
-import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.client.solrj.impl.HttpSolrClient;
-import org.apache.solr.client.solrj.request.QueryRequest;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
-
-import java.io.File;
-import java.nio.charset.StandardCharsets;
-
-import static org.junit.internal.matchers.StringContains.containsString;
-
-public class TestModifyConfFiles extends AbstractFullDistribZkTestBase {
-
-  public TestModifyConfFiles() {
-    super();
-  }
-
-  @Override
-  public void doTest() throws Exception {
-    int which = r.nextInt(clients.size());
-    HttpSolrClient client = (HttpSolrClient) clients.get(which);
-
-    ModifiableSolrParams params = new ModifiableSolrParams();
-    params.set("op", "write");
-    params.set("file", "schema.xml");
-    QueryRequest request = new QueryRequest(params);
-    request.setPath("/admin/fileedit");
-    try {
-      client.request(request);
-      fail("Should have caught exception");
-    } catch (Exception e) {
-      assertThat(e.getMessage(), containsString("Input stream list was null for admin file write operation."));
-    }
-
-    params.remove("file");
-    params.set("stream.body", "Testing rewrite of schema.xml file.");
-    params.set("op", "test");
-    request = new QueryRequest(params);
-    request.setPath("/admin/fileedit");
-    try {
-      client.request(request);
-      fail("Should have caught exception");
-    } catch (Exception e) {
-      assertThat(e.getMessage(), containsString("No file name specified for write operation."));
-    }
-
-    params.set("op", "write");
-    params.set("file", "bogus.txt");
-    request = new QueryRequest(params);
-    request.setPath("/admin/fileedit");
-    try {
-      client.request(request);
-      fail("Should have caught exception");
-    } catch (Exception e) {
-      assertThat(e.getMessage(), containsString("Can not access: bogus.txt"));
-    }
-
-    try {
-      params.set("file", "schema.xml");
-      request = new QueryRequest(params);
-      request.setPath("/admin/fileedit");
-      client.request(request);
-      fail("Should have caught exception since it's mal-formed XML");
-    } catch (Exception e) {
-      assertTrue("Should have a sax parser exception here!",
-          e.getMessage().contains("Invalid XML file: org.xml.sax.SAXParseException"));
-    }
-
-    String top = SolrTestCaseJ4.TEST_HOME() + "/collection1/conf";
-    params.set("stream.body", FileUtils.readFileToString(new File(top, "schema-tiny.xml"), "UTF-8"));
-    params.set("file", "schema.xml");
-    request = new QueryRequest(params);
-    request.setPath("/admin/fileedit");
-
-    client.request(request);
-
-    SolrZkClient zkClient = cloudClient.getZkStateReader().getZkClient();
-    String contents = new String(zkClient.getData("/configs/conf1/schema.xml", null, null, true), StandardCharsets.UTF_8);
-
-    assertTrue("Schema contents should have changed!", contents.contains("<schema name=\"tiny\" version=\"1.1\">"));
-
-    // Create a velocity/whatever node. Put a bit of data in it. See if you can change it.
-    zkClient.makePath("/configs/conf1/velocity/test.vm", false, true);
-
-    params.set("stream.body", "Some bogus stuff for a test.");
-    params.set("file", "velocity/test.vm");
-    request = new QueryRequest(params);
-    request.setPath("/admin/fileedit");
-
-    client.request(request);
-
-    contents = new String(zkClient.getData("/configs/conf1/velocity/test.vm", null, null, true), StandardCharsets.UTF_8);
-    assertTrue("Should have found new content in a velocity/test.vm.",
-        contents.indexOf("Some bogus stuff for a test.") != -1);
-
-    params = new ModifiableSolrParams();
-    request = new QueryRequest(params);
-    request.setPath("/admin/file");
-    NamedList<Object> res = client.request(request);
-
-    NamedList files = (NamedList)res.get("files");
-    assertNotNull("Should have gotten files back", files);
-    SimpleOrderedMap schema = (SimpleOrderedMap)files.get("schema.xml");
-    assertNotNull("Should have a schema returned", schema);
-    assertNull("Schema.xml should not be a directory", schema.get("directory"));
-
-    SimpleOrderedMap velocity = (SimpleOrderedMap)files.get("velocity");
-    assertNotNull("Should have velocity dir returned", velocity);
-
-    assertTrue("Velocity should be a directory", (boolean)velocity.get("directory"));
-  }
-
-}
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java b/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java
index 21e819b..9e23a6b9 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java
@@ -27,7 +27,7 @@
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.zookeeper.KeeperException;
-import org.junit.Before;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -42,15 +42,7 @@
 
   public TestRebalanceLeaders() {
     schemaString = "schema15.xml";      // we need a string id
-  }
-
-  @Override
-  @Before
-  public void setUp() throws Exception {
-    fixShardCount = true;
     sliceCount = 4;
-    shardCount = 4;
-    super.setUp();
   }
 
   int reps = 10;
@@ -59,12 +51,11 @@
 
   Map<String, Replica> expected = new HashMap<>();
 
-
-  @Override
-  public void doTest() throws Exception {
-    CloudSolrClient client = createCloudClient(null);
+  @Test
+  @ShardsFixed(num = 4)
+  public void test() throws Exception {
     reps = random().nextInt(9) + 1; // make sure and do at least one.
-    try {
+    try (CloudSolrClient client = createCloudClient(null)) {
       // Mix up a bunch of different combinations of shards and replicas in order to exercise boundary cases.
       // shards, replicationfactor, maxreplicaspernode
       int shards = random().nextInt(7);
@@ -72,9 +63,6 @@
       int rFactor = random().nextInt(4);
       if (rFactor < 2) rFactor = 2;
       createCollection(null, COLLECTION_NAME, shards, rFactor, shards * rFactor + 1, client, null, "conf1");
-    } finally {
-      //remove collections
-      client.shutdown();
     }
 
     waitForCollection(cloudClient.getZkStateReader(), COLLECTION_NAME, 2);
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestReplicaProperties.java b/solr/core/src/test/org/apache/solr/cloud/TestReplicaProperties.java
index 5c854fa..236ba48 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestReplicaProperties.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestReplicaProperties.java
@@ -18,11 +18,6 @@
  */
 
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -36,7 +31,12 @@
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.zookeeper.KeeperException;
-import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
 
 @Slow
 public class TestReplicaProperties extends ReplicaPropertiesBase {
@@ -45,21 +45,14 @@
 
   public TestReplicaProperties() {
     schemaString = "schema15.xml";      // we need a string id
-  }
-
-  @Override
-  @Before
-  public void setUp() throws Exception {
-    fixShardCount = true;
     sliceCount = 2;
-    shardCount = 4;
-    super.setUp();
   }
 
-  @Override
-  public void doTest() throws Exception {
-    CloudSolrClient client = createCloudClient(null);
-    try {
+  @Test
+  @ShardsFixed(num = 4)
+  public void test() throws Exception {
+
+    try (CloudSolrClient client = createCloudClient(null)) {
       // Mix up a bunch of different combinations of shards and replicas in order to exercise boundary cases.
       // shards, replicationfactor, maxreplicaspernode
       int shards = random().nextInt(7);
@@ -67,9 +60,6 @@
       int rFactor = random().nextInt(4);
       if (rFactor < 2) rFactor = 2;
       createCollection(null, COLLECTION_NAME, shards, rFactor, shards * rFactor + 1, client, null, "conf1");
-    } finally {
-      //remove collections
-      client.shutdown();
     }
 
     waitForCollection(cloudClient.getZkStateReader(), COLLECTION_NAME, 2);
@@ -81,8 +71,8 @@
   }
 
   private void listCollection() throws IOException, SolrServerException {
-    CloudSolrClient client = createCloudClient(null);
-    try {
+
+    try (CloudSolrClient client = createCloudClient(null)) {
       ModifiableSolrParams params = new ModifiableSolrParams();
       params.set("action", CollectionParams.CollectionAction.LIST.toString());
       SolrRequest request = new QueryRequest(params);
@@ -93,16 +83,13 @@
       assertTrue("control_collection was not found in list", collections.contains("control_collection"));
       assertTrue(DEFAULT_COLLECTION + " was not found in list", collections.contains(DEFAULT_COLLECTION));
       assertTrue(COLLECTION_NAME + " was not found in list", collections.contains(COLLECTION_NAME));
-    } finally {
-      //remove collections
-      client.shutdown();
     }
   }
 
 
   private void clusterAssignPropertyTest() throws Exception {
-    CloudSolrClient client = createCloudClient(null);
-    try {
+
+    try (CloudSolrClient client = createCloudClient(null)) {
       client.connect();
       try {
         doPropertyAction(client,
@@ -199,8 +186,6 @@
 
       verifyLeaderAssignment(client, COLLECTION_NAME);
 
-    } finally {
-      client.shutdown();
     }
   }
 
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRequestStatusCollectionAPI.java b/solr/core/src/test/org/apache/solr/cloud/TestRequestStatusCollectionAPI.java
index b63e663..2d23617 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestRequestStatusCollectionAPI.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestRequestStatusCollectionAPI.java
@@ -24,7 +24,7 @@
 import org.apache.solr.common.params.CollectionParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
-import org.junit.Before;
+import org.junit.Test;
 
 import java.io.IOException;
 
@@ -36,14 +36,8 @@
     schemaString = "schema15.xml";      // we need a string id
   }
 
-  @Override
-  @Before
-  public void setUp() throws Exception {
-    super.setUp();
-
-  }
-
-  public void doTest() {
+  @Test
+  public void test() throws Exception {
     ModifiableSolrParams params = new ModifiableSolrParams();
 
     params.set(CollectionParams.ACTION, CollectionParams.CollectionAction.CREATE.toString());
@@ -55,9 +49,7 @@
     params.set("async", "1000");
     try {
       sendRequest(params);
-    } catch (SolrServerException e) {
-      e.printStackTrace();
-    } catch (IOException e) {
+    } catch (SolrServerException | IOException e) {
       e.printStackTrace();
     }
 
@@ -74,9 +66,7 @@
 
     try {
       message = sendStatusRequestWithRetry(params, MAX_WAIT_TIMEOUT_SECONDS);
-    } catch (SolrServerException e) {
-      e.printStackTrace();
-    } catch (IOException e) {
+    } catch (SolrServerException | IOException e) {
       e.printStackTrace();
     }
 
@@ -90,9 +80,7 @@
       r = sendRequest(params);
       status = (NamedList) r.get("status");
       message = (String) status.get("msg");
-    } catch (SolrServerException e) {
-      e.printStackTrace();
-    } catch (IOException e) {
+    } catch (SolrServerException | IOException e) {
       e.printStackTrace();
     }
 
@@ -105,9 +93,7 @@
     params.set("async", "1001");
     try {
       sendRequest(params);
-    } catch (SolrServerException e) {
-      e.printStackTrace();
-    } catch (IOException e) {
+    } catch (SolrServerException | IOException e) {
       e.printStackTrace();
     }
 
@@ -117,9 +103,7 @@
       params.set(OverseerCollectionProcessor.REQUESTID, "1001");
     try {
       message = sendStatusRequestWithRetry(params, MAX_WAIT_TIMEOUT_SECONDS);
-    } catch (SolrServerException e) {
-      e.printStackTrace();
-    } catch (IOException e) {
+    } catch (SolrServerException | IOException e) {
       e.printStackTrace();
     }
 
@@ -135,9 +119,7 @@
       params.set("async", "1002");
     try {
       sendRequest(params);
-    } catch (SolrServerException e) {
-      e.printStackTrace();
-    } catch (IOException e) {
+    } catch (SolrServerException | IOException e) {
       e.printStackTrace();
     }
 
@@ -149,9 +131,7 @@
 
     try {
       message = sendStatusRequestWithRetry(params, MAX_WAIT_TIMEOUT_SECONDS);
-    } catch (SolrServerException e) {
-      e.printStackTrace();
-    } catch (IOException e) {
+    } catch (SolrServerException | IOException e) {
       e.printStackTrace();
     }
 
@@ -168,9 +148,7 @@
       params.set("async", "1002");
     try {
       r = sendRequest(params);
-    } catch (SolrServerException e) {
-      e.printStackTrace();
-    } catch (IOException e) {
+    } catch (SolrServerException | IOException e) {
       e.printStackTrace();
     }
 
@@ -218,8 +196,10 @@
         .getBaseURL();
     baseUrl = baseUrl.substring(0, baseUrl.length() - "collection1".length());
 
-    HttpSolrClient baseClient = new HttpSolrClient(baseUrl);
-    baseClient.setConnectionTimeout(15000);
-    return baseClient.request(request);
+    try (HttpSolrClient baseServer = new HttpSolrClient(baseUrl)) {
+      baseServer.setConnectionTimeout(15000);
+      return baseServer.request(request);
+    }
+
   }
 }
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestShortCircuitedRequests.java b/solr/core/src/test/org/apache/solr/cloud/TestShortCircuitedRequests.java
index 8886ec4..babc51d 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestShortCircuitedRequests.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestShortCircuitedRequests.java
@@ -23,18 +23,18 @@
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.params.ShardParams;
 import org.apache.solr.common.util.NamedList;
+import org.junit.Test;
 
 public class TestShortCircuitedRequests extends AbstractFullDistribZkTestBase {
 
   public TestShortCircuitedRequests() {
     schemaString = "schema15.xml";      // we need a string id
     super.sliceCount = 4;
-    super.shardCount = 4;
-    super.fixShardCount = true;  // we only want to test with exactly 4 slices.
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 4)
+  public void test() throws Exception {
     waitForRecoveriesToFinish(false);
     assertEquals(4, cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION).getSlices().size());
     index("id", "a!doc1");  // shard3
diff --git a/solr/core/src/test/org/apache/solr/cloud/TriLevelCompositeIdRoutingTest.java b/solr/core/src/test/org/apache/solr/cloud/TriLevelCompositeIdRoutingTest.java
index 9419c53..a2065fc 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TriLevelCompositeIdRoutingTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TriLevelCompositeIdRoutingTest.java
@@ -21,6 +21,7 @@
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrDocument;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 import java.util.HashMap;
 import java.util.HashSet;
@@ -47,14 +48,12 @@
 
   public TriLevelCompositeIdRoutingTest() {
     schemaString = "schema15.xml";      // we need a string id
-    super.sliceCount = TEST_NIGHTLY ? 12 : 2;             // a lot of slices for more ranges and buckets
-    super.shardCount = TEST_NIGHTLY ? 24 : 3;
-    super.fixShardCount = true;
-
+    sliceCount = TEST_NIGHTLY ? 12 : 2;             // a lot of slices for more ranges and buckets
+    fixShardCount(TEST_NIGHTLY ? 24 : 3);
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     boolean testFinished = false;
     try {
       handle.clear();
@@ -74,7 +73,6 @@
     }
   }
 
-
   private void doTriLevelHashingTest() throws Exception {
     log.info("### STARTING doTriLevelHashingTest");
     // for now,  we know how ranges will be distributed to shards.
@@ -153,10 +151,4 @@
   private String getKey(String id) {
     return id.substring(0, id.lastIndexOf('!'));
   }
-
-  @Override
-  public void tearDown() throws Exception {
-    super.tearDown();
-  }
-
 }
diff --git a/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java
index f2c1291..3e9097e 100644
--- a/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java
@@ -29,10 +29,10 @@
 import org.apache.solr.common.cloud.ZkCoreNodeProps;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.update.DirectUpdateHandler2;
 import org.apache.solr.util.DefaultSolrThreadFactory;
-import org.junit.Before;
-import org.junit.BeforeClass;
+import org.junit.Test;
 
 import java.io.File;
 import java.io.IOException;
@@ -48,17 +48,6 @@
 @Slow
 @SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776")
 public class UnloadDistributedZkTest extends BasicDistributedZkTest {
-  
-  @BeforeClass
-  public static void beforeThisClass3() throws Exception {
- 
-  }
-  
-  @Before
-  @Override
-  public void setUp() throws Exception {
-    super.setUp();
-  }
 
   protected String getSolrXml() {
     return "solr-no-core.xml";
@@ -68,9 +57,9 @@
     super();
     checkCreatedVsState = false;
   }
-  
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  public void test() throws Exception {
     
     testCoreUnloadAndLeaders(); // long
     testUnloadLotsOfCores(); // long
@@ -94,51 +83,51 @@
     
     SolrClient client = clients.get(0);
     String url1 = getBaseUrl(client);
-    HttpSolrClient adminClient = new HttpSolrClient(url1);
-    adminClient.setConnectionTimeout(15000);
-    adminClient.setSoTimeout(60000);
-    adminClient.request(createCmd);
-    
-    createCmd = new Create();
-    createCmd.setCoreName("test_unload_shard_and_collection_2");
-    collection = "test_unload_shard_and_collection";
-    createCmd.setCollection(collection);
-    coreDataDir = createTempDir().toFile().getAbsolutePath();
-    createCmd.setDataDir(getDataDir(coreDataDir));
-    
-    adminClient.request(createCmd);
-    
-    // does not mean they are active and up yet :*
-    waitForRecoveriesToFinish(collection, false);
 
-    // now unload one of the two
-    Unload unloadCmd = new Unload(false);
-    unloadCmd.setCoreName("test_unload_shard_and_collection_2");
-    adminClient.request(unloadCmd);
-    
-    // there should be only one shard
-    int slices = getCommonCloudSolrClient().getZkStateReader().getClusterState().getSlices(collection).size();
-    long timeoutAt = System.currentTimeMillis() + 45000;
-    while (slices != 1) {
-      if (System.currentTimeMillis() > timeoutAt) {
-        printLayout();
-        fail("Expected to find only one slice in " + collection);
+    try (HttpSolrClient adminClient = new HttpSolrClient(url1)) {
+      adminClient.setConnectionTimeout(15000);
+      adminClient.setSoTimeout(60000);
+      adminClient.request(createCmd);
+
+      createCmd = new Create();
+      createCmd.setCoreName("test_unload_shard_and_collection_2");
+      collection = "test_unload_shard_and_collection";
+      createCmd.setCollection(collection);
+      coreDataDir = createTempDir().toFile().getAbsolutePath();
+      createCmd.setDataDir(getDataDir(coreDataDir));
+
+      adminClient.request(createCmd);
+
+      // does not mean they are active and up yet :*
+      waitForRecoveriesToFinish(collection, false);
+
+      // now unload one of the two
+      Unload unloadCmd = new Unload(false);
+      unloadCmd.setCoreName("test_unload_shard_and_collection_2");
+      adminClient.request(unloadCmd);
+
+      // there should be only one shard
+      int slices = getCommonCloudSolrClient().getZkStateReader().getClusterState().getSlices(collection).size();
+      long timeoutAt = System.currentTimeMillis() + 45000;
+      while (slices != 1) {
+        if (System.currentTimeMillis() > timeoutAt) {
+          printLayout();
+          fail("Expected to find only one slice in " + collection);
+        }
+
+        Thread.sleep(1000);
+        slices = getCommonCloudSolrClient().getZkStateReader().getClusterState().getSlices(collection).size();
       }
-      
-      Thread.sleep(1000);
-      slices = getCommonCloudSolrClient().getZkStateReader().getClusterState().getSlices(collection).size();
+
+      // now unload one of the other
+      unloadCmd = new Unload(false);
+      unloadCmd.setCoreName("test_unload_shard_and_collection_1");
+      adminClient.request(unloadCmd);
     }
-    
-    // now unload one of the other
-    unloadCmd = new Unload(false);
-    unloadCmd.setCoreName("test_unload_shard_and_collection_1");
-    adminClient.request(unloadCmd);
-    adminClient.shutdown();
-    adminClient = null;
-    
+
     //printLayout();
     // the collection should be gone
-    timeoutAt = System.currentTimeMillis() + 30000;
+    long timeoutAt = System.currentTimeMillis() + 30000;
     while (getCommonCloudSolrClient().getZkStateReader().getClusterState().hasCollection(collection)) {
       if (System.currentTimeMillis() > timeoutAt) {
         printLayout();
@@ -155,24 +144,23 @@
    */
   private void testCoreUnloadAndLeaders() throws Exception {
     File tmpDir = createTempDir().toFile();
-    
+
+    String core1DataDir = tmpDir.getAbsolutePath() + File.separator + System.currentTimeMillis() + "unloadcollection1" + "_1n";
+
     // create a new collection collection
     SolrClient client = clients.get(0);
     String url1 = getBaseUrl(client);
-    HttpSolrClient adminClient = new HttpSolrClient(url1);
-    adminClient.setConnectionTimeout(15000);
-    adminClient.setSoTimeout(60000);
-    
-    Create createCmd = new Create();
-    createCmd.setCoreName("unloadcollection1");
-    createCmd.setCollection("unloadcollection");
-    createCmd.setNumShards(1);
-    String core1DataDir = tmpDir.getAbsolutePath() + File.separator + System.currentTimeMillis() + "unloadcollection1" + "_1n";
-    createCmd.setDataDir(getDataDir(core1DataDir));
-    adminClient.request(createCmd);
-    adminClient.shutdown();
-    adminClient = null;
-    
+    try (HttpSolrClient adminClient = new HttpSolrClient(url1)) {
+      adminClient.setConnectionTimeout(15000);
+      adminClient.setSoTimeout(60000);
+
+      Create createCmd = new Create();
+      createCmd.setCoreName("unloadcollection1");
+      createCmd.setCollection("unloadcollection");
+      createCmd.setNumShards(1);
+      createCmd.setDataDir(getDataDir(core1DataDir));
+      adminClient.request(createCmd);
+    }
     ZkStateReader zkStateReader = getCommonCloudSolrClient().getZkStateReader();
     
     zkStateReader.updateClusterState(true);
@@ -182,17 +170,15 @@
     
     client = clients.get(1);
     String url2 = getBaseUrl(client);
-    adminClient = new HttpSolrClient(url2);
-    
-    createCmd = new Create();
-    createCmd.setCoreName("unloadcollection2");
-    createCmd.setCollection("unloadcollection");
-    String core2dataDir = tmpDir.getAbsolutePath() + File.separator + System.currentTimeMillis() + "unloadcollection1" + "_2n";
-    createCmd.setDataDir(getDataDir(core2dataDir));
-    adminClient.request(createCmd);
-    adminClient.shutdown();
-    adminClient = null;
-    
+    try (HttpSolrClient adminClient = new HttpSolrClient(url2)) {
+
+      Create createCmd = new Create();
+      createCmd.setCoreName("unloadcollection2");
+      createCmd.setCollection("unloadcollection");
+      String core2dataDir = tmpDir.getAbsolutePath() + File.separator + System.currentTimeMillis() + "unloadcollection1" + "_2n";
+      createCmd.setDataDir(getDataDir(core2dataDir));
+      adminClient.request(createCmd);
+    }
     zkStateReader.updateClusterState(true);
     slices = zkStateReader.getClusterState().getCollection("unloadcollection").getSlices().size();
     assertEquals(1, slices);
@@ -202,73 +188,64 @@
     ZkCoreNodeProps leaderProps = getLeaderUrlFromZk("unloadcollection", "shard1");
     
     Random random = random();
-    HttpSolrClient collectionClient;
     if (random.nextBoolean()) {
-      collectionClient = new HttpSolrClient(leaderProps.getCoreUrl());
-      // lets try and use the solrj client to index and retrieve a couple
-      // documents
-      SolrInputDocument doc1 = getDoc(id, 6, i1, -600, tlong, 600, t1,
-          "humpty dumpy sat on a wall");
-      SolrInputDocument doc2 = getDoc(id, 7, i1, -600, tlong, 600, t1,
-          "humpty dumpy3 sat on a walls");
-      SolrInputDocument doc3 = getDoc(id, 8, i1, -600, tlong, 600, t1,
-          "humpty dumpy2 sat on a walled");
-      collectionClient.add(doc1);
-      collectionClient.add(doc2);
-      collectionClient.add(doc3);
-      collectionClient.commit();
-      collectionClient.shutdown();
-      collectionClient = null;
+      try (HttpSolrClient collectionClient = new HttpSolrClient(leaderProps.getCoreUrl())) {
+        // lets try and use the solrj client to index and retrieve a couple
+        // documents
+        SolrInputDocument doc1 = getDoc(id, 6, i1, -600, tlong, 600, t1,
+            "humpty dumpy sat on a wall");
+        SolrInputDocument doc2 = getDoc(id, 7, i1, -600, tlong, 600, t1,
+            "humpty dumpy3 sat on a walls");
+        SolrInputDocument doc3 = getDoc(id, 8, i1, -600, tlong, 600, t1,
+            "humpty dumpy2 sat on a walled");
+        collectionClient.add(doc1);
+        collectionClient.add(doc2);
+        collectionClient.add(doc3);
+        collectionClient.commit();
+      }
     }
 
     // create another replica for our collection
     client = clients.get(2);
     String url3 = getBaseUrl(client);
-    adminClient = new HttpSolrClient(url3);
-    
-    createCmd = new Create();
-    createCmd.setCoreName("unloadcollection3");
-    createCmd.setCollection("unloadcollection");
-    String core3dataDir = tmpDir.getAbsolutePath() + File.separator + System.currentTimeMillis() + "unloadcollection" + "_3n";
-    createCmd.setDataDir(getDataDir(core3dataDir));
-    adminClient.request(createCmd);
-    adminClient.shutdown();
-    adminClient = null;
-    
+    try (HttpSolrClient adminClient = new HttpSolrClient(url3)) {
+      Create createCmd = new Create();
+      createCmd.setCoreName("unloadcollection3");
+      createCmd.setCollection("unloadcollection");
+      String core3dataDir = tmpDir.getAbsolutePath() + File.separator + System.currentTimeMillis() + "unloadcollection" + "_3n";
+      createCmd.setDataDir(getDataDir(core3dataDir));
+      adminClient.request(createCmd);
+    }
     
     waitForRecoveriesToFinish("unloadcollection", zkStateReader, false);
     
     // so that we start with some versions when we reload...
     DirectUpdateHandler2.commitOnClose = false;
     
-    HttpSolrClient addClient = new HttpSolrClient(url3 + "/unloadcollection3");
-    addClient.setConnectionTimeout(30000);
+    try (HttpSolrClient addClient = new HttpSolrClient(url3 + "/unloadcollection3")) {
+      addClient.setConnectionTimeout(30000);
 
-    // add a few docs
-    for (int x = 20; x < 100; x++) {
-      SolrInputDocument doc1 = getDoc(id, x, i1, -600, tlong, 600, t1,
-          "humpty dumpy sat on a wall");
-      addClient.add(doc1);
+      // add a few docs
+      for (int x = 20; x < 100; x++) {
+        SolrInputDocument doc1 = getDoc(id, x, i1, -600, tlong, 600, t1,
+            "humpty dumpy sat on a wall");
+        addClient.add(doc1);
+      }
     }
-    addClient.shutdown();
-    addClient = null;
-
     // don't commit so they remain in the tran log
     //collectionClient.commit();
     
     // unload the leader
-    collectionClient = new HttpSolrClient(leaderProps.getBaseUrl());
-    collectionClient.setConnectionTimeout(15000);
-    collectionClient.setSoTimeout(30000);
-    
-    Unload unloadCmd = new Unload(false);
-    unloadCmd.setCoreName(leaderProps.getCoreName());
-    ModifiableSolrParams p = (ModifiableSolrParams) unloadCmd.getParams();
+    try (HttpSolrClient collectionClient = new HttpSolrClient(leaderProps.getBaseUrl())) {
+      collectionClient.setConnectionTimeout(15000);
+      collectionClient.setSoTimeout(30000);
 
-    collectionClient.request(unloadCmd);
-    collectionClient.shutdown();
-    collectionClient = null;
+      Unload unloadCmd = new Unload(false);
+      unloadCmd.setCoreName(leaderProps.getCoreName());
+      ModifiableSolrParams p = (ModifiableSolrParams) unloadCmd.getParams();
 
+      collectionClient.request(unloadCmd);
+    }
 //    Thread.currentThread().sleep(500);
 //    printLayout();
     
@@ -283,51 +260,45 @@
     // ensure there is a leader
     zkStateReader.getLeaderRetry("unloadcollection", "shard1", 15000);
     
-    addClient = new HttpSolrClient(url2 + "/unloadcollection2");
-    addClient.setConnectionTimeout(30000);
-    addClient.setSoTimeout(90000);
-    
-    // add a few docs while the leader is down
-    for (int x = 101; x < 200; x++) {
-      SolrInputDocument doc1 = getDoc(id, x, i1, -600, tlong, 600, t1,
-          "humpty dumpy sat on a wall");
-      addClient.add(doc1);
+    try (HttpSolrClient addClient = new HttpSolrClient(url2 + "/unloadcollection2")) {
+      addClient.setConnectionTimeout(30000);
+      addClient.setSoTimeout(90000);
+
+      // add a few docs while the leader is down
+      for (int x = 101; x < 200; x++) {
+        SolrInputDocument doc1 = getDoc(id, x, i1, -600, tlong, 600, t1,
+            "humpty dumpy sat on a wall");
+        addClient.add(doc1);
+      }
     }
-    addClient.shutdown();
-    addClient = null;
-    
     
     // create another replica for our collection
     client = clients.get(3);
     String url4 = getBaseUrl(client);
-    adminClient = new HttpSolrClient(url4);
-    adminClient.setConnectionTimeout(15000);
-    adminClient.setSoTimeout(30000);
-    
-    createCmd = new Create();
-    createCmd.setCoreName("unloadcollection4");
-    createCmd.setCollection("unloadcollection");
-    String core4dataDir = tmpDir.getAbsolutePath() + File.separator + System.currentTimeMillis() + "unloadcollection" + "_4n";
-    createCmd.setDataDir(getDataDir(core4dataDir));
-    adminClient.request(createCmd);
-    adminClient.shutdown();
-    adminClient = null;
-    
+    try (HttpSolrClient adminClient = new HttpSolrClient(url4)) {
+      adminClient.setConnectionTimeout(15000);
+      adminClient.setSoTimeout(30000);
+
+      Create createCmd = new Create();
+      createCmd.setCoreName("unloadcollection4");
+      createCmd.setCollection("unloadcollection");
+      String core4dataDir = tmpDir.getAbsolutePath() + File.separator + System.currentTimeMillis() + "unloadcollection" + "_4n";
+      createCmd.setDataDir(getDataDir(core4dataDir));
+      adminClient.request(createCmd);
+    }
     waitForRecoveriesToFinish("unloadcollection", zkStateReader, false);
     
     // unload the leader again
     leaderProps = getLeaderUrlFromZk("unloadcollection", "shard1");
-    collectionClient = new HttpSolrClient(leaderProps.getBaseUrl());
-    collectionClient.setConnectionTimeout(15000);
-    collectionClient.setSoTimeout(30000);
-    
-    unloadCmd = new Unload(false);
-    unloadCmd.setCoreName(leaderProps.getCoreName());
-    p = (ModifiableSolrParams) unloadCmd.getParams();
-    collectionClient.request(unloadCmd);
-    collectionClient.shutdown();
-    collectionClient = null;
-    
+    try (HttpSolrClient collectionClient = new HttpSolrClient(leaderProps.getBaseUrl())) {
+      collectionClient.setConnectionTimeout(15000);
+      collectionClient.setSoTimeout(30000);
+
+      Unload unloadCmd = new Unload(false);
+      unloadCmd.setCoreName(leaderProps.getCoreName());
+      SolrParams p = (ModifiableSolrParams) unloadCmd.getParams();
+      collectionClient.request(unloadCmd);
+    }
     tries = 50;
     while (leaderProps.getCoreUrl().equals(zkStateReader.getLeaderUrl("unloadcollection", "shard1", 15000))) {
       Thread.sleep(100);
@@ -343,99 +314,90 @@
     DirectUpdateHandler2.commitOnClose = true;
     
     // bring the downed leader back as replica
-    adminClient = new HttpSolrClient(leaderProps.getBaseUrl());
-    adminClient.setConnectionTimeout(15000);
-    adminClient.setSoTimeout(30000);
-    
-    createCmd = new Create();
-    createCmd.setCoreName(leaderProps.getCoreName());
-    createCmd.setCollection("unloadcollection");
-    createCmd.setDataDir(getDataDir(core1DataDir));
-    adminClient.request(createCmd);
-    adminClient.shutdown();
-    adminClient = null;
+    try (HttpSolrClient adminClient = new HttpSolrClient(leaderProps.getBaseUrl())) {
+      adminClient.setConnectionTimeout(15000);
+      adminClient.setSoTimeout(30000);
 
+      Create createCmd = new Create();
+      createCmd.setCoreName(leaderProps.getCoreName());
+      createCmd.setCollection("unloadcollection");
+      createCmd.setDataDir(getDataDir(core1DataDir));
+      adminClient.request(createCmd);
+    }
     waitForRecoveriesToFinish("unloadcollection", zkStateReader, false);
+
+    long found1, found3;
     
-    adminClient = new HttpSolrClient(url2 + "/unloadcollection");
-    adminClient.setConnectionTimeout(15000);
-    adminClient.setSoTimeout(30000);
-    adminClient.commit();
-    SolrQuery q = new SolrQuery("*:*");
-    q.set("distrib", false);
-    long found1 = adminClient.query(q).getResults().getNumFound();
-    adminClient.shutdown();
-    adminClient = new HttpSolrClient(url3 + "/unloadcollection");
-    adminClient.setConnectionTimeout(15000);
-    adminClient.setSoTimeout(30000);
-    adminClient.commit();
-    q = new SolrQuery("*:*");
-    q.set("distrib", false);
-    long found3 = adminClient.query(q).getResults().getNumFound();
-    adminClient.shutdown();
-    adminClient = new HttpSolrClient(url4 + "/unloadcollection");
-    adminClient.setConnectionTimeout(15000);
-    adminClient.setSoTimeout(30000);
-    adminClient.commit();
-    q = new SolrQuery("*:*");
-    q.set("distrib", false);
-    long found4 = adminClient.query(q).getResults().getNumFound();
-    
-    // all 3 shards should now have the same number of docs
-    assertEquals(found1, found3);
-    assertEquals(found3, found4);
-    adminClient.shutdown();
+    try (HttpSolrClient adminClient = new HttpSolrClient(url2 + "/unloadcollection")) {
+      adminClient.setConnectionTimeout(15000);
+      adminClient.setSoTimeout(30000);
+      adminClient.commit();
+      SolrQuery q = new SolrQuery("*:*");
+      q.set("distrib", false);
+      found1 = adminClient.query(q).getResults().getNumFound();
+    }
+    try (HttpSolrClient adminClient = new HttpSolrClient(url3 + "/unloadcollection")) {
+      adminClient.setConnectionTimeout(15000);
+      adminClient.setSoTimeout(30000);
+      adminClient.commit();
+      SolrQuery q = new SolrQuery("*:*");
+      q.set("distrib", false);
+      found3 = adminClient.query(q).getResults().getNumFound();
+    }
+
+    try (HttpSolrClient adminClient = new HttpSolrClient(url4 + "/unloadcollection")) {
+      adminClient.setConnectionTimeout(15000);
+      adminClient.setSoTimeout(30000);
+      adminClient.commit();
+      SolrQuery q = new SolrQuery("*:*");
+      q.set("distrib", false);
+      long found4 = adminClient.query(q).getResults().getNumFound();
+
+      // all 3 shards should now have the same number of docs
+      assertEquals(found1, found3);
+      assertEquals(found3, found4);
+    }
     
   }
   
   private void testUnloadLotsOfCores() throws Exception {
     SolrClient client = clients.get(2);
     String url3 = getBaseUrl(client);
-    final HttpSolrClient adminClient = new HttpSolrClient(url3);
-    adminClient.setConnectionTimeout(15000);
-    adminClient.setSoTimeout(60000);
-    ThreadPoolExecutor executor = new ThreadPoolExecutor(0, Integer.MAX_VALUE,
-        5, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(),
-        new DefaultSolrThreadFactory("testExecutor"));
-    int cnt = atLeast(3);
-    
-    // create the cores
-    createCores(adminClient, executor, "multiunload", 2, cnt);
-    
-    executor.shutdown();
-    executor.awaitTermination(120, TimeUnit.SECONDS);
-    executor = new ThreadPoolExecutor(0, Integer.MAX_VALUE, 5,
-        TimeUnit.SECONDS, new SynchronousQueue<Runnable>(),
-        new DefaultSolrThreadFactory("testExecutor"));
-    for (int j = 0; j < cnt; j++) {
-      final int freezeJ = j;
-      executor.execute(new Runnable() {
-        @Override
-        public void run() {
-          Unload unloadCmd = new Unload(true);
-          unloadCmd.setCoreName("multiunload" + freezeJ);
-          try {
-            adminClient.request(unloadCmd);
-          } catch (SolrServerException e) {
-            throw new RuntimeException(e);
-          } catch (IOException e) {
-            throw new RuntimeException(e);
+    try (final HttpSolrClient adminClient = new HttpSolrClient(url3)) {
+      adminClient.setConnectionTimeout(15000);
+      adminClient.setSoTimeout(60000);
+      ThreadPoolExecutor executor = new ThreadPoolExecutor(0, Integer.MAX_VALUE,
+          5, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(),
+          new DefaultSolrThreadFactory("testExecutor"));
+      int cnt = atLeast(3);
+
+      // create the cores
+      createCores(adminClient, executor, "multiunload", 2, cnt);
+
+      executor.shutdown();
+      executor.awaitTermination(120, TimeUnit.SECONDS);
+      executor = new ThreadPoolExecutor(0, Integer.MAX_VALUE, 5,
+          TimeUnit.SECONDS, new SynchronousQueue<Runnable>(),
+          new DefaultSolrThreadFactory("testExecutor"));
+      for (int j = 0; j < cnt; j++) {
+        final int freezeJ = j;
+        executor.execute(new Runnable() {
+          @Override
+          public void run() {
+            Unload unloadCmd = new Unload(true);
+            unloadCmd.setCoreName("multiunload" + freezeJ);
+            try {
+              adminClient.request(unloadCmd);
+            } catch (SolrServerException | IOException e) {
+              throw new RuntimeException(e);
+            }
           }
-        }
-      });
-      Thread.sleep(random().nextInt(50));
+        });
+        Thread.sleep(random().nextInt(50));
+      }
+      executor.shutdown();
+      executor.awaitTermination(120, TimeUnit.SECONDS);
     }
-    executor.shutdown();
-    executor.awaitTermination(120, TimeUnit.SECONDS);
-    adminClient.shutdown();
   }
 
-
-
-
-  
-  @Override
-  public void tearDown() throws Exception {
-    super.tearDown();
-  }
 }
diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkCLITest.java b/solr/core/src/test/org/apache/solr/cloud/ZkCLITest.java
index c873287..2c95ce9 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ZkCLITest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ZkCLITest.java
@@ -17,14 +17,6 @@
  * limitations under the License.
  */
 
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.InputStream;
-import java.nio.charset.StandardCharsets;
-import java.util.Collection;
-import java.util.List;
-
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.io.filefilter.RegexFileFilter;
@@ -44,6 +36,14 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.Collection;
+import java.util.List;
+
 // TODO: This test would be a lot faster if it used a solrhome with fewer config
 // files - there are a lot of them to upload
 public class ZkCLITest extends SolrTestCaseJ4 {
@@ -78,19 +78,10 @@
     log.info("####SETUP_START " + getTestName());
 
     String exampleHome = SolrJettyTestBase.legacyExampleCollection1SolrHome();
-    
-    boolean useNewSolrXml = random().nextBoolean();
+
     File tmpDir = createTempDir().toFile();
-    if (useNewSolrXml) {
-      solrHome = exampleHome;
-    } else {
-      File tmpSolrHome = new File(tmpDir, "tmp-solr-home");
-      FileUtils.copyDirectory(new File(exampleHome), tmpSolrHome);
-      FileUtils.copyFile(getFile("old-solr-example/solr.xml"), new File(tmpSolrHome, "solr.xml"));
-      solrHome = tmpSolrHome.getAbsolutePath();
-    }
-    
-    
+    solrHome = exampleHome;
+
     zkDir = tmpDir.getAbsolutePath() + File.separator
         + "zookeeper/server1/data";
     log.info("ZooKeeper dataDir:" + zkDir);
diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java b/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java
index bb63b1c..a9f1bce 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java
@@ -17,12 +17,6 @@
  * the License.
  */
 
-import java.io.File;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.SolrJettyTestBase;
 import org.apache.solr.SolrTestCaseJ4;
@@ -33,16 +27,21 @@
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.CoreDescriptor;
 import org.apache.solr.core.CoresLocator;
+import org.apache.solr.core.PluginInfo;
 import org.apache.solr.handler.admin.CoreAdminHandler;
 import org.apache.solr.handler.component.HttpShardHandlerFactory;
-import org.apache.solr.handler.component.ShardHandlerFactory;
 import org.apache.solr.update.UpdateShardHandler;
-import org.apache.solr.util.ExternalPaths;
 import org.apache.zookeeper.CreateMode;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+import java.io.File;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
 @Slow
 public class ZkControllerTest extends SolrTestCaseJ4 {
 
@@ -323,7 +322,7 @@
     
     @Override
     public ConfigSolr getConfig() {
-      return new ConfigSolr() {
+      return new ConfigSolr(null, null) {
 
         @Override
         public CoresLocator getCoresLocator() {
@@ -331,25 +330,22 @@
         }
 
         @Override
-        protected String getShardHandlerFactoryConfigPath() {
-          throw new UnsupportedOperationException();
+        public PluginInfo getShardHandlerFactoryPluginInfo() {
+          return null;
         }
 
         @Override
-        public boolean isPersistent() {
-          throw new UnsupportedOperationException();
-        }};
+        protected String getProperty(CfgProp key) {
+          return null;
+        }
+
+      };
     }
     
     @Override
     public UpdateShardHandler getUpdateShardHandler() {
       return new UpdateShardHandler(null);
     }
-    
-    @Override
-    public String getAdminPath() {
-      return "/admin/cores";
-    }
 
   }
 }
diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkSolrClientTest.java b/solr/core/src/test/org/apache/solr/cloud/ZkSolrClientTest.java
index 283b7d1..267a682 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ZkSolrClientTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ZkSolrClientTest.java
@@ -176,9 +176,7 @@
         try {
           zkClient.makePath("collections/collection4", true);
           break;
-        } catch (KeeperException.SessionExpiredException e) {
-
-        } catch (KeeperException.ConnectionLossException e) {
+        } catch (KeeperException.SessionExpiredException | KeeperException.ConnectionLossException e) {
 
         }
         Thread.sleep(1000 * i);
@@ -289,9 +287,7 @@
           try {
             zkClient.getChildren("/collections", this, true);
             latch.countDown();
-          } catch (KeeperException e) {
-            throw new RuntimeException(e);
-          } catch (InterruptedException e) {
+          } catch (KeeperException | InterruptedException e) {
             throw new RuntimeException(e);
           }
         }
diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsChaosMonkeySafeLeaderTest.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsChaosMonkeySafeLeaderTest.java
index c39de4e..eb4a48b 100644
--- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsChaosMonkeySafeLeaderTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsChaosMonkeySafeLeaderTest.java
@@ -23,7 +23,6 @@
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.cloud.ChaosMonkeySafeLeaderTest;
 import org.junit.AfterClass;
-import org.junit.Before;
 import org.junit.BeforeClass;
 
 import com.carrotsearch.randomizedtesting.annotations.Nightly;
@@ -47,10 +46,9 @@
     dfsCluster = null;
   }
   
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     
     // super class may hard code directory
     useFactory("org.apache.solr.core.HdfsDirectoryFactory");
diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsCollectionsAPIDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsCollectionsAPIDistributedZkTest.java
index c777e06..07945db 100644
--- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsCollectionsAPIDistributedZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsCollectionsAPIDistributedZkTest.java
@@ -22,6 +22,7 @@
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.cloud.CollectionsAPIDistributedZkTest;
+import org.apache.solr.update.HdfsUpdateLog;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 
@@ -46,6 +47,7 @@
   
   @AfterClass
   public static void teardownClass() throws Exception {
+    assertEquals(0, HdfsUpdateLog.INIT_FAILED_LOGS_COUNT.get());
     HdfsTestUtil.teardownClass(dfsCluster);
     System.clearProperty("solr.hdfs.home");
     System.clearProperty("solr.hdfs.blockcache.enabled");
diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsSyncSliceTest.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsSyncSliceTest.java
index 8f51c1a..13d9cff 100644
--- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsSyncSliceTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsSyncSliceTest.java
@@ -23,7 +23,6 @@
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.cloud.SyncSliceTest;
 import org.junit.AfterClass;
-import org.junit.Before;
 import org.junit.BeforeClass;
 
 import com.carrotsearch.randomizedtesting.annotations.Nightly;
@@ -46,14 +45,7 @@
     HdfsTestUtil.teardownClass(dfsCluster);
     dfsCluster = null;
   }
-  
-  @Before
-  @Override
-  public void setUp() throws Exception {
-    super.setUp();
-  }
 
-  
   @Override
   protected String getDataDir(String dataDir) throws IOException {
     return HdfsTestUtil.getDataDir(dfsCluster, dataDir);
diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java
index 4173206..2443e60 100644
--- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java
+++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java
@@ -10,10 +10,14 @@
 import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.common.util.IOUtils;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
@@ -38,7 +42,13 @@
   
   private static Map<MiniDFSCluster,Timer> timers = new ConcurrentHashMap<>();
 
+  private static FSDataOutputStream badTlogOutStream;
+
   public static MiniDFSCluster setupClass(String dir) throws Exception {
+    return setupClass(dir, true);
+  }
+  
+  public static MiniDFSCluster setupClass(String dir, boolean safeModeTesting) throws Exception {
     LuceneTestCase.assumeFalse("HDFS tests were disabled by -Dtests.disableHdfs",
       Boolean.parseBoolean(System.getProperty("tests.disableHdfs", "false")));
 
@@ -65,23 +75,32 @@
     
     final MiniDFSCluster dfsCluster = new MiniDFSCluster(conf, dataNodes, true, null);
     dfsCluster.waitActive();
-
+    
     System.setProperty("solr.hdfs.home", getDataDir(dfsCluster, "solr_hdfs_home"));
     
-    
-    NameNodeAdapter.enterSafeMode(dfsCluster.getNameNode(), false);
-    
-    int rnd = LuceneTestCase.random().nextInt(10000);
-    Timer timer = new Timer();
-    timer.schedule(new TimerTask() {
+    int rndMode = LuceneTestCase.random().nextInt(10);
+    if (safeModeTesting && rndMode > 4) {
+      NameNodeAdapter.enterSafeMode(dfsCluster.getNameNode(), false);
       
-      @Override
-      public void run() {
-        NameNodeAdapter.leaveSafeMode(dfsCluster.getNameNode());
-      }
-    }, rnd);
-    
-    timers.put(dfsCluster, timer);
+      int rnd = LuceneTestCase.random().nextInt(10000);
+      Timer timer = new Timer();
+      timer.schedule(new TimerTask() {
+        
+        @Override
+        public void run() {
+          NameNodeAdapter.leaveSafeMode(dfsCluster.getNameNode());
+        }
+      }, rnd);
+      
+      timers.put(dfsCluster, timer);
+    } else {
+      // force a lease recovery by creating a tlog file and not closing it
+      URI uri = dfsCluster.getURI();
+      Path hdfsDirPath = new Path(uri.toString() + "/solr/collection1/core_node1/data/tlog/tlog.0000000000000000000");
+      // tran log already being created testing
+      FileSystem fs = FileSystem.newInstance(hdfsDirPath.toUri(), conf);
+      badTlogOutStream = fs.create(hdfsDirPath);
+    }
     
     SolrTestCaseJ4.useFactory("org.apache.solr.core.HdfsDirectoryFactory");
     
@@ -100,6 +119,10 @@
       dfsCluster.shutdown();
     }
     
+    if (badTlogOutStream != null) {
+      IOUtils.closeQuietly(badTlogOutStream);
+    }
+    
     // TODO: we HACK around HADOOP-9643
     if (savedLocale != null) {
       Locale.setDefault(savedLocale);
diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsWriteToMultipleCollectionsTest.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsWriteToMultipleCollectionsTest.java
index 0d9ae42..0aedc11 100644
--- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsWriteToMultipleCollectionsTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsWriteToMultipleCollectionsTest.java
@@ -22,6 +22,7 @@
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.store.NRTCachingDirectory;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LuceneTestCase.Nightly;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.client.solrj.SolrQuery;
@@ -40,6 +41,7 @@
 import org.apache.solr.util.RefCounted;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -77,15 +79,15 @@
   public HdfsWriteToMultipleCollectionsTest() {
     super();
     sliceCount = 1;
-    shardCount = 3;
+    fixShardCount(3);
   }
   
   protected String getSolrXml() {
     return "solr-no-core.xml";
   }
-  
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  public void test() throws Exception {
     int docCount = random().nextInt(1313) + 1;
     int cnt = random().nextInt(4) + 1;
     for (int i = 0; i < cnt; i++) {
@@ -116,10 +118,8 @@
       client.commit();
       collectionsCount += client.query(new SolrQuery("*:*")).getResults().getNumFound();
     }
-    
-    for (CloudSolrClient client : cloudClients) {
-      client.shutdown();
-    }
+
+    IOUtils.close(cloudClients);
 
     assertEquals(addCnt, collectionsCount);
     
diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java
index cdbd348..59dc8f7 100644
--- a/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java
@@ -17,14 +17,8 @@
 
 package org.apache.solr.cloud.hdfs;
 
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Timer;
-import java.util.TimerTask;
-
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -32,8 +26,8 @@
 import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.LuceneTestCase.Slow;
-import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.QueryRequest;
@@ -45,9 +39,15 @@
 import org.apache.zookeeper.KeeperException;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Timer;
+import java.util.TimerTask;
 
 @Slow
 @ThreadLeakScope(Scope.NONE) // hdfs client currently leaks thread(s)
@@ -80,16 +80,16 @@
   public StressHdfsTest() {
     super();
     sliceCount = 1;
-    shardCount = TEST_NIGHTLY ? 7 : random().nextInt(2) + 1;
+    fixShardCount(TEST_NIGHTLY ? 7 : random().nextInt(2) + 1);
     testRestartIntoSafeMode = random().nextBoolean();
   }
   
   protected String getSolrXml() {
     return "solr-no-core.xml";
   }
-  
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  public void test() throws Exception {
     randomlyEnableAutoSoftCommit();
     
     int cnt = random().nextInt(2) + 1;
@@ -138,11 +138,11 @@
     int nShards;
     int maxReplicasPerNode;
     if (overshard) {
-      nShards = shardCount * 2;
+      nShards = getShardCount() * 2;
       maxReplicasPerNode = 8;
       rep = 1;
     } else {
-      nShards = shardCount / 2;
+      nShards = getShardCount() / 2;
       maxReplicasPerNode = 1;
       rep = 2;
       if (nShards == 0) nShards = 1;
@@ -163,8 +163,7 @@
     
     int i = 0;
     for (SolrClient client : clients) {
-      HttpSolrClient c = new HttpSolrClient(getBaseUrl(client) + "/" + DELETE_DATA_DIR_COLLECTION);
-      try {
+      try (HttpSolrClient c = new HttpSolrClient(getBaseUrl(client) + "/" + DELETE_DATA_DIR_COLLECTION)) {
         int docCnt = random().nextInt(1000) + 1;
         for (int j = 0; j < docCnt; j++) {
           c.add(getDoc("id", i++, "txt_t", "just some random text for a doc"));
@@ -182,8 +181,6 @@
         NamedList<Object> coreInfo = (NamedList<Object>) response.get("core");
         String dataDir = (String) ((NamedList<Object>) coreInfo.get("directory")).get("data");
         dataDirs.add(dataDir);
-      } finally {
-        c.shutdown();
       }
     }
     
diff --git a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java
index bb2bb36..70fc109 100644
--- a/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/overseer/ZkStateWriterTest.java
@@ -18,6 +18,7 @@
  */
 
 import java.util.HashMap;
+import java.util.Map;
 
 import org.apache.lucene.util.IOUtils;
 import org.apache.solr.SolrTestCaseJ4;
@@ -28,6 +29,7 @@
 import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.DocRouter;
+import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkStateReader;
 
@@ -116,4 +118,47 @@
     }
   }
 
+  public void testSingleExternalCollection() throws Exception{
+    String zkDir = createTempDir("testSingleExternalCollection").toFile().getAbsolutePath();
+
+    ZkTestServer server = new ZkTestServer(zkDir);
+
+    SolrZkClient zkClient = null;
+
+    try {
+      server.run();
+      AbstractZkTestCase.tryCleanSolrZkNode(server.getZkHost());
+      AbstractZkTestCase.makeSolrZkNode(server.getZkHost());
+
+      zkClient = new SolrZkClient(server.getZkAddress(), OverseerTest.DEFAULT_CONNECTION_TIMEOUT);
+      zkClient.makePath(ZkStateReader.LIVE_NODES_ZKNODE, true);
+
+      ZkStateReader reader = new ZkStateReader(zkClient);
+      reader.createClusterStateWatchersAndUpdate();
+
+      ZkStateWriter writer = new ZkStateWriter(reader, new Overseer.Stats());
+
+      zkClient.makePath(ZkStateReader.COLLECTIONS_ZKNODE + "/c1", true);
+
+      // create new collection with stateFormat = 2
+      ZkWriteCommand c1 = new ZkWriteCommand("c1",
+          new DocCollection("c1", new HashMap<String, Slice>(), new HashMap<String, Object>(), DocRouter.DEFAULT, 0, ZkStateReader.COLLECTIONS_ZKNODE + "/c1/state.json"));
+
+      ClusterState clusterState = writer.enqueueUpdate(reader.getClusterState(), c1, null);
+      writer.writePendingUpdates();
+
+      Map map = (Map) ZkStateReader.fromJSON(zkClient.getData("/clusterstate.json", null, null, true));
+      assertNull(map.get("c1"));
+      map = (Map) ZkStateReader.fromJSON(zkClient.getData(ZkStateReader.COLLECTIONS_ZKNODE + "/c1/state.json", null, null, true));
+      assertNotNull(map.get("c1"));
+
+    } finally {
+      IOUtils.close(zkClient);
+      server.shutdown();
+
+    }
+
+
+  }
+
 }
diff --git a/solr/core/src/test/org/apache/solr/core/CoreContainerCoreInitFailuresTest.java b/solr/core/src/test/org/apache/solr/core/CoreContainerCoreInitFailuresTest.java
deleted file mode 100644
index af374e19..0000000
--- a/solr/core/src/test/org/apache/solr/core/CoreContainerCoreInitFailuresTest.java
+++ /dev/null
@@ -1,339 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.core;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.lucene.util.IOUtils;
-import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.common.SolrException;
-import org.junit.After;
-import org.xml.sax.SAXParseException;
-
-import java.io.File;
-import java.util.Collection;
-import java.util.Map;
-import java.util.regex.Pattern;
-
-public class CoreContainerCoreInitFailuresTest extends SolrTestCaseJ4 {
-  
-  File solrHome = null;
-  CoreContainer cc = null;
-
-  private void init(final String dirSuffix) {
-    solrHome = createTempDir(dirSuffix).toFile();
-  }
-
-  @After
-  public void cleanUp() throws Exception {
-    if (cc != null) {
-      cc.shutdown();
-      cc = null;
-    }
-
-    solrHome = null;
-  }
-
-  public void testFlowWithEmpty() throws Exception {
-    // reused state
-    Map<String,CoreContainer.CoreLoadFailure> failures = null;
-    Collection<String> cores = null;
-    Exception fail = null;
-
-    init("empty_flow");
-
-    // solr.xml
-    File solrXml = new File(solrHome, "solr.xml");
-    FileUtils.write(solrXml, EMPTY_SOLR_XML, IOUtils.UTF_8);
-
-    // ----
-    // init the CoreContainer
-    cc = new CoreContainer(solrHome.getAbsolutePath());
-    cc.load();
-
-    // check that we have the cores we expect
-    cores = cc.getCoreNames();
-    assertNotNull("core names is null", cores);
-    assertEquals("wrong number of cores", 0, cores.size());
-    
-    // check that we have the failures we expect
-    failures = cc.getCoreInitFailures();
-    assertNotNull("core failures is a null map", failures);
-    assertEquals("wrong number of core failures", 0, failures.size());
-
-    // -----
-    // try to add a collection with a path that doesn't exist
-    final CoreDescriptor bogus = new CoreDescriptor(cc, "bogus", "bogus_path");
-    try {
-      ignoreException(Pattern.quote("bogus_path"));
-      cc.create(bogus);
-      fail("bogus inst dir failed to trigger exception from create");
-    } catch (SolrException e) {
-      assertTrue("init exception doesn't mention bogus dir: " + e.getCause().getCause().getMessage(),
-                 0 < e.getCause().getCause().getMessage().indexOf("bogus_path"));
-      
-    }
-    
-    // check that we have the cores we expect
-    cores = cc.getCoreNames();
-    assertNotNull("core names is null", cores);
-    assertEquals("wrong number of cores", 0, cores.size());
-    
-    // check that we have the failures we expect
-    failures = cc.getCoreInitFailures();
-    assertNotNull("core failures is a null map", failures);
-    assertEquals("wrong number of core failures", 1, failures.size());
-    fail = failures.get("bogus").exception;
-    assertNotNull("null failure for test core", fail);
-    assertTrue("init failure doesn't mention problem: " + fail.getCause().getMessage(),
-               0 < fail.getCause().getMessage().indexOf("bogus_path"));
-
-    // check that we get null accessing a non-existent core
-    assertNull(cc.getCore("does_not_exist"));
-    // check that we get a 500 accessing the core with an init failure
-    try {
-      SolrCore c = cc.getCore("bogus");
-      fail("Failed to get Exception on accessing core with init failure");
-    } catch (SolrException ex) {
-      assertEquals(500, ex.code());
-      // double wrapped
-      String cause = ex.getCause().getCause().getMessage();
-      assertTrue("getCore() ex cause doesn't mention init fail: " + cause,
-                 0 < cause.indexOf("bogus_path"));
-      
-    }
-
-    // let the test end here, with some recorded failures, and let cleanUp()
-    // verify that there is no problem shuting down CoreContainer with known 
-    // SolrCore failures
-  }
-  
-  public void testFlowBadFromStart() throws Exception {
-
-    // reused state
-    Map<String,CoreContainer.CoreLoadFailure> failures = null;
-    Collection<String> cores = null;
-    Exception fail = null;
-
-    init("bad_flow");
-
-    // start with two collections: one valid, and one broken
-    File solrXml = new File(solrHome, "solr.xml");
-    FileUtils.write(solrXml, BAD_SOLR_XML, IOUtils.UTF_8);
-
-    // our "ok" collection
-    FileUtils.copyFile(getFile("solr/collection1/conf/solrconfig-defaults.xml"),
-                       FileUtils.getFile(solrHome, "col_ok", "conf", "solrconfig.xml"));
-    FileUtils.copyFile(getFile("solr/collection1/conf/schema-minimal.xml"),
-                       FileUtils.getFile(solrHome, "col_ok", "conf", "schema.xml"));
-
-    // our "bad" collection
-    ignoreException(Pattern.quote("DummyMergePolicy"));
-    FileUtils.copyFile(getFile("solr/collection1/conf/bad-mp-solrconfig.xml"),
-                       FileUtils.getFile(solrHome, "col_bad", "conf", "solrconfig.xml"));
-    FileUtils.copyFile(getFile("solr/collection1/conf/schema-minimal.xml"),
-                       FileUtils.getFile(solrHome, "col_bad", "conf", "schema.xml"));
-
-
-    // -----
-    // init the  CoreContainer with the mix of ok/bad cores
-    cc = new CoreContainer(solrHome.getAbsolutePath());
-    cc.load();
-    
-    // check that we have the cores we expect
-    cores = cc.getCoreNames();
-    assertNotNull("core names is null", cores);
-    assertEquals("wrong number of cores", 1, cores.size());
-    assertTrue("col_ok not found", cores.contains("col_ok"));
-    
-    // check that we have the failures we expect
-    failures = cc.getCoreInitFailures();
-    assertNotNull("core failures is a null map", failures);
-    assertEquals("wrong number of core failures", 1, failures.size());
-    fail = failures.get("col_bad").exception;
-    assertNotNull("null failure for test core", fail);
-    assertTrue("init failure doesn't mention problem: " + fail.getMessage(),
-               0 < fail.getMessage().indexOf("DummyMergePolicy"));
-
-    // check that we get null accessing a non-existent core
-    assertNull(cc.getCore("does_not_exist"));
-    // check that we get a 500 accessing the core with an init failure
-    try {
-      SolrCore c = cc.getCore("col_bad");
-      fail("Failed to get Exception on accessing core with init failure");
-    } catch (SolrException ex) {
-      assertEquals(500, ex.code());
-      // double wrapped
-      String cause = ex.getCause().getCause().getMessage();
-      assertTrue("getCore() ex cause doesn't mention init fail: " + cause,
-                 0 < cause.indexOf("DummyMergePolicy"));
-    }
-
-    // -----
-    // "fix" the bad collection
-    FileUtils.copyFile(getFile("solr/collection1/conf/solrconfig-defaults.xml"),
-                       FileUtils.getFile(solrHome, "col_bad", "conf", "solrconfig.xml"));
-    final CoreDescriptor fixed = new CoreDescriptor(cc, "col_bad", "col_bad");
-    cc.create(fixed);
-    
-    // check that we have the cores we expect
-    cores = cc.getCoreNames();
-    assertNotNull("core names is null", cores);
-    assertEquals("wrong number of cores", 2, cores.size());
-    assertTrue("col_ok not found", cores.contains("col_ok"));
-    assertTrue("col_bad not found", cores.contains("col_bad"));
-    
-    // check that we have the failures we expect
-    failures = cc.getCoreInitFailures();
-    assertNotNull("core failures is a null map", failures);
-    assertEquals("wrong number of core failures", 0, failures.size());
-
-
-    // -----
-    // try to add a collection with a path that doesn't exist
-    final CoreDescriptor bogus = new CoreDescriptor(cc, "bogus", "bogus_path");
-    try {
-      ignoreException(Pattern.quote("bogus_path"));
-      cc.create(bogus);
-      fail("bogus inst dir failed to trigger exception from create");
-    } catch (SolrException e) {
-      assertTrue("init exception doesn't mention bogus dir: " + e.getCause().getCause().getMessage(),
-                 0 < e.getCause().getCause().getMessage().indexOf("bogus_path"));
-      
-    }
-    
-    // check that we have the cores we expect
-    cores = cc.getCoreNames();
-    assertNotNull("core names is null", cores);
-    assertEquals("wrong number of cores", 2, cores.size());
-    assertTrue("col_ok not found", cores.contains("col_ok"));
-    assertTrue("col_bad not found", cores.contains("col_bad"));
-    
-    // check that we have the failures we expect
-    failures = cc.getCoreInitFailures();
-    assertNotNull("core failures is a null map", failures);
-    assertEquals("wrong number of core failures", 1, failures.size());
-    fail = failures.get("bogus").exception;
-    assertNotNull("null failure for test core", fail);
-    assertTrue("init failure doesn't mention problem: " + fail.getCause().getMessage(),
-               0 < fail.getCause().getMessage().indexOf("bogus_path"));
-
-    // check that we get null accessing a non-existent core
-    assertNull(cc.getCore("does_not_exist"));
-    // check that we get a 500 accessing the core with an init failure
-    try {
-      SolrCore c = cc.getCore("bogus");
-      fail("Failed to get Exception on accessing core with init failure");
-    } catch (SolrException ex) {
-      assertEquals(500, ex.code());
-      // double wrapped
-      String cause = ex.getCause().getCause().getMessage();
-      assertTrue("getCore() ex cause doesn't mention init fail: " + cause,
-                 0 < cause.indexOf("bogus_path"));
-    }
-
-    // -----
-    // break col_bad's config and try to RELOAD to add failure
-
-    final long col_bad_old_start = getCoreStartTime(cc, "col_bad");
-
-    FileUtils.write
-      (FileUtils.getFile(solrHome, "col_bad", "conf", "solrconfig.xml"),
-       "This is giberish, not valid XML <", 
-       IOUtils.UTF_8);
-
-    try {
-      ignoreException(Pattern.quote("SAX"));
-      cc.reload("col_bad");
-      fail("corrupt solrconfig.xml failed to trigger exception from reload");
-    } catch (SolrException e) {
-      Throwable rootException = getWrappedException(e);
-      assertTrue("We're supposed to have a wrapped SAXParserException here, but we don't",
-          rootException instanceof SAXParseException);
-      SAXParseException se = (SAXParseException) rootException;
-      assertTrue("reload exception doesn't refer to slrconfig.xml " + se.getSystemId(),
-          0 < se.getSystemId().indexOf("solrconfig.xml"));
-
-    }
-
-    assertEquals("Failed core reload should not have changed start time",
-                 col_bad_old_start, getCoreStartTime(cc, "col_bad"));
-
-    // check that we have the cores we expect
-    cores = cc.getCoreNames();
-    assertNotNull("core names is null", cores);
-    assertEquals("wrong number of cores", 2, cores.size());
-    assertTrue("col_ok not found", cores.contains("col_ok"));
-    assertTrue("col_bad not found", cores.contains("col_bad"));
-
-    // check that we have the failures we expect
-    failures = cc.getCoreInitFailures();
-    assertNotNull("core failures is a null map", failures);
-    assertEquals("wrong number of core failures", 2, failures.size());
-    Throwable ex = getWrappedException(failures.get("col_bad").exception);
-    assertNotNull("null failure for test core", ex);
-    assertTrue("init failure isn't SAXParseException",
-               ex instanceof SAXParseException);
-    SAXParseException saxEx = (SAXParseException) ex;
-    assertTrue("init failure doesn't mention problem: " + saxEx.toString(), saxEx.getSystemId().contains("solrconfig.xml"));
-
-    // ----
-    // fix col_bad's config (again) and RELOAD to fix failure
-    FileUtils.copyFile(getFile("solr/collection1/conf/solrconfig-defaults.xml"),
-                       FileUtils.getFile(solrHome, "col_bad", "conf", "solrconfig.xml"));
-    cc.reload("col_bad");
-    
-    assertTrue("Core reload should have changed start time",
-               col_bad_old_start < getCoreStartTime(cc, "col_bad"));
-    
-
-    // check that we have the cores we expect
-    cores = cc.getCoreNames();
-    assertNotNull("core names is null", cores);
-    assertEquals("wrong number of cores", 2, cores.size());
-    assertTrue("col_ok not found", cores.contains("col_ok"));
-    assertTrue("col_bad not found", cores.contains("col_bad"));
-
-    // check that we have the failures we expect
-    failures = cc.getCoreInitFailures();
-    assertNotNull("core failures is a null map", failures);
-    assertEquals("wrong number of core failures", 1, failures.size());
-
-  }
-
-  private long getCoreStartTime(final CoreContainer cc, final String name) {
-    try (SolrCore tmp = cc.getCore(name)) {
-      return tmp.getStartTime();
-    }
-  }
-
-  private static final String EMPTY_SOLR_XML ="<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n" +
-      "<solr persistent=\"false\">\n" +
-      "  <cores adminPath=\"/admin/cores\">\n" +
-      "  </cores>\n" +
-      "</solr>";
-
-  private static final String BAD_SOLR_XML =
-    "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n" +
-    "<solr persistent=\"false\">\n" +
-    "  <cores adminPath=\"/admin/cores\">\n" +
-    "    <core name=\"col_ok\" instanceDir=\"col_ok\" />\n" + 
-    "    <core name=\"col_bad\" instanceDir=\"col_bad\" />\n" + 
-    "  </cores>\n" +
-    "</solr>";
-  
-}
diff --git a/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java b/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java
new file mode 100644
index 0000000..26fbdf7
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/core/HdfsDirectoryFactoryTest.java
@@ -0,0 +1,131 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.core;
+
+import java.nio.file.Path;
+
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.NoLockFactory;
+import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.cloud.hdfs.HdfsTestUtil;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.DirectoryFactory.DirContext;
+import org.apache.solr.util.MockCoreContainer.MockCoreDescriptor;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope;
+
+@ThreadLeakScope(Scope.NONE) // hdfs client currently leaks thread(s)
+public class HdfsDirectoryFactoryTest extends SolrTestCaseJ4 {
+  
+  private static MiniDFSCluster dfsCluster;
+  
+  @BeforeClass
+  public static void setupClass() throws Exception {
+    dfsCluster = HdfsTestUtil.setupClass(createTempDir().toFile().getAbsolutePath(), false);
+  }
+  
+  @AfterClass
+  public static void teardownClass() throws Exception {
+    HdfsTestUtil.teardownClass(dfsCluster);
+    System.clearProperty("solr.hdfs.home");
+    System.clearProperty(HdfsDirectoryFactory.NRTCACHINGDIRECTORY_MAXMERGESIZEMB);
+    dfsCluster = null;
+  }
+  
+  @Test
+  public void testInitArgsOrSysPropConfig() throws Exception {
+    
+    HdfsDirectoryFactory hdfsFactory = new HdfsDirectoryFactory();
+    
+    // test sys prop config
+    
+    System.setProperty("solr.hdfs.home", dfsCluster.getURI().toString() + "/solr1");
+    hdfsFactory.init(new NamedList<>());
+    String dataHome = hdfsFactory.getDataHome(new MockCoreDescriptor());
+
+    assertTrue(dataHome.endsWith("/solr1/mock/data"));
+    
+    System.clearProperty("solr.hdfs.home");
+    
+    // test init args config
+    
+    NamedList<Object> nl = new NamedList<>();
+    nl.add("solr.hdfs.home", dfsCluster.getURI().toString() + "/solr2");
+    hdfsFactory.init(nl);
+    dataHome = hdfsFactory.getDataHome(new MockCoreDescriptor());
+
+    assertTrue(dataHome.endsWith("/solr2/mock/data"));
+    
+    // test sys prop and init args config - init args wins
+    
+    System.setProperty("solr.hdfs.home", dfsCluster.getURI().toString() + "/solr1");
+    hdfsFactory.init(nl);
+    dataHome = hdfsFactory.getDataHome(new MockCoreDescriptor());
+
+    assertTrue(dataHome.endsWith("/solr2/mock/data"));
+    
+    System.clearProperty("solr.hdfs.home");
+    
+    
+    // set conf dir by sys prop
+    
+    Path confDir = createTempDir();
+    
+    System.setProperty(HdfsDirectoryFactory.CONFIG_DIRECTORY, confDir.toString());
+    
+    Directory dir = hdfsFactory.create(dfsCluster.getURI().toString() + "/solr", NoLockFactory.INSTANCE, DirContext.DEFAULT);
+    try {
+      assertEquals(confDir.toString(), hdfsFactory.getConfDir());
+    } finally {
+      dir.close();
+    }
+    
+    // check bool and int getConf impls
+    nl = new NamedList<>();
+    nl.add(HdfsDirectoryFactory.NRTCACHINGDIRECTORY_MAXMERGESIZEMB, 4);
+    System.setProperty(HdfsDirectoryFactory.NRTCACHINGDIRECTORY_MAXMERGESIZEMB, "3");
+    nl.add(HdfsDirectoryFactory.BLOCKCACHE_ENABLED, true);
+    System.setProperty(HdfsDirectoryFactory.BLOCKCACHE_ENABLED, "false");
+    
+    hdfsFactory.init(nl);
+    
+    assertEquals(4, hdfsFactory.getConfig(HdfsDirectoryFactory.NRTCACHINGDIRECTORY_MAXMERGESIZEMB, 0));
+    assertEquals(true, hdfsFactory.getConfig(HdfsDirectoryFactory.BLOCKCACHE_ENABLED, false));
+    
+    nl = new NamedList<>();
+    hdfsFactory.init(nl);
+    System.setProperty(HdfsDirectoryFactory.BLOCKCACHE_ENABLED, "true");
+    
+    assertEquals(3, hdfsFactory.getConfig(HdfsDirectoryFactory.NRTCACHINGDIRECTORY_MAXMERGESIZEMB, 0));
+    assertEquals(true, hdfsFactory.getConfig(HdfsDirectoryFactory.BLOCKCACHE_ENABLED, false));
+    
+    System.clearProperty(HdfsDirectoryFactory.NRTCACHINGDIRECTORY_MAXMERGESIZEMB);
+    System.clearProperty(HdfsDirectoryFactory.BLOCKCACHE_ENABLED);
+    
+    assertEquals(0, hdfsFactory.getConfig(HdfsDirectoryFactory.NRTCACHINGDIRECTORY_MAXMERGESIZEMB, 0));
+    assertEquals(false, hdfsFactory.getConfig(HdfsDirectoryFactory.BLOCKCACHE_ENABLED, false));
+    
+    hdfsFactory.close();
+  }
+
+}
diff --git a/solr/core/src/test/org/apache/solr/core/OpenCloseCoreStressTest.java b/solr/core/src/test/org/apache/solr/core/OpenCloseCoreStressTest.java
index 5dc1687..1d748e5 100644
--- a/solr/core/src/test/org/apache/solr/core/OpenCloseCoreStressTest.java
+++ b/solr/core/src/test/org/apache/solr/core/OpenCloseCoreStressTest.java
@@ -18,8 +18,8 @@
 package org.apache.solr.core;
 
 import org.apache.commons.io.FileUtils;
+import org.apache.lucene.util.IOUtils;
 import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.UpdateRequest;
@@ -93,59 +93,49 @@
   @After
   public void tearDownServer() throws Exception {
     if (jetty != null) jetty.stop();
-    for(SolrClient client: indexingClients) {
-      client.shutdown();
-    }
-    for(SolrClient client: queryingClients) {
-      client.shutdown();
-    }
+    IOUtils.close(indexingClients);
+    IOUtils.close(queryingClients);
     indexingClients.clear();
     queryingClients.clear();
   }
 
   @Test
   public void test5Seconds() throws Exception {
-    doStress(5, random().nextBoolean());
+    doStress(5);
   }
   
   @Test
   @Nightly
   public void test15SecondsOld() throws Exception {
-    doStress(15, true);
+    doStress(15);
   }
 
   @Test
   @Nightly
   public void test15SecondsNew() throws Exception {
-    doStress(15, false);
+    doStress(15);
   }
 
   @Test
   @Nightly
   public void test10MinutesOld() throws Exception {
-    doStress(300, true);
+    doStress(300);
   }
 
   @Test
   @Nightly
   public void test10MinutesNew() throws Exception {
-    doStress(300, false);
+    doStress(300);
   }
 
   @Test
   @Weekly
-  public void test1HourOld() throws Exception {
-    doStress(1800, true);
+  public void test1Hour() throws Exception {
+    doStress(1800);
   }
-
-  @Test
-  @Weekly
-  public void test1HourNew() throws Exception {
-    doStress(1800, false);
-  }
-
-
+  
   private void buildClients() throws Exception {
+
     jetty.start();
     url = buildUrl(jetty.getLocalPort(), "/solr/");
 
@@ -169,8 +159,8 @@
   }
 
   // Unless things go _really_ well, stop after you have the directories set up.
-  private void doStress(int secondsToRun, boolean oldStyle) throws Exception {
-    makeCores(solrHomeDirectory, oldStyle);
+  private void doStress(int secondsToRun) throws Exception {
+    makeCores(solrHomeDirectory);
 
     //MUST start the server after the cores are made.
     buildClients();
@@ -213,25 +203,22 @@
     }
   }
 
-  private void makeCores(File home, boolean oldStyle) throws Exception {
+  private void makeCores(File home) throws Exception {
     File testSrcRoot = new File(SolrTestCaseJ4.TEST_HOME());
     String srcSolrXml = "solr-stress-new.xml";
 
-    if (oldStyle) {
-      srcSolrXml = "solr-stress-old.xml";
-    }
     FileUtils.copyFile(new File(testSrcRoot, srcSolrXml), new File(home, "solr.xml"));
 
     // create directories in groups of 100 until you have enough.
     for (int idx = 0; idx < numCores; ++idx) {
       String coreName = String.format(Locale.ROOT, "%05d_core", idx);
-      makeCore(new File(home, coreName), testSrcRoot, oldStyle);
+      makeCore(new File(home, coreName), testSrcRoot);
       coreCounts.put(coreName, 0L);
       coreNames.add(coreName);
     }
   }
 
-  private void makeCore(File coreDir, File testSrcRoot, boolean oldStyle) throws IOException {
+  private void makeCore(File coreDir, File testSrcRoot) throws IOException {
     File conf = new File(coreDir, "conf");
 
     if (!conf.mkdirs()) log.warn("mkdirs returned false in makeCore... ignoring");
@@ -244,9 +231,7 @@
     FileUtils.copyFile(new File(testConf, "solrconfig.snippet.randomindexconfig.xml"),
         new File(conf, "solrconfig.snippet.randomindexconfig.xml"));
 
-    if (!oldStyle) {
-      FileUtils.copyFile(new File(testSrcRoot, "conf/core.properties"), new File(coreDir, "core.properties"));
-    }
+    FileUtils.copyFile(new File(testSrcRoot, "conf/core.properties"), new File(coreDir, "core.properties"));
 
   }
 
diff --git a/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java b/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java
index 42b4fef..b78b8d7 100644
--- a/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java
+++ b/solr/core/src/test/org/apache/solr/core/SolrCoreTest.java
@@ -51,35 +51,6 @@
     deleteCore();
     super.tearDown();
   }
-  
-  @Test
-  public void testRemoveThenAddDefaultCore() throws Exception {
-    final CoreContainer cores = h.getCoreContainer();
-    SolrCore core = cores.getCore("");
-
-    assertEquals(COLLECTION1, cores.getDefaultCoreName());
-    
-    cores.unload("");
-    core.close();
-
-    CoreDescriptor cd = new CoreDescriptor(cores, COLLECTION1, "collection1",
-                                            CoreDescriptor.CORE_DATADIR, createTempDir("dataDir2").toFile().getAbsolutePath());
-    
-    cores.create(cd);
-    
-    assertEquals(COLLECTION1, cores.getDefaultCoreName());
-    
-    // so we should be able to get a core with collection1
-    core = cores.getCore(COLLECTION1);
-    assertNotNull(core);
-    core.close();
-    
-    // and with ""
-    core = cores.getCore("");
-    assertNotNull(core);
-    
-    core.close();
-  }
 
   @Test
   public void testRequestHandlerRegistry() {
@@ -100,7 +71,7 @@
   @Test
   public void testClose() throws Exception {
     final CoreContainer cores = h.getCoreContainer();
-    SolrCore core = cores.getCore("");
+    SolrCore core = cores.getCore(SolrTestCaseJ4.DEFAULT_TEST_CORENAME);
 
     ClosingRequestHandler handler1 = new ClosingRequestHandler();
     handler1.inform( core );
@@ -120,7 +91,7 @@
     assertTrue("Refcount != 1", core.getOpenCount() == 1);
     
     final CoreContainer cores = h.getCoreContainer();
-    SolrCore c1 = cores.getCore("");
+    SolrCore c1 = cores.getCore(SolrTestCaseJ4.DEFAULT_TEST_CORENAME);
     assertTrue("Refcount != 2", core.getOpenCount() == 2);
 
     ClosingRequestHandler handler1 = new ClosingRequestHandler();
@@ -131,12 +102,12 @@
     assertNull( old ); // should not be anything...
     assertEquals( core.getRequestHandlers().get( path ), handler1 );
    
-    SolrCore c2 = cores.getCore("");
+    SolrCore c2 = cores.getCore(SolrTestCaseJ4.DEFAULT_TEST_CORENAME);
     c1.close();
     assertTrue("Refcount < 1", core.getOpenCount() >= 1);
     assertTrue("Handler is closed", handler1.closed == false);
     
-    c1 = cores.getCore("");
+    c1 = cores.getCore(SolrTestCaseJ4.DEFAULT_TEST_CORENAME);
     assertTrue("Refcount < 2", core.getOpenCount() >= 2);
     assertTrue("Handler is closed", handler1.closed == false);
     
@@ -184,7 +155,7 @@
           try {
             for (int l = 0; l < LOOP; ++l) {
               r += 1;
-              core = cores.getCore("");
+              core = cores.getCore(SolrTestCaseJ4.DEFAULT_TEST_CORENAME);
               // sprinkle concurrency hinting...
               yield(l);
               assertTrue("Refcount < 1", core.getOpenCount() >= 1);              
diff --git a/solr/core/src/test/org/apache/solr/core/TestArbitraryIndexDir.java b/solr/core/src/test/org/apache/solr/core/TestArbitraryIndexDir.java
index d78d28a..2ffb235 100644
--- a/solr/core/src/test/org/apache/solr/core/TestArbitraryIndexDir.java
+++ b/solr/core/src/test/org/apache/solr/core/TestArbitraryIndexDir.java
@@ -54,14 +54,12 @@
   @BeforeClass
   public static void beforeClass() {
     // this test wants to start solr, and then open a separate indexwriter of its own on the same dir.
-    System.setProperty("solr.tests.nrtMode", "false");
     System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_
     savedFactory = System.getProperty("solr.DirectoryFactory");
     System.setProperty("solr.directoryFactory", "org.apache.solr.core.MockFSDirectoryFactory");
   }
   @AfterClass
   public static void afterClass() {
-    System.clearProperty("solr.tests.nrtMode");
     if (savedFactory == null) {
       System.clearProperty("solr.directoryFactory");
     } else {
@@ -125,7 +123,7 @@
     iw.close();
 
     //commit will cause searcher to open with the new index dir
-    assertU(commit());
+    assertU(commit());h.getCoreContainer().reload(h.getCore().getName());
     //new index dir contains just 1 doc.
     assertQ("return doc with id 2",
         req("id:2"),
diff --git a/solr/core/src/test/org/apache/solr/core/TestBadConfig.java b/solr/core/src/test/org/apache/solr/core/TestBadConfig.java
index 42f7d62..07d2e69 100644
--- a/solr/core/src/test/org/apache/solr/core/TestBadConfig.java
+++ b/solr/core/src/test/org/apache/solr/core/TestBadConfig.java
@@ -27,10 +27,10 @@
     assertConfigs("bad_solrconfig.xml","schema.xml","unset.sys.property");
   }
 
-  public void testSegmentMergerWithoutReopen() throws Exception {
-      assertConfigs("bad-solrconfig-warmer-no-reopen.xml", "schema12.xml",
-                    "mergedSegmentWarmer");
+  public void testNRTModeProperty() throws Exception {
+    assertConfigs("bad-solrconfig-nrtmode.xml","schema.xml", "nrtMode");
   }
+
   public void testMultipleDirectoryFactories() throws Exception {
       assertConfigs("bad-solrconfig-multiple-dirfactory.xml", "schema12.xml",
                     "directoryFactory");
diff --git a/solr/core/src/test/org/apache/solr/core/TestConfig.java b/solr/core/src/test/org/apache/solr/core/TestConfig.java
index 023ab5a..4a3bb7b 100644
--- a/solr/core/src/test/org/apache/solr/core/TestConfig.java
+++ b/solr/core/src/test/org/apache/solr/core/TestConfig.java
@@ -17,17 +17,14 @@
 
 package org.apache.solr.core;
 
-import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.ConcurrentMergeScheduler;
 import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.TieredMergePolicy;
-import org.apache.lucene.index.ConcurrentMergeScheduler;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.handler.admin.ShowFileRequestHandler;
-import org.apache.solr.update.DirectUpdateHandler2;
-import org.apache.solr.update.SolrIndexConfig;
-import org.apache.solr.util.RefCounted;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.schema.IndexSchemaFactory;
+import org.apache.solr.update.SolrIndexConfig;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.w3c.dom.Node;
@@ -36,7 +33,6 @@
 import javax.xml.xpath.XPathConstants;
 import java.io.IOException;
 import java.io.InputStream;
-import java.util.Locale;
 
 public class TestConfig extends SolrTestCaseJ4 {
 
@@ -117,10 +113,10 @@
     SolrIndexConfig sic = sc.indexConfig;
     assertEquals("default ramBufferSizeMB", 100.0D, sic.ramBufferSizeMB, 0.0D);
     assertEquals("default LockType", SolrIndexConfig.LOCK_TYPE_NATIVE, sic.lockType);
-    assertEquals("default useCompoundFile", false, sic.useCompoundFile);
+    assertEquals("default useCompoundFile", false, sic.getUseCompoundFile());
 
     IndexSchema indexSchema = IndexSchemaFactory.buildIndexSchema("schema.xml", solrConfig);
-    IndexWriterConfig iwc = sic.toIndexWriterConfig(indexSchema);
+    IndexWriterConfig iwc = sic.toIndexWriterConfig(h.getCore());
 
     assertNotNull("null mp", iwc.getMergePolicy());
     assertTrue("mp is not TMP", iwc.getMergePolicy() instanceof TieredMergePolicy);
@@ -140,7 +136,7 @@
                  Double.parseDouble(System.getProperty("solr.tests.ramBufferSizeMB")), 
                                     sic.ramBufferSizeMB, 0.0D);
     assertEquals("useCompoundFile sysprop", 
-                 Boolean.parseBoolean(System.getProperty("useCompoundFile")), sic.useCompoundFile);
+                 Boolean.parseBoolean(System.getProperty("useCompoundFile")), sic.getUseCompoundFile());
   }
 
 }
diff --git a/solr/core/src/test/org/apache/solr/core/TestConfigSets.java b/solr/core/src/test/org/apache/solr/core/TestConfigSets.java
index 2e23867..7193b25 100644
--- a/solr/core/src/test/org/apache/solr/core/TestConfigSets.java
+++ b/solr/core/src/test/org/apache/solr/core/TestConfigSets.java
@@ -46,7 +46,7 @@
     System.setProperty("configsets", configSetsBaseDir);
 
     SolrResourceLoader loader = new SolrResourceLoader(testDirectory.getAbsolutePath());
-    CoreContainer container = new CoreContainer(loader, ConfigSolr.fromString(loader, solrxml));
+    CoreContainer container = new CoreContainer(ConfigSolr.fromString(loader, solrxml));
     container.load();
 
     return container;
@@ -115,12 +115,12 @@
     System.setProperty("configsets", csd);
 
     SolrResourceLoader loader = new SolrResourceLoader(testDirectory.getAbsolutePath());
-    CoreContainer container = new CoreContainer(loader, ConfigSolr.fromString(loader, solrxml));
+    CoreContainer container = new CoreContainer(ConfigSolr.fromString(loader, solrxml));
     container.load();
 
     // We initially don't have a /get handler defined
     SolrCore core = container.create(new CoreDescriptor(container, "core1", testDirectory + "/core", "configSet", "configset-2"));
-    assertThat("No /get handler should be defined in the initial configuration",
+    assertThat("No /dump handler should be defined in the initial configuration",
         core.getRequestHandler("/dump"), is(nullValue()));
 
     // Now copy in a config with a /get handler and reload
@@ -129,7 +129,7 @@
     container.reload("core1");
 
     core = container.getCore("core1");
-    assertThat("A /get handler should be defined in the reloaded configuration",
+    assertThat("A /dump handler should be defined in the reloaded configuration",
         core.getRequestHandler("/dump"), is(notNullValue()));
     core.close();
 
diff --git a/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java b/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java
index 2b97664..a577ff8 100644
--- a/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java
+++ b/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java
@@ -18,6 +18,7 @@
 package org.apache.solr.core;
 
 import org.apache.commons.io.FileUtils;
+import org.apache.lucene.util.IOUtils;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.handler.admin.CollectionsHandler;
@@ -26,19 +27,18 @@
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
-import org.xml.sax.SAXException;
+import org.xml.sax.SAXParseException;
 
-import javax.xml.parsers.ParserConfigurationException;
-import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.nio.charset.StandardCharsets;
+import java.nio.file.Path;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.List;
+import java.util.Map;
 import java.util.jar.JarEntry;
 import java.util.jar.JarOutputStream;
+import java.util.regex.Pattern;
 
 import static org.hamcrest.CoreMatchers.not;
 import static org.hamcrest.CoreMatchers.nullValue;
@@ -55,7 +55,7 @@
   @BeforeClass
   public static void beforeClass() throws Exception {
     oldSolrHome = System.getProperty(SOLR_HOME_PROP);
-    initCore("solrconfig.xml", "schema.xml");
+    System.setProperty("configsets", getFile("solr/configsets").getAbsolutePath());
   }
 
   @AfterClass
@@ -67,16 +67,14 @@
     }
   }
 
-  private File solrHomeDirectory;
+  private CoreContainer init(String xml) throws Exception {
+    Path solrHomeDirectory = createTempDir();
+    return init(solrHomeDirectory, xml);
+  }
 
-  private CoreContainer init(String dirName) throws Exception {
-
-    solrHomeDirectory = createTempDir(dirName).toFile();
-
-    FileUtils.copyDirectory(new File(SolrTestCaseJ4.TEST_HOME()), solrHomeDirectory);
-    System.out.println("Using solrconfig from " + new File(SolrTestCaseJ4.TEST_HOME()).getAbsolutePath());
-
-    CoreContainer ret = new CoreContainer(solrHomeDirectory.getAbsolutePath());
+  private CoreContainer init(Path homeDirectory, String xml) throws Exception {
+    SolrResourceLoader loader = new SolrResourceLoader(homeDirectory.toString());
+    CoreContainer ret = new CoreContainer(ConfigSolr.fromString(loader, xml));
     ret.load();
     return ret;
   }
@@ -84,12 +82,14 @@
   @Test
   public void testShareSchema() throws Exception {
     System.setProperty("shareSchema", "true");
-    final CoreContainer cores = init("_shareSchema");
+
+    CoreContainer cores = init(CONFIGSETS_SOLR_XML);
+
     try {
-      CoreDescriptor descriptor1 = new CoreDescriptor(cores, "core1", "./collection1");
+      CoreDescriptor descriptor1 = new CoreDescriptor(cores, "core1", "./collection1", "configSet", "minimal");
       SolrCore core1 = cores.create(descriptor1);
       
-      CoreDescriptor descriptor2 = new CoreDescriptor(cores, "core2", "./collection1");
+      CoreDescriptor descriptor2 = new CoreDescriptor(cores, "core2", "./collection1", "configSet", "minimal");
       SolrCore core2 = cores.create(descriptor2);
       
       assertSame(core1.getLatestSchema(), core2.getLatestSchema());
@@ -102,12 +102,14 @@
 
   @Test
   public void testReloadSequential() throws Exception {
-    final CoreContainer cc = init("_reloadSequential");
+    final CoreContainer cc = init(CONFIGSETS_SOLR_XML);
+    CoreDescriptor descriptor1 = new CoreDescriptor(cc, "core1", "./collection1", "configSet", "minimal");
+    cc.create(descriptor1);
     try {
-      cc.reload("collection1");
-      cc.reload("collection1");
-      cc.reload("collection1");
-      cc.reload("collection1");
+      cc.reload("core1");
+      cc.reload("core1");
+      cc.reload("core1");
+      cc.reload("core1");
 
     } finally {
       cc.shutdown();
@@ -116,63 +118,52 @@
 
   @Test
   public void testReloadThreaded() throws Exception {
-    final CoreContainer cc = init("_reloadThreaded");
+    final CoreContainer cc = init(CONFIGSETS_SOLR_XML);
+    CoreDescriptor descriptor1 = new CoreDescriptor(cc, "core1", "./collection1", "configSet", "minimal");
+    cc.create(descriptor1);
 
-      class TestThread extends Thread {
-        @Override
-        public void run() {
-          cc.reload("collection1");
-        }
+    class TestThread extends Thread {
+      @Override
+      public void run() {
+        cc.reload("core1");
       }
+    }
 
-      List<Thread> threads = new ArrayList<>();
-      int numThreads = 4;
-      for (int i = 0; i < numThreads; i++) {
-        threads.add(new TestThread());
-      }
+    List<Thread> threads = new ArrayList<>();
+    int numThreads = 4;
+    for (int i = 0; i < numThreads; i++) {
+      threads.add(new TestThread());
+    }
 
-      for (Thread thread : threads) {
-        thread.start();
-      }
+    for (Thread thread : threads) {
+      thread.start();
+    }
 
-      for (Thread thread : threads) {
-        thread.join();
+    for (Thread thread : threads) {
+      thread.join();
     }
 
     cc.shutdown();
 
   }
 
-
-
   @Test
-  public void testNoCores() throws IOException, ParserConfigurationException, SAXException {
-    //create solrHome
-    File solrHomeDirectory = createTempDir().toFile();
-    
-    boolean oldSolrXml = random().nextBoolean();
-    
-    SetUpHome(solrHomeDirectory, oldSolrXml ? EMPTY_SOLR_XML : EMPTY_SOLR_XML2);
-    CoreContainer cores = new CoreContainer(solrHomeDirectory.getAbsolutePath());
-    cores.load();
+  public void testNoCores() throws Exception {
+
+    CoreContainer cores = init(CONFIGSETS_SOLR_XML);
+
     try {
       //assert zero cores
       assertEquals("There should not be cores", 0, cores.getCores().size());
       
-      FileUtils.copyDirectory(new File(SolrTestCaseJ4.TEST_HOME(), "collection1"), solrHomeDirectory);
       //add a new core
-      CoreDescriptor coreDescriptor = new CoreDescriptor(cores, "core1", solrHomeDirectory.getAbsolutePath());
+      CoreDescriptor coreDescriptor = new CoreDescriptor(cores, "core1", "collection1", CoreDescriptor.CORE_CONFIGSET, "minimal");
       SolrCore newCore = cores.create(coreDescriptor);
 
       //assert one registered core
 
       assertEquals("There core registered", 1, cores.getCores().size());
 
-      if (oldSolrXml) {
-        assertXmlFile(new File(solrHomeDirectory, "solr.xml"),
-            "/solr/cores[@transientCacheSize='32']");
-      }
-
       cores.unload("core1");
       //assert cero cores
       assertEquals("There should not be cores", 0, cores.getCores().size());
@@ -193,27 +184,14 @@
   }
 
   @Test
-  public void testLogWatcherEnabledByDefault() {
-    assertNotNull(h.getCoreContainer().getLogging());
-  }
-  
-  private void SetUpHome(File solrHomeDirectory, String xmlFile) throws IOException {
-    if (solrHomeDirectory.exists()) {
-      FileUtils.deleteDirectory(solrHomeDirectory);
-    }
-    assertTrue("Failed to mkdirs workDir", solrHomeDirectory.mkdirs());
+  public void testLogWatcherEnabledByDefault() throws Exception {
+    CoreContainer cc = init("<solr></solr>");
     try {
-      File solrXmlFile = new File(solrHomeDirectory, "solr.xml");
-      BufferedWriter out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(solrXmlFile), StandardCharsets.UTF_8));
-      out.write(xmlFile);
-      out.close();
-    } catch (IOException e) {
-      FileUtils.deleteDirectory(solrHomeDirectory);
-      throw e;
+      assertNotNull(cc.getLogging());
     }
-
-    //init
-    System.setProperty(SOLR_HOME_PROP, solrHomeDirectory.getAbsolutePath());
+    finally {
+      cc.shutdown();
+    }
   }
 
   @Test
@@ -221,13 +199,12 @@
 
     MockCoresLocator cl = new MockCoresLocator();
 
-    solrHomeDirectory = createTempDir("_deleteBadCores").toFile();
-    SolrResourceLoader resourceLoader = new SolrResourceLoader(solrHomeDirectory.getAbsolutePath());
-    File instanceDir = new File(solrHomeDirectory, "_deleteBadCores");
+    SolrResourceLoader resourceLoader = new SolrResourceLoader(createTempDir().toString());
+
     System.setProperty("configsets", getFile("solr/configsets").getAbsolutePath());
 
-    final CoreContainer cc = new CoreContainer(resourceLoader, ConfigSolr.fromString(resourceLoader, EMPTY_SOLR_XML2), cl);
-    CoreDescriptor badcore = new CoreDescriptor(cc, "badcore", instanceDir.getAbsolutePath(), "configSet", "nosuchconfigset");
+    final CoreContainer cc = new CoreContainer(ConfigSolr.fromString(resourceLoader, CONFIGSETS_SOLR_XML), cl);
+    CoreDescriptor badcore = new CoreDescriptor(cc, "badcore", "badcore", "configSet", "nosuchconfigset");
     cl.add(badcore);
 
     try {
@@ -238,7 +215,7 @@
       assertThat(cc.getCoreInitFailures().size(), is(0));
 
       // can we create the core now with a good config?
-      SolrCore core = cc.create(new CoreDescriptor(cc, "badcore", instanceDir.getAbsolutePath(), "configSet", "minimal"));
+      SolrCore core = cc.create(new CoreDescriptor(cc, "badcore", "badcore", "configSet", "minimal"));
       assertThat(core, not(nullValue()));
 
     }
@@ -249,13 +226,13 @@
 
   @Test
   public void testClassLoaderHierarchy() throws Exception {
-    final CoreContainer cc = init("_classLoaderHierarchy");
+    final CoreContainer cc = init(CONFIGSETS_SOLR_XML);
     try {
       ClassLoader sharedLoader = cc.loader.getClassLoader();
       ClassLoader contextLoader = Thread.currentThread().getContextClassLoader();
       assertSame(contextLoader, sharedLoader.getParent());
 
-      CoreDescriptor descriptor1 = new CoreDescriptor(cc, "core1", "./collection1");
+      CoreDescriptor descriptor1 = new CoreDescriptor(cc, "core1", "./collection1", "configSet", "minimal");
       SolrCore core1 = cc.create(descriptor1);
       ClassLoader coreLoader = core1.getResourceLoader().getClassLoader();
       assertSame(sharedLoader, coreLoader.getParent());
@@ -267,9 +244,9 @@
 
   @Test
   public void testSharedLib() throws Exception {
-    File tmpRoot = createTempDir("testSharedLib").toFile();
+    Path tmpRoot = createTempDir("testSharedLib");
 
-    File lib = new File(tmpRoot, "lib");
+    File lib = new File(tmpRoot.toFile(), "lib");
     lib.mkdirs();
 
     JarOutputStream jar1 = new JarOutputStream(new FileOutputStream(new File(lib, "jar1.jar")));
@@ -277,7 +254,7 @@
     jar1.closeEntry();
     jar1.close();
 
-    File customLib = new File(tmpRoot, "customLib");
+    File customLib = new File(tmpRoot.toFile(), "customLib");
     customLib.mkdirs();
 
     JarOutputStream jar2 = new JarOutputStream(new FileOutputStream(new File(customLib, "jar2.jar")));
@@ -285,41 +262,32 @@
     jar2.closeEntry();
     jar2.close();
 
-    FileUtils.writeStringToFile(new File(tmpRoot, "default-lib-solr.xml"), "<solr><cores/></solr>", "UTF-8");
-    FileUtils.writeStringToFile(new File(tmpRoot, "explicit-lib-solr.xml"), "<solr sharedLib=\"lib\"><cores/></solr>", "UTF-8");
-    FileUtils.writeStringToFile(new File(tmpRoot, "custom-lib-solr.xml"), "<solr sharedLib=\"customLib\"><cores/></solr>", "UTF-8");
-
-    final CoreContainer cc1 = CoreContainer.createAndLoad(tmpRoot.getAbsolutePath(), new File(tmpRoot, "default-lib-solr.xml"));
+    final CoreContainer cc1 = init(tmpRoot, "<solr></solr>");
     try {
       cc1.loader.openResource("defaultSharedLibFile").close();
     } finally {
       cc1.shutdown();
     }
 
-    final CoreContainer cc2 = CoreContainer.createAndLoad(tmpRoot.getAbsolutePath(), new File(tmpRoot, "explicit-lib-solr.xml"));
+    final CoreContainer cc2 = init(tmpRoot, "<solr><str name=\"sharedLib\">lib</str></solr>");
     try {
       cc2.loader.openResource("defaultSharedLibFile").close();
     } finally {
       cc2.shutdown();
     }
 
-    final CoreContainer cc3 = CoreContainer.createAndLoad(tmpRoot.getAbsolutePath(), new File(tmpRoot, "custom-lib-solr.xml"));
+    final CoreContainer cc3 = init(tmpRoot, "<solr><str name=\"sharedLib\">customLib</str></solr>");
     try {
       cc3.loader.openResource("customSharedLibFile").close();
     } finally {
       cc3.shutdown();
     }
   }
-  
-  private static final String EMPTY_SOLR_XML ="<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n" +
-      "<solr persistent=\"false\">\n" +
-      "  <cores adminPath=\"/admin/cores\" transientCacheSize=\"32\" >\n" +
-      "  </cores>\n" +
-      "</solr>";
-  
-  private static final String EMPTY_SOLR_XML2 ="<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n" +
+
+  private static final String CONFIGSETS_SOLR_XML ="<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n" +
       "<solr>\n" +
-      "<str name=\"configSetBaseDir\">${configsets:configsets}</str>" +
+      "<str name=\"configSetBaseDir\">${configsets:configsets}</str>\n" +
+      "<str name=\"shareSchema\">${shareSchema:false}</str>\n" +
       "</solr>";
 
   private static final String CUSTOM_HANDLERS_SOLR_XML = "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n" +
@@ -350,12 +318,7 @@
   @Test
   public void testCustomHandlers() throws Exception {
 
-    solrHomeDirectory = createTempDir("_customHandlers").toFile();
-    SolrResourceLoader loader = new SolrResourceLoader(solrHomeDirectory.getAbsolutePath());
-
-    ConfigSolr config = ConfigSolr.fromString(loader, CUSTOM_HANDLERS_SOLR_XML);
-
-    CoreContainer cc = new CoreContainer(loader, config);
+    CoreContainer cc = init(CUSTOM_HANDLERS_SOLR_XML);
     try {
       cc.load();
       assertThat(cc.getCollectionsHandler(), is(instanceOf(CustomCollectionsHandler.class)));
@@ -406,4 +369,262 @@
       return cores;
     }
   }
+
+  @Test
+  public void testCoreInitFailuresFromEmptyContainer() throws Exception {
+    // reused state
+    Map<String,CoreContainer.CoreLoadFailure> failures = null;
+    Collection<String> cores = null;
+    Exception fail = null;
+
+    // ----
+    // init the CoreContainer
+    CoreContainer cc = init(CONFIGSETS_SOLR_XML);
+
+    // check that we have the cores we expect
+    cores = cc.getCoreNames();
+    assertNotNull("core names is null", cores);
+    assertEquals("wrong number of cores", 0, cores.size());
+
+    // check that we have the failures we expect
+    failures = cc.getCoreInitFailures();
+    assertNotNull("core failures is a null map", failures);
+    assertEquals("wrong number of core failures", 0, failures.size());
+
+    // -----
+    // try to add a collection with a path that doesn't exist
+    final CoreDescriptor bogus = new CoreDescriptor(cc, "bogus", "bogus_path");
+    try {
+      ignoreException(Pattern.quote("bogus_path"));
+      cc.create(bogus);
+      fail("bogus inst dir failed to trigger exception from create");
+    } catch (SolrException e) {
+      assertTrue("init exception doesn't mention bogus dir: " + e.getCause().getCause().getMessage(),
+          0 < e.getCause().getCause().getMessage().indexOf("bogus_path"));
+
+    }
+
+    // check that we have the cores we expect
+    cores = cc.getCoreNames();
+    assertNotNull("core names is null", cores);
+    assertEquals("wrong number of cores", 0, cores.size());
+
+    // check that we have the failures we expect
+    failures = cc.getCoreInitFailures();
+    assertNotNull("core failures is a null map", failures);
+    assertEquals("wrong number of core failures", 1, failures.size());
+    fail = failures.get("bogus").exception;
+    assertNotNull("null failure for test core", fail);
+    assertTrue("init failure doesn't mention problem: " + fail.getCause().getMessage(),
+        0 < fail.getCause().getMessage().indexOf("bogus_path"));
+
+    // check that we get null accessing a non-existent core
+    assertNull(cc.getCore("does_not_exist"));
+    // check that we get a 500 accessing the core with an init failure
+    try {
+      SolrCore c = cc.getCore("bogus");
+      fail("Failed to get Exception on accessing core with init failure");
+    } catch (SolrException ex) {
+      assertEquals(500, ex.code());
+      // double wrapped
+      String cause = ex.getCause().getCause().getMessage();
+      assertTrue("getCore() ex cause doesn't mention init fail: " + cause,
+          0 < cause.indexOf("bogus_path"));
+
+    }
+
+    cc.shutdown();
+  }
+
+  @Test
+  public void testCoreInitFailuresOnReload() throws Exception {
+
+    // reused state
+    Map<String,CoreContainer.CoreLoadFailure> failures = null;
+    Collection<String> cores = null;
+    Exception fail = null;
+
+    // -----
+    // init the  CoreContainer with the mix of ok/bad cores
+    MockCoresLocator cl = new MockCoresLocator();
+
+    SolrResourceLoader resourceLoader = new SolrResourceLoader(createTempDir().toString());
+
+    System.setProperty("configsets", getFile("solr/configsets").getAbsolutePath());
+
+    final CoreContainer cc = new CoreContainer(ConfigSolr.fromString(resourceLoader, CONFIGSETS_SOLR_XML), cl);
+    cl.add(new CoreDescriptor(cc, "col_ok", "col_ok", "configSet", "minimal"));
+    cl.add(new CoreDescriptor(cc, "col_bad", "col_bad", "configSet", "bad-mergepolicy"));
+    cc.load();
+
+    // check that we have the cores we expect
+    cores = cc.getCoreNames();
+    assertNotNull("core names is null", cores);
+    assertEquals("wrong number of cores", 1, cores.size());
+    assertTrue("col_ok not found", cores.contains("col_ok"));
+
+    // check that we have the failures we expect
+    failures = cc.getCoreInitFailures();
+    assertNotNull("core failures is a null map", failures);
+    assertEquals("wrong number of core failures", 1, failures.size());
+    fail = failures.get("col_bad").exception;
+    assertNotNull("null failure for test core", fail);
+    assertTrue("init failure doesn't mention problem: " + fail.getMessage(),
+        0 < fail.getMessage().indexOf("DummyMergePolicy"));
+
+    // check that we get null accessing a non-existent core
+    assertNull(cc.getCore("does_not_exist"));
+    // check that we get a 500 accessing the core with an init failure
+    try {
+      SolrCore c = cc.getCore("col_bad");
+      fail("Failed to get Exception on accessing core with init failure");
+    } catch (SolrException ex) {
+      assertEquals(500, ex.code());
+      // double wrapped
+      String cause = ex.getCause().getCause().getMessage();
+      assertTrue("getCore() ex cause doesn't mention init fail: " + cause,
+          0 < cause.indexOf("DummyMergePolicy"));
+    }
+
+    // -----
+    // "fix" the bad collection
+    FileUtils.copyFile(getFile("solr/collection1/conf/solrconfig-defaults.xml"),
+        FileUtils.getFile(cc.getSolrHome(), "col_bad", "conf", "solrconfig.xml"));
+    FileUtils.copyFile(getFile("solr/collection1/conf/schema-minimal.xml"),
+        FileUtils.getFile(cc.getSolrHome(), "col_bad", "conf", "schema.xml"));
+    final CoreDescriptor fixed = new CoreDescriptor(cc, "col_bad", "col_bad");
+    cc.create(fixed);
+
+    // check that we have the cores we expect
+    cores = cc.getCoreNames();
+    assertNotNull("core names is null", cores);
+    assertEquals("wrong number of cores", 2, cores.size());
+    assertTrue("col_ok not found", cores.contains("col_ok"));
+    assertTrue("col_bad not found", cores.contains("col_bad"));
+
+    // check that we have the failures we expect
+    failures = cc.getCoreInitFailures();
+    assertNotNull("core failures is a null map", failures);
+    assertEquals("wrong number of core failures", 0, failures.size());
+
+
+    // -----
+    // try to add a collection with a path that doesn't exist
+    final CoreDescriptor bogus = new CoreDescriptor(cc, "bogus", "bogus_path");
+    try {
+      ignoreException(Pattern.quote("bogus_path"));
+      cc.create(bogus);
+      fail("bogus inst dir failed to trigger exception from create");
+    } catch (SolrException e) {
+      assertTrue("init exception doesn't mention bogus dir: " + e.getCause().getCause().getMessage(),
+          0 < e.getCause().getCause().getMessage().indexOf("bogus_path"));
+
+    }
+
+    // check that we have the cores we expect
+    cores = cc.getCoreNames();
+    assertNotNull("core names is null", cores);
+    assertEquals("wrong number of cores", 2, cores.size());
+    assertTrue("col_ok not found", cores.contains("col_ok"));
+    assertTrue("col_bad not found", cores.contains("col_bad"));
+
+    // check that we have the failures we expect
+    failures = cc.getCoreInitFailures();
+    assertNotNull("core failures is a null map", failures);
+    assertEquals("wrong number of core failures", 1, failures.size());
+    fail = failures.get("bogus").exception;
+    assertNotNull("null failure for test core", fail);
+    assertTrue("init failure doesn't mention problem: " + fail.getCause().getMessage(),
+        0 < fail.getCause().getMessage().indexOf("bogus_path"));
+
+    // check that we get null accessing a non-existent core
+    assertNull(cc.getCore("does_not_exist"));
+    // check that we get a 500 accessing the core with an init failure
+    try {
+      SolrCore c = cc.getCore("bogus");
+      fail("Failed to get Exception on accessing core with init failure");
+    } catch (SolrException ex) {
+      assertEquals(500, ex.code());
+      // double wrapped
+      String cause = ex.getCause().getCause().getMessage();
+      assertTrue("getCore() ex cause doesn't mention init fail: " + cause,
+          0 < cause.indexOf("bogus_path"));
+    }
+
+    // -----
+    // break col_bad's config and try to RELOAD to add failure
+
+    final long col_bad_old_start = getCoreStartTime(cc, "col_bad");
+
+    FileUtils.write
+        (FileUtils.getFile(cc.getSolrHome(), "col_bad", "conf", "solrconfig.xml"),
+            "This is giberish, not valid XML <",
+            IOUtils.UTF_8);
+
+    try {
+      ignoreException(Pattern.quote("SAX"));
+      cc.reload("col_bad");
+      fail("corrupt solrconfig.xml failed to trigger exception from reload");
+    } catch (SolrException e) {
+      Throwable rootException = getWrappedException(e);
+      assertTrue("We're supposed to have a wrapped SAXParserException here, but we don't",
+          rootException instanceof SAXParseException);
+      SAXParseException se = (SAXParseException) rootException;
+      assertTrue("reload exception doesn't refer to slrconfig.xml " + se.getSystemId(),
+          0 < se.getSystemId().indexOf("solrconfig.xml"));
+
+    }
+
+    assertEquals("Failed core reload should not have changed start time",
+        col_bad_old_start, getCoreStartTime(cc, "col_bad"));
+
+    // check that we have the cores we expect
+    cores = cc.getCoreNames();
+    assertNotNull("core names is null", cores);
+    assertEquals("wrong number of cores", 2, cores.size());
+    assertTrue("col_ok not found", cores.contains("col_ok"));
+    assertTrue("col_bad not found", cores.contains("col_bad"));
+
+    // check that we have the failures we expect
+    failures = cc.getCoreInitFailures();
+    assertNotNull("core failures is a null map", failures);
+    assertEquals("wrong number of core failures", 2, failures.size());
+    Throwable ex = getWrappedException(failures.get("col_bad").exception);
+    assertNotNull("null failure for test core", ex);
+    assertTrue("init failure isn't SAXParseException",
+        ex instanceof SAXParseException);
+    SAXParseException saxEx = (SAXParseException) ex;
+    assertTrue("init failure doesn't mention problem: " + saxEx.toString(), saxEx.getSystemId().contains("solrconfig.xml"));
+
+    // ----
+    // fix col_bad's config (again) and RELOAD to fix failure
+    FileUtils.copyFile(getFile("solr/collection1/conf/solrconfig-defaults.xml"),
+        FileUtils.getFile(cc.getSolrHome(), "col_bad", "conf", "solrconfig.xml"));
+    cc.reload("col_bad");
+
+    assertTrue("Core reload should have changed start time",
+        col_bad_old_start < getCoreStartTime(cc, "col_bad"));
+
+
+    // check that we have the cores we expect
+    cores = cc.getCoreNames();
+    assertNotNull("core names is null", cores);
+    assertEquals("wrong number of cores", 2, cores.size());
+    assertTrue("col_ok not found", cores.contains("col_ok"));
+    assertTrue("col_bad not found", cores.contains("col_bad"));
+
+    // check that we have the failures we expect
+    failures = cc.getCoreInitFailures();
+    assertNotNull("core failures is a null map", failures);
+    assertEquals("wrong number of core failures", 1, failures.size());
+
+    cc.shutdown();
+
+  }
+
+  private long getCoreStartTime(final CoreContainer cc, final String name) {
+    try (SolrCore tmp = cc.getCore(name)) {
+      return tmp.getStartTime();
+    }
+  }
 }
diff --git a/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java b/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
index 3006626..96c7f52 100644
--- a/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
+++ b/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
@@ -110,7 +110,12 @@
 
   private CoreContainer init() throws Exception {
     final CoreContainer cores = new CoreContainer();
-    cores.load();
+    try {
+      cores.load();
+    } catch (Exception e) {
+      cores.shutdown();
+      throw e;
+    }
     return cores;
   }
 
@@ -135,8 +140,6 @@
 
     CoreContainer cc = init();
     try {
-      assertEquals(ConfigSolrXmlOld.DEFAULT_DEFAULT_CORE_NAME,
-                   cc.getDefaultCoreName());
 
       TestLazyCores.checkInCores(cc, "core1");
       TestLazyCores.checkNotInCores(cc, "lazy1", "core2", "collection1");
diff --git a/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java b/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java
index 9c0c683..d46d474 100644
--- a/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java
+++ b/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java
@@ -26,9 +26,11 @@
 import org.apache.solr.util.RESTfulServerProvider;
 import org.apache.solr.util.RestTestHarness;
 import org.apache.solr.util.SimplePostTool;
+import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.FileOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.nio.charset.StandardCharsets;
@@ -55,19 +57,17 @@
     }
   }
 
-
-
   @Override
-  public void doTest() throws Exception {
-
-   setupHarnesses();
-   dynamicLoading();
-
-
-
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
+    for (RestTestHarness r : restTestHarnesses) {
+      r.close();
+    }
   }
 
-  private void dynamicLoading() throws Exception {
+  @Test
+  public void testDynamicLoading() throws Exception {
+    setupHarnesses();
     String payload = "{\n" +
         "'create-requesthandler' : { 'name' : '/test1', 'class': 'org.apache.solr.core.BlobStoreTestRequestHandler' , 'lib':'test','version':'1'}\n" +
         "}";
@@ -129,7 +129,7 @@
 
 
     payload = "{\n" +
-        "'update-requesthandler' : { 'name' : '/test1', 'class': 'org.apache.solr.core.BlobStoreTestRequestHandlerV2' , 'lib':'test','version':'2'}\n" +
+        "'update-requesthandler' : { 'name' : '/test1', 'class': 'org.apache.solr.core.BlobStoreTestRequestHandlerV2' , 'lib':'test','version':2}\n" +
         "}";
 
     client = restTestHarnesses.get(random().nextInt(restTestHarnesses.size()));
@@ -139,10 +139,10 @@
         "/config/overlay?wt=json",
         null,
         Arrays.asList("overlay", "requestHandler", "/test1", "version"),
-        "2",10);
+        2l,10);
 
     success= false;
-    for(int i=0;i<50;i++) {
+    for(int i=0;i<100;i++) {
       map = TestSolrConfigHandler.getRespMap("/test1?wt=json", client);
       if(BlobStoreTestRequestHandlerV2.class.getName().equals(map.get("class"))) {
         success = true;
@@ -153,7 +153,7 @@
 
     assertTrue("New version of class is not loaded " + new String(ZkStateReader.toJSON(map), StandardCharsets.UTF_8), success);
 
-    for(int i=0;i<50;i++) {
+    for(int i=0;i<100;i++) {
       map = TestSolrConfigHandler.getRespMap("/test1?wt=json", client);
       if("X val".equals(map.get("x"))){
          success = true;
@@ -201,6 +201,4 @@
     return bos.getByteBuffer();
   }
 
-
-
 }
diff --git a/solr/core/src/test/org/apache/solr/core/TestImplicitCoreProperties.java b/solr/core/src/test/org/apache/solr/core/TestImplicitCoreProperties.java
index 4836195..b38ec06 100644
--- a/solr/core/src/test/org/apache/solr/core/TestImplicitCoreProperties.java
+++ b/solr/core/src/test/org/apache/solr/core/TestImplicitCoreProperties.java
@@ -1,7 +1,6 @@
 package org.apache.solr.core;
 
 import org.apache.solr.SolrTestCaseJ4;
-import java.io.File;
 import org.junit.Test;
 
 /*
@@ -22,18 +21,16 @@
  */
 public class TestImplicitCoreProperties extends SolrTestCaseJ4 {
 
-  public static final String SOLRXML =
-      "<solr><cores><core name=\"collection1\" instanceDir=\"collection1\" config=\"solrconfig-implicitproperties.xml\"/></cores></solr>";
-
   @Test
   public void testImplicitPropertiesAreSubstitutedInSolrConfig() {
 
-    CoreContainer cc = createCoreContainer(TEST_HOME(), SOLRXML);
+    CoreContainer cc
+        = createCoreContainer("collection1", "data", "solrconfig-implicitproperties.xml", "schema.xml");
+    
     try {
-      cc.load();
       assertQ(req("q", "*:*")
               , "//str[@name='dummy1'][.='collection1']"
-              , "//str[@name='dummy2'][.='data"+File.separator+"']"
+              , "//str[@name='dummy2'][.='data']"
               , "//str[@name='dummy3'][.='solrconfig-implicitproperties.xml']"
               , "//str[@name='dummy4'][.='schema.xml']"
               , "//str[@name='dummy5'][.='false']"
diff --git a/solr/core/src/test/org/apache/solr/core/TestInfoStreamLogging.java b/solr/core/src/test/org/apache/solr/core/TestInfoStreamLogging.java
index fa6bc3f..74b3f4b 100644
--- a/solr/core/src/test/org/apache/solr/core/TestInfoStreamLogging.java
+++ b/solr/core/src/test/org/apache/solr/core/TestInfoStreamLogging.java
@@ -31,7 +31,7 @@
   }
   
   public void testIndexConfig() throws Exception {
-    IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore().getLatestSchema());
+    IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore());
 
     assertTrue(iwc.getInfoStream() instanceof LoggingInfoStream);
   }
diff --git a/solr/core/src/test/org/apache/solr/core/TestInitParams.java b/solr/core/src/test/org/apache/solr/core/TestInitParams.java
index 6085a84..6ffd38f 100644
--- a/solr/core/src/test/org/apache/solr/core/TestInitParams.java
+++ b/solr/core/src/test/org/apache/solr/core/TestInitParams.java
@@ -76,18 +76,7 @@
 
   }
 
-  /*@Test
-  public void testComponentWithInitParamAndRequestParam(){
-    for (String s : Arrays.asList("/dump4")) {
-      SolrRequestHandler handler = h.getCore().getRequestHandler(s);
-      SolrQueryResponse rsp = new SolrQueryResponse();
-      handler.handleRequest(req("param", "a","param","b" ,"param","c", "useParam","a"), rsp);
-      NamedList def = (NamedList) rsp.getValues().get("params");
-      assertEquals("A", def.get("a"));
-      assertEquals("B", def.get("b"));
-      assertEquals("C", def.get("c"));
-    }
-  }*/
+
   @Test
   public void testComponentWithConflictingInitParams(){
     SolrRequestHandler handler = h.getCore().getRequestHandler("/dump2");
@@ -119,7 +108,18 @@
 
   }
 
-
-
+  public void testMatchPath(){
+    InitParams initParams = new InitParams(new PluginInfo(InitParams.TYPE, ZkNodeProps.makeMap("path","/update/json/docs")));
+    assertFalse(initParams.matchPath("/update"));
+    assertTrue(initParams.matchPath("/update/json/docs"));
+    initParams = new InitParams(new PluginInfo(InitParams.TYPE, ZkNodeProps.makeMap("path","/update/**")));
+    assertTrue(initParams.matchPath("/update/json/docs"));
+    assertTrue(initParams.matchPath("/update/json"));
+    assertTrue(initParams.matchPath("/update"));
+    initParams = new InitParams(new PluginInfo(InitParams.TYPE, ZkNodeProps.makeMap("path","/update/*")));
+    assertFalse(initParams.matchPath("/update/json/docs"));
+    assertTrue(initParams.matchPath("/update/json"));
+    assertTrue(initParams.matchPath("/update"));
+  }
 
 }
diff --git a/solr/core/src/test/org/apache/solr/core/TestLazyCores.java b/solr/core/src/test/org/apache/solr/core/TestLazyCores.java
index befd579..e15dcb6 100644
--- a/solr/core/src/test/org/apache/solr/core/TestLazyCores.java
+++ b/solr/core/src/test/org/apache/solr/core/TestLazyCores.java
@@ -17,9 +17,10 @@
  * limitations under the License.
  */
 
-import org.apache.commons.lang.StringUtils;
+import com.google.common.collect.ImmutableList;
 import org.apache.commons.codec.Charsets;
 import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.StringUtils;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.CoreAdminParams;
@@ -31,9 +32,7 @@
 import org.apache.solr.update.AddUpdateCommand;
 import org.apache.solr.update.CommitUpdateCommand;
 import org.apache.solr.update.UpdateHandler;
-import org.apache.solr.util.TestHarness;
-import org.junit.Before;
-import org.junit.BeforeClass;
+import org.apache.solr.util.ReadOnlyCoresLocator;
 import org.junit.Test;
 
 import java.io.File;
@@ -48,18 +47,7 @@
 
 public class TestLazyCores extends SolrTestCaseJ4 {
 
-  @BeforeClass
-  public static void beforeClass() throws Exception {
-    initCore("solrconfig-minimal.xml", "schema-tiny.xml");
-  }
-
   private File solrHomeDirectory;
-  
-  @Before
-  @Override
-  public void setUp() throws Exception {
-    super.setUp();
-  }
 
   private CoreContainer init() throws Exception {
     solrHomeDirectory = createTempDir().toFile();
@@ -69,17 +57,7 @@
     }
 
     SolrResourceLoader loader = new SolrResourceLoader(solrHomeDirectory.getAbsolutePath());
-
-    File solrXml = new File(solrHomeDirectory, "solr.xml");
-    FileUtils.write(solrXml, LOTS_SOLR_XML, Charsets.UTF_8.toString());
-    ConfigSolrXmlOld config = (ConfigSolrXmlOld) ConfigSolr.fromFile(loader, solrXml);
-
-    CoresLocator locator = new SolrXMLCoresLocator.NonPersistingLocator(LOTS_SOLR_XML, config);
-
-
-    final CoreContainer cores = new CoreContainer(loader, config, locator);
-    cores.load();
-    return cores;
+    return createCoreContainer(new LazyCoreTestConfig(loader));
   }
   
   @Test
@@ -172,14 +150,16 @@
 
       // Now just insure that the normal searching on "collection1" finds _0_ on the same query that found _2_ above.
       // Use of makeReq above and req below is tricky, very tricky.
+      SolrCore collection1 = cc.getCore("collection1");
       assertQ("test raw query",
-          req("q", "{!raw f=v_t}hello", "wt", "xml")
+          makeReq(collection1, "q", "{!raw f=v_t}hello", "wt", "xml")
           , "//result[@numFound='0']"
       );
 
       checkInCores(cc, "collectionLazy4");
 
       core4.close();
+      collection1.close();
     } finally {
       cc.shutdown();
     }
@@ -411,72 +391,6 @@
     }
   }
 
-
-  //Make sure persisting not-loaded lazy cores is done. See SOLR-4347
-
-  @Test
-  public void testPersistence() throws Exception {
-    final CoreContainer cc = init();
-    try {
-      copyMinConf(new File(solrHomeDirectory, "core1"));
-      copyMinConf(new File(solrHomeDirectory, "core2"));
-      copyMinConf(new File(solrHomeDirectory, "core3"));
-      copyMinConf(new File(solrHomeDirectory, "core4"));
-
-      final CoreDescriptor cd1 = buildCoreDescriptor(cc, "core1", "./core1")
-          .isTransient(true).loadOnStartup(true).build();
-      final CoreDescriptor cd2 = buildCoreDescriptor(cc, "core2", "./core2")
-          .isTransient(true).loadOnStartup(false).build();
-      final CoreDescriptor cd3 = buildCoreDescriptor(cc, "core3", "./core3")
-          .isTransient(false).loadOnStartup(true).build();
-      final CoreDescriptor cd4 = buildCoreDescriptor(cc, "core4", "./core4")
-          .isTransient(false).loadOnStartup(false).build();
-
-
-      SolrCore core1 = cc.create(cd1);
-      SolrCore core2 = cc.create(cd2);
-      SolrCore core3 = cc.create(cd3);
-      SolrCore core4 = cc.create(cd4);
-
-      SolrXMLCoresLocator.NonPersistingLocator locator =
-          (SolrXMLCoresLocator.NonPersistingLocator) cc.getCoresLocator();
-
-      TestHarness.validateXPath(locator.xml,
-          "/solr/cores/core[@name='collection1']",
-          "/solr/cores/core[@name='collectionLazy2']",
-          "/solr/cores/core[@name='collectionLazy3']",
-          "/solr/cores/core[@name='collectionLazy4']",
-          "/solr/cores/core[@name='collectionLazy5']",
-          "/solr/cores/core[@name='collectionLazy6']",
-          "/solr/cores/core[@name='collectionLazy7']",
-          "/solr/cores/core[@name='collectionLazy8']",
-          "/solr/cores/core[@name='collectionLazy9']",
-          "/solr/cores/core[@name='core1']",
-          "/solr/cores/core[@name='core2']",
-          "/solr/cores/core[@name='core3']",
-          "/solr/cores/core[@name='core4']",
-          "13=count(/solr/cores/core)");
-
-      removeOne(cc, "collectionLazy2");
-      removeOne(cc, "collectionLazy3");
-      removeOne(cc, "collectionLazy4");
-      removeOne(cc, "collectionLazy5");
-      removeOne(cc, "collectionLazy6");
-      removeOne(cc, "collectionLazy7");
-      removeOne(cc, "core1");
-      removeOne(cc, "core2");
-      removeOne(cc, "core3");
-      removeOne(cc, "core4");
-
-      // now test that unloading a core means the core is not persisted
-      TestHarness.validateXPath(locator.xml, "3=count(/solr/cores/core)");
-
-    } finally {
-      cc.shutdown();
-    }
-  }
-
-
   // Test that transient cores
   // 1> produce errors as appropriate when the config or schema files are foo'd
   // 2> "self heal". That is, if the problem is corrected can the core be reloaded and used?
@@ -627,12 +541,8 @@
     SolrResourceLoader loader = new SolrResourceLoader(solrHomeDirectory.getAbsolutePath());
     ConfigSolrXml config = (ConfigSolrXml) ConfigSolr.fromFile(loader, solrXml);
 
-    CoresLocator locator = new CorePropertiesLocator(solrHomeDirectory.getAbsolutePath());
-
     // OK this should succeed, but at the end we should have recorded a series of errors.
-    final CoreContainer cores = new CoreContainer(loader, config, locator);
-    cores.load();
-    return cores;
+    return createCoreContainer(config);
   }
 
   // We want to see that the core "heals itself" if an un-corrupted file is written to the directory.
@@ -669,9 +579,6 @@
     }
   }
 
-  private void removeOne(CoreContainer cc, String coreName) {
-    cc.unload(coreName);
-  }
   public static void checkNotInCores(CoreContainer cc, String... nameCheck) {
     Collection<String> names = cc.getCoreNames();
     for (String name : nameCheck) {
@@ -712,26 +619,51 @@
     return StringUtils.join(args, File.separator);
   }
 
-  private final static String LOTS_SOLR_XML = " <solr persistent=\"false\"> " +
-      "<cores adminPath=\"/admin/cores\" defaultCoreName=\"collectionLazy2\" transientCacheSize=\"4\">  " +
-      "<core name=\"collection1\" instanceDir=\"collection1\"  /> " +
+  public static class LazyCoreTestConfig extends ConfigSolr {
 
-      "<core name=\"collectionLazy2\" instanceDir=\"collection2\" transient=\"true\" loadOnStartup=\"true\"   /> " +
+    public LazyCoreTestConfig(SolrResourceLoader loader) {
+      super(loader, null);
+    }
 
-      "<core name=\"collectionLazy3\" instanceDir=\"collection3\" transient=\"on\" loadOnStartup=\"false\"    /> " +
+    static CoreDescriptor makeCoreDescriptor(CoreContainer cc, String coreName, String instanceDir, String isTransient, String loadOnStartup) {
+      return new CoreDescriptor(cc, coreName, instanceDir,
+          CoreDescriptor.CORE_TRANSIENT, isTransient,
+          CoreDescriptor.CORE_LOADONSTARTUP, loadOnStartup);
+    }
 
-      "<core name=\"collectionLazy4\" instanceDir=\"collection4\" transient=\"false\" loadOnStartup=\"false\" /> " +
+    @Override
+    public CoresLocator getCoresLocator() {
+      return new ReadOnlyCoresLocator() {
+        @Override
+        public List<CoreDescriptor> discover(CoreContainer cc) {
+          return ImmutableList.of(
+              new CoreDescriptor(cc, "collection1", "collection1"),
+              makeCoreDescriptor(cc, "collectionLazy2", "collection2", "true", "true"),
+              makeCoreDescriptor(cc, "collectionLazy3", "collection3", "on", "false"),
+              makeCoreDescriptor(cc, "collectionLazy4", "collection4", "false", "false"),
+              makeCoreDescriptor(cc, "collectionLazy5", "collection5", "false", "true"),
+              makeCoreDescriptor(cc, "collectionLazy6", "collection6", "true", "false"),
+              makeCoreDescriptor(cc, "collectionLazy7", "collection7", "true", "false"),
+              makeCoreDescriptor(cc, "collectionLazy8", "collection8", "true", "false"),
+              makeCoreDescriptor(cc, "collectionLazy9", "collection9", "true", "false")
+          );
+        }
+      };
+    }
 
-      "<core name=\"collectionLazy5\" instanceDir=\"collection5\" transient=\"false\" loadOnStartup=\"true\" /> " +
+    @Override
+    public PluginInfo getShardHandlerFactoryPluginInfo() {
+      return null;
+    }
 
-      "<core name=\"collectionLazy6\" instanceDir=\"collection6\" transient=\"true\" loadOnStartup=\"false\" /> " +
+    @Override
+    protected String getProperty(CfgProp key) {
+      switch (key) {
+        case SOLR_TRANSIENTCACHESIZE:
+          return "4";
+      }
+      return null;
+    }
+  }
 
-      "<core name=\"collectionLazy7\" instanceDir=\"collection7\" transient=\"true\" loadOnStartup=\"false\" /> " +
-
-      "<core name=\"collectionLazy8\" instanceDir=\"collection8\" transient=\"true\" loadOnStartup=\"false\" /> " +
-
-      "<core name=\"collectionLazy9\" instanceDir=\"collection9\" transient=\"true\" loadOnStartup=\"false\" /> " +
-
-      "</cores> " +
-      "</solr>";
 }
diff --git a/solr/core/src/test/org/apache/solr/core/TestMergePolicyConfig.java b/solr/core/src/test/org/apache/solr/core/TestMergePolicyConfig.java
index 7e1ebec..1b9c7cb 100644
--- a/solr/core/src/test/org/apache/solr/core/TestMergePolicyConfig.java
+++ b/solr/core/src/test/org/apache/solr/core/TestMergePolicyConfig.java
@@ -45,7 +45,7 @@
 
   public void testDefaultMergePolicyConfig() throws Exception {
     initCore("solrconfig-mergepolicy-defaults.xml","schema-minimal.xml");
-    IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore().getLatestSchema());
+    IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore());
     assertEquals(false, iwc.getUseCompoundFile());
 
     TieredMergePolicy tieredMP = assertAndCast(TieredMergePolicy.class,
@@ -61,7 +61,7 @@
       = Boolean.parseBoolean(System.getProperty("useCompoundFile"));
 
     initCore("solrconfig-mergepolicy-legacy.xml","schema-minimal.xml");
-    IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore().getLatestSchema());
+    IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore());
     assertEquals(expectCFS, iwc.getUseCompoundFile());
 
 
@@ -81,7 +81,7 @@
       = Boolean.parseBoolean(System.getProperty("useCompoundFile"));
 
     initCore("solrconfig-tieredmergepolicy.xml","schema-minimal.xml");
-    IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore().getLatestSchema());
+    IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore());
     assertEquals(expectCFS, iwc.getUseCompoundFile());
 
 
@@ -122,7 +122,7 @@
     System.setProperty("solr.test.log.merge.policy", mpClass.getName());
 
     initCore("solrconfig-logmergepolicy.xml","schema-minimal.xml");
-    IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore().getLatestSchema());
+    IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore());
 
     // verify some props set to -1 get lucene internal defaults
     assertEquals(-1, solrConfig.indexConfig.maxBufferedDocs);
diff --git a/solr/core/src/test/org/apache/solr/core/TestNonNRTOpen.java b/solr/core/src/test/org/apache/solr/core/TestNonNRTOpen.java
deleted file mode 100644
index eca1886..0000000
--- a/solr/core/src/test/org/apache/solr/core/TestNonNRTOpen.java
+++ /dev/null
@@ -1,164 +0,0 @@
-package org.apache.solr.core;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.File;
-import java.util.Collections;
-import java.util.IdentityHashMap;
-import java.util.Set;
-
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.DirectoryReader;
-import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.search.SolrIndexSearcher;
-import org.apache.solr.util.RefCounted;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TestNonNRTOpen extends SolrTestCaseJ4 {
-  private static final Logger log = LoggerFactory.getLogger(TestNonNRTOpen.class);
-
-  @BeforeClass
-  public static void beforeClass() throws Exception {
-    // use a filesystem, because we need to create an index, then "start up solr"
-    System.setProperty("solr.directoryFactory", "solr.StandardDirectoryFactory");
-    // and dont delete it initially
-    System.setProperty("solr.test.leavedatadir", "true");
-    // turn off nrt
-    System.setProperty("solr.tests.nrtMode", "false");
-    // set these so that merges won't break the test
-    System.setProperty("solr.tests.maxBufferedDocs", "100000");
-    System.setProperty("solr.tests.mergePolicy", "org.apache.lucene.index.LogDocMergePolicy");
-    initCore("solrconfig-basic.xml", "schema-minimal.xml");
-    // add a doc
-    assertU(adoc("foo", "bar"));
-    assertU(commit());
-    File myDir = initCoreDataDir;
-    deleteCore();
-    // boot up again over the same index
-    initCoreDataDir = myDir;
-    initCore("solrconfig-basic.xml", "schema-minimal.xml");
-    // startup
-    assertNotNRT(1);
-  }
-  
-  public void setUp() throws Exception {
-    super.setUp();
-    // delete all, then add initial doc
-    assertU(delQ("*:*"));
-    assertU(adoc("foo", "bar"));
-    assertU(commit());
-  }
-  
-  @AfterClass
-  public static void afterClass() throws Exception {
-    // ensure we clean up after ourselves, this will fire before superclass...
-    System.clearProperty("solr.test.leavedatadir");
-    System.clearProperty("solr.directoryFactory");
-    System.clearProperty("solr.tests.maxBufferedDocs");
-    System.clearProperty("solr.tests.mergePolicy");
-    System.clearProperty("solr.tests.nrtMode");
-  }
-
-  public void testReaderIsNotNRT() {
-    // startup
-    assertNotNRT(1);
-    
-    // core reload
-    String core = h.getCore().getName();
-    log.info("Reloading core: " + h.getCore().toString());
-    h.getCoreContainer().reload(core);
-    assertNotNRT(1);
-    
-    // add a doc and commit
-    assertU(adoc("baz", "doc"));
-    assertU(commit());
-    assertNotNRT(2);
-    
-    // add a doc and core reload
-    assertU(adoc("bazz", "doc2"));
-    log.info("Reloading core: " + h.getCore().toString());
-    h.getCoreContainer().reload(core);
-    assertNotNRT(3);
-  }
-
-  public void testSharedCores() {
-    // clear out any junk
-    assertU(optimize());
-    
-    Set<Object> s1 = getCoreCacheKeys();
-    assertEquals(1, s1.size());
-    
-    // add a doc, will go in a new segment
-    assertU(adoc("baz", "doc"));
-    assertU(commit());
-    
-    Set<Object> s2 = getCoreCacheKeys();
-    assertEquals(2, s2.size());
-    assertTrue(s2.containsAll(s1));
-    
-    // add two docs, will go in a new segment
-    assertU(adoc("foo", "doc"));
-    assertU(adoc("foo2", "doc"));
-    assertU(commit());
-    
-    Set<Object> s3 = getCoreCacheKeys();
-    assertEquals(3, s3.size());
-    assertTrue(s3.containsAll(s2));
-    
-    // delete a doc
-    assertU(delQ("foo2:doc"));
-    assertU(commit());
-    
-    // same cores
-    assertEquals(s3, getCoreCacheKeys());
-  }
-  
-  static void assertNotNRT(int maxDoc) {
-    SolrCore core = h.getCore();
-    log.info("Checking notNRT & maxDoc=" + maxDoc + " of core=" + core.toString());
-    RefCounted<SolrIndexSearcher> searcher = core.getSearcher();
-    try {
-      SolrIndexSearcher s = searcher.get();
-      DirectoryReader ir = s.getRawReader();
-      assertEquals("SOLR-5815? : wrong maxDoc: core=" + core.toString() +" searcher=" + s.toString(),
-                   maxDoc, ir.maxDoc());
-      assertFalse("SOLR-5815? : expected non-NRT reader, got: " + ir, ir.toString().contains(":nrt"));
-    } finally {
-      searcher.decref();
-    }
-  }
-
-  private Set<Object> getCoreCacheKeys() {
-    RefCounted<SolrIndexSearcher> searcher = h.getCore().getSearcher();
-    Set<Object> set = Collections.newSetFromMap(new IdentityHashMap<Object,Boolean>());
-    try {
-      DirectoryReader ir = searcher.get().getRawReader();
-      for (LeafReaderContext context : ir.leaves()) {
-        set.add(context.reader().getCoreCacheKey());
-      }
-    } finally {
-      searcher.decref();
-    }
-    return set;
-  }
-}
-
diff --git a/solr/core/src/test/org/apache/solr/core/TestShardHandlerFactory.java b/solr/core/src/test/org/apache/solr/core/TestShardHandlerFactory.java
index 778b23b..8491468 100644
--- a/solr/core/src/test/org/apache/solr/core/TestShardHandlerFactory.java
+++ b/solr/core/src/test/org/apache/solr/core/TestShardHandlerFactory.java
@@ -38,13 +38,4 @@
     cc.shutdown();
   }
 
-  public void testOldXML() throws Exception {
-    CoreContainer cc = CoreContainer.createAndLoad(TEST_HOME(), new File(TEST_HOME(), "solr-shardhandler-old.xml"));
-    ShardHandlerFactory factory = cc.getShardHandlerFactory();
-    assertTrue(factory instanceof MockShardHandlerFactory);
-    NamedList args = ((MockShardHandlerFactory)factory).args;
-    assertEquals("myMagicRequiredValue", args.get("myMagicRequiredParameter"));
-    factory.close();
-    cc.shutdown();
-  }
 }
diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
index 43dae7c..323afd9 100644
--- a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
+++ b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
@@ -18,25 +18,11 @@
  */
 
 
-import java.io.File;
-import java.io.IOException;
-import java.io.StringReader;
-import java.nio.charset.StandardCharsets;
-import java.text.MessageFormat;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.SortedMap;
-import java.util.TreeMap;
-import java.util.concurrent.TimeUnit;
-
 import com.google.common.collect.ImmutableList;
 import org.apache.commons.io.FileUtils;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
-import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.handler.TestSolrConfigHandlerCloud;
 import org.apache.solr.handler.TestSolrConfigHandlerConcurrent;
 import org.apache.solr.util.RestTestBase;
 import org.apache.solr.util.RestTestHarness;
@@ -49,7 +35,23 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.File;
+import java.io.IOException;
+import java.io.StringReader;
+import java.text.MessageFormat;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import java.util.concurrent.TimeUnit;
+
+import static java.util.Arrays.asList;
 import static org.apache.solr.core.ConfigOverlay.getObjectByPath;
+import static org.apache.solr.handler.TestBlobHandler.getAsString;
+import static org.apache.solr.handler.TestSolrConfigHandlerCloud.compareValues;
 
 public class TestSolrConfigHandler extends RestTestBase {
   public static final Logger log = LoggerFactory.getLogger(TestSolrConfigHandler.class);
@@ -86,12 +88,26 @@
       jetty = null;
     }
     client = null;
+    if (restTestHarness != null) {
+      restTestHarness.close();
+    }
     restTestHarness = null;
   }
 
 
   public void testProperty() throws Exception{
     RestTestHarness harness = restTestHarness;
+    Map confMap =  getRespMap("/config?wt=json" ,harness);
+    assertNotNull( getObjectByPath(confMap,false,Arrays.asList("config","requestHandler","/admin/luke")));
+    assertNotNull( getObjectByPath(confMap,false,Arrays.asList("config","requestHandler","/admin/system")));
+    assertNotNull( getObjectByPath(confMap,false,Arrays.asList("config","requestHandler","/admin/mbeans")));
+    assertNotNull( getObjectByPath(confMap,false,Arrays.asList("config","requestHandler","/admin/plugins")));
+    assertNotNull( getObjectByPath(confMap,false,Arrays.asList("config","requestHandler","/admin/threads")));
+    assertNotNull( getObjectByPath(confMap,false,Arrays.asList("config","requestHandler","/admin/properties")));
+    assertNotNull( getObjectByPath(confMap,false,Arrays.asList("config","requestHandler","/admin/logging")));
+    assertNotNull( getObjectByPath(confMap,false,Arrays.asList("config","requestHandler","/admin/file")));
+    assertNotNull( getObjectByPath(confMap,false,Arrays.asList("config","requestHandler","/admin/ping")));
+
     String payload= "{\n" +
         " 'set-property' : { 'updateHandler.autoCommit.maxDocs':100, 'updateHandler.autoCommit.maxTime':10 } \n" +
         " }";
@@ -103,7 +119,7 @@
     assertEquals("100",  String.valueOf(getObjectByPath(props, true, ImmutableList.of("updateHandler", "autoCommit", "maxDocs")) ));
     assertEquals("10",  String.valueOf(getObjectByPath(props, true, ImmutableList.of("updateHandler", "autoCommit", "maxTime")) ));
 
-    m = (Map) getRespMap("/config?wt=json" ,harness).get("solrConfig");
+    m = (Map) getRespMap("/config?wt=json" ,harness).get("config");
     assertNotNull(m);
 
     assertEquals( "100",String.valueOf(getObjectByPath(m, true, ImmutableList.of("updateHandler", "autoCommit", "maxDocs"))));
@@ -168,7 +184,7 @@
         10);
 
     payload = "{\n" +
-        "'update-requesthandler' : { 'name' : '/x', 'class': 'org.apache.solr.handler.DumpRequestHandler' , 'startup' : 'lazy' , 'a':'b'}\n" +
+        "'update-requesthandler' : { 'name' : '/x', 'class': 'org.apache.solr.handler.DumpRequestHandler' , 'startup' : 'lazy' , 'a':'b' , 'defaults': {'def_a':'def A val'}}\n" +
         "}";
     runConfigCommand(writeHarness,"/config?wt=json", payload);
 
@@ -180,6 +196,14 @@
         "b",
         10);
 
+    testForResponseElement(writeHarness,
+        testServerBaseUrl,
+        "/x?wt=json&getdefaults=true&json.nl=map",
+        cloudSolrServer,
+        Arrays.asList("getdefaults", "def_a"),
+        "def A val",
+        10);
+
     payload = "{\n" +
         "'delete-requesthandler' : '/x'" +
         "}";
@@ -201,11 +225,11 @@
 
   }
 
-  public static void testForResponseElement(RestTestHarness harness,
+  public static Map testForResponseElement(RestTestHarness harness,
                                             String testServerBaseUrl,
                                             String uri,
                                             CloudSolrClient cloudSolrServer,List<String> jsonPath,
-                                            String expected,
+                                            Object expected,
                                             long maxTimeoutSeconds ) throws Exception {
 
     boolean success = false;
@@ -228,7 +252,220 @@
 
     }
 
-    assertTrue(MessageFormat.format("Could not get expected value  {0} for path {1} full output {2}", expected, jsonPath, new String(ZkStateReader.toJSON(m), StandardCharsets.UTF_8)), success);
+    assertTrue(MessageFormat.format("Could not get expected value  {0} for path {1} full output {2}", expected, jsonPath, getAsString(m)), success);
+    return m;
+  }
+
+  public void testReqParams() throws Exception{
+    RestTestHarness harness = restTestHarness;
+    String payload = " {\n" +
+        "  'set' : {'x': {" +
+        "                    'a':'A val',\n" +
+        "                    'b': 'B val'}\n" +
+        "             }\n" +
+        "  }";
+
+
+    TestSolrConfigHandler.runConfigCommand(harness,"/config/params?wt=json", payload);
+
+    TestSolrConfigHandler.testForResponseElement(
+        harness,
+        null,
+        "/config/params?wt=json",
+        null,
+        Arrays.asList("response", "params", "x", "a"),
+        "A val",
+        10);
+
+    TestSolrConfigHandler.testForResponseElement(
+        harness,
+        null,
+        "/config/params?wt=json",
+        null,
+        Arrays.asList("response", "params", "x", "b"),
+        "B val",
+        10);
+
+    payload = "{\n" +
+        "'create-requesthandler' : { 'name' : '/dump', 'class': 'org.apache.solr.handler.DumpRequestHandler' }\n" +
+        "}";
+
+    TestSolrConfigHandler.runConfigCommand(harness, "/config?wt=json", payload);
+
+    TestSolrConfigHandler.testForResponseElement(
+        harness,
+        null,
+        "/config/overlay?wt=json",
+        null,
+        Arrays.asList("overlay", "requestHandler", "/dump", "name"),
+        "/dump",
+        10);
+
+    TestSolrConfigHandler.testForResponseElement(harness,
+        null,
+        "/dump?wt=json&useParams=x",
+        null,
+        Arrays.asList("params", "a"),
+        "A val",
+        5);
+    TestSolrConfigHandler.testForResponseElement(harness,
+        null,
+        "/dump?wt=json&useParams=x&a=fomrequest",
+        null,
+        Arrays.asList("params", "a"),
+        "fomrequest",
+        5);
+
+    payload = "{\n" +
+        "'create-requesthandler' : { 'name' : '/dump1', 'class': 'org.apache.solr.handler.DumpRequestHandler', 'useParams':'x' }\n" +
+        "}";
+
+    TestSolrConfigHandler.runConfigCommand(harness,"/config?wt=json", payload);
+
+    TestSolrConfigHandler.testForResponseElement(harness,
+        null,
+        "/config/overlay?wt=json",
+        null,
+        Arrays.asList("overlay", "requestHandler", "/dump1", "name"),
+        "/dump1",
+        10);
+
+    TestSolrConfigHandler.testForResponseElement(
+        harness,
+        null,
+        "/dump1?wt=json",
+        null,
+        Arrays.asList("params", "a"),
+        "A val",
+        5);
+
+
+
+    payload = " {\n" +
+        "  'set' : {'y':{\n" +
+        "                'c':'CY val',\n" +
+        "                'b': 'BY val', " +
+        "                'd': ['val 1', 'val 2']}\n" +
+        "             }\n" +
+        "  }";
+
+
+    TestSolrConfigHandler.runConfigCommand(harness,"/config/params?wt=json", payload);
+
+    TestSolrConfigHandler.testForResponseElement(
+        harness,
+        null,
+        "/config/params?wt=json",
+        null,
+        Arrays.asList("response", "params", "y", "c"),
+        "CY val",
+        10);
+
+    TestSolrConfigHandler.testForResponseElement(harness,
+        null,
+        "/dump?wt=json&useParams=y",
+        null,
+        Arrays.asList("params", "c"),
+        "CY val",
+        5);
+
+
+    TestSolrConfigHandler.testForResponseElement(
+        harness,
+        null,
+        "/dump1?wt=json&useParams=y",
+        null,
+        Arrays.asList("params", "b"),
+        "BY val",
+        5);
+
+    TestSolrConfigHandler.testForResponseElement(
+        harness,
+        null,
+        "/dump1?wt=json&useParams=y",
+        null,
+        Arrays.asList("params", "a"),
+        null,
+        5);
+
+    TestSolrConfigHandler.testForResponseElement(
+        harness,
+        null,
+        "/dump1?wt=json&useParams=y",
+        null,
+        Arrays.asList("params", "d"),
+        Arrays.asList("val 1", "val 2") ,
+        5);
+
+    payload = " {\n" +
+        "  'update' : {'y': {\n" +
+        "                'c':'CY val modified',\n" +
+        "                'e':'EY val',\n" +
+        "                'b': 'BY val'" +
+        "}\n" +
+        "             }\n" +
+        "  }";
+
+
+    TestSolrConfigHandler.runConfigCommand(harness,"/config/params?wt=json", payload);
+
+    TestSolrConfigHandler.testForResponseElement(
+        harness,
+        null,
+        "/config/params?wt=json",
+        null,
+        Arrays.asList("response", "params", "y", "c"),
+        "CY val modified",
+        10);
+
+    TestSolrConfigHandler.testForResponseElement(
+        harness,
+        null,
+        "/config/params?wt=json",
+        null,
+        Arrays.asList("response", "params", "y", "e"),
+        "EY val",
+        10);
+
+    payload = " {\n" +
+        "  'set' : {'y': {\n" +
+        "                'p':'P val',\n" +
+        "                'q': 'Q val'" +
+        "}\n" +
+        "             }\n" +
+        "  }";
+
+
+    TestSolrConfigHandler.runConfigCommand(harness,"/config/params?wt=json", payload);
+    TestSolrConfigHandler.testForResponseElement(
+        harness,
+        null,
+        "/config/params?wt=json",
+        null,
+        Arrays.asList("response", "params", "y", "p"),
+        "P val",
+        10);
+
+    TestSolrConfigHandler.testForResponseElement(
+        harness,
+        null,
+        "/config/params?wt=json",
+        null,
+        Arrays.asList("response", "params", "y", "c"),
+        null,
+        10);
+    payload = " {'delete' : 'y'}";
+    TestSolrConfigHandler.runConfigCommand(harness,"/config/params?wt=json", payload);
+    TestSolrConfigHandler.testForResponseElement(
+        harness,
+        null,
+        "/config/params?wt=json",
+        null,
+        Arrays.asList("response", "params", "y", "p"),
+        null,
+        10);
+
+
   }
 
 
diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrIndexConfig.java b/solr/core/src/test/org/apache/solr/core/TestSolrIndexConfig.java
index 543b648..56663ac 100644
--- a/solr/core/src/test/org/apache/solr/core/TestSolrIndexConfig.java
+++ b/solr/core/src/test/org/apache/solr/core/TestSolrIndexConfig.java
@@ -47,7 +47,7 @@
 
   
   public void testIndexConfigParsing() throws Exception {
-    IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore().getLatestSchema());
+    IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore());
     try {
       checkIndexWriterConfig(iwc);
     } finally {
diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrXml.java b/solr/core/src/test/org/apache/solr/core/TestSolrXml.java
index 4327ec0..9855bcd 100644
--- a/solr/core/src/test/org/apache/solr/core/TestSolrXml.java
+++ b/solr/core/src/test/org/apache/solr/core/TestSolrXml.java
@@ -88,8 +88,6 @@
     assertEquals("trans cache size", 66, cfg.getTransientCacheSize());
     assertEquals("zk client timeout", 77, cfg.getZkClientTimeout());
     assertEquals("zk host", "testZkHost", cfg.getZkHost());
-    assertEquals("persistent", true, cfg.isPersistent());
-    assertEquals("core admin path", ConfigSolr.DEFAULT_CORE_ADMIN_PATH, cfg.getAdminPath());
   }
 
   // Test  a few property substitutions that happen to be in solr-50-all.xml.
diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrXmlPersistence.java b/solr/core/src/test/org/apache/solr/core/TestSolrXmlPersistence.java
deleted file mode 100644
index 2e43e61..0000000
--- a/solr/core/src/test/org/apache/solr/core/TestSolrXmlPersistence.java
+++ /dev/null
@@ -1,613 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.core;
-
-import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.lucene.util.IOUtils;
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.common.params.CoreAdminParams;
-import org.apache.solr.handler.admin.CoreAdminHandler;
-import org.apache.solr.response.SolrQueryResponse;
-import org.apache.solr.util.TestHarness;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.RuleChain;
-import org.junit.rules.TestRule;
-import org.w3c.dom.Document;
-import org.w3c.dom.NamedNodeMap;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-import org.xml.sax.SAXException;
-
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-import java.io.ByteArrayInputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.List;
-
-import static org.hamcrest.core.Is.is;
-
-public class TestSolrXmlPersistence extends SolrTestCaseJ4 {
-
-  private File solrHomeDirectory = createTempDir().toFile();
-
-  @Rule
-  public TestRule solrTestRules =
-      RuleChain.outerRule(new SystemPropertiesRestoreRule());
-
-  @Before
-  public void setupTest() {
-    solrHomeDirectory = createTempDir(LuceneTestCase.getTestClass().getSimpleName()).toFile();
-  }
-
-  private CoreContainer init(String solrXmlString, String... subDirs) throws Exception {
-
-    for (String s : subDirs) {
-      copyMinConf(new File(solrHomeDirectory, s));
-    }
-
-    File solrXml = new File(solrHomeDirectory, "solr.xml");
-    FileUtils.write(solrXml, solrXmlString, IOUtils.UTF_8);
-
-    final CoreContainer cores = createCoreContainer(solrHomeDirectory.getAbsolutePath(), solrXmlString);
-    return cores;
-  }
-
-
-  // take a solr.xml with system vars in <solr>, <cores> and <core> and <core/properties> tags that have system
-  // variables defined. Insure that after persisting solr.xml, they're all still there as ${} syntax.
-  // Also insure that nothing extra crept in.
-  @Test
-  public void testSystemVars() throws Exception {
-    //Set these system props in order to insure that we don't write out the values rather than the ${} syntax.
-    System.setProperty("solr.zkclienttimeout", "93");
-    System.setProperty("solrconfig", "solrconfig.xml");
-    System.setProperty("schema", "schema.xml");
-    System.setProperty("zkHostSet", "localhost:9983");
-
-    CoreContainer cc = init(SOLR_XML_LOTS_SYSVARS, "SystemVars1", "SystemVars2");
-    try {
-      origMatchesPersist(cc, SOLR_XML_LOTS_SYSVARS);
-    } finally {
-      cc.shutdown();
-    }
-  }
-
-  @Test
-  public void testReload() throws Exception {
-    // Whether the core is transient or not can make a difference.
-    doReloadTest("SystemVars2");
-    doReloadTest("SystemVars1");
-
-  }
-
-  private void doReloadTest(String which) throws Exception {
-
-    CoreContainer cc = init(SOLR_XML_LOTS_SYSVARS, "SystemVars1", "SystemVars2");
-    try {
-      final CoreAdminHandler admin = new CoreAdminHandler(cc);
-      SolrQueryResponse resp = new SolrQueryResponse();
-      admin.handleRequestBody
-          (req(CoreAdminParams.ACTION,
-              CoreAdminParams.CoreAdminAction.RELOAD.toString(),
-              CoreAdminParams.CORE, which),
-              resp);
-      assertNull("Exception on reload", resp.getException());
-
-      origMatchesPersist(cc, SOLR_XML_LOTS_SYSVARS);
-    } finally {
-      cc.shutdown();
-      if (solrHomeDirectory.exists()) {
-        FileUtils.deleteDirectory(solrHomeDirectory);
-      }
-    }
-
-  }
-
-  @Test
-  public void testRename() throws Exception {
-    doTestRename("SystemVars1");
-    doTestRename("SystemVars2");
-  }
-
-  private void doTestRename(String which) throws Exception {
-    CoreContainer cc = init(SOLR_XML_LOTS_SYSVARS, "SystemVars1", "SystemVars2");
-    SolrXMLCoresLocator.NonPersistingLocator locator
-        = (SolrXMLCoresLocator.NonPersistingLocator) cc.getCoresLocator();
-
-    try {
-      final CoreAdminHandler admin = new CoreAdminHandler(cc);
-      SolrQueryResponse resp = new SolrQueryResponse();
-      admin.handleRequestBody
-          (req(CoreAdminParams.ACTION,
-              CoreAdminParams.CoreAdminAction.RENAME.toString(),
-              CoreAdminParams.CORE, which,
-              CoreAdminParams.OTHER, "RenamedCore"),
-              resp);
-      assertNull("Exception on rename", resp.getException());
-
-      // OK, Assure that if I change everything that has been renamed with the original value for the core, it matches
-      // the old list
-      String[] persistList = getAllNodes();
-      String[] expressions = new String[persistList.length];
-
-      for (int idx = 0; idx < persistList.length; ++idx) {
-        expressions[idx] = persistList[idx].replaceAll("RenamedCore", which);
-      }
-
-      //assertXmlFile(origXml, expressions);
-      TestHarness.validateXPath(SOLR_XML_LOTS_SYSVARS, expressions);
-
-      // Now the other way, If I replace the original name in the original XML file with "RenamedCore", does it match
-      // what was persisted?
-      persistList = getAllNodes(SOLR_XML_LOTS_SYSVARS);
-      expressions = new String[persistList.length];
-      for (int idx = 0; idx < persistList.length; ++idx) {
-        // /solr/cores/core[@name='SystemVars1' and @collection='${collection:collection1}']
-        expressions[idx] = persistList[idx].replace("@name='" + which + "'", "@name='RenamedCore'");
-      }
-
-      TestHarness.validateXPath(locator.xml, expressions);
-
-    } finally {
-      cc.shutdown();
-      if (solrHomeDirectory.exists()) {
-        FileUtils.deleteDirectory(solrHomeDirectory);
-      }
-    }
-  }
-
-  @Test
-  public void testSwap() throws Exception {
-    doTestSwap("SystemVars1", "SystemVars2");
-    doTestSwap("SystemVars2", "SystemVars1");
-  }
-
-  /*
-  Count the number of times substring appears in target
-   */
-  private int countOccurrences(String target, String substring) {
-    int pos = -1, count = 0;
-    while ((pos = target.indexOf(substring, pos + 1)) != -1) {
-      count++;
-    }
-    return count;
-  }
-
-  private void doTestSwap(String from, String to) throws Exception {
-    CoreContainer cc = init(SOLR_XML_LOTS_SYSVARS, "SystemVars1", "SystemVars2");
-    SolrXMLCoresLocator.NonPersistingLocator locator
-        = (SolrXMLCoresLocator.NonPersistingLocator) cc.getCoresLocator();
-
-    int coreCount = countOccurrences(locator.xml, "<core ");
-
-    try {
-      final CoreAdminHandler admin = new CoreAdminHandler(cc);
-      SolrQueryResponse resp = new SolrQueryResponse();
-      admin.handleRequestBody
-          (req(CoreAdminParams.ACTION,
-              CoreAdminParams.CoreAdminAction.SWAP.toString(),
-              CoreAdminParams.CORE, from,
-              CoreAdminParams.OTHER, to),
-              resp);
-      assertNull("Exception on swap", resp.getException());
-
-      assertThat("Swapping cores should leave the same number of cores as before",
-          countOccurrences(locator.xml, "<core "), is(coreCount));
-
-      String[] persistList = getAllNodes();
-      String[] expressions = new String[persistList.length];
-
-      // Now manually change the names back and it should match exactly to the original XML.
-      for (int idx = 0; idx < persistList.length; ++idx) {
-        String fromName = "@name='" + from + "'";
-        String toName = "@name='" + to + "'";
-        if (persistList[idx].contains(fromName)) {
-          expressions[idx] = persistList[idx].replace(fromName, toName);
-        } else {
-          expressions[idx] = persistList[idx].replace(toName, fromName);
-        }
-      }
-
-      //assertXmlFile(origXml, expressions);
-      TestHarness.validateXPath(SOLR_XML_LOTS_SYSVARS, expressions);
-
-    } finally {
-      cc.shutdown();
-      if (solrHomeDirectory.exists()) {
-        FileUtils.deleteDirectory(solrHomeDirectory);
-      }
-    }
-  }
-
-  @Test
-  public void testMinimalXml() throws Exception {
-    CoreContainer cc = init(SOLR_XML_MINIMAL, "SystemVars1");
-    try {
-      cc.shutdown();
-      origMatchesPersist(cc, SOLR_XML_MINIMAL);
-    } finally {
-      cc.shutdown();
-    }
-  }
-
-  private void origMatchesPersist(CoreContainer cc, String originalSolrXML) throws Exception  {
-    String[] expressions = getAllNodes(originalSolrXML);
-    SolrXMLCoresLocator.NonPersistingLocator locator
-        = (SolrXMLCoresLocator.NonPersistingLocator) cc.getCoresLocator();
-
-    TestHarness.validateXPath(locator.xml, expressions);
-  }
-
-  @Test
-  public void testUnloadCreate() throws Exception {
-    doTestUnloadCreate("SystemVars1");
-    doTestUnloadCreate("SystemVars2");
-  }
-
-  private void doTestUnloadCreate(String which) throws Exception {
-    CoreContainer cc = init(SOLR_XML_LOTS_SYSVARS, "SystemVars1", "SystemVars2");
-    try {
-      final CoreAdminHandler admin = new CoreAdminHandler(cc);
-
-      SolrQueryResponse resp = new SolrQueryResponse();
-      admin.handleRequestBody
-          (req(CoreAdminParams.ACTION,
-              CoreAdminParams.CoreAdminAction.UNLOAD.toString(),
-              CoreAdminParams.CORE, which),
-              resp);
-      assertNull("Exception on unload", resp.getException());
-
-      //origMatchesPersist(cc, new File(solrHomeDirectory, "unloadcreate1.solr.xml"));
-
-      String instPath = new File(solrHomeDirectory, which).getAbsolutePath();
-      admin.handleRequestBody
-          (req(CoreAdminParams.ACTION,
-              CoreAdminParams.CoreAdminAction.CREATE.toString(),
-              CoreAdminParams.INSTANCE_DIR, instPath,
-              CoreAdminParams.NAME, which),
-              resp);
-      assertNull("Exception on create", resp.getException());
-
-      String[] persistList = getAllNodes();
-      String[] expressions = new String[persistList.length];
-
-      // Now manually change the names back and it should match exactly to the original XML.
-      for (int idx = 0; idx < persistList.length; ++idx) {
-        String name = "@name='" + which + "'";
-
-        if (persistList[idx].contains(name)) {
-          if (persistList[idx].contains("@schema='schema.xml'")) {
-            expressions[idx] = persistList[idx].replace("schema.xml", "${schema:schema.xml}");
-          } else if (persistList[idx].contains("@config='solrconfig.xml'")) {
-            expressions[idx] = persistList[idx].replace("solrconfig.xml", "${solrconfig:solrconfig.xml}");
-          } else if (persistList[idx].contains("@instanceDir=")) {
-            expressions[idx] = persistList[idx].replaceFirst("instanceDir\\='.*?'", "instanceDir='" + which + "/'");
-          } else {
-            expressions[idx] = persistList[idx];
-          }
-        } else {
-          expressions[idx] = persistList[idx];
-        }
-      }
-
-      //assertXmlFile(origXml, expressions);
-      TestHarness.validateXPath(SOLR_XML_LOTS_SYSVARS, expressions);
-
-    } finally {
-      cc.shutdown();
-      if (solrHomeDirectory.exists()) {
-        FileUtils.deleteDirectory(solrHomeDirectory);
-      }
-    }
-  }
-
-  @Test
-  public void testCreatePersistCore() throws Exception {
-    // Template for creating a core.
-    CoreContainer cc = init(SOLR_XML_LOTS_SYSVARS, "SystemVars1", "SystemVars2", "props1", "props2");
-    SolrXMLCoresLocator.NonPersistingLocator locator
-        = (SolrXMLCoresLocator.NonPersistingLocator) cc.getCoresLocator();
-
-    try {
-      final CoreAdminHandler admin = new CoreAdminHandler(cc);
-      // create a new core (using CoreAdminHandler) w/ properties
-
-      SolrQueryResponse resp = new SolrQueryResponse();
-      admin.handleRequestBody
-          (req(CoreAdminParams.ACTION,
-              CoreAdminParams.CoreAdminAction.CREATE.toString(),
-              CoreAdminParams.NAME, "props1",
-              CoreAdminParams.TRANSIENT, "true",
-              CoreAdminParams.LOAD_ON_STARTUP, "true",
-              CoreAdminParams.PROPERTY_PREFIX + "prefix1", "valuep1",
-              CoreAdminParams.PROPERTY_PREFIX + "prefix2", "valueP2",
-              "wt", "json", // need to insure that extra parameters are _not_ preserved (actually happened).
-              "qt", "admin/cores"),
-              resp);
-      assertNull("Exception on create", resp.getException());
-
-      String instPath2 = new File(solrHomeDirectory, "props2").getAbsolutePath();
-      admin.handleRequestBody
-          (req(CoreAdminParams.ACTION,
-              CoreAdminParams.CoreAdminAction.CREATE.toString(),
-              CoreAdminParams.INSTANCE_DIR, instPath2,
-              CoreAdminParams.NAME, "props2",
-              CoreAdminParams.PROPERTY_PREFIX + "prefix2_1", "valuep2_1",
-              CoreAdminParams.PROPERTY_PREFIX + "prefix2_2", "valueP2_2",
-              CoreAdminParams.CONFIG, "solrconfig.xml",
-              CoreAdminParams.DATA_DIR, "./dataDirTest",
-              CoreAdminParams.SCHEMA, "schema.xml"),
-              resp);
-      assertNull("Exception on create", resp.getException());
-
-      // Everything that was in the original XML file should be in the persisted one.
-      TestHarness.validateXPath(locator.xml, getAllNodes(SOLR_XML_LOTS_SYSVARS));
-
-      // And the params for the new core should be in the persisted file.
-      TestHarness.validateXPath
-          (
-              locator.xml,
-              "/solr/cores/core[@name='props1']/property[@name='prefix1' and @value='valuep1']"
-              , "/solr/cores/core[@name='props1']/property[@name='prefix2' and @value='valueP2']"
-              , "/solr/cores/core[@name='props1' and @transient='true']"
-              , "/solr/cores/core[@name='props1' and @loadOnStartup='true']"
-              , "/solr/cores/core[@name='props1' and @instanceDir='props1" + File.separator + "']"
-              , "/solr/cores/core[@name='props2']/property[@name='prefix2_1' and @value='valuep2_1']"
-              , "/solr/cores/core[@name='props2']/property[@name='prefix2_2' and @value='valueP2_2']"
-              , "/solr/cores/core[@name='props2' and @config='solrconfig.xml']"
-              , "/solr/cores/core[@name='props2' and @schema='schema.xml']"
-              , "/solr/cores/core[@name='props2' and not(@loadOnStartup)]"
-              , "/solr/cores/core[@name='props2' and not(@transient)]"
-              , "/solr/cores/core[@name='props2' and @instanceDir='" + instPath2 + "']"
-              , "/solr/cores/core[@name='props2' and @dataDir='./dataDirTest']"
-          );
-
-    } finally {
-      cc.shutdown();
-    }
-  }
-
-  @Test
-  public void testPersist() throws Exception {
-
-    String defXml = FileUtils.readFileToString(
-        new File(SolrTestCaseJ4.TEST_HOME(), "solr.xml"),
-        StandardCharsets.UTF_8.name());
-    final CoreContainer cores = init(defXml, "collection1");
-    SolrXMLCoresLocator.NonPersistingLocator locator
-        = (SolrXMLCoresLocator.NonPersistingLocator) cores.getCoresLocator();
-
-    String instDir;
-    try (SolrCore template = cores.getCore("collection1")) {
-      assertNotNull(template);
-      instDir = template.getCoreDescriptor().getRawInstanceDir();
-    }
-
-    final File instDirFile = new File(cores.getSolrHome(), instDir);
-    assertTrue("instDir doesn't exist: " + instDir, instDirFile.exists());
-
-    // sanity check the basic persistence of the default init
-    TestHarness.validateXPath(locator.xml,
-        "/solr[@persistent='true']",
-        "/solr/cores[@defaultCoreName='collection1' and not(@transientCacheSize)]",
-        "/solr/cores/core[@name='collection1' and @instanceDir='" + instDir +
-            "' and @transient='false' and @loadOnStartup='true' ]",
-        "1=count(/solr/cores/core)");
-
-    // create some new cores and sanity check the persistence
-
-    final File dataXfile = new File(solrHomeDirectory, "dataX");
-    final String dataX = dataXfile.getAbsolutePath();
-    assertTrue("dataXfile mkdirs failed: " + dataX, dataXfile.mkdirs());
-
-    final File instYfile = new File(solrHomeDirectory, "instY");
-    FileUtils.copyDirectory(instDirFile, instYfile);
-
-    // :HACK: dataDir leaves off trailing "/", but instanceDir uses it
-    final String instY = instYfile.getAbsolutePath() + "/";
-
-    final CoreDescriptor xd = buildCoreDescriptor(cores, "X", instDir)
-        .withDataDir(dataX).build();
-
-    final CoreDescriptor yd = new CoreDescriptor(cores, "Y", instY);
-
-    SolrCore x = null;
-    SolrCore y = null;
-    try {
-      x = cores.create(xd);
-      y = cores.create(yd);
-
-      assertEquals("cores not added?", 3, cores.getCoreNames().size());
-
-      TestHarness.validateXPath(locator.xml,
-          "/solr[@persistent='true']",
-          "/solr/cores[@defaultCoreName='collection1']",
-          "/solr/cores/core[@name='collection1' and @instanceDir='" + instDir
-              + "']", "/solr/cores/core[@name='X' and @instanceDir='" + instDir
-              + "' and @dataDir='" + dataX + "']",
-          "/solr/cores/core[@name='Y' and @instanceDir='" + instY + "']",
-          "3=count(/solr/cores/core)");
-
-      // Test for saving implicit properties, we should not do this.
-      TestHarness.validateXPath(locator.xml,
-          "/solr/cores/core[@name='X' and not(@solr.core.instanceDir) and not (@solr.core.configName)]");
-
-      cores.unload("X");
-
-      TestHarness.validateXPath(locator.xml, "/solr[@persistent='true']",
-          "/solr/cores[@defaultCoreName='collection1']",
-          "/solr/cores/core[@name='collection1' and @instanceDir='" + instDir + "']",
-          "/solr/cores/core[@name='Y' and @instanceDir='" + instY + "']",
-          "2=count(/solr/cores/core)");
-
-    } finally {
-      cores.shutdown();
-    }
-  }
-
-
-  private String[] getAllNodes(InputStream is) throws ParserConfigurationException, IOException, SAXException {
-    List<String> expressions = new ArrayList<>(); // XPATH and value for all elements in the indicated XML
-    DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory
-        .newInstance();
-    DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder();
-    Document document = docBuilder.parse(is);
-
-    Node root = document.getDocumentElement();
-    gatherNodes(root, expressions, "");
-    return expressions.toArray(new String[expressions.size()]);
-  }
-
-  private String[] getAllNodes() throws ParserConfigurationException, IOException, SAXException {
-    return getAllNodes(new FileInputStream(new File(solrHomeDirectory, "solr.xml")));
-  }
-
-  private String[] getAllNodes(String xmlString) throws ParserConfigurationException, IOException, SAXException {
-    return getAllNodes(new ByteArrayInputStream(xmlString.getBytes(StandardCharsets.UTF_8)));
-  }
-
-  /*
-  private void assertSolrXmlFile(String... xpathExpressions) throws IOException, SAXException {
-    assertXmlFile(new File(solrHomeDirectory, "solr.xml"), xpathExpressions);
-  }
-  */
-
-  // Note this is pretty specialized for a solr.xml file because working with the DOM is such a pain.
-
-  private static List<String> qualified = new ArrayList<String>() {{
-    add("core");
-    add("property");
-    add("int");
-    add("str");
-    add("long");
-    add("property");
-  }};
-
-  private static List<String> addText = new ArrayList<String>() {{
-    add("int");
-    add("str");
-    add("long");
-  }};
-
-  // path is the path to parent node
-
-  private void gatherNodes(Node node, List<String> expressions, String path) {
-
-    String nodeName = node.getNodeName();
-    String thisPath = path + "/" + nodeName;
-    //Parent[@id='1']/Children/child[@name]
-    // Add in the xpaths for verification of any attributes.
-    NamedNodeMap attrs = node.getAttributes();
-    String qualifier = "";
-    if (attrs.getLength() > 0) {
-      // Assemble the prefix for qualifying all of the attributes with the same name
-      if (qualified.contains(nodeName)) {
-        qualifier = "@name='" + node.getAttributes().getNamedItem("name").getTextContent() + "'";
-      }
-
-      for (int idx = 0; idx < attrs.getLength(); ++idx) {
-
-        Node attr = attrs.item(idx);
-        if (StringUtils.isNotBlank(qualifier) && "name".equals(attr.getNodeName())) {
-          continue; // Already added "name" attribute in qualifier string.
-        }
-        if (StringUtils.isNotBlank(qualifier)) {
-          // Create [@name="stuff" and @attrib="value"] fragment
-          expressions.add(thisPath +
-              "[" + qualifier + " and @" + attr.getNodeName() + "='" + attr.getTextContent() + "']");
-
-        } else {
-          // Create [@attrib="value"] fragment
-          expressions.add(thisPath +
-              "[" + qualifier + " @" + attr.getNodeName() + "='" + attr.getTextContent() + "']");
-        }
-      }
-    }
-    // Now add the text for special nodes
-    // a[normalize-space(text())='somesite']
-    if (addText.contains(nodeName)) {
-      expressions.add(thisPath + "[" + qualifier + " and text()='" + node.getTextContent() + "']");
-    }
-    // Now collect all the child element nodes.
-    NodeList nodeList = node.getChildNodes();
-    for (int i = 0; i < nodeList.getLength(); i++) {
-
-      Node currentNode = nodeList.item(i);
-      if (currentNode.getNodeType() == Node.ELEMENT_NODE) {
-        if (StringUtils.isNotBlank(qualifier)) {
-          gatherNodes(currentNode, expressions, thisPath + "[" + qualifier + "]");
-        } else {
-          gatherNodes(currentNode, expressions, thisPath);
-        }
-      }
-    }
-  }
-
-  public static String SOLR_XML_LOTS_SYSVARS =
-      "<solr persistent=\"${solr.xml.persist:false}\" coreLoadThreads=\"12\" sharedLib=\"${something:.}\" >\n" +
-          "  <logging class=\"${logclass:log4j.class}\" enabled=\"{logenable:true}\">\n" +
-          "     <watcher size=\"${watchSize:13}\" threshold=\"${logThresh:54}\" />\n" +
-          "  </logging>\n" +
-          "  <cores adminPath=\"/admin/cores\" defaultCoreName=\"SystemVars1\" host=\"127.0.0.1\" \n" +
-          "       hostPort=\"${hostPort:8983}\" hostContext=\"${hostContext:solr}\" \n" +
-          "       zkClientTimeout=\"${solr.zkclienttimeout:30000}\" \n" +
-          "       shareSchema=\"${shareSchema:false}\" distribUpdateConnTimeout=\"${distribUpdateConnTimeout:15000}\" \n" +
-          "       distribUpdateSoTimeout=\"${distribUpdateSoTimeout:120000}\" \n" +
-          "       leaderVoteWait=\"${leadVoteWait:32}\" managementPath=\"${manpath:/var/lib/path}\" transientCacheSize=\"${tranSize:128}\"> \n" +
-          "     <core name=\"SystemVars1\" instanceDir=\"SystemVars1/\" shard=\"${shard:32}\" \n" +
-          "          collection=\"${collection:collection1}\" config=\"${solrconfig:solrconfig.xml}\" \n" +
-          "          schema=\"${schema:schema.xml}\" ulogDir=\"${ulog:./}\" roles=\"${myrole:boss}\" \n" +
-          "          dataDir=\"${data:./}\" loadOnStartup=\"${onStart:true}\" transient=\"${tran:true}\" \n" +
-          "          coreNodeName=\"${coreNode:utterlyridiculous}\" \n" +
-          "       >\n" +
-          "     </core>\n" +
-          "     <core name=\"SystemVars2\" instanceDir=\"SystemVars2/\" shard=\"${shard:32}\" \n" +
-          "          collection=\"${collection:collection2}\" config=\"${solrconfig:solrconfig.xml}\" \n" +
-          "          coreNodeName=\"${coreNodeName:}\" schema=\"${schema:schema.xml}\">\n" +
-          "      <property name=\"collection\" value=\"{collection:collection2}\"/>\n" +
-          "      <property name=\"schema\" value=\"${schema:schema.xml}\"/>\n" +
-          "      <property name=\"coreNodeName\" value=\"EricksCore\"/>\n" +
-          "     </core>\n" +
-          "     <shardHandlerFactory name=\"${shhandler:shardHandlerFactory}\" class=\"${handlefac:HttpShardHandlerFactory}\">\n" +
-          "         <int name=\"socketTimeout\">${socketTimeout:120000}</int> \n" +
-          "         <int name=\"connTimeout\">${connTimeout:15000}</int> \n" +
-          "         <str name=\"arbitraryName\">${arbitrarySysValue:foobar}</str>\n" +
-          "     </shardHandlerFactory> \n" +
-          "   </cores>\n" +
-          "</solr>";
-
-
-  private static String SOLR_XML_MINIMAL =
-          "<solr >\n" +
-          "  <cores> \n" +
-          "     <core name=\"SystemVars1\" instanceDir=\"SystemVars1/\" />\n" +
-          "   </cores>\n" +
-          "</solr>";
-
-}
diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrXmlPersistor.java b/solr/core/src/test/org/apache/solr/core/TestSolrXmlPersistor.java
deleted file mode 100644
index f93117f..0000000
--- a/solr/core/src/test/org/apache/solr/core/TestSolrXmlPersistor.java
+++ /dev/null
@@ -1,127 +0,0 @@
-package org.apache.solr.core;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.File;
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.solr.SolrTestCaseJ4;
-import org.junit.Test;
-
-import com.google.common.collect.ImmutableList;
-
-public class TestSolrXmlPersistor  extends SolrTestCaseJ4 {
-
-  private static final List<CoreDescriptor> EMPTY_CD_LIST = ImmutableList.<CoreDescriptor>builder().build();
-
-  @Test
-  public void selfClosingCoresTagIsPersisted() {
-
-    final String solrxml = "<solr><cores adminHandler=\"/admin\"/></solr>";
-
-    SolrXMLCoresLocator persistor = new SolrXMLCoresLocator(solrxml, null);
-    assertEquals("<solr><cores adminHandler=\"/admin\"></cores></solr>", persistor.buildSolrXML(EMPTY_CD_LIST));
-
-  }
-
-  @Test
-  public void emptyCoresTagIsPersisted() {
-    final String solrxml = "<solr><cores adminHandler=\"/admin\"></cores></solr>";
-
-    SolrXMLCoresLocator persistor = new SolrXMLCoresLocator(solrxml, null);
-    assertEquals("<solr><cores adminHandler=\"/admin\"></cores></solr>", persistor.buildSolrXML(EMPTY_CD_LIST));
-  }
-
-  @Test
-  public void emptySolrXmlIsPersisted() {
-    final String solrxml = "<solr></solr>";
-
-    SolrXMLCoresLocator persistor = new SolrXMLCoresLocator(solrxml, null);
-    assertEquals("<solr><cores></cores></solr>", persistor.buildSolrXML(EMPTY_CD_LIST));
-  }
-
-  @Test
-  public void simpleCoreDescriptorIsPersisted() throws IOException {
-    
-    final String solrxml = "<solr><cores></cores></solr>";
-    
-    final File solrHomeDirectory = createTempDir().toFile();
-    
-    copyMinFullSetup(solrHomeDirectory);
-    
-    CoreContainer cc = new CoreContainer(solrHomeDirectory.getAbsolutePath());
-    
-    final CoreDescriptor cd = new CoreDescriptor(cc, "testcore",
-        "instance/dir/");
-    List<CoreDescriptor> cds = ImmutableList.of(cd);
-    
-    SolrXMLCoresLocator persistor = new SolrXMLCoresLocator(solrxml, null);
-    String xml = persistor.buildSolrXML(cds);
-    
-    assertTrue(xml.contains("<solr><cores>"));
-    assertTrue(xml.contains("name=\"testcore\""));
-    assertTrue(xml.contains("instanceDir=\"instance/dir/\""));
-    assertTrue(xml.contains("</cores></solr>"));
-  }
-
-  @Test
-  public void shardHandlerInfoIsPersisted() {
-
-    final String solrxml =
-        "<solr>" +
-          "<cores adminHandler=\"whatever\">" +
-            "<core name=\"testcore\" instanceDir=\"instance/dir/\"/>" +
-            "<shardHandlerFactory name=\"shardHandlerFactory\" class=\"HttpShardHandlerFactory\">" +
-              "<int name=\"socketTimeout\">${socketTimeout:500}</int>" +
-              "<str name=\"arbitrary\">arbitraryValue</str>" +
-            "</shardHandlerFactory>" +
-          "</cores>" +
-        "</solr>";
-
-    SolrXMLCoresLocator locator = new SolrXMLCoresLocator(solrxml, null);
-    assertTrue(locator.getTemplate().contains("{{CORES_PLACEHOLDER}}"));
-    assertTrue(locator.getTemplate().contains("<shardHandlerFactory "));
-    assertTrue(locator.getTemplate().contains("${socketTimeout:500}"));
-
-  }
-
-  @Test
-  public void simpleShardHandlerInfoIsPersisted() {
-
-    final String solrxml =
-        "<solr>" +
-          "<cores adminHandler=\"whatever\">" +
-            "<core name=\"testcore\" instanceDir=\"instance/dir/\"/>" +
-            "<shardHandlerFactory name=\"shardHandlerFactory\" class=\"HttpShardHandlerFactory\"/>" +
-          "</cores>" +
-        "</solr>";
-
-    SolrXMLCoresLocator locator = new SolrXMLCoresLocator(solrxml, null);
-    assertTrue(locator.getTemplate().contains("{{CORES_PLACEHOLDER}}"));
-    assertTrue(locator.getTemplate().contains("<shardHandlerFactory "));
-  }
-
-  @Test
-  public void complexXmlIsParsed() {
-    SolrXMLCoresLocator locator = new SolrXMLCoresLocator(TestSolrXmlPersistence.SOLR_XML_LOTS_SYSVARS, null);
-    assertTrue(locator.getTemplate().contains("{{CORES_PLACEHOLDER}}"));
-  }
-
-}
diff --git a/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java b/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java
index 9633242..f273333 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestBlobHandler.java
@@ -37,63 +37,69 @@
 import org.apache.solr.core.ConfigOverlay;
 import org.apache.solr.update.DirectUpdateHandler2;
 import org.apache.solr.util.SimplePostTool;
+import org.junit.Test;
+import org.noggit.JSONParser;
+import org.noggit.ObjectBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
+import java.io.StringReader;
 import java.nio.ByteBuffer;
 import java.nio.charset.StandardCharsets;
+import java.text.MessageFormat;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
-import java.util.concurrent.TimeUnit;
 
 import static org.apache.solr.core.ConfigOverlay.getObjectByPath;
 
 public class TestBlobHandler extends AbstractFullDistribZkTestBase {
   static final Logger log =  LoggerFactory.getLogger(TestBlobHandler.class);
 
-  private void doBlobHandlerTest() throws Exception {
-    SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)));
+  @Test
+  public void doBlobHandlerTest() throws Exception {
 
-    CollectionAdminResponse response1;
-    CollectionAdminRequest.Create createCollectionRequest = new CollectionAdminRequest.Create();
-    createCollectionRequest.setCollectionName(".system");
-    createCollectionRequest.setNumShards(1);
-    createCollectionRequest.setReplicationFactor(2);
-    response1 = createCollectionRequest.process(client);
-    assertEquals(0, response1.getStatus());
-    assertTrue(response1.isSuccess());
-    DocCollection sysColl = cloudClient.getZkStateReader().getClusterState().getCollection(".system");
-    Replica replica = sysColl.getActiveSlicesMap().values().iterator().next().getLeader();
+    try (SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)))) {
+      CollectionAdminResponse response1;
+      CollectionAdminRequest.Create createCollectionRequest = new CollectionAdminRequest.Create();
+      createCollectionRequest.setCollectionName(".system");
+      createCollectionRequest.setNumShards(1);
+      createCollectionRequest.setReplicationFactor(2);
+      response1 = createCollectionRequest.process(client);
+      assertEquals(0, response1.getStatus());
+      assertTrue(response1.isSuccess());
+      DocCollection sysColl = cloudClient.getZkStateReader().getClusterState().getCollection(".system");
+      Replica replica = sysColl.getActiveSlicesMap().values().iterator().next().getLeader();
 
-    String baseUrl = replica.getStr(ZkStateReader.BASE_URL_PROP);
-    String url = baseUrl + "/.system/config/requestHandler";
-    Map map = TestSolrConfigHandlerConcurrent.getAsMap(url, cloudClient);
-    assertNotNull(map);
-    assertEquals("solr.BlobHandler", getObjectByPath(map, true, Arrays.asList(
-        "solrConfig",
-        "requestHandler",
-        "/blob",
-        "class")));
+      String baseUrl = replica.getStr(ZkStateReader.BASE_URL_PROP);
+      String url = baseUrl + "/.system/config/requestHandler";
+      Map map = TestSolrConfigHandlerConcurrent.getAsMap(url, cloudClient);
+      assertNotNull(map);
+      assertEquals("solr.BlobHandler", getObjectByPath(map, true, Arrays.asList(
+          "config",
+          "requestHandler",
+          "/blob",
+          "class")));
 
-    byte[] bytarr  = new byte[1024];
-    for (int i = 0; i < bytarr.length; i++) bytarr[i]= (byte) (i % 127);
-    byte[] bytarr2  = new byte[2048];
-    for (int i = 0; i < bytarr2.length; i++) bytarr2[i]= (byte) (i % 127);
-    postAndCheck(cloudClient, baseUrl, ByteBuffer.wrap( bytarr), 1);
-    postAndCheck(cloudClient, baseUrl, ByteBuffer.wrap( bytarr2), 2);
+      byte[] bytarr  = new byte[1024];
+      for (int i = 0; i < bytarr.length; i++) bytarr[i]= (byte) (i % 127);
+      byte[] bytarr2  = new byte[2048];
+      for (int i = 0; i < bytarr2.length; i++) bytarr2[i]= (byte) (i % 127);
+      postAndCheck(cloudClient, baseUrl, ByteBuffer.wrap( bytarr), 1);
+      postAndCheck(cloudClient, baseUrl, ByteBuffer.wrap( bytarr2), 2);
 
-    url = baseUrl + "/.system/blob/test/1";
-    map = TestSolrConfigHandlerConcurrent.getAsMap(url,cloudClient);
-    List l = (List) ConfigOverlay.getObjectByPath(map, false, Arrays.asList("response", "docs"));
-    assertNotNull(l);
-    map = (Map) l.get(0);
-    assertEquals(""+bytarr.length,String.valueOf(map.get("size")));
+      url = baseUrl + "/.system/blob/test/1";
+      map = TestSolrConfigHandlerConcurrent.getAsMap(url,cloudClient);
+      List l = (List) ConfigOverlay.getObjectByPath(map, false, Arrays.asList("response", "docs"));
+      assertNotNull(""+map, l);
+      assertTrue("" + map, l.size() > 0);
+      map = (Map) l.get(0);
+      assertEquals(""+bytarr.length,String.valueOf(map.get("size")));
 
-    compareInputAndOutput(baseUrl+"/.system/blob/test?wt=filestream", bytarr2);
-    compareInputAndOutput(baseUrl+"/.system/blob/test/1?wt=filestream", bytarr);
-
+      compareInputAndOutput(baseUrl+"/.system/blob/test?wt=filestream", bytarr2);
+      compareInputAndOutput(baseUrl+"/.system/blob/test/1?wt=filestream", bytarr);
+    }
   }
 
   public static  void createSysColl(SolrClient client) throws SolrServerException, IOException {
@@ -108,8 +114,8 @@
   }
 
   @Override
-  public void tearDown() throws Exception {
-    super.tearDown();
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
     System.clearProperty("numShards");
     System.clearProperty("zkHost");
 
@@ -119,27 +125,32 @@
 
   public static void postAndCheck(CloudSolrClient cloudClient, String baseUrl, ByteBuffer bytes, int count) throws Exception {
     postData(cloudClient, baseUrl, bytes);
+
     String url;
-    Map map;
+    Map map = null;
     List l;
-    long startTime = System.nanoTime();
-    long maxTimeoutSeconds = 10;
-    while ( true) {
+    long start = System.currentTimeMillis();
+    int i=0;
+    for(;i<150;i++) {//10secs
       url = baseUrl + "/.system/blob/test";
       map = TestSolrConfigHandlerConcurrent.getAsMap(url, cloudClient);
       String numFound = String.valueOf(ConfigOverlay.getObjectByPath(map, false, Arrays.asList("response", "numFound")));
       if(!(""+count).equals(numFound)) {
-        if (TimeUnit.SECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutSeconds) {
-          Thread.sleep(100);
-          continue;
-        }
+        Thread.sleep(100);
+        continue;
       }
       l = (List) ConfigOverlay.getObjectByPath(map, false, Arrays.asList("response", "docs"));
       assertNotNull(l);
       map = (Map) l.get(0);
       assertEquals("" + bytes.limit(), String.valueOf(map.get("size")));
-      break;
+      return;
     }
+    fail(MessageFormat.format("Could not successfully add blob after {0} attempts. Expecting {1} items. time elapsed {2}  output  for url is {3}",
+        i,count, System.currentTimeMillis()-start,  getAsString(map)));
+  }
+
+  public static String getAsString(Map map) {
+    return new String(ZkStateReader.toJSON(map), StandardCharsets.UTF_8);
   }
 
   private void compareInputAndOutput(String url, byte[] bytarr) throws IOException {
@@ -160,24 +171,25 @@
 
   }
 
-  public static String postData(CloudSolrClient cloudClient, String baseUrl, ByteBuffer bytarr) throws IOException {
+  public static void postData(CloudSolrClient cloudClient, String baseUrl, ByteBuffer bytarr) throws IOException {
     HttpPost httpPost = null;
     HttpEntity entity;
-    String response;
+    String response = null;
     try {
       httpPost = new HttpPost(baseUrl+"/.system/blob/test");
       httpPost.setHeader("Content-Type","application/octet-stream");
       httpPost.setEntity(new ByteArrayEntity(bytarr.array(), bytarr.arrayOffset(), bytarr.limit()));
       entity = cloudClient.getLbClient().getHttpClient().execute(httpPost).getEntity();
-      return EntityUtils.toString(entity, StandardCharsets.UTF_8);
+      try {
+        response = EntityUtils.toString(entity, StandardCharsets.UTF_8);
+        Map m = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
+        assertFalse("Error in posting blob "+ getAsString(m),m.containsKey("error"));
+      } catch (JSONParser.ParseException e) {
+        log.error(response);
+        fail();
+      }
     } finally {
       httpPost.releaseConnection();
     }
   }
-
-  @Override
-  public void doTest() throws Exception {
-    doBlobHandlerTest();
-
-  }
 }
diff --git a/solr/core/src/test/org/apache/solr/handler/TestConfigReload.java b/solr/core/src/test/org/apache/solr/handler/TestConfigReload.java
index 7cc45a7..506c028 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestConfigReload.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestConfigReload.java
@@ -17,6 +17,9 @@
  * limitations under the License.
  */
 
+import static java.util.Arrays.asList;
+import static org.apache.solr.core.ConfigOverlay.getObjectByPath;
+
 import java.io.StringReader;
 import java.nio.charset.StandardCharsets;
 import java.text.MessageFormat;
@@ -44,14 +47,12 @@
 import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.data.Stat;
+import org.junit.Test;
 import org.noggit.JSONParser;
 import org.noggit.ObjectBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static java.util.Arrays.asList;
-import static org.apache.solr.core.ConfigOverlay.getObjectByPath;
-
 public class TestConfigReload extends AbstractFullDistribZkTestBase {
 
 
@@ -69,11 +70,25 @@
       restTestHarnesses.add(harness);
     }
   }
-
+  
   @Override
-  public void doTest() throws Exception {
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
+    for (RestTestHarness h : restTestHarnesses) {
+      h.close();
+    }
+  }
+
+  @Test
+  public void test() throws Exception {
     setupHarnesses();
-    reloadTest();
+    try {
+      reloadTest();
+    } finally {
+      for (RestTestHarness h : restTestHarnesses) {
+        h.close();
+      }
+    }
   }
 
   private void reloadTest() throws Exception {
@@ -81,7 +96,7 @@
     log.info("live_nodes_count :  " + cloudClient.getZkStateReader().getClusterState().getLiveNodes());
     String confPath = ZkController.CONFIGS_ZKNODE+"/conf1/";
 //    checkConfReload(client, confPath + ConfigOverlay.RESOURCE_NAME, "overlay");
-    checkConfReload(client, confPath + SolrConfig.DEFAULT_CONF_FILE,"solrConfig", "/config");
+    checkConfReload(client, confPath + SolrConfig.DEFAULT_CONF_FILE,"config", "/config");
 
   }
 
diff --git a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java
index 7d4548b..22a96c2 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java
@@ -68,6 +68,7 @@
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Date;
+import java.util.Properties;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
@@ -137,8 +138,8 @@
     slave.tearDown();
     masterJetty = slaveJetty = null;
     master = slave = null;
-    masterClient.shutdown();
-    slaveClient.shutdown();
+    masterClient.close();
+    slaveClient.close();
     masterClient = slaveClient = null;
   }
 
@@ -153,7 +154,7 @@
   private static SolrClient createNewSolrClient(int port) {
     try {
       // setup the client...
-      HttpSolrClient client = new HttpSolrClient(buildUrl(port));
+      HttpSolrClient client = new HttpSolrClient(buildUrl(port) + "/" + DEFAULT_TEST_CORENAME);
       client.setConnectionTimeout(15000);
       client.setSoTimeout(60000);
       client.setDefaultMaxConnectionsPerHost(100);
@@ -261,18 +262,24 @@
   }
   
   private NamedList<Object> reloadCore(SolrClient s, String core) throws Exception {
-    
+
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.set("action","reload");
     params.set("core", core);
     params.set("qt","/admin/cores");
     QueryRequest req = new QueryRequest(params);
 
-    NamedList<Object> res = s.request(req);
+    try (HttpSolrClient adminClient = adminClient(s)) {
+      NamedList<Object> res = adminClient.request(req);
+      assertNotNull("null response from server", res);
+      return res;
+    }
 
-    assertNotNull("null response from server", res);
+  }
 
-    return res;
+  private HttpSolrClient adminClient(SolrClient client) {
+    String adminUrl = ((HttpSolrClient)client).getBaseURL().replace("/collection1", "");
+    return new HttpSolrClient(adminUrl);
   }
 
   @Test
@@ -340,52 +347,13 @@
       try { 
         if (repeater != null) repeater.tearDown();
       } catch (Exception e) { /* :NOOP: */ }
-      if (repeaterClient != null) repeaterClient.shutdown();
+      if (repeaterClient != null) repeaterClient.close();
     }
   }
 
 
   /**
-   * Verify that things still work if an IW has not been opened (and hence the CommitPoints have not been communicated to the deletion policy)
-   */
-  public void testNoWriter() throws Exception {
-    useFactory(null);    // force a persistent directory
-
-    // read-only setting (no opening from indexwriter)
-    System.setProperty("solr.tests.nrtMode", "false");
-    try {
-    // stop and start so they see the new directory setting
-    slaveJetty.stop();
-    masterJetty.stop();
-    slaveJetty.start(true);
-    masterJetty.start(true);
-
-    index(slaveClient, "id", "123456");
-    slaveClient.commit();
-    slaveJetty.stop();
-    slaveJetty.start(true);
-    } finally {
-      System.clearProperty("solr.tests.nrtMode"); // dont mess with other tests
-    }
-
-    // Currently we open a writer on-demand.  This is to test that we are correctly testing
-    // the code path when SolrDeletionPolicy.getLatestCommit() returns null.
-    // When we are using an ephemeral directory, an IW will always be opened to create the index and hence
-    // getLatestCommit will always be non-null.
-    CoreContainer cores = ((SolrDispatchFilter) slaveJetty.getDispatchFilter().getFilter()).getCores();
-    Collection<SolrCore> theCores = cores.getCores();
-    assertEquals(1, theCores.size());
-    SolrCore core = (SolrCore)theCores.toArray()[0];
-    assertNull( core.getDeletionPolicy().getLatestCommit() );
-
-
-    pullFromMasterToSlave();  // this will cause SnapPuller to be invoked and we will test when SolrDeletionPolicy.getLatestCommit() returns null
-
-    resetFactory();
-  }
-
-  /**
-   * Verify that empty commits and/or commits with openSearcher=false 
+   * Verify that empty commits and/or commits with openSearcher=false
    * on the master do not cause subsequent replication problems on the slave 
    */
   public void testEmptyCommits() throws Exception {
@@ -488,7 +456,7 @@
   //jetty servers.
   private void invokeReplicationCommand(int pJettyPort, String pCommand) throws IOException
   {
-    String masterUrl = buildUrl(pJettyPort) + "/replication?command=" + pCommand;
+    String masterUrl = buildUrl(pJettyPort) + "/" + DEFAULT_TEST_CORENAME + "/replication?command=" + pCommand;
     URL u = new URL(masterUrl);
     InputStream stream = u.openStream();
     stream.close();
@@ -529,7 +497,7 @@
     masterJetty.stop();
 
     masterJetty = createJetty(master);
-    masterClient.shutdown();
+    masterClient.close();
     masterClient = createNewSolrClient(masterJetty.getLocalPort());
 
     slave.setTestPort(masterJetty.getLocalPort());
@@ -548,7 +516,7 @@
     assertFalse(slaveXsltDir.exists());
 
     slaveJetty = createJetty(slave);
-    slaveClient.shutdown();
+    slaveClient.close();
     slaveClient = createNewSolrClient(slaveJetty.getLocalPort());
 
     //add a doc with new field and commit on master to trigger snappull from slave.
@@ -625,7 +593,7 @@
     slave.copyConfigFile(CONF_DIR + "solrconfig-slave1.xml", "solrconfig.xml");
     slaveJetty.stop();
     slaveJetty = createJetty(slave);
-    slaveClient.shutdown();
+    slaveClient.close();
     slaveClient = createNewSolrClient(slaveJetty.getLocalPort());
 
     masterClient.deleteByQuery("*:*");
@@ -647,8 +615,8 @@
     assertEquals(nDocs, masterQueryResult.getNumFound());
 
     // snappull
-    String masterUrl = buildUrl(slaveJetty.getLocalPort()) + "/replication?command=fetchindex&masterUrl=";
-    masterUrl += buildUrl(masterJetty.getLocalPort()) + "/replication";
+    String masterUrl = buildUrl(slaveJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME + "/replication?command=fetchindex&masterUrl=";
+    masterUrl += buildUrl(masterJetty.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME + "/replication";
     URL url = new URL(masterUrl);
     InputStream stream = url.openStream();
     stream.close();
@@ -761,14 +729,14 @@
       slave.copyConfigFile(CONF_DIR +slaveSchema, "schema.xml");
       slaveJetty.stop();
       slaveJetty = createJetty(slave);
-      slaveClient.shutdown();
+      slaveClient.close();
       slaveClient = createNewSolrClient(slaveJetty.getLocalPort());
 
       master.copyConfigFile(CONF_DIR + "solrconfig-master3.xml",
           "solrconfig.xml");
       masterJetty.stop();
       masterJetty = createJetty(master);
-      masterClient.shutdown();
+      masterClient.close();
       masterClient = createNewSolrClient(masterJetty.getLocalPort());
       
       masterClient.deleteByQuery("*:*");
@@ -883,7 +851,7 @@
     slave.copyConfigFile(CONF_DIR + "solrconfig-slave1.xml", "solrconfig.xml");
     slaveJetty.stop();
     slaveJetty = createJetty(slave);
-    slaveClient.shutdown();
+    slaveClient.close();
     slaveClient = createNewSolrClient(slaveJetty.getLocalPort());
 
     try {
@@ -893,7 +861,7 @@
           "solrconfig.xml");
       repeaterJetty = createJetty(repeater);
       if (repeaterClient != null) {
-        repeaterClient.shutdown();
+        repeaterClient.close();
       }
       repeaterClient = createNewSolrClient(repeaterJetty.getLocalPort());
       
@@ -944,6 +912,9 @@
         repeater.tearDown();
         repeaterJetty = null;
       }
+      if (repeaterClient != null) {
+        repeaterClient.close();
+      }
     }
     
   }
@@ -998,8 +969,10 @@
     URL url;
     InputStream stream;
     masterUrl = buildUrl(to.getLocalPort())
+        + "/" + DEFAULT_TEST_CORENAME
         + "/replication?wait=true&command=fetchindex&masterUrl="
-        + buildUrl(from.getLocalPort()) + "/replication";
+        + buildUrl(from.getLocalPort())
+        + "/" + DEFAULT_TEST_CORENAME + "/replication";
     url = new URL(masterUrl);
     stream = url.openStream();
     stream.close();
@@ -1024,7 +997,7 @@
     masterJetty.stop();
 
     masterJetty = createJetty(master);
-    masterClient.shutdown();
+    masterClient.close();
     masterClient = createNewSolrClient(masterJetty.getLocalPort());
     
     for (int i = 0; i < nDocs; i++)
@@ -1042,7 +1015,7 @@
 
     //start slave
     slaveJetty = createJetty(slave);
-    slaveClient.shutdown();
+    slaveClient.close();
     slaveClient = createNewSolrClient(slaveJetty.getLocalPort());
 
     //get docs from slave and check if number is equal to master
@@ -1076,7 +1049,7 @@
       masterJetty.stop();
       
       masterJetty = createJetty(master);
-      masterClient.shutdown();
+      masterClient.close();
       masterClient = createNewSolrClient(masterJetty.getLocalPort());
       
       for (int i = 0; i < nDocs; i++)
@@ -1102,7 +1075,7 @@
       
       // start slave
       slaveJetty = createJetty(slave);
-      slaveClient.shutdown();
+      slaveClient.close();
       slaveClient = createNewSolrClient(slaveJetty.getLocalPort());
       
       // get docs from slave and check if number is equal to master
@@ -1136,7 +1109,7 @@
     masterJetty.stop();
 
     masterJetty = createJetty(master);
-    masterClient.shutdown();
+    masterClient.close();
     masterClient = createNewSolrClient(masterJetty.getLocalPort());
 
     masterClient.deleteByQuery("*:*");
@@ -1154,7 +1127,7 @@
 
     //start slave
     slaveJetty = createJetty(slave);
-    slaveClient.shutdown();
+    slaveClient.close();
     slaveClient = createNewSolrClient(slaveJetty.getLocalPort());
     
     //get docs from slave and check if number is equal to master
@@ -1235,7 +1208,7 @@
     masterJetty.stop();
 
     masterJetty = createJetty(master);
-    masterClient.shutdown();
+    masterClient.close();
     masterClient = createNewSolrClient(masterJetty.getLocalPort());
 
     slave.setTestPort(masterJetty.getLocalPort());
@@ -1243,7 +1216,7 @@
 
     slaveJetty.stop();
     slaveJetty = createJetty(slave);
-    slaveClient.shutdown();
+    slaveClient.close();
     slaveClient = createNewSolrClient(slaveJetty.getLocalPort());
 
     slaveClient.deleteByQuery("*:*");
@@ -1296,7 +1269,7 @@
     master.copyConfigFile(CONF_DIR + "solrconfig-master-throttled.xml", "solrconfig.xml");
     useFactory(null);
     masterJetty = createJetty(master);
-    masterClient.shutdown();
+    masterClient.close();
     masterClient = createNewSolrClient(masterJetty.getLocalPort());
 
     //index docs
@@ -1308,7 +1281,7 @@
 
     //Check Index Size
     String dataDir = master.getDataDir();
-    masterClient.shutdown();
+    masterClient.close();
     masterJetty.stop();
 
     Directory dir = FSDirectory.open(Paths.get(dataDir, "index"));
@@ -1329,7 +1302,7 @@
     slave.setTestPort(masterJetty.getLocalPort());
     slave.copyConfigFile(CONF_DIR + "solrconfig-slave1.xml", "solrconfig.xml");
     slaveJetty = createJetty(slave);
-    slaveClient.shutdown();
+    slaveClient.close();
     slaveClient = createNewSolrClient(slaveJetty.getLocalPort());
 
     long startTime = System.nanoTime();
@@ -1427,34 +1400,36 @@
     final long sleepInterval = 200;
     long timeSlept = 0;
 
-    SolrParams p = params("action","status", "core", "collection1");
-    while (timeSlept < timeout) {
-      QueryRequest req = new QueryRequest(p);
-      req.setPath("/admin/cores");
-      try {
-        NamedList data = client.request(req);
-        for (String k : new String[] {"status","collection1"}) {
-          Object o = data.get(k);
-          assertNotNull("core status rsp missing key: " + k, o);
-          data = (NamedList) o;
+    try (HttpSolrClient adminClient = adminClient(client)) {
+      SolrParams p = params("action", "status", "core", "collection1");
+      while (timeSlept < timeout) {
+        QueryRequest req = new QueryRequest(p);
+        req.setPath("/admin/cores");
+        try {
+          NamedList data = adminClient.request(req);
+          for (String k : new String[]{"status", "collection1"}) {
+            Object o = data.get(k);
+            assertNotNull("core status rsp missing key: " + k, o);
+            data = (NamedList) o;
+          }
+          Date startTime = (Date) data.get("startTime");
+          assertNotNull("core has null startTime", startTime);
+          if (null == min || startTime.after(min)) {
+            return startTime;
+          }
+        } catch (SolrException e) {
+          // workarround for SOLR-4668
+          if (500 != e.code()) {
+            throw e;
+          } // else server possibly from the core reload in progress...
         }
-        Date startTime = (Date) data.get("startTime");
-        assertNotNull("core has null startTime", startTime);
-        if (null == min || startTime.after(min)) {
-          return startTime;
-        }
-      } catch (SolrException e) {
-        // workarround for SOLR-4668
-        if (500 != e.code()) {
-          throw e;
-        } // else server possibly from the core reload in progress...
-      }
 
-      timeSlept += sleepInterval;
-      Thread.sleep(sleepInterval);
+        timeSlept += sleepInterval;
+        Thread.sleep(sleepInterval);
+      }
+      fail("timed out waiting for collection1 startAt time to exceed: " + min);
+      return min; // compilation neccessity
     }
-    fail("timed out waiting for collection1 startAt time to exceed: " + min);
-    return min; // compilation neccessity
   }
   
   private static String buildUrl(int port) {
@@ -1512,6 +1487,11 @@
       System.setProperty("solr.test.sys.prop1", "propone");
       System.setProperty("solr.test.sys.prop2", "proptwo");
 
+      Properties props = new Properties();
+      props.setProperty("name", "collection1");
+
+      writeCoreProperties(homeDir.toPath().resolve("collection1"), props, "TestReplicationHandler");
+
       dataDir = new File(homeDir + "/collection1", "data");
       confDir = new File(homeDir + "/collection1", "conf");
 
diff --git a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerBackup.java b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerBackup.java
index 963197d..beb7f7b 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerBackup.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandlerBackup.java
@@ -73,7 +73,7 @@
   private static SolrClient createNewSolrClient(int port) {
     try {
       // setup the client...
-      HttpSolrClient client = new HttpSolrClient(buildUrl(port, context));
+      HttpSolrClient client = new HttpSolrClient(buildUrl(port, context) + "/" + DEFAULT_TEST_CORENAME);
       client.setConnectionTimeout(15000);
       client.setSoTimeout(60000);
       client.setDefaultMaxConnectionsPerHost(100);
@@ -108,7 +108,7 @@
   @After
   public void tearDown() throws Exception {
     super.tearDown();
-    masterClient.shutdown();
+    masterClient.close();
     masterClient  = null;
     masterJetty.stop();
     master.tearDown();
@@ -260,7 +260,7 @@
     }
 
     public void fetchStatus() throws IOException {
-      String masterUrl = buildUrl(masterJetty.getLocalPort(), "/solr") + "/replication?command=" + ReplicationHandler.CMD_DETAILS;
+      String masterUrl = buildUrl(masterJetty.getLocalPort(), "/solr") + "/" + DEFAULT_TEST_CORENAME + "/replication?command=" + ReplicationHandler.CMD_DETAILS;
       URL url;
       InputStream stream = null;
       try {
@@ -309,10 +309,10 @@
     public void runCommand() {
       String masterUrl = null;
       if(backupName != null) {
-        masterUrl = buildUrl(masterJetty.getLocalPort(), context) + "/replication?command=" + cmd +
+        masterUrl = buildUrl(masterJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME + "/replication?command=" + cmd +
             "&name=" +  backupName;
       } else {
-        masterUrl = buildUrl(masterJetty.getLocalPort(), context) + "/replication?command=" + cmd +
+        masterUrl = buildUrl(masterJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME + "/replication?command=" + cmd +
             (addNumberToKeepInRequest ? "&" + backupKeepParamName + "=1" : "");
       }
 
@@ -343,7 +343,7 @@
     }
 
     public boolean fetchStatus() throws IOException {
-      String masterUrl = buildUrl(masterJetty.getLocalPort(), context) + "/replication?command=" + ReplicationHandler.CMD_DETAILS;
+      String masterUrl = buildUrl(masterJetty.getLocalPort(), context) + "/" + DEFAULT_TEST_CORENAME + "/replication?command=" + ReplicationHandler.CMD_DETAILS;
       URL url;
       InputStream stream = null;
       try {
diff --git a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java
index 90d9cf3..dd98c04 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerCloud.java
@@ -18,12 +18,15 @@
  */
 
 
+import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
 
+import org.apache.lucene.util.LuceneTestCase.BadApple;
+
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
@@ -32,12 +35,18 @@
 import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.core.ConfigOverlay;
+import org.apache.solr.core.RequestParams;
 import org.apache.solr.core.TestSolrConfigHandler;
 import org.apache.solr.util.RESTfulServerProvider;
 import org.apache.solr.util.RestTestHarness;
+import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static java.util.Arrays.asList;
+import static org.apache.solr.handler.TestBlobHandler.getAsString;
+
+@BadApple(bugUrl = "https://issues.apache.org/jira/browse/SOLR-6924")
 public class TestSolrConfigHandlerCloud extends AbstractFullDistribZkTestBase {
   static final Logger log =  LoggerFactory.getLogger(TestSolrConfigHandlerCloud.class);
   private List<RestTestHarness> restTestHarnesses = new ArrayList<>();
@@ -55,11 +64,18 @@
   }
 
   @Override
-  public void doTest() throws Exception {
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
+    for (RestTestHarness r : restTestHarnesses) {
+      r.close();
+    }
+  }
+
+  @Test
+  public void test() throws Exception {
     setupHarnesses();
     testReqHandlerAPIs();
     testReqParams();
-
   }
 
   private void testReqHandlerAPIs() throws Exception {
@@ -94,23 +110,14 @@
 
     TestSolrConfigHandler.runConfigCommand(writeHarness,"/config/params?wt=json", payload);
 
-    TestSolrConfigHandler.testForResponseElement(
-        null,
+    Map result = TestSolrConfigHandler.testForResponseElement(null,
         urls.get(random().nextInt(urls.size())),
         "/config/params?wt=json",
         cloudClient,
-        Arrays.asList("response", "params", "x", "a"),
+        asList("response", "params", "x", "a"),
         "A val",
         10);
-
-    TestSolrConfigHandler.testForResponseElement(
-        null,
-        urls.get(random().nextInt(urls.size())),
-        "/config/params?wt=json",
-        cloudClient,
-        Arrays.asList("response", "params", "x", "b"),
-        "B val",
-        10);
+    compareValues(result, "B val", asList("response", "params", "x", "b"));
 
     payload = "{\n" +
         "'create-requesthandler' : { 'name' : '/dump', 'class': 'org.apache.solr.handler.DumpRequestHandler' }\n" +
@@ -122,22 +129,24 @@
         urls.get(random().nextInt(urls.size())),
         "/config/overlay?wt=json",
         cloudClient,
-        Arrays.asList("overlay", "requestHandler", "/dump", "name"),
+        asList("overlay", "requestHandler", "/dump", "name"),
         "/dump",
         10);
 
-    TestSolrConfigHandler.testForResponseElement(null,
+    result = TestSolrConfigHandler.testForResponseElement(null,
         urls.get(random().nextInt(urls.size())),
         "/dump?wt=json&useParams=x",
         cloudClient,
-        Arrays.asList("params", "a"),
+        asList("params", "a"),
         "A val",
         5);
+    compareValues(result, "", asList( "params", RequestParams.USEPARAM));
+
     TestSolrConfigHandler.testForResponseElement(null,
         urls.get(random().nextInt(urls.size())),
         "/dump?wt=json&useParams=x&a=fomrequest",
         cloudClient,
-        Arrays.asList("params", "a"),
+        asList("params", "a"),
         "fomrequest",
         5);
 
@@ -147,19 +156,19 @@
 
     TestSolrConfigHandler.runConfigCommand(writeHarness,"/config?wt=json", payload);
 
-    TestSolrConfigHandler.testForResponseElement(null,
+    result = TestSolrConfigHandler.testForResponseElement(null,
         urls.get(random().nextInt(urls.size())),
         "/config/overlay?wt=json",
         cloudClient,
-        Arrays.asList("overlay", "requestHandler", "/dump1", "name"),
+        asList("overlay", "requestHandler", "/dump1", "name"),
         "/dump1",
         10);
 
-    TestSolrConfigHandler.testForResponseElement(null,
+    result = TestSolrConfigHandler.testForResponseElement(null,
         urls.get(random().nextInt(urls.size())),
         "/dump1?wt=json",
         cloudClient,
-        Arrays.asList("params", "a"),
+        asList("params", "a"),
         "A val",
         5);
 
@@ -169,47 +178,37 @@
     payload = " {\n" +
         "  'set' : {'y':{\n" +
         "                'c':'CY val',\n" +
-        "                'b': 'BY val'}\n" +
+        "                'b': 'BY val', " +
+        "                'i': 20, " +
+        "                'd': ['val 1', 'val 2']}\n" +
         "             }\n" +
         "  }";
 
 
     TestSolrConfigHandler.runConfigCommand(writeHarness,"/config/params?wt=json", payload);
 
-    TestSolrConfigHandler.testForResponseElement(
+   result =  TestSolrConfigHandler.testForResponseElement(
         null,
         urls.get(random().nextInt(urls.size())),
         "/config/params?wt=json",
         cloudClient,
-        Arrays.asList("response", "params", "y", "c"),
+        asList("response", "params", "y", "c"),
         "CY val",
         10);
+    compareValues(result, 20l, asList("response", "params", "y", "i"));
 
-    TestSolrConfigHandler.testForResponseElement(null,
+
+    result = TestSolrConfigHandler.testForResponseElement(null,
         urls.get(random().nextInt(urls.size())),
         "/dump?wt=json&useParams=y",
         cloudClient,
-        Arrays.asList("params", "c"),
+        asList("params", "c"),
         "CY val",
         5);
-
-
-    TestSolrConfigHandler.testForResponseElement(null,
-        urls.get(random().nextInt(urls.size())),
-        "/dump1?wt=json&useParams=y",
-        cloudClient,
-        Arrays.asList("params", "b"),
-        "BY val",
-        5);
-
-    TestSolrConfigHandler.testForResponseElement(null,
-        urls.get(random().nextInt(urls.size())),
-        "/dump1?wt=json&useParams=y",
-        cloudClient,
-        Arrays.asList("params", "a"),
-        null,
-        5);
-
+    compareValues(result, "BY val", asList("params", "b"));
+    compareValues(result, null, asList("params", "a"));
+    compareValues(result, Arrays.asList("val 1", "val 2")  , asList("params", "d"));
+    compareValues(result, "20"  , asList("params", "i"));
     payload = " {\n" +
         "  'update' : {'y': {\n" +
         "                'c':'CY val modified',\n" +
@@ -222,23 +221,16 @@
 
     TestSolrConfigHandler.runConfigCommand(writeHarness,"/config/params?wt=json", payload);
 
-    TestSolrConfigHandler.testForResponseElement(
+    result = TestSolrConfigHandler.testForResponseElement(
         null,
         urls.get(random().nextInt(urls.size())),
         "/config/params?wt=json",
         cloudClient,
-        Arrays.asList("response", "params", "y", "c"),
+        asList("response", "params", "y", "c"),
         "CY val modified",
         10);
+    compareValues(result, "EY val", asList("response", "params", "y", "e"));
 
-    TestSolrConfigHandler.testForResponseElement(
-        null,
-        urls.get(random().nextInt(urls.size())),
-        "/config/params?wt=json",
-        cloudClient,
-        Arrays.asList("response", "params", "y", "e"),
-        "EY val",
-        10);
 
     payload = " {\n" +
         "  'set' : {'y': {\n" +
@@ -250,23 +242,16 @@
 
 
     TestSolrConfigHandler.runConfigCommand(writeHarness,"/config/params?wt=json", payload);
-    TestSolrConfigHandler.testForResponseElement(
+    result = TestSolrConfigHandler.testForResponseElement(
         null,
         urls.get(random().nextInt(urls.size())),
         "/config/params?wt=json",
         cloudClient,
-        Arrays.asList("response", "params", "y", "p"),
+        asList("response", "params", "y", "p"),
         "P val",
         10);
+    compareValues(result, null, asList("response", "params", "y", "c"));
 
-    TestSolrConfigHandler.testForResponseElement(
-        null,
-        urls.get(random().nextInt(urls.size())),
-        "/config/params?wt=json",
-        cloudClient,
-        Arrays.asList("response", "params", "y", "c"),
-        null,
-        10);
     payload = " {'delete' : 'y'}";
     TestSolrConfigHandler.runConfigCommand(writeHarness,"/config/params?wt=json", payload);
     TestSolrConfigHandler.testForResponseElement(
@@ -274,11 +259,16 @@
         urls.get(random().nextInt(urls.size())),
         "/config/params?wt=json",
         cloudClient,
-        Arrays.asList("response", "params", "y", "p"),
+        asList("response", "params", "y", "p"),
         null,
         10);
 
 
   }
 
+  public static void compareValues(Map result, Object expected, List<String> jsonPath) {
+    assertTrue(MessageFormat.format("Could not get expected value  {0} for path {1} full output {2}", expected, jsonPath, getAsString(result)),
+        Objects.equals(expected, ConfigOverlay.getObjectByPath(result, false, jsonPath)));
+  }
+
 }
diff --git a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java
index 23e2f40..bae4ab5 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestSolrConfigHandlerConcurrent.java
@@ -17,7 +17,10 @@
  * limitations under the License.
  */
 
-import java.io.IOException;
+import static java.util.Arrays.asList;
+import static org.apache.solr.core.ConfigOverlay.getObjectByPath;
+import static org.noggit.ObjectBuilder.getVal;
+
 import java.io.StringReader;
 import java.nio.charset.StandardCharsets;
 import java.text.MessageFormat;
@@ -43,15 +46,12 @@
 import org.apache.solr.core.ConfigOverlay;
 import org.apache.solr.util.RESTfulServerProvider;
 import org.apache.solr.util.RestTestHarness;
+import org.junit.Test;
 import org.noggit.JSONParser;
 import org.noggit.ObjectBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static java.util.Arrays.asList;
-import static org.apache.solr.core.ConfigOverlay.getObjectByPath;
-import static org.noggit.ObjectBuilder.getVal;
-
 
 public class TestSolrConfigHandlerConcurrent extends AbstractFullDistribZkTestBase {
 
@@ -72,7 +72,15 @@
   }
 
   @Override
-  public void doTest() throws Exception {
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
+    for (RestTestHarness h : restTestHarnesses) {
+      h.close();
+    }
+  }
+
+  @Test
+  public void test() throws Exception {
     Map editable_prop_map = (Map) new ObjectBuilder(new JSONParser(new StringReader(
         ConfigOverlay.MAPPING))).getObject();
     Map caches = (Map) editable_prop_map.get("query");
@@ -90,8 +98,6 @@
             ArrayList errs = new ArrayList();
             collectErrors.add(errs);
             invokeBulkCall((String)e.getKey() , errs, (Map) e.getValue());
-          } catch (IOException e) {
-            e.printStackTrace();
           } catch (Exception e) {
             e.printStackTrace();
           }
@@ -132,15 +138,24 @@
     Set<String> errmessages = new HashSet<>();
     for(int i =1;i<2;i++){//make it  ahigher number
       RestTestHarness publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size()));
-      payload = payload.replaceAll("CACHENAME" , cacheName);
-      String val1 = String.valueOf(10 * i + 1);
-      payload = payload.replace("CACHEVAL1", val1);
-      String val2 = String.valueOf(10 * i + 2);
-      payload = payload.replace("CACHEVAL2", val2);
-      String val3 = String.valueOf(10 * i + 3);
-      payload = payload.replace("CACHEVAL3", val3);
-
-      String response = publisher.post("/config?wt=json", SolrTestCaseJ4.json(payload));
+      String response;
+      String val1;
+      String val2;
+      String val3;
+      try {
+        payload = payload.replaceAll("CACHENAME" , cacheName);
+        val1 = String.valueOf(10 * i + 1);
+        payload = payload.replace("CACHEVAL1", val1);
+        val2 = String.valueOf(10 * i + 2);
+        payload = payload.replace("CACHEVAL2", val2);
+        val3 = String.valueOf(10 * i + 3);
+        payload = payload.replace("CACHEVAL3", val3);
+  
+        response = publisher.post("/config?wt=json", SolrTestCaseJ4.json(payload));
+      } finally {
+        publisher.close();
+      }
+      
       Map map = (Map) getVal(new JSONParser(new StringReader(response)));
       Object errors = map.get("errors");
       if(errors!= null){
diff --git a/solr/core/src/java/org/apache/solr/handler/CSVRequestHandler.java b/solr/core/src/test/org/apache/solr/handler/ThrowErrorOnInitRequestHandler.java
similarity index 65%
rename from solr/core/src/java/org/apache/solr/handler/CSVRequestHandler.java
rename to solr/core/src/test/org/apache/solr/handler/ThrowErrorOnInitRequestHandler.java
index f6b91c9..60caa30 100644
--- a/solr/core/src/java/org/apache/solr/handler/CSVRequestHandler.java
+++ b/solr/core/src/test/org/apache/solr/handler/ThrowErrorOnInitRequestHandler.java
@@ -17,29 +17,33 @@
 
 package org.apache.solr.handler;
 
+import java.io.IOException;
+
 import org.apache.solr.common.util.NamedList;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.request.SolrRequestHandler;
+import org.apache.solr.response.SolrQueryResponse;
 
 /**
- * use {@link UpdateRequestHandler}
+ * throws a {@link java.lang.Error} on init for testing purposes
  */
-@Deprecated
-public class CSVRequestHandler extends UpdateRequestHandler {
-
+public class ThrowErrorOnInitRequestHandler extends RequestHandlerBase
+{
   @Override
-  public void init(NamedList args) {
-    super.init(args);
-    setAssumeContentType("application/csv");
-    // log.warn("Using deprecated class: "+this.getClass().getSimpleName()+" -- replace with UpdateRequestHandler");
+  public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws IOException 
+  {
+    /* NOOP */
   }
 
   //////////////////////// SolrInfoMBeans methods //////////////////////
+
   @Override
   public String getDescription() {
-    return "Add/Update multiple documents with CSV formatted rows";
+    return "throws a java.lang.Error on init for testing purposes";
+  }
+
+  @Override
+  public void init(NamedList args) {
+    throw new Error("Doing my job, throwing a java.lang.Error");
   }
 }
-
-
-
-
-
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java
index cbc93d6..41b02a1 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/CoreAdminHandlerTest.java
@@ -31,7 +31,6 @@
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.CoreDescriptor;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrXMLCoresLocator;
 import org.apache.solr.response.SolrQueryResponse;
 import org.junit.BeforeClass;
 import org.junit.Rule;
@@ -73,8 +72,6 @@
         new File(subHome, "solrconfig.snippet.randomindexconfig.xml"));
 
     final CoreContainer cores = h.getCoreContainer();
-    SolrXMLCoresLocator.NonPersistingLocator locator
-        = (SolrXMLCoresLocator.NonPersistingLocator) cores.getCoresLocator();
 
     final CoreAdminHandler admin = new CoreAdminHandler(cores);
 
@@ -98,15 +95,6 @@
             resp);
     assertNull("Exception on create", resp.getException());
 
-    // First assert that these values are persisted.
-    h.validateXPath
-        (locator.xml
-            ,"/solr/cores/core[@name='" + getCoreName() + "' and @instanceDir='${INSTDIR_TEST}']"
-            ,"/solr/cores/core[@name='" + getCoreName() + "' and @dataDir='${DATA_TEST}']"
-            ,"/solr/cores/core[@name='" + getCoreName() + "' and @schema='${SCHEMA_TEST}']"
-            ,"/solr/cores/core[@name='" + getCoreName() + "' and @config='${CONFIG_TEST}']"
-        );
-
     // Now assert that certain values are properly dereferenced in the process of creating the core, see
     // SOLR-4982.
 
@@ -212,42 +200,32 @@
     copySolrHomeToTemp(solrHomeDirectory, "corex", true);
     File corex = new File(solrHomeDirectory, "corex");
     FileUtils.write(new File(corex, "core.properties"), "", Charsets.UTF_8.toString());
-    JettySolrRunner runner = new JettySolrRunner(solrHomeDirectory.getAbsolutePath(), "/solr", 0);
-    HttpSolrClient client = null;
-    try {
-      runner.start();
-      client = new HttpSolrClient("http://localhost:" + runner.getLocalPort() + "/solr/corex");
+    JettySolrRunner runner = new JettySolrRunner(solrHomeDirectory.getAbsolutePath(), "/solr", 0, null, null, true, null, sslConfig);
+    runner.start();
+
+    try (HttpSolrClient client = new HttpSolrClient(runner.getBaseUrl() + "/corex")) {
       client.setConnectionTimeout(SolrTestCaseJ4.DEFAULT_CONNECTION_TIMEOUT);
       client.setSoTimeout(SolrTestCaseJ4.DEFAULT_CONNECTION_TIMEOUT);
       SolrInputDocument doc = new SolrInputDocument();
       doc.addField("id", "123");
       client.add(doc);
       client.commit();
-      client.shutdown();
+    }
 
-      client = new HttpSolrClient("http://localhost:" + runner.getLocalPort() + "/solr");
+    try (HttpSolrClient client = new HttpSolrClient(runner.getBaseUrl().toString())) {
       client.setConnectionTimeout(SolrTestCaseJ4.DEFAULT_CONNECTION_TIMEOUT);
       client.setSoTimeout(SolrTestCaseJ4.DEFAULT_CONNECTION_TIMEOUT);
       CoreAdminRequest.Unload req = new CoreAdminRequest.Unload(false);
       req.setDeleteInstanceDir(true);
       req.setCoreName("corex");
       req.process(client);
-      client.shutdown();
-
-      runner.stop();
-
-      assertFalse("Instance directory exists after core unload with deleteInstanceDir=true : " + corex,
-          corex.exists());
-    } catch (Exception e) {
-      log.error("Exception testing core unload with deleteInstanceDir=true", e);
-    } finally {
-      if (client != null) {
-        client.shutdown();
-      }
-      if (!runner.isStopped())  {
-        runner.stop();
-      }
     }
+
+    runner.stop();
+
+    assertFalse("Instance directory exists after core unload with deleteInstanceDir=true : " + corex,
+        corex.exists());
+
   }
 
   @Test
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java
index de75b5a..ef9be3d 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedDebugComponentTest.java
@@ -65,7 +65,8 @@
     solrHome = createSolrHome();
     createJetty(solrHome.getAbsolutePath(), null, null);
     String url = jetty.getBaseUrl().toString();
-    collection1 = new HttpSolrClient(url);
+
+    collection1 = new HttpSolrClient(url + "/collection1");
     collection2 = new HttpSolrClient(url + "/collection2");
     
     String urlCollection1 = jetty.getBaseUrl().toString() + "/" + "collection1";
@@ -74,10 +75,13 @@
     shard2 = urlCollection2.replaceAll("https?://", "");
     
     //create second core
-    CoreAdminRequest.Create req = new CoreAdminRequest.Create();
-    req.setCoreName("collection2");
-    collection1.request(req);
-    
+    try (HttpSolrClient nodeClient = new HttpSolrClient(url)) {
+      CoreAdminRequest.Create req = new CoreAdminRequest.Create();
+      req.setCoreName("collection2");
+      req.setConfigSet("collection1");
+      nodeClient.request(req);
+    }
+
     SolrInputDocument doc = new SolrInputDocument();
     doc.setField("id", "1");
     doc.setField("text", "batman");
@@ -93,8 +97,8 @@
   
   @AfterClass
   public static void destroyThings() throws Exception {
-    collection1.shutdown();
-    collection2.shutdown();
+    collection1.close();
+    collection2.close();
     collection1 = null;
     collection2 = null;
     jetty.stop();
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedExpandComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedExpandComponentTest.java
index 9cd6f52..a2d4880 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedExpandComponentTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedExpandComponentTest.java
@@ -21,12 +21,10 @@
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrDocumentList;
-import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.util.NamedList;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
-import java.nio.ByteBuffer;
 import java.util.Map;
 import java.util.Iterator;
 
@@ -38,8 +36,6 @@
 public class DistributedExpandComponentTest extends BaseDistributedSearchTestCase {
 
   public DistributedExpandComponentTest() {
-    fixShardCount = true;
-    shardCount = 3;
     stress = 0;
   }
 
@@ -48,8 +44,9 @@
     initCore("solrconfig-collapseqparser.xml", "schema11.xml");
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 3)
+  public void test() throws Exception {
     final String group = (random().nextBoolean() ? "group_s" : "group_s_dv");
     
     del("*:*");
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java
index 17f0030..6c9da19 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java
@@ -32,18 +32,15 @@
 import org.apache.solr.common.params.SolrParams;
 
 import junit.framework.AssertionFailedError;
+import org.junit.Test;
 
 public class DistributedFacetPivotLargeTest extends BaseDistributedSearchTestCase {
   
   public static final String SPECIAL = ""; 
 
-  public DistributedFacetPivotLargeTest() {
-    this.fixShardCount = true;
-    this.shardCount = 4; // we leave one empty as an edge case
-  }
-  
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 4)
+  public void test() throws Exception {
     this.stress = 0 ;
     handle.clear();
     handle.put("QTime", SKIPVAL);
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLongTailTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLongTailTest.java
index d675d8e..350a4f7 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLongTailTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLongTailTest.java
@@ -26,6 +26,7 @@
 import org.apache.solr.common.params.FacetParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.junit.Test;
 
 /**
  * test demonstrating how overrequesting helps finds top-terms in the "long tail" 
@@ -39,11 +40,6 @@
  */
 public class DistributedFacetPivotLongTailTest extends BaseDistributedSearchTestCase {
   
-  public DistributedFacetPivotLongTailTest(){
-    this.fixShardCount = true;
-    this.shardCount = 3;
-  }
-
   private int docNumber = 0;
   
   public int getDocNum() {
@@ -51,8 +47,9 @@
     return docNumber;
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 3)
+  public void test() throws Exception {
 
     final SolrClient shard0 = clients.get(0);
     final SolrClient shard1 = clients.get(1);
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallAdvancedTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallAdvancedTest.java
index e269dea..64cc0f9 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallAdvancedTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallAdvancedTest.java
@@ -25,6 +25,7 @@
 import org.apache.solr.common.params.FacetParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
+import org.junit.Test;
 
 import java.util.List;
 
@@ -37,13 +38,9 @@
  */
 public class DistributedFacetPivotSmallAdvancedTest extends BaseDistributedSearchTestCase {
 
-  public DistributedFacetPivotSmallAdvancedTest() {
-    this.fixShardCount = true;
-    this.shardCount = 2;
-  }
-
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 2)
+  public void test() throws Exception {
 
     del("*:*");
     final SolrClient shard0 = clients.get(0);
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallTest.java
index 1407a80..19baf83 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotSmallTest.java
@@ -32,16 +32,13 @@
 import org.apache.solr.common.params.ModifiableSolrParams;
 
 import junit.framework.AssertionFailedError;
+import org.junit.Test;
 
 public class DistributedFacetPivotSmallTest extends BaseDistributedSearchTestCase {
-  
-  public DistributedFacetPivotSmallTest() {
-    this.fixShardCount = true;
-    this.shardCount = 4;
-  }
-  
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  @ShardsFixed(num = 4)
+  public void test() throws Exception {
     
     del("*:*");
 
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotWhiteBoxTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotWhiteBoxTest.java
index d17af16..8fbfe5a 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotWhiteBoxTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotWhiteBoxTest.java
@@ -22,18 +22,15 @@
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
+import org.junit.Test;
 
 import java.util.List;
 
 public class DistributedFacetPivotWhiteBoxTest extends BaseDistributedSearchTestCase {
 
-  public DistributedFacetPivotWhiteBoxTest() {
-    this.fixShardCount = true;
-    this.shardCount = 4;
-  }
-
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 4)
+  public void test() throws Exception {
 
     del("*:*");
 
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedMLTComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedMLTComponentTest.java
index 1944927..6bf5136 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedMLTComponentTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedMLTComponentTest.java
@@ -20,8 +20,8 @@
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.BaseDistributedSearchTestCase;
 import org.apache.solr.common.params.MoreLikeThisParams;
-import org.junit.BeforeClass;
 import org.junit.Ignore;
+import org.junit.Test;
 
 /**
  * Test for distributed MoreLikeThisComponent's 
@@ -38,29 +38,18 @@
 
   public DistributedMLTComponentTest()
   {
-    fixShardCount=true;
-    shardCount=3;
     stress=0;
   }
 
-  @BeforeClass
-  public static void beforeClass() throws Exception {
-
-  }
-
   @Override
-  public void setUp() throws Exception {
+  public void distribSetUp() throws Exception {
     requestHandlerName = "mltrh";
-    super.setUp();
+    super.distribSetUp();
   }
   
-  @Override
-  public void tearDown() throws Exception {
-    super.tearDown();
-  }
-  
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 3)
+  public void test() throws Exception {
     del("*:*");
     index(id, "1", "lowerfilt", "toyota");
     index(id, "2", "lowerfilt", "chevrolet");
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentCustomSortTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentCustomSortTest.java
index 883946a..20a57db 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentCustomSortTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentCustomSortTest.java
@@ -19,9 +19,8 @@
 
 import org.apache.solr.BaseDistributedSearchTestCase;
 import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.common.SolrDocument;
-import org.apache.solr.common.SolrDocumentList;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 import java.nio.ByteBuffer;
 
@@ -33,8 +32,6 @@
 public class DistributedQueryComponentCustomSortTest extends BaseDistributedSearchTestCase {
 
   public DistributedQueryComponentCustomSortTest() {
-    fixShardCount = true;
-    shardCount = 3;
     stress = 0;
   }
 
@@ -43,8 +40,9 @@
     initCore("solrconfig.xml", "schema-custom-field.xml");
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 3)
+  public void test() throws Exception {
     del("*:*");
 
     index(id, "1", "text", "a", "payload", ByteBuffer.wrap(new byte[] { 0x12, 0x62, 0x15 }),                     //  2 
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentOptimizationTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentOptimizationTest.java
index 158a32a..ed328fb 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentOptimizationTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryComponentOptimizationTest.java
@@ -20,11 +20,11 @@
 import org.apache.solr.BaseDistributedSearchTestCase;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.ShardParams;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 import java.nio.ByteBuffer;
 import java.util.Map;
@@ -40,8 +40,6 @@
 public class DistributedQueryComponentOptimizationTest extends BaseDistributedSearchTestCase {
 
   public DistributedQueryComponentOptimizationTest() {
-    fixShardCount = true;
-    shardCount = 3;
     stress = 0;
   }
 
@@ -50,8 +48,9 @@
     initCore("solrconfig.xml", "schema-custom-field.xml");
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 3)
+  public void test() throws Exception {
     del("*:*");
 
     index(id, "1", "text", "a", "test_sS", "21", "payload", ByteBuffer.wrap(new byte[] { 0x12, 0x62, 0x15 }),                     //  2
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryElevationComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryElevationComponentTest.java
index 19dfb32..e8e908d 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryElevationComponentTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedQueryElevationComponentTest.java
@@ -26,6 +26,7 @@
 import org.apache.solr.common.params.CommonParams;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 /**
  * 
@@ -39,8 +40,6 @@
   }
 
   public DistributedQueryElevationComponentTest() {
-    fixShardCount = true;
-    shardCount = 3;
     stress = 0;
 
     // TODO: a better way to do this?
@@ -59,8 +58,9 @@
     System.clearProperty("elevate.data.file");
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 3)
+  public void test() throws Exception {
     
     
     del("*:*");
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedSpellCheckComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedSpellCheckComponentTest.java
index fedc945..bbf5358 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedSpellCheckComponentTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedSpellCheckComponentTest.java
@@ -32,6 +32,7 @@
 import org.apache.solr.common.params.SpellingParams;
 import org.apache.solr.common.util.NamedList;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 /**
  * Test for SpellCheckComponent's distributed querying
@@ -57,16 +58,6 @@
     useFactory(null); // need an FS factory
   }
 
-  @Override
-  public void setUp() throws Exception {
-    super.setUp();
-  }
-  
-  @Override
-  public void tearDown() throws Exception {
-    super.tearDown();
-  }
-  
   private void q(Object... q) throws Exception {
     final ModifiableSolrParams params = new ModifiableSolrParams();
 
@@ -95,9 +86,9 @@
       Assert.fail("Control data did not return any suggestions.");
     }
   }
-  
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  public void test() throws Exception {
     del("*:*");
     index(id, "1", "lowerfilt", "toyota");
     index(id, "2", "lowerfilt", "chevrolet");
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedSuggestComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedSuggestComponentTest.java
index dc08fb0..7f08ecb 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedSuggestComponentTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedSuggestComponentTest.java
@@ -31,6 +31,7 @@
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.spelling.suggest.SuggesterParams;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 /**
  * Test for SuggestComponent's distributed querying
@@ -55,16 +56,6 @@
   }
 
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
-  }
-  
-  @Override
-  public void tearDown() throws Exception {
-    super.tearDown();
-  }
-  
-  @Override
   public void validateControlData(QueryResponse control) throws Exception
   {    
     NamedList<Object> nl = control.getResponse();
@@ -74,10 +65,10 @@
     if(sc.size() == 0 && command == null) {
       Assert.fail("Control data did not return any suggestions or execute any command");
     }
-  } 
-  
-  @Override
-  public void doTest() throws Exception {
+  }
+
+  @Test
+  public void test() throws Exception {
     del("*:*");
     index(id, "1", "cat", "This is another title", "price", "10", "weight", "10");
     index(id, "2", "cat", "Yet another", "price", "15", "weight", "10");
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedTermsComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedTermsComponentTest.java
index ee9bfb3..ef09760 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedTermsComponentTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedTermsComponentTest.java
@@ -18,6 +18,7 @@
  */
 
 import org.apache.solr.BaseDistributedSearchTestCase;
+import org.junit.Test;
 
 /**
  * Test for TermsComponent distributed querying
@@ -27,8 +28,8 @@
  */
 public class DistributedTermsComponentTest extends BaseDistributedSearchTestCase {
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     del("*:*");
     index(id, 18, "b_t", "snake spider shark snail slug seal");
     index(id, 19, "b_t", "snake spider shark snail slug");
diff --git a/solr/core/src/test/org/apache/solr/handler/component/SuggestComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/SuggestComponentTest.java
index fee96af..f6c0db7 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/SuggestComponentTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/SuggestComponentTest.java
@@ -19,7 +19,12 @@
 
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrException;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.CoreDescriptor;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.search.SolrIndexSearcher;
 import org.apache.solr.spelling.suggest.SuggesterParams;
+import org.apache.solr.util.RefCounted;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
@@ -50,14 +55,20 @@
     assertU(adoc("id", "9", "cat", "blah in blah", "price", "50", "weight", "40"));
     assertU(adoc("id", "10", "cat", "another blah in blah", "price", "55", "weight", "40"));
     assertU((commit()));
+    waitForWarming();
   }
   
   @Override
   public void tearDown() throws Exception {
     super.tearDown();
     assertU(delQ("*:*"));
-    optimize();
     assertU((commit()));
+    waitForWarming();
+    // rebuild suggesters with empty index
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_BUILD_ALL, "true"),
+        "//str[@name='command'][.='buildAll']"
+        );
   }
   
   @Test
@@ -66,7 +77,7 @@
         SuggesterParams.SUGGEST_DICT, "suggest_fuzzy_doc_dict", 
         SuggesterParams.SUGGEST_BUILD, "true",
         SuggesterParams.SUGGEST_Q, "exampel",
-        SuggesterParams.SUGGEST_COUNT, "2"),
+        SuggesterParams.SUGGEST_COUNT, "5"),
         "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='exampel']/int[@name='numFound'][.='2']",
         "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example inputdata']",
         "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[1]/long[@name='weight'][.='45']",
@@ -78,7 +89,7 @@
         SuggesterParams.SUGGEST_DICT, "suggest_fuzzy_doc_dict", 
         SuggesterParams.SUGGEST_BUILD, "true",
         SuggesterParams.SUGGEST_Q, "Rad",
-        SuggesterParams.SUGGEST_COUNT, "2"),
+        SuggesterParams.SUGGEST_COUNT, "5"),
         "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='Rad']/int[@name='numFound'][.='2']",
         "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='Rad']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='Rad fox']",
         "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='Rad']/arr[@name='suggestions']/lst[1]/long[@name='weight'][.='35']",
@@ -93,7 +104,7 @@
         SuggesterParams.SUGGEST_DICT, "suggest_fuzzy_doc_expr_dict", 
         SuggesterParams.SUGGEST_BUILD, "true",
         SuggesterParams.SUGGEST_Q, "exampel",
-        SuggesterParams.SUGGEST_COUNT, "2"),
+        SuggesterParams.SUGGEST_COUNT, "5"),
         "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_expr_dict']/lst[@name='exampel']/int[@name='numFound'][.='2']",
         "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_expr_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example inputdata']",
         "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_expr_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[1]/long[@name='weight'][.='120']",
@@ -123,7 +134,7 @@
         SuggesterParams.SUGGEST_DICT, "suggest_fuzzy_doc_expr_dict",
         SuggesterParams.SUGGEST_BUILD, "true",
         SuggesterParams.SUGGEST_Q, "exampel",
-        SuggesterParams.SUGGEST_COUNT, "2"),
+        SuggesterParams.SUGGEST_COUNT, "5"),
         "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='exampel']/int[@name='numFound'][.='2']",
         "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[1]/str[@name='term'][.='example inputdata']",
         "//lst[@name='suggest']/lst[@name='suggest_fuzzy_doc_dict']/lst[@name='exampel']/arr[@name='suggestions']/lst[1]/long[@name='weight'][.='45']",
@@ -142,7 +153,7 @@
     assertQ(req("qt", rh, 
         SuggesterParams.SUGGEST_BUILD_ALL, "true",
         SuggesterParams.SUGGEST_Q, "exampel",
-        SuggesterParams.SUGGEST_COUNT, "2"),
+        SuggesterParams.SUGGEST_COUNT, "5"),
         "//str[@name='command'][.='buildAll']"
         );
     
@@ -157,7 +168,7 @@
     assertQ(req("qt", rh, 
         SuggesterParams.SUGGEST_RELOAD_ALL, "true",
         SuggesterParams.SUGGEST_Q, "exampel",
-        SuggesterParams.SUGGEST_COUNT, "2"),
+        SuggesterParams.SUGGEST_COUNT, "5"),
         "//str[@name='command'][.='reloadAll']"
         );
     
@@ -174,7 +185,7 @@
         req("qt", rh, 
         SuggesterParams.SUGGEST_DICT, fakeSuggesterName,
         SuggesterParams.SUGGEST_Q, "exampel",
-        SuggesterParams.SUGGEST_COUNT, "2"),
+        SuggesterParams.SUGGEST_COUNT, "5"),
         SolrException.ErrorCode.BAD_REQUEST
         );
     
@@ -182,9 +193,349 @@
         "' parameter not specified and no default suggester configured",
         req("qt", rh, 
         SuggesterParams.SUGGEST_Q, "exampel",
-        SuggesterParams.SUGGEST_COUNT, "2"),
+        SuggesterParams.SUGGEST_COUNT, "5"),
         SolrException.ErrorCode.BAD_REQUEST
         );
   }
   
+
+  @Test
+  public void testDefaultBuildOnStartupNotStoredDict() throws Exception {
+    
+    final String suggester = "suggest_doc_default_startup_no_store";
+    
+    // validate that this suggester is not storing the lookup
+    assertEquals(suggester, 
+        h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[8]/str[@name='name']", false));
+    assertNull(h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[8]/str[@name='storeDir']", false));
+    
+    // validate that this suggester only builds manually and has not buildOnStartup parameter
+    assertEquals("false", 
+        h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[8]/str[@name='buildOnCommit']", true));
+    assertNull(h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[8]/str[@name='buildOnStartup']", false));
+    
+    reloadCore(random().nextBoolean());
+    
+    // Validate that the suggester was built on new/reload core
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggester,
+        SuggesterParams.SUGGEST_Q, "example",
+        SuggesterParams.SUGGEST_COUNT, "5"),
+        "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='example']/int[@name='numFound'][.='2']"
+        );
+    
+    // add one more doc, should be visible after core reload
+    assertU(adoc("id", "10", "cat", "example data extra ", "price", "40", "weight", "35"));
+    assertU((commit()));
+    
+    waitForWarming();
+    
+    // buildOnCommit=false, this doc should not be in the suggester yet
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggester,
+        SuggesterParams.SUGGEST_Q, "example",
+        SuggesterParams.SUGGEST_COUNT, "5"),
+        "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='example']/int[@name='numFound'][.='2']"
+        );
+    
+    reloadCore(random().nextBoolean());
+    
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggester,
+        SuggesterParams.SUGGEST_Q, "example",
+        SuggesterParams.SUGGEST_COUNT, "5"),
+        "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='example']/int[@name='numFound'][.='3']"
+        );
+    
+  }
+  
+  @Test
+  public void testDefaultBuildOnStartupStoredDict() throws Exception {
+    
+    final String suggester = "suggest_doc_default_startup";
+    
+    // validate that this suggester is storing the lookup
+    assertEquals(suggester, 
+        h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[7]/str[@name='name']", false));
+    assertEquals(suggester, 
+        h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[7]/str[@name='storeDir']", false));
+    
+    // validate that this suggester only builds manually and has not buildOnStartup parameter
+    assertEquals("false", 
+        h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[7]/str[@name='buildOnCommit']", true));
+    assertNull(h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[7]/str[@name='buildOnStartup']", false));
+    
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggester,
+        SuggesterParams.SUGGEST_Q, "example",
+        SuggesterParams.SUGGEST_COUNT, "5"),
+        "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='example']/int[@name='numFound'][.='0']"
+        );
+    
+    // build the suggester manually
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggester,
+        SuggesterParams.SUGGEST_BUILD, "true"),
+        "//str[@name='command'][.='build']"
+        );
+    
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggester,
+        SuggesterParams.SUGGEST_Q, "example",
+        SuggesterParams.SUGGEST_COUNT, "5"),
+        "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='example']/int[@name='numFound'][.='2']"
+        );
+    
+    reloadCore(random().nextBoolean());
+    
+    // Validate that the suggester was loaded on new/reload core
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggester,
+        SuggesterParams.SUGGEST_Q, "example",
+        SuggesterParams.SUGGEST_COUNT, "5"),
+        "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='example']/int[@name='numFound'][.='2']"
+        );
+    
+    // add one more doc, this should not be seen after a core reload (not until the suggester is manually rebuilt)
+    assertU(adoc("id", "10", "cat", "example data extra ", "price", "40", "weight", "35"));
+    assertU((commit()));
+    
+    waitForWarming();
+    // buildOnCommit=false, this doc should not be in the suggester yet
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggester,
+        SuggesterParams.SUGGEST_Q, "example",
+        SuggesterParams.SUGGEST_COUNT, "5"),
+        "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='example']/int[@name='numFound'][.='2']"
+        );
+    
+    reloadCore(random().nextBoolean());
+    
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggester,
+        SuggesterParams.SUGGEST_Q, "example",
+        SuggesterParams.SUGGEST_COUNT, "5"),
+        "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='example']/int[@name='numFound'][.='2']"
+        );
+    
+    // build the suggester manually
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggester,
+        SuggesterParams.SUGGEST_BUILD, "true"),
+        "//str[@name='command'][.='build']"
+        );
+    
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggester,
+        SuggesterParams.SUGGEST_Q, "example",
+        SuggesterParams.SUGGEST_COUNT, "5"),
+        "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='example']/int[@name='numFound'][.='3']"
+        );
+    
+  }
+  
+  @Test
+  public void testLoadOnStartup() throws Exception {
+    
+    final String suggester = "suggest_fuzzy_doc_manal_build";
+    
+    // validate that this suggester is storing the lookup
+    assertEquals(suggester, 
+        h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[6]/str[@name='name']", false));
+    assertEquals(suggester, 
+        h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[6]/str[@name='storeDir']", false));
+    
+    // validate that this suggester only builds manually
+    assertEquals("false", 
+        h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[6]/str[@name='buildOnCommit']", true));
+    assertEquals("false", 
+        h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[6]/str[@name='buildOnStartup']", true));
+    
+    // build the suggester manually
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggester,
+        SuggesterParams.SUGGEST_BUILD, "true"),
+        "//str[@name='command'][.='build']"
+        );
+    
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggester,
+        SuggesterParams.SUGGEST_Q, "exampel",
+        SuggesterParams.SUGGEST_COUNT, "5"),
+        "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='exampel']/int[@name='numFound'][.='2']"
+        );
+    
+    reloadCore(false);
+    
+    // Validate that the suggester was loaded on core reload
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggester,
+        SuggesterParams.SUGGEST_Q, "exampel",
+        SuggesterParams.SUGGEST_COUNT, "5"),
+        "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='exampel']/int[@name='numFound'][.='2']"
+        );
+    
+    reloadCore(true);
+    
+    // Validate that the suggester was loaded on new core
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggester,
+        SuggesterParams.SUGGEST_Q, "exampel",
+        SuggesterParams.SUGGEST_COUNT, "5"),
+        "//lst[@name='suggest']/lst[@name='" + suggester + "']/lst[@name='exampel']/int[@name='numFound'][.='2']"
+        );
+  }
+  
+  public void testBuildOnStartupWithCoreReload() throws Exception {
+    doTestBuildOnStartup(false);
+  }
+  
+  public void testBuildOnStartupWithNewCores() throws Exception {
+    doTestBuildOnStartup(true);
+  }
+  
+  private void doTestBuildOnStartup(boolean createNewCores) throws Exception {
+    
+    final String suggesterFuzzy = "suggest_fuzzy_doc_dict";
+    
+    // the test relies on useColdSearcher=false
+    assertFalse("Precondition not met for test. useColdSearcher must be false", 
+        h.getCore().getSolrConfig().useColdSearcher);
+    
+    // validate that this suggester is not storing the lookup and buildOnStartup is not set
+    assertEquals(suggesterFuzzy, 
+        h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[3]/str[@name='name']", false));
+    assertNull(h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[3]/str[@name='storeDir']", false));
+    
+    // assert that buildOnStartup=false
+    assertEquals("false", 
+        h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[3]/str[@name='buildOnStartup']", false));
+    assertEquals("true", 
+        h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[3]/str[@name='buildOnCommit']", false));
+    
+    // verify that this suggester is built (there was a commit in setUp)
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggesterFuzzy, 
+        SuggesterParams.SUGGEST_Q, "exampel",
+        SuggesterParams.SUGGEST_COUNT, "5"),
+        "//lst[@name='suggest']/lst[@name='" + suggesterFuzzy + "']/lst[@name='exampel']/int[@name='numFound'][.='2']"
+        );
+    
+    // reload the core and wait for for the listeners to finish
+    reloadCore(createNewCores);
+    if (System.getProperty(SYSPROP_NIGHTLY) != null) {
+      // wait some time here in nightly to make sure there are no race conditions in suggester build
+      Thread.sleep(1000);
+    }
+    
+    // The suggester should be empty
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggesterFuzzy, 
+        SuggesterParams.SUGGEST_Q, "exampel",
+        SuggesterParams.SUGGEST_COUNT, "5"),
+        "//lst[@name='suggest']/lst[@name='" + suggesterFuzzy + "']/lst[@name='exampel']/int[@name='numFound'][.='0']"
+        );
+    
+    // build the suggester manually
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggesterFuzzy, 
+        SuggesterParams.SUGGEST_BUILD, "true"),
+        "//str[@name='command'][.='build']"
+        );
+    
+    // validate the suggester is built again
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggesterFuzzy, 
+        SuggesterParams.SUGGEST_Q, "exampel",
+        SuggesterParams.SUGGEST_COUNT, "5"),
+        "//lst[@name='suggest']/lst[@name='" + suggesterFuzzy + "']/lst[@name='exampel']/int[@name='numFound'][.='2']"
+        );
+    
+    final String suggestStartup = "suggest_fuzzy_doc_dict_build_startup";
+    
+    // repeat the test with "suggest_fuzzy_doc_dict_build_startup", it is exactly the same but with buildOnStartup=true
+    assertEquals(suggestStartup, 
+        h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[5]/str[@name='name']", false));
+    assertNull(h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[5]/str[@name='storeDir']", false));
+    assertEquals("true", 
+        h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[5]/str[@name='buildOnStartup']", false));
+    assertEquals("false", 
+        h.getCore().getSolrConfig().getVal("//searchComponent[@name='suggest']/lst[5]/str[@name='buildOnCommit']", false));
+    
+    // reload the core
+    reloadCore(createNewCores);
+    // verify that this suggester is built (should build on startup)
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggestStartup, 
+        SuggesterParams.SUGGEST_Q, "exampel",
+        SuggesterParams.SUGGEST_COUNT, "5"),
+        "//lst[@name='suggest']/lst[@name='" + suggestStartup + "']/lst[@name='exampel']/int[@name='numFound'][.='2']"
+        );
+    
+    // add one more doc, this should not be seen without rebuilding manually or reloading the core (buildOnCommit=false)
+    assertU(adoc("id", "10", "cat", "example data extra ", "price", "40", "weight", "35"));
+    assertU((commit()));
+    
+    waitForWarming();
+
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggestStartup, 
+        SuggesterParams.SUGGEST_Q, "exampel",
+        SuggesterParams.SUGGEST_COUNT, "5"),
+        "//lst[@name='suggest']/lst[@name='" + suggestStartup + "']/lst[@name='exampel']/int[@name='numFound'][.='2']"
+        );
+    
+    // build the suggester manually
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggestStartup, 
+        SuggesterParams.SUGGEST_BUILD, "true"),
+        "//str[@name='command'][.='build']"
+        );
+    
+    assertQ(req("qt", rh, 
+        SuggesterParams.SUGGEST_DICT, suggestStartup, 
+        SuggesterParams.SUGGEST_Q, "exampel",
+        SuggesterParams.SUGGEST_COUNT, "5"),
+        "//lst[@name='suggest']/lst[@name='" + suggestStartup + "']/lst[@name='exampel']/int[@name='numFound'][.='3']"
+        );
+  }
+  
+  private void reloadCore(boolean createNewCore) throws Exception {
+    if (createNewCore) {
+      CoreContainer cores = h.getCoreContainer();
+      SolrCore core = h.getCore();
+      String dataDir1 = core.getDataDir();
+      CoreDescriptor cd = core.getCoreDescriptor();
+      cores.unload(core.getName());
+      SolrCore createdCore = cores.create(cd);
+      assertEquals(dataDir1, createdCore.getDataDir());
+      assertEquals(createdCore, h.getCore());
+    } else {
+      h.reload();
+      // On regular reloading, wait until the new searcher is registered
+      waitForWarming();
+    }
+    
+    assertQ(req("qt", "standard", 
+        "q", "*:*"), 
+        "//*[@numFound='11']"
+        );
+  }
+
+  private void waitForWarming() throws InterruptedException {
+    RefCounted<SolrIndexSearcher> registeredSearcher = h.getCore().getRegisteredSearcher();
+    RefCounted<SolrIndexSearcher> newestSearcher = h.getCore().getNewestSearcher(false);;
+    while (registeredSearcher == null || registeredSearcher.get() != newestSearcher.get()) {
+      if (registeredSearcher != null) {
+        registeredSearcher.decref();
+      }
+      newestSearcher.decref();
+      Thread.sleep(50);
+      registeredSearcher = h.getCore().getRegisteredSearcher();
+      newestSearcher = h.getCore().getNewestSearcher(false);
+    }
+    registeredSearcher.decref();
+    newestSearcher.decref();
+  }
+  
 }
diff --git a/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentDistributedTest.java b/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentDistributedTest.java
index 5cd77de..bc184f84 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentDistributedTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/TermVectorComponentDistributedTest.java
@@ -22,6 +22,7 @@
 import org.apache.solr.BaseDistributedSearchTestCase;
 import org.apache.solr.common.params.TermVectorParams;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 public class TermVectorComponentDistributedTest extends BaseDistributedSearchTestCase {
   @BeforeClass
@@ -30,8 +31,8 @@
                 Constants.JAVA_VENDOR.startsWith("IBM"));
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
 
     handle.clear();
     handle.put("timestamp", SKIPVAL);
diff --git a/solr/core/src/test/org/apache/solr/handler/component/TestExpandComponent.java b/solr/core/src/test/org/apache/solr/handler/component/TestExpandComponent.java
index 2da638d..658fce2 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/TestExpandComponent.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/TestExpandComponent.java
@@ -19,6 +19,7 @@
 
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.search.CollapsingQParserPlugin;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -43,14 +44,54 @@
     assertU(commit());
   }
 
+
+
   @Test
   public void testExpand() throws Exception {
-    final String group = (random().nextBoolean() ? "group_s" : "group_s_dv");
-    
-    String[] doc = {"id","1", "term_s", "YYYY", group, "group1", "test_ti", "5", "test_tl", "10", "test_tf", "2000", "type_s", "parent"};
+    List<String> groups = new ArrayList();
+    groups.add("group_s");
+    groups.add("group_s_dv");
+
+    Collections.shuffle(groups, random());
+    String floatAppend = "";
+
+    String hint = (random().nextBoolean() ? " hint="+ CollapsingQParserPlugin.HINT_TOP_FC : "");
+
+     _testExpand(groups.get(0), floatAppend, hint);
+  }
+
+  @Test
+  public void testNumericExpand() throws Exception {
+    List<String> groups = new ArrayList();
+    groups.add("group_i");
+    groups.add("group_ti_dv");
+    groups.add("group_f");
+    groups.add("group_tf_dv");
+    Collections.shuffle(groups, random());
+    String floatAppend = "";
+    if(groups.get(0).indexOf("f") > -1) {
+      floatAppend = "."+random().nextInt(100);  //Append the float
+      floatAppend = Float.toString(Float.parseFloat(floatAppend)); //Create a proper float out of the string.
+      floatAppend = floatAppend.substring(1);  //Drop off the leading 0, leaving just the decimal
+    }
+
+    String hint = "";
+
+      _testExpand(groups.get(0), floatAppend, hint);
+  }
+
+  private void _testExpand(String group, String floatAppend, String hint) throws Exception {
+
+    String[] doc = {"id","1", "term_s", "YYYY", group, "1"+floatAppend, "test_ti", "5",
+
+
+
+
+
+        "test_tl", "10", "test_tf", "2000", "type_s", "parent"};
     assertU(adoc(doc));
     assertU(commit());
-    String[] doc1 = {"id","2", "term_s","YYYY", group, "group1", "test_ti", "50", "test_tl", "100", "test_tf", "200", "type_s", "child"};
+    String[] doc1 = {"id","2", "term_s","YYYY", group, "1"+floatAppend, "test_ti", "50", "test_tl", "100", "test_tf", "200", "type_s", "child"};
     assertU(adoc(doc1));
 
     String[] doc2 = {"id","3", "term_s", "YYYY", "test_ti", "5000", "test_tl", "100", "test_tf", "200"};
@@ -60,17 +101,17 @@
     assertU(adoc(doc3));
 
 
-    String[] doc4 = {"id","5", "term_s", "YYYY", group, "group2", "test_ti", "4", "test_tl", "10", "test_tf", "2000", "type_s", "parent"};
+    String[] doc4 = {"id","5", "term_s", "YYYY", group, "2"+floatAppend, "test_ti", "4", "test_tl", "10", "test_tf", "2000", "type_s", "parent"};
     assertU(adoc(doc4));
     assertU(commit());
-    String[] doc5 = {"id","6", "term_s","YYYY", group, "group2", "test_ti", "10", "test_tl", "100", "test_tf", "200", "type_s", "child"};
+    String[] doc5 = {"id","6", "term_s","YYYY", group, "2"+floatAppend, "test_ti", "10", "test_tl", "100", "test_tf", "200", "type_s", "child"};
     assertU(adoc(doc5));
     assertU(commit());
 
-    String[] doc6 = {"id","7", "term_s", "YYYY", group, "group1", "test_ti", "1", "test_tl", "100000", "test_tf", "2000", "type_s", "child"};
+    String[] doc6 = {"id","7", "term_s", "YYYY", group, "1"+floatAppend, "test_ti", "1", "test_tl", "100000", "test_tf", "2000", "type_s", "child"};
     assertU(adoc(doc6));
     assertU(commit());
-    String[] doc7 = {"id","8", "term_s","YYYY", group, "group2", "test_ti", "2", "test_tl", "100000", "test_tf", "200", "type_s", "child"};
+    String[] doc7 = {"id","8", "term_s","YYYY", group, "2"+floatAppend, "test_ti", "2", "test_tl", "100000", "test_tf", "200", "type_s", "child"};
     assertU(adoc(doc7));
 
     assertU(commit());
@@ -78,7 +119,7 @@
     //First basic test case.
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+"}");
+    params.add("fq", "{!collapse field="+group+hint+"}");
     params.add("defType", "edismax");
     params.add("bf", "field(test_ti)");
     params.add("expand", "true");
@@ -86,17 +127,17 @@
         "*[count(/response/lst[@name='expanded']/result)=2]",
         "/response/result/doc[1]/float[@name='id'][.='2.0']",
         "/response/result/doc[2]/float[@name='id'][.='6.0']",
-        "/response/lst[@name='expanded']/result[@name='group1']/doc[1]/float[@name='id'][.='1.0']",
-        "/response/lst[@name='expanded']/result[@name='group1']/doc[2]/float[@name='id'][.='7.0']",
-        "/response/lst[@name='expanded']/result[@name='group2']/doc[1]/float[@name='id'][.='5.0']",
-        "/response/lst[@name='expanded']/result[@name='group2']/doc[2]/float[@name='id'][.='8.0']"
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/float[@name='id'][.='1.0']",
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/float[@name='id'][.='7.0']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[1]/float[@name='id'][.='5.0']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[2]/float[@name='id'][.='8.0']"
     );
 
     //Basic test case page 2
 
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+"}");
+    params.add("fq", "{!collapse field="+group+hint+"}");
     params.add("defType", "edismax");
     params.add("bf", "field(test_ti)");
     params.add("expand", "true");
@@ -105,14 +146,14 @@
     assertQ(req(params), "*[count(/response/result/doc)=1]",
         "*[count(/response/lst[@name='expanded']/result)=1]",
         "/response/result/doc[1]/float[@name='id'][.='6.0']",
-        "/response/lst[@name='expanded']/result[@name='group2']/doc[1]/float[@name='id'][.='5.0']",
-        "/response/lst[@name='expanded']/result[@name='group2']/doc[2]/float[@name='id'][.='8.0']"
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[1]/float[@name='id'][.='5.0']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[2]/float[@name='id'][.='8.0']"
     );
 
     //Test expand.sort
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+"}");
+    params.add("fq", "{!collapse field="+group+hint+"}");
     params.add("defType", "edismax");
     params.add("bf", "field(test_ti)");
     params.add("expand", "true");
@@ -121,17 +162,17 @@
         "*[count(/response/lst[@name='expanded']/result)=2]",
         "/response/result/doc[1]/float[@name='id'][.='2.0']",
         "/response/result/doc[2]/float[@name='id'][.='6.0']",
-        "/response/lst[@name='expanded']/result[@name='group1']/doc[1]/float[@name='id'][.='7.0']",
-        "/response/lst[@name='expanded']/result[@name='group1']/doc[2]/float[@name='id'][.='1.0']",
-        "/response/lst[@name='expanded']/result[@name='group2']/doc[1]/float[@name='id'][.='8.0']",
-        "/response/lst[@name='expanded']/result[@name='group2']/doc[2]/float[@name='id'][.='5.0']"
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/float[@name='id'][.='7.0']",
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/float[@name='id'][.='1.0']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[1]/float[@name='id'][.='8.0']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[2]/float[@name='id'][.='5.0']"
     );
 
     //Test with nullPolicy, ExpandComponent should ignore docs with null values in the collapse fields.
     //Main result set should include the doc with null value in the collapse field.
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse}");
+    params.add("fq", "{!collapse field="+group+hint+" nullPolicy=collapse}");
     params.add("defType", "edismax");
     params.add("bf", "field(test_ti)");
     params.add("expand", "true");
@@ -141,10 +182,10 @@
         "/response/result/doc[1]/float[@name='id'][.='3.0']",
         "/response/result/doc[2]/float[@name='id'][.='2.0']",
         "/response/result/doc[3]/float[@name='id'][.='6.0']",
-        "/response/lst[@name='expanded']/result[@name='group1']/doc[1]/float[@name='id'][.='7.0']",
-        "/response/lst[@name='expanded']/result[@name='group1']/doc[2]/float[@name='id'][.='1.0']",
-        "/response/lst[@name='expanded']/result[@name='group2']/doc[1]/float[@name='id'][.='8.0']",
-        "/response/lst[@name='expanded']/result[@name='group2']/doc[2]/float[@name='id'][.='5.0']"
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/float[@name='id'][.='7.0']",
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/float[@name='id'][.='1.0']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[1]/float[@name='id'][.='8.0']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[2]/float[@name='id'][.='5.0']"
     );
 
 
@@ -162,10 +203,10 @@
         "*[count(/response/lst[@name='expanded']/result)=2]",
         "/response/result/doc[1]/float[@name='id'][.='1.0']",
         "/response/result/doc[2]/float[@name='id'][.='5.0']",
-        "/response/lst[@name='expanded']/result[@name='group1']/doc[1]/float[@name='id'][.='7.0']",
-        "/response/lst[@name='expanded']/result[@name='group1']/doc[2]/float[@name='id'][.='2.0']",
-        "/response/lst[@name='expanded']/result[@name='group2']/doc[1]/float[@name='id'][.='8.0']",
-        "/response/lst[@name='expanded']/result[@name='group2']/doc[2]/float[@name='id'][.='6.0']"
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/float[@name='id'][.='7.0']",
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/float[@name='id'][.='2.0']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[1]/float[@name='id'][.='8.0']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[2]/float[@name='id'][.='6.0']"
     );
 
 
@@ -184,10 +225,10 @@
         "*[count(/response/lst[@name='expanded']/result)=2]",
         "/response/result/doc[1]/float[@name='id'][.='1.0']",
         "/response/result/doc[2]/float[@name='id'][.='5.0']",
-        "/response/lst[@name='expanded']/result[@name='group1']/doc[1]/float[@name='id'][.='7.0']",
-        "/response/lst[@name='expanded']/result[@name='group1']/doc[2]/float[@name='id'][.='2.0']",
-        "/response/lst[@name='expanded']/result[@name='group2']/doc[1]/float[@name='id'][.='8.0']",
-        "/response/lst[@name='expanded']/result[@name='group2']/doc[2]/float[@name='id'][.='6.0']"
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/float[@name='id'][.='7.0']",
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/float[@name='id'][.='2.0']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[1]/float[@name='id'][.='8.0']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[2]/float[@name='id'][.='6.0']"
     );
 
     //Test overide expand.fq and expand.q
@@ -206,17 +247,17 @@
         "*[count(/response/lst[@name='expanded']/result)=2]",
         "/response/result/doc[1]/float[@name='id'][.='1.0']",
         "/response/result/doc[2]/float[@name='id'][.='5.0']",
-        "/response/lst[@name='expanded']/result[@name='group1']/doc[1]/float[@name='id'][.='7.0']",
-        "/response/lst[@name='expanded']/result[@name='group1']/doc[2]/float[@name='id'][.='2.0']",
-        "/response/lst[@name='expanded']/result[@name='group2']/doc[1]/float[@name='id'][.='8.0']",
-        "/response/lst[@name='expanded']/result[@name='group2']/doc[2]/float[@name='id'][.='6.0']"
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/float[@name='id'][.='7.0']",
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/float[@name='id'][.='2.0']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[1]/float[@name='id'][.='8.0']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[2]/float[@name='id'][.='6.0']"
     );
 
     //Test expand.rows
 
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+"}");
+    params.add("fq", "{!collapse field="+group+hint+"}");
     params.add("defType", "edismax");
     params.add("bf", "field(test_ti)");
     params.add("expand", "true");
@@ -224,12 +265,12 @@
     params.add("expand.rows", "1");
     assertQ(req(params), "*[count(/response/result/doc)=2]",
         "*[count(/response/lst[@name='expanded']/result)=2]",
-        "*[count(/response/lst[@name='expanded']/result[@name='group1']/doc)=1]",
-        "*[count(/response/lst[@name='expanded']/result[@name='group2']/doc)=1]",
+        "*[count(/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc)=1]",
+        "*[count(/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc)=1]",
         "/response/result/doc[1]/float[@name='id'][.='2.0']",
         "/response/result/doc[2]/float[@name='id'][.='6.0']",
-        "/response/lst[@name='expanded']/result[@name='group1']/doc[1]/float[@name='id'][.='7.0']",
-        "/response/lst[@name='expanded']/result[@name='group2']/doc[1]/float[@name='id'][.='8.0']"
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/float[@name='id'][.='7.0']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[1]/float[@name='id'][.='8.0']"
     );
 
 
@@ -237,7 +278,7 @@
 
     params = new ModifiableSolrParams();
     params.add("q", "test_ti:5");
-    params.add("fq", "{!collapse field="+group+"}");
+    params.add("fq", "{!collapse field="+group+hint+"}");
     params.add("defType", "edismax");
     params.add("bf", "field(test_ti)");
     params.add("expand", "true");
@@ -251,7 +292,7 @@
 
     params = new ModifiableSolrParams();
     params.add("q", "test_ti:5532535");
-    params.add("fq", "{!collapse field="+group+"}");
+    params.add("fq", "{!collapse field="+group+hint+"}");
     params.add("defType", "edismax");
     params.add("bf", "field(test_ti)");
     params.add("expand", "true");
@@ -265,7 +306,7 @@
 
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+"}");
+    params.add("fq", "{!collapse field="+group+hint+"}");
     params.add("defType", "edismax");
     params.add("bf", "field(test_ti)");
     params.add("expand", "true");
@@ -274,10 +315,10 @@
         "*[count(/response/lst[@name='expanded']/result)=2]",
         "/response/result/doc[1]/float[@name='id'][.='2.0']",
         "/response/result/doc[2]/float[@name='id'][.='6.0']",
-        "/response/lst[@name='expanded']/result[@name='group1']/doc[1]/float[@name='id'][.='1.0']",
-        "/response/lst[@name='expanded']/result[@name='group1']/doc[2]/float[@name='id'][.='7.0']",
-        "/response/lst[@name='expanded']/result[@name='group2']/doc[1]/float[@name='id'][.='5.0']",
-        "/response/lst[@name='expanded']/result[@name='group2']/doc[2]/float[@name='id'][.='8.0']"
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[1]/float[@name='id'][.='1.0']",
+        "/response/lst[@name='expanded']/result[@name='1"+floatAppend+"']/doc[2]/float[@name='id'][.='7.0']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[1]/float[@name='id'][.='5.0']",
+        "/response/lst[@name='expanded']/result[@name='2"+floatAppend+"']/doc[2]/float[@name='id'][.='8.0']"
     );
   }
 
diff --git a/solr/core/src/test/org/apache/solr/highlight/DummyHighlighter.java b/solr/core/src/test/org/apache/solr/highlight/DummyHighlighter.java
index 0d660b4..3836a12 100644
--- a/solr/core/src/test/org/apache/solr/highlight/DummyHighlighter.java
+++ b/solr/core/src/test/org/apache/solr/highlight/DummyHighlighter.java
@@ -16,15 +16,14 @@
  */
 package org.apache.solr.highlight;
 
-import java.io.IOException;
-
 import org.apache.lucene.search.Query;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.core.SolrConfig;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.search.DocList;
 
+import java.io.IOException;
+
 public class DummyHighlighter extends SolrHighlighter {
 
   @Override
@@ -35,9 +34,4 @@
     return fragments;
   }
 
-  @Override
-  public void initalize(SolrConfig config) {
-    // do nothing
-  }
-
 }
diff --git a/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java b/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java
index 4453fe0..5a8afb1 100644
--- a/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java
+++ b/solr/core/src/test/org/apache/solr/highlight/HighlighterTest.java
@@ -17,17 +17,27 @@
 
 package org.apache.solr.highlight;
 
+import java.io.IOException;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.spans.SpanPayloadCheckQuery;
+import org.apache.lucene.search.spans.SpanTermQuery;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.params.HighlightParams;
 import org.apache.solr.handler.component.HighlightComponent;
+import org.apache.solr.handler.component.ResponseBuilder;
+import org.apache.solr.handler.component.SearchComponent;
 import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.search.DocSet;
 import org.apache.solr.util.TestHarness;
 import org.junit.After;
 import org.junit.BeforeClass;
@@ -1053,4 +1063,37 @@
     );
 
   }
+
+  @Test
+  public void payloadFilteringSpanQuery() throws IOException {
+    clearIndex();
+
+    String FIELD_NAME = "payloadDelimited";
+    assertU(adoc("id", "0", FIELD_NAME, "word|7 word|2"));
+    assertU(commit());
+
+    //We search at a lower level than typical Solr tests because there's no QParser for payloads
+
+    //Create query matching this payload
+    Query query = new SpanPayloadCheckQuery(new SpanTermQuery(new Term(FIELD_NAME, "word")),
+        Collections.singleton(new byte[]{0,0,0,7}));//bytes for integer 7
+
+    //invoke highlight component... the hard way
+    final SearchComponent hlComp = h.getCore().getSearchComponent("highlight");
+    SolrQueryRequest req = req("hl", "true", "hl.fl", FIELD_NAME, HighlightParams.USE_PHRASE_HIGHLIGHTER, "true");
+    try {
+      SolrQueryResponse resp = new SolrQueryResponse();
+      ResponseBuilder rb = new ResponseBuilder(req, resp, Collections.singletonList(hlComp));
+      rb.setHighlightQuery(query);
+      rb.setResults(req.getSearcher().getDocListAndSet(query, (DocSet) null, null, 0, 1));
+      //highlight:
+      hlComp.prepare(rb);
+      hlComp.process(rb);
+      //inspect response
+      final String[] snippets = (String[]) resp.getValues().findRecursive("highlighting", "0", FIELD_NAME);
+      assertEquals("<em>word|7</em> word|2", snippets[0]);
+    } finally {
+      req.close();
+    }
+  }
 }
diff --git a/solr/core/src/test/org/apache/solr/rest/SolrRestletTestBase.java b/solr/core/src/test/org/apache/solr/rest/SolrRestletTestBase.java
index e5fc017..ea8a40c 100644
--- a/solr/core/src/test/org/apache/solr/rest/SolrRestletTestBase.java
+++ b/solr/core/src/test/org/apache/solr/rest/SolrRestletTestBase.java
@@ -21,6 +21,8 @@
 import org.junit.BeforeClass;
 import org.restlet.ext.servlet.ServerServlet;
 
+import java.nio.file.Path;
+import java.util.Properties;
 import java.util.SortedMap;
 import java.util.TreeMap;
 
@@ -42,11 +44,25 @@
    */
   @BeforeClass
   public static void init() throws Exception {
+
+    Path tempDir = createTempDir();
+    Path coresDir = tempDir.resolve("cores");
+
+    System.setProperty("coreRootDirectory", coresDir.toString());
+    System.setProperty("configSetBaseDir", TEST_HOME());
+
     final SortedMap<ServletHolder,String> extraServlets = new TreeMap<>();
     final ServletHolder solrSchemaRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class);
     solrSchemaRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi");
     extraServlets.put(solrSchemaRestApi, "/schema/*");  // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...'
 
+    Properties props = new Properties();
+    props.setProperty("name", DEFAULT_TEST_CORENAME);
+    props.setProperty("config", "solrconfig.xml");
+    props.setProperty("schema", "schema-rest.xml");
+    props.setProperty("configSet", "collection1");
+
+    writeCoreProperties(coresDir.resolve("core"), props, "SolrRestletTestBase");
     createJettyAndHarness(TEST_HOME(), "solrconfig.xml", "schema-rest.xml", "/solr", true, extraServlets);
   }
 }
diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java b/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
index 3c69feb..050fc082 100644
--- a/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
+++ b/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
@@ -73,6 +73,9 @@
       jetty = null;
     }
     client = null;
+    if (restTestHarness != null) {
+      restTestHarness.close();
+    }
     restTestHarness = null;
   }
 
diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestCopyFieldCollectionResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestCopyFieldCollectionResource.java
index 004ee56..383e2b3 100644
--- a/solr/core/src/test/org/apache/solr/rest/schema/TestCopyFieldCollectionResource.java
+++ b/solr/core/src/test/org/apache/solr/rest/schema/TestCopyFieldCollectionResource.java
@@ -23,7 +23,10 @@
   @Test
   public void testGetAllCopyFields() throws Exception {
     assertQ("/schema/copyfields?indent=on&wt=xml",
-            "/response/arr[@name='copyFields']/lst[    str[@name='source'][.='title']"
+        "/response/arr[@name='copyFields']/lst[    str[@name='source'][.='src_sub_no_ast_i']"
+            +"                                      and str[@name='dest'][.='title']]",
+
+        "/response/arr[@name='copyFields']/lst[    str[@name='source'][.='title']"
            +"                                      and str[@name='dest'][.='title_stemmed']"
            +"                                      and int[@name='maxChars'][.='200']]",
 
@@ -65,10 +68,6 @@
 
             "/response/arr[@name='copyFields']/lst[    str[@name='source'][.='src_sub_no_ast_i']"
            +"                                      and str[@name='sourceDynamicBase'][.='*_i']"
-           +"                                      and str[@name='dest'][.='title']]",
-
-            "/response/arr[@name='copyFields']/lst[    str[@name='source'][.='src_sub_no_ast_i']"
-           +"                                      and str[@name='sourceDynamicBase'][.='*_i']"
            +"                                      and str[@name='dest'][.='*_s']]",
 
             "/response/arr[@name='copyFields']/lst[    str[@name='source'][.='src_sub_no_ast_i']"
@@ -105,19 +104,19 @@
   @Test
   public void testJsonGetAllCopyFields() throws Exception {
     assertJQ("/schema/copyfields?indent=on&wt=json",
-             "/copyFields/[6]=={'source':'title','dest':'dest_sub_no_ast_s','destDynamicBase':'*_s'}",
+             "/copyFields/[1]=={'source':'src_sub_no_ast_i','dest':'title'}",
+             "/copyFields/[7]=={'source':'title','dest':'dest_sub_no_ast_s','destDynamicBase':'*_s'}",
 
-             "/copyFields/[7]=={'source':'*_i','dest':'title'}",
-             "/copyFields/[8]=={'source':'*_i','dest':'*_s'}",
-             "/copyFields/[9]=={'source':'*_i','dest':'*_dest_sub_s','destDynamicBase':'*_s'}",
-             "/copyFields/[10]=={'source':'*_i','dest':'dest_sub_no_ast_s','destDynamicBase':'*_s'}",
+             "/copyFields/[8]=={'source':'*_i','dest':'title'}",
+             "/copyFields/[9]=={'source':'*_i','dest':'*_s'}",
+             "/copyFields/[10]=={'source':'*_i','dest':'*_dest_sub_s','destDynamicBase':'*_s'}",
+             "/copyFields/[11]=={'source':'*_i','dest':'dest_sub_no_ast_s','destDynamicBase':'*_s'}",
 
-             "/copyFields/[11]=={'source':'*_src_sub_i','sourceDynamicBase':'*_i','dest':'title'}",
-             "/copyFields/[12]=={'source':'*_src_sub_i','sourceDynamicBase':'*_i','dest':'*_s'}",
-             "/copyFields/[13]=={'source':'*_src_sub_i','sourceDynamicBase':'*_i','dest':'*_dest_sub_s','destDynamicBase':'*_s'}",
-             "/copyFields/[14]=={'source':'*_src_sub_i','sourceDynamicBase':'*_i','dest':'dest_sub_no_ast_s','destDynamicBase':'*_s'}",
+             "/copyFields/[12]=={'source':'*_src_sub_i','sourceDynamicBase':'*_i','dest':'title'}",
+             "/copyFields/[13]=={'source':'*_src_sub_i','sourceDynamicBase':'*_i','dest':'*_s'}",
+             "/copyFields/[14]=={'source':'*_src_sub_i','sourceDynamicBase':'*_i','dest':'*_dest_sub_s','destDynamicBase':'*_s'}",
+             "/copyFields/[15]=={'source':'*_src_sub_i','sourceDynamicBase':'*_i','dest':'dest_sub_no_ast_s','destDynamicBase':'*_s'}",
 
-             "/copyFields/[15]=={'source':'src_sub_no_ast_i','sourceDynamicBase':'*_i','dest':'title'}",
              "/copyFields/[16]=={'source':'src_sub_no_ast_i','sourceDynamicBase':'*_i','dest':'*_s'}",
              "/copyFields/[17]=={'source':'src_sub_no_ast_i','sourceDynamicBase':'*_i','dest':'*_dest_sub_s','destDynamicBase':'*_s'}",
              "/copyFields/[18]=={'source':'src_sub_no_ast_i','sourceDynamicBase':'*_i','dest':'dest_sub_no_ast_s','destDynamicBase':'*_s'}");
diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaDynamicFieldResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaDynamicFieldResource.java
index 60a72bf..792d163 100644
--- a/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaDynamicFieldResource.java
+++ b/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaDynamicFieldResource.java
@@ -16,11 +16,6 @@
  * limitations under the License.
  */
 
-import java.io.File;
-import java.util.SortedMap;
-import java.util.TreeMap;
-import java.util.regex.Pattern;
-
 import org.apache.commons.io.FileUtils;
 import org.apache.solr.util.RestTestBase;
 import org.eclipse.jetty.servlet.ServletHolder;
@@ -29,6 +24,11 @@
 import org.junit.Test;
 import org.restlet.ext.servlet.ServerServlet;
 
+import java.io.File;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import java.util.regex.Pattern;
+
 public class TestManagedSchemaDynamicFieldResource extends RestTestBase {
 
   private static File tmpSolrHome;
@@ -63,6 +63,9 @@
       jetty = null;
     }
     client = null;
+    if (restTestHarness != null) {
+      restTestHarness.close();
+    }
     restTestHarness = null;
   }
 
diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaFieldResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaFieldResource.java
index 4fa6953..83eb1a8 100644
--- a/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaFieldResource.java
+++ b/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaFieldResource.java
@@ -16,11 +16,6 @@
  * limitations under the License.
  */
 
-import java.io.File;
-import java.util.SortedMap;
-import java.util.TreeMap;
-import java.util.regex.Pattern;
-
 import org.apache.commons.io.FileUtils;
 import org.apache.solr.util.RestTestBase;
 import org.eclipse.jetty.servlet.ServletHolder;
@@ -29,6 +24,11 @@
 import org.junit.Test;
 import org.restlet.ext.servlet.ServerServlet;
 
+import java.io.File;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import java.util.regex.Pattern;
+
 public class TestManagedSchemaFieldResource extends RestTestBase {
 
   private static File tmpSolrHome;
@@ -63,6 +63,9 @@
       jetty = null;
     }
     client = null;
+    if (restTestHarness != null) {
+      restTestHarness.close();
+    }
     restTestHarness = null;
   }
   
diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaFieldTypeResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaFieldTypeResource.java
index 26fab12..a3aec9c 100644
--- a/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaFieldTypeResource.java
+++ b/solr/core/src/test/org/apache/solr/rest/schema/TestManagedSchemaFieldTypeResource.java
@@ -66,6 +66,11 @@
     jetty = null;
     System.clearProperty("managed.schema.mutable");
     System.clearProperty("enable.update.log");
+    
+    if (restTestHarness != null) {
+      restTestHarness.close();
+    }
+    restTestHarness = null;
   }
   
   @Test
diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaResource.java b/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaResource.java
index aed5d7c..fb59f1f 100644
--- a/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaResource.java
+++ b/solr/core/src/test/org/apache/solr/rest/schema/TestSchemaResource.java
@@ -131,19 +131,19 @@
              "/schema/dynamicFields/[1]/name=='ignored_*'",
              "/schema/dynamicFields/[2]/name=='*_mfacet'",
                  
-             "/schema/copyFields/[6]=={'source':'title','dest':'dest_sub_no_ast_s'}",
+             "/schema/copyFields/[1]=={'source':'src_sub_no_ast_i','dest':'title'}",
 
-             "/schema/copyFields/[7]=={'source':'*_i','dest':'title'}",
-             "/schema/copyFields/[8]=={'source':'*_i','dest':'*_s'}",
-             "/schema/copyFields/[9]=={'source':'*_i','dest':'*_dest_sub_s'}",
-             "/schema/copyFields/[10]=={'source':'*_i','dest':'dest_sub_no_ast_s'}",
+             "/schema/copyFields/[7]=={'source':'title','dest':'dest_sub_no_ast_s'}",
+             "/schema/copyFields/[8]=={'source':'*_i','dest':'title'}",
+             "/schema/copyFields/[9]=={'source':'*_i','dest':'*_s'}",
+             "/schema/copyFields/[10]=={'source':'*_i','dest':'*_dest_sub_s'}",
+             "/schema/copyFields/[11]=={'source':'*_i','dest':'dest_sub_no_ast_s'}",
 
-             "/schema/copyFields/[11]=={'source':'*_src_sub_i','dest':'title'}",
-             "/schema/copyFields/[12]=={'source':'*_src_sub_i','dest':'*_s'}",
-             "/schema/copyFields/[13]=={'source':'*_src_sub_i','dest':'*_dest_sub_s'}",
-             "/schema/copyFields/[14]=={'source':'*_src_sub_i','dest':'dest_sub_no_ast_s'}",
+             "/schema/copyFields/[12]=={'source':'*_src_sub_i','dest':'title'}",
+             "/schema/copyFields/[13]=={'source':'*_src_sub_i','dest':'*_s'}",
+             "/schema/copyFields/[14]=={'source':'*_src_sub_i','dest':'*_dest_sub_s'}",
+             "/schema/copyFields/[15]=={'source':'*_src_sub_i','dest':'dest_sub_no_ast_s'}",
 
-             "/schema/copyFields/[15]=={'source':'src_sub_no_ast_i','dest':'title'}",
              "/schema/copyFields/[16]=={'source':'src_sub_no_ast_i','dest':'*_s'}",
              "/schema/copyFields/[17]=={'source':'src_sub_no_ast_i','dest':'*_dest_sub_s'}",
              "/schema/copyFields/[18]=={'source':'src_sub_no_ast_i','dest':'dest_sub_no_ast_s'}");
diff --git a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedStopFilterFactory.java b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedStopFilterFactory.java
index 3b29557..ad9ab29 100644
--- a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedStopFilterFactory.java
+++ b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedStopFilterFactory.java
@@ -66,6 +66,11 @@
     jetty = null;
     System.clearProperty("managed.schema.mutable");
     System.clearProperty("enable.update.log");
+    
+    if (restTestHarness != null) {
+      restTestHarness.close();
+    }
+    restTestHarness = null;
   }
 
 
diff --git a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java
index 2b20aa2..8aa40ad 100644
--- a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java
+++ b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java
@@ -63,6 +63,11 @@
     FileUtils.deleteDirectory(tmpSolrHome);
     System.clearProperty("managed.schema.mutable");
     System.clearProperty("enable.update.log");
+    
+    if (restTestHarness != null) {
+      restTestHarness.close();
+    }
+    restTestHarness = null;
   }
   
   @Test
diff --git a/solr/core/src/test/org/apache/solr/schema/ModifyConfFileTest.java b/solr/core/src/test/org/apache/solr/schema/ModifyConfFileTest.java
deleted file mode 100644
index 43f39c6..0000000
--- a/solr/core/src/test/org/apache/solr/schema/ModifyConfFileTest.java
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.schema;
-
-import static org.junit.internal.matchers.StringContains.containsString;
-
-import java.io.File;
-import java.util.ArrayList;
-
-import org.apache.commons.codec.Charsets;
-import org.apache.commons.io.FileUtils;
-import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.util.ContentStream;
-import org.apache.solr.common.util.ContentStreamBase;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.core.CoreContainer;
-import org.apache.solr.core.SolrCore;
-import org.apache.solr.request.LocalSolrQueryRequest;
-import org.apache.solr.request.SolrRequestHandler;
-import org.apache.solr.response.SolrQueryResponse;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.RuleChain;
-import org.junit.rules.TestRule;
-
-import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
-
-public class ModifyConfFileTest extends SolrTestCaseJ4 {
-  private File solrHomeDirectory = createTempDir().toFile();
-
-  @Rule
-  public TestRule solrTestRules = RuleChain.outerRule(new SystemPropertiesRestoreRule());
-
-  private CoreContainer init() throws Exception {
-    System.setProperty("solr.test.sys.prop1", "propone");
-    System.setProperty("solr.test.sys.prop2", "proptwo");
-
-    solrHomeDirectory = createTempDir().toFile();
-
-    copySolrHomeToTemp(solrHomeDirectory, "core1", true);
-    FileUtils.write(new File(new File(solrHomeDirectory, "core1"), "core.properties"), "", Charsets.UTF_8.toString());
-    final CoreContainer cores = new CoreContainer(solrHomeDirectory.getAbsolutePath());
-    cores.load();
-    return cores;
-  }
-
-  @Test
-  public void testConfigWrite() throws Exception {
-
-    final CoreContainer cc = init();
-    try (SolrCore core = cc.getCore("core1")) {
-      //final CoreAdminHandler admin = new CoreAdminHandler(cc);
-
-      SolrQueryResponse rsp = new SolrQueryResponse();
-      SolrRequestHandler handler = core.getRequestHandler("/admin/fileedit");
-
-      ModifiableSolrParams params = params("file","schema.xml", "op","write");
-      core.execute(handler, new LocalSolrQueryRequest(core, params), rsp);
-      assertEquals(rsp.getException().getMessage(), "Input stream list was null for admin file write operation.");
-
-      params = params("op", "write");
-      core.execute(handler, new LocalSolrQueryRequest(core, params), rsp);
-      assertEquals(rsp.getException().getMessage(), "No file name specified for write operation.");
-
-      ArrayList<ContentStream> streams = new ArrayList<>( 2 );
-      streams.add(new ContentStreamBase.StringStream("Testing rewrite of schema.xml file." ) );
-
-      params = params("op", "write", "file", "bogus.txt");
-      LocalSolrQueryRequest locReq = new LocalSolrQueryRequest(core, params);
-      locReq.setContentStreams(streams);
-      core.execute(handler, locReq, rsp);
-      assertEquals(rsp.getException().getMessage(), "Can not access: bogus.txt");
-
-      String top = SolrTestCaseJ4.TEST_HOME() + "/collection1/conf";
-      String badConf = FileUtils.readFileToString(new File(top, "solrconfig-minimal.xml"), "UTF-8").replace("</dataDir>", "");
-
-      params = params("op", "write", "file", "solrconfig.xml");
-      locReq = new LocalSolrQueryRequest(core, params);
-      streams.clear();
-      streams.add(new ContentStreamBase.StringStream(badConf));
-      locReq.setContentStreams(streams);
-      core.execute(handler, locReq, rsp);
-      assertThat("should have detected an error early!", rsp.getException().getMessage(), containsString("\"</dataDir>\""));
-
-      params = params("op", "test", "file", "schema.xml", "stream.body", "Testing rewrite of schema.xml file.");
-      locReq = new LocalSolrQueryRequest(core, params);
-      locReq.setContentStreams(streams);
-      core.execute(handler, locReq, rsp);
-
-      assertTrue("Schema should have caused core reload to fail!",
-          rsp.getException().getMessage().indexOf("SAXParseException") != -1);
-      String contents = FileUtils.readFileToString(new File(core.getCoreDescriptor().getInstanceDir(), "conf/schema.xml"), Charsets.UTF_8.toString());
-      assertFalse("Schema contents should NOT have changed!", contents.contains("Testing rewrite of schema.xml file."));
-
-      streams.add(new ContentStreamBase.StringStream("This should barf"));
-      locReq = new LocalSolrQueryRequest(core, params);
-      locReq.setContentStreams(streams);
-      core.execute(handler, locReq, rsp);
-      assertEquals(rsp.getException().getMessage(), "More than one input stream was found for admin file write operation.");
-
-      streams.clear();
-      streams.add(new ContentStreamBase.StringStream("Some bogus stuff for a test."));
-      params = params("op", "write", "file", "velocity/test.vm");
-      locReq = new LocalSolrQueryRequest(core, params);
-      locReq.setContentStreams(streams);
-      core.execute(handler, locReq, rsp);
-      contents = FileUtils.readFileToString(new File(core.getCoreDescriptor().getInstanceDir(),
-          "conf/velocity/test.vm"), Charsets.UTF_8.toString());
-      assertEquals("Schema contents should have changed!", "Some bogus stuff for a test.", contents);
-
-      streams.clear();
-      params = params();
-      locReq = new LocalSolrQueryRequest(core, params);
-
-      core.execute(core.getRequestHandler("/admin/file"), locReq, rsp);
-
-      NamedList<Object> res = rsp.getValues();
-
-      NamedList files = (NamedList)res.get("files");
-      assertNotNull("Should have gotten files back", files);
-      SimpleOrderedMap schema = (SimpleOrderedMap)files.get("schema.xml");
-      assertNotNull("Should have a schema returned", schema);
-      assertNull("Schema.xml should not be a directory", schema.get("directory"));
-
-      SimpleOrderedMap velocity = (SimpleOrderedMap)files.get("velocity");
-      assertNotNull("Should have velocity dir returned", velocity);
-
-      assertTrue("Velocity should be a directory", (boolean)velocity.get("directory"));
-
-    } finally {
-      cc.shutdown();
-    }
-
-  }
-}
diff --git a/solr/core/src/test/org/apache/solr/schema/SpatialRPTFieldTypeTest.java b/solr/core/src/test/org/apache/solr/schema/SpatialRPTFieldTypeTest.java
new file mode 100644
index 0000000..07a6f8f
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/schema/SpatialRPTFieldTypeTest.java
@@ -0,0 +1,275 @@
+package org.apache.solr.schema;
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.File;
+import java.nio.file.Files;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.solr.core.AbstractBadConfigTestBase;
+import org.junit.After;
+import org.junit.Before;
+
+public class SpatialRPTFieldTypeTest extends AbstractBadConfigTestBase {
+  
+  private static File tmpSolrHome;
+  private static File tmpConfDir;
+  
+  private static final String collection = "collection1";
+  private static final String confDir = collection + "/conf";
+  
+  @Before
+  private void initManagedSchemaCore() throws Exception {
+    tmpSolrHome = createTempDir().toFile();
+    tmpConfDir = new File(tmpSolrHome, confDir);
+    File testHomeConfDir = new File(TEST_HOME(), confDir);
+    FileUtils.copyFileToDirectory(new File(testHomeConfDir, "solrconfig-managed-schema.xml"), tmpConfDir);
+    FileUtils.copyFileToDirectory(new File(testHomeConfDir, "solrconfig-basic.xml"), tmpConfDir);
+    FileUtils.copyFileToDirectory(new File(testHomeConfDir, "solrconfig.snippet.randomindexconfig.xml"), tmpConfDir);
+    FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema-one-field-no-dynamic-field.xml"), tmpConfDir);
+    FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema-one-field-no-dynamic-field-unique-key.xml"), tmpConfDir);
+    FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema-minimal.xml"), tmpConfDir);
+    FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema_codec.xml"), tmpConfDir);
+    FileUtils.copyFileToDirectory(new File(testHomeConfDir, "schema-bm25.xml"), tmpConfDir);
+    
+    // initCore will trigger an upgrade to managed schema, since the solrconfig has
+    // <schemaFactory class="ManagedIndexSchemaFactory" ... />
+    System.setProperty("managed.schema.mutable", "false");
+    System.setProperty("enable.update.log", "false");
+    initCore("solrconfig-managed-schema.xml", "schema-minimal.xml", tmpSolrHome.getPath());
+  }
+  
+  @After
+  private void afterClass() throws Exception {
+    deleteCore();
+    System.clearProperty("managed.schema.mutable");
+    System.clearProperty("enable.update.log");
+  }
+  
+  final String INDEXED_COORDINATES = "25,82";
+  final String QUERY_COORDINATES = "24,81";
+  final String DISTANCE_DEGREES = "1.3520328";
+  final String DISTANCE_KILOMETERS = "150.33939";
+  final String DISTANCE_MILES = "93.416565";
+  
+  public void testUnitsDegrees() throws Exception { // test back compat behaviour
+    setupRPTField("degrees", null, "true");
+    
+    assertU(adoc("str", "X", "geo", INDEXED_COORDINATES));
+    assertU(commit());
+    String q;
+    
+    q = "geo:{!geofilt score=distance filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}";
+    assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_DEGREES+"']");
+    
+    q = "geo:{!geofilt score=degrees filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}";
+    assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_DEGREES+"']");
+    
+    q = "geo:{!geofilt score=kilometers filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}";
+    assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_KILOMETERS+"']");
+    
+    q = "geo:{!geofilt score=miles filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}";
+    assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_MILES+"']");
+  }
+  
+  public void testUnitsNonDegrees() throws Exception {
+    try {
+      setupRPTField("kilometers", null, "true");
+      fail("Expected exception for deprecated units parameter.");
+    } catch (Exception ex) {
+      if(!ex.getMessage().startsWith("units parameter is deprecated"))
+        throw ex;
+    }
+  }
+  
+  public void testDistanceUnitsDegrees() throws Exception {
+    setupRPTField(null, "degrees", "true");
+    
+    assertU(adoc("str", "X", "geo", INDEXED_COORDINATES));
+    assertU(commit());
+    String q;
+    
+    q = "geo:{!geofilt score=distance filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}";
+    assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_DEGREES+"']");
+    
+    q = "geo:{!geofilt score=degrees filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}";
+    assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_DEGREES+"']");
+    
+    q = "geo:{!geofilt score=kilometers filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}";
+    assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_KILOMETERS+"']");
+    
+    q = "geo:{!geofilt score=miles filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}";
+    assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_MILES+"']");
+  }
+  
+  public void testDistanceUnitsKilometers() throws Exception {
+    setupRPTField(null, "kilometers", "true");
+    
+    assertU(adoc("str", "X", "geo", INDEXED_COORDINATES));
+    assertU(commit());
+    String q;
+    
+    q = "geo:{!geofilt score=distance filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}";
+    assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_KILOMETERS+"']");
+    
+    q = "geo:{!geofilt score=degrees filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}";
+    assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_DEGREES+"']");
+    
+    q = "geo:{!geofilt score=kilometers filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}";
+    assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_KILOMETERS+"']");
+    
+    q = "geo:{!geofilt score=miles filter=false sfield=geo pt="+QUERY_COORDINATES+" d=1000}";
+    assertQ(req("q", q, "fl", "*,score"), "//result/doc/float[@name='score'][.='"+DISTANCE_MILES+"']");
+  }
+  
+  public void testBothUnitsAndDistanceUnits() throws Exception { // distanceUnits should take precedence
+    try {
+      setupRPTField("degrees", "kilometers", "true");  
+      fail("Expected exception for deprecated units parameter.");
+    } catch (Exception ex) {
+      if(!ex.getMessage().startsWith("units parameter is deprecated"))
+        throw ex;
+    }
+  }
+  
+  public void testJunkValuesForDistanceUnits() throws Exception {
+    try {
+      setupRPTField(null, "rose", "true");
+      fail("Expected exception for bad value of distanceUnits.");
+    } catch (Exception ex) {
+      if(!ex.getMessage().startsWith("Must specify distanceUnits as one of"))
+        throw ex;
+    }
+  }
+
+  public void testMaxDistErrConversion() throws Exception {
+    deleteCore();
+    File managedSchemaFile = new File(tmpConfDir, "managed-schema");
+    Files.delete(managedSchemaFile.toPath()); // Delete managed-schema so it won't block parsing a new schema
+    System.setProperty("managed.schema.mutable", "true");
+    initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath());
+    
+    String fieldName = "new_text_field";
+    assertNull("Field '" + fieldName + "' is present in the schema",
+        h.getCore().getLatestSchema().getFieldOrNull(fieldName));
+    
+    IndexSchema oldSchema = h.getCore().getLatestSchema();
+    
+    SpatialRecursivePrefixTreeFieldType rptFieldType = new SpatialRecursivePrefixTreeFieldType();
+    Map<String, String> rptMap = new HashMap<String,String>();
+
+    rptFieldType.setTypeName("location_rpt");
+    rptMap.put("geo", "true");
+
+    // test km
+    rptMap.put("distanceUnits", "kilometers");
+    rptMap.put("maxDistErr", "0.001"); // 1 meter
+    rptFieldType.init(oldSchema, rptMap);
+    assertEquals(11, rptFieldType.grid.getMaxLevels());
+
+    // test miles
+    rptMap.put("distanceUnits", "miles");
+    rptMap.put("maxDistErr", "0.001");
+    rptFieldType.init(oldSchema, rptMap);
+    assertEquals(10, rptFieldType.grid.getMaxLevels());
+
+    // test degrees
+    rptMap.put("distanceUnits", "degrees");
+    rptMap.put("maxDistErr", "0.001");
+    rptFieldType.init(oldSchema, rptMap);
+    assertEquals(8, rptFieldType.grid.getMaxLevels());
+  }
+
+  public void testGeoDistanceFunctionWithBackCompat() throws Exception {
+    setupRPTField("degrees", null, "true");
+
+    assertU(adoc("str", "X", "geo", "1,2"));
+    assertU(commit());
+
+    // geodist() should return in km
+    assertJQ(req("defType","func",
+        "q","geodist(3,4)",
+        "sfield","geo",
+        "fl","score")
+        , 1e-5
+        ,"/response/docs/[0]/score==314.4033"
+    );
+  }
+
+  public void testGeoDistanceFunctionWithKilometers() throws Exception {
+    setupRPTField(null, "kilometers", "true");
+
+    assertU(adoc("str", "X", "geo", "1,2"));
+    assertU(commit());
+
+    assertJQ(req("defType","func",
+        "q","geodist(3,4)",
+        "sfield","geo",
+        "fl","score")
+        , 1e-5
+        ,"/response/docs/[0]/score==314.4033"
+    );
+  }
+
+  public void testGeoDistanceFunctionWithMiles() throws Exception {
+    setupRPTField(null, "miles", "true");
+
+    assertU(adoc("str", "X", "geo", "1,2"));
+    assertU(commit());
+
+    assertJQ(req("defType","func",
+        "q","geodist(3,4)",
+        "sfield","geo",
+        "fl","score")
+        , 1e-5
+        ,"/response/docs/[0]/score==195.36115"
+    );
+  }
+
+  private void setupRPTField(String units, String distanceUnits, String geo) throws Exception {
+    deleteCore();
+    File managedSchemaFile = new File(tmpConfDir, "managed-schema");
+    Files.delete(managedSchemaFile.toPath()); // Delete managed-schema so it won't block parsing a new schema
+    System.setProperty("managed.schema.mutable", "true");
+    initCore("solrconfig-managed-schema.xml", "schema-one-field-no-dynamic-field.xml", tmpSolrHome.getPath());
+
+    String fieldName = "new_text_field";
+    assertNull("Field '" + fieldName + "' is present in the schema",
+        h.getCore().getLatestSchema().getFieldOrNull(fieldName));
+    
+    IndexSchema oldSchema = h.getCore().getLatestSchema();
+
+    SpatialRecursivePrefixTreeFieldType rptFieldType = new SpatialRecursivePrefixTreeFieldType();
+    Map<String, String> rptMap = new HashMap<String,String>();
+    if(units!=null)
+      rptMap.put("units", units);
+    if(distanceUnits!=null)
+      rptMap.put("distanceUnits", distanceUnits);
+    if(geo!=null)
+      rptMap.put("geo", geo);
+    rptFieldType.init(oldSchema, rptMap);
+    rptFieldType.setTypeName("location_rpt");
+    SchemaField newField = new SchemaField("geo", rptFieldType, SchemaField.STORED | SchemaField.INDEXED, null);
+    IndexSchema newSchema = oldSchema.addField(newField);
+
+    h.getCore().setLatestSchema(newSchema);
+
+    assertU(delQ("*:*"));
+  }
+}
diff --git a/solr/core/src/test/org/apache/solr/schema/TestBinaryField.java b/solr/core/src/test/org/apache/solr/schema/TestBinaryField.java
index 8924c3d..4e85d2d 100644
--- a/solr/core/src/test/org/apache/solr/schema/TestBinaryField.java
+++ b/solr/core/src/test/org/apache/solr/schema/TestBinaryField.java
@@ -14,11 +14,14 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.solr.schema;
 
+import com.google.common.base.Charsets;
 import org.apache.commons.io.FileUtils;
 import org.apache.solr.SolrJettyTestBase;
 import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.beans.Field;
@@ -29,9 +32,14 @@
 import org.junit.BeforeClass;
 
 import java.io.File;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
 import java.nio.ByteBuffer;
+import java.nio.file.Files;
 import java.util.List;
+import java.util.Properties;
 
+@SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776")
 public class TestBinaryField extends SolrJettyTestBase {
 
   @BeforeClass
@@ -57,94 +65,102 @@
     FileUtils.copyFile(new File(src_dir, "solrconfig.snippet.randomindexconfig.xml"), 
                        new File(confDir, "solrconfig.snippet.randomindexconfig.xml"));
 
+    try (Writer w = new OutputStreamWriter(Files.newOutputStream(collDir.toPath().resolve("core.properties")), Charsets.UTF_8)) {
+      Properties coreProps = new Properties();
+      coreProps.put("name", "collection1");
+      coreProps.store(w, "");
+    }
+
     createJetty(homeDir.getAbsolutePath(), null, null);
   }
 
 
   public void testSimple() throws Exception {
-    SolrClient client = getSolrClient();
-    byte[] buf = new byte[10];
-    for (int i = 0; i < 10; i++) {
-      buf[i] = (byte) i;
-    }
-    SolrInputDocument doc = null;
-    doc = new SolrInputDocument();
-    doc.addField("id", 1);
-    doc.addField("data", ByteBuffer.wrap(buf, 2, 5));
-    client.add(doc);
+    try (SolrClient client = getSolrClient()) {
+      byte[] buf = new byte[10];
+      for (int i = 0; i < 10; i++) {
+        buf[i] = (byte) i;
+      }
+      SolrInputDocument doc = null;
+      doc = new SolrInputDocument();
+      doc.addField("id", 1);
+      doc.addField("data", ByteBuffer.wrap(buf, 2, 5));
+      client.add(doc);
 
-    doc = new SolrInputDocument();
-    doc.addField("id", 2);
-    doc.addField("data", ByteBuffer.wrap(buf, 4, 3));
-    client.add(doc);
+      doc = new SolrInputDocument();
+      doc.addField("id", 2);
+      doc.addField("data", ByteBuffer.wrap(buf, 4, 3));
+      client.add(doc);
 
-    doc = new SolrInputDocument();
-    doc.addField("id", 3);
-    doc.addField("data", buf);
-    client.add(doc);
+      doc = new SolrInputDocument();
+      doc.addField("id", 3);
+      doc.addField("data", buf);
+      client.add(doc);
 
-    client.commit();
+      client.commit();
 
-    QueryResponse resp = client.query(new SolrQuery("*:*"));
-    SolrDocumentList res = resp.getResults();
-    List<Bean> beans = resp.getBeans(Bean.class);
-    assertEquals(3, res.size());
-    assertEquals(3, beans.size());
-    for (SolrDocument d : res) {
-      Integer id = (Integer) d.getFieldValue("id");
-      byte[] data = (byte[]) d.getFieldValue("data");
-      if (id == 1) {
-        assertEquals(5, data.length);
-        for (int i = 0; i < data.length; i++) {
-          byte b = data[i];
-          assertEquals((byte)(i + 2), b);
-        }
+      QueryResponse resp = client.query(new SolrQuery("*:*"));
+      SolrDocumentList res = resp.getResults();
+      List<Bean> beans = resp.getBeans(Bean.class);
+      assertEquals(3, res.size());
+      assertEquals(3, beans.size());
+      for (SolrDocument d : res) {
 
-      } else if (id == 2) {
-        assertEquals(3, data.length);
-        for (int i = 0; i < data.length; i++) {
-          byte b = data[i];
-          assertEquals((byte)(i + 4), b);
-        }
+        Integer id = (Integer) d.getFieldValue("id");
+        byte[] data = (byte[]) d.getFieldValue("data");
+        if (id == 1) {
+          assertEquals(5, data.length);
+          for (int i = 0; i < data.length; i++) {
+            byte b = data[i];
+            assertEquals((byte) (i + 2), b);
+          }
+
+        } else if (id == 2) {
+          assertEquals(3, data.length);
+          for (int i = 0; i < data.length; i++) {
+            byte b = data[i];
+            assertEquals((byte) (i + 4), b);
+          }
 
 
-      } else if (id == 3) {
-        assertEquals(10, data.length);
-        for (int i = 0; i < data.length; i++) {
-          byte b = data[i];
-          assertEquals((byte)i, b);
+        } else if (id == 3) {
+          assertEquals(10, data.length);
+          for (int i = 0; i < data.length; i++) {
+            byte b = data[i];
+            assertEquals((byte) i, b);
+          }
+
         }
 
       }
+      for (Bean d : beans) {
+        Integer id = d.id;
+        byte[] data = d.data;
+        if (id == 1) {
+          assertEquals(5, data.length);
+          for (int i = 0; i < data.length; i++) {
+            byte b = data[i];
+            assertEquals((byte) (i + 2), b);
+          }
 
-    }
-    for (Bean d : beans) {
-      Integer id = d.id;
-      byte[] data = d.data;
-      if (id == 1) {
-        assertEquals(5, data.length);
-        for (int i = 0; i < data.length; i++) {
-          byte b = data[i];
-          assertEquals((byte)(i + 2), b);
-        }
-
-      } else if (id == 2) {
-        assertEquals(3, data.length);
-        for (int i = 0; i < data.length; i++) {
-          byte b = data[i];
-          assertEquals((byte)(i + 4), b);
-        }
+        } else if (id == 2) {
+          assertEquals(3, data.length);
+          for (int i = 0; i < data.length; i++) {
+            byte b = data[i];
+            assertEquals((byte) (i + 4), b);
+          }
 
 
-      } else if (id == 3) {
-        assertEquals(10, data.length);
-        for (int i = 0; i < data.length; i++) {
-          byte b = data[i];
-          assertEquals((byte)i, b);
+        } else if (id == 3) {
+          assertEquals(10, data.length);
+          for (int i = 0; i < data.length; i++) {
+            byte b = data[i];
+            assertEquals((byte) i, b);
+          }
+
         }
 
       }
-
     }
 
   }
diff --git a/solr/core/src/test/org/apache/solr/schema/TestBulkSchemaConcurrent.java b/solr/core/src/test/org/apache/solr/schema/TestBulkSchemaConcurrent.java
index b1c8fec..b3ed82a 100644
--- a/solr/core/src/test/org/apache/solr/schema/TestBulkSchemaConcurrent.java
+++ b/solr/core/src/test/org/apache/solr/schema/TestBulkSchemaConcurrent.java
@@ -18,20 +18,10 @@
  */
 
 
-import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.client.solrj.SolrClient;
-import org.apache.solr.client.solrj.impl.HttpSolrClient;
-import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
-import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.util.RESTfulServerProvider;
-import org.apache.solr.util.RestTestHarness;
-import org.junit.BeforeClass;
-import org.noggit.JSONParser;
-import org.noggit.ObjectBuilder;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import static java.text.MessageFormat.format;
+import static org.apache.solr.rest.schema.TestBulkSchemaAPI.getCopyFields;
+import static org.apache.solr.rest.schema.TestBulkSchemaAPI.getObj;
 
-import java.io.IOException;
 import java.io.StringReader;
 import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
@@ -41,9 +31,19 @@
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
-import static java.text.MessageFormat.format;
-import static org.apache.solr.rest.schema.TestBulkSchemaAPI.getCopyFields;
-import static org.apache.solr.rest.schema.TestBulkSchemaAPI.getObj;
+import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.util.RESTfulServerProvider;
+import org.apache.solr.util.RestTestHarness;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.noggit.JSONParser;
+import org.noggit.ObjectBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestBulkSchemaConcurrent  extends AbstractFullDistribZkTestBase {
   static final Logger log =  LoggerFactory.getLogger(TestBulkSchemaConcurrent.class);
@@ -70,8 +70,17 @@
       restTestHarnesses.add(harness);
     }
   }
+
   @Override
-  public void doTest() throws Exception {
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
+    for (RestTestHarness r : restTestHarnesses) {
+      r.close();
+    }
+  }
+
+  @Test
+  public void test() throws Exception {
 
     final int threadCount = 5;
     setupHarnesses();
@@ -88,8 +97,6 @@
             ArrayList errs = new ArrayList();
             collectErrors.add(errs);
             invokeBulkCall(finalI,errs);
-          } catch (IOException e) {
-            e.printStackTrace();
           } catch (Exception e) {
             e.printStackTrace();
           }
@@ -105,8 +112,7 @@
     boolean success = true;
 
     for (List e : collectErrors) {
-      if(e== null) continue;
-      if(!e.isEmpty()){
+      if(e != null &&  !e.isEmpty()){
         success = false;
         log.error(e.toString());
       }
@@ -166,27 +172,31 @@
     }
 
     //get another node
-    RestTestHarness harness = restTestHarnesses.get(r.nextInt(restTestHarnesses.size()));
-    long startTime = System.nanoTime();
-    boolean success = false;
-    long maxTimeoutMillis = 100000;
     Set<String> errmessages = new HashSet<>();
-    while ( ! success
-        && TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutMillis) {
-      errmessages.clear();
-      Map m = getObj(harness, aField, "fields");
-      if(m== null) errmessages.add(format("field {0} not created", aField));
-
-      m = getObj(harness, dynamicFldName, "dynamicFields");
-      if(m== null) errmessages.add(format("dynamic field {0} not created", dynamicFldName));
-
-      List l = getCopyFields(harness, "a1");
-      if(!checkCopyField(l,aField,dynamicCopyFldDest))
-        errmessages.add(format("CopyField source={0},dest={1} not created" , aField,dynamicCopyFldDest));
-
-      m = getObj(harness, "mystr", "fieldTypes");
-      if(m == null) errmessages.add(format("new type {}  not created" , newFieldTypeName));
-      Thread.sleep(10);
+    RestTestHarness harness = restTestHarnesses.get(r.nextInt(restTestHarnesses.size()));
+    try {
+      long startTime = System.nanoTime();
+      boolean success = false;
+      long maxTimeoutMillis = 100000;
+      while (!success
+          && TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutMillis) {
+        errmessages.clear();
+        Map m = getObj(harness, aField, "fields");
+        if (m == null) errmessages.add(format("field {0} not created", aField));
+        
+        m = getObj(harness, dynamicFldName, "dynamicFields");
+        if (m == null) errmessages.add(format("dynamic field {0} not created", dynamicFldName));
+        
+        List l = getCopyFields(harness, "a1");
+        if (!checkCopyField(l, aField, dynamicCopyFldDest)) errmessages
+            .add(format("CopyField source={0},dest={1} not created", aField, dynamicCopyFldDest));
+        
+        m = getObj(harness, "mystr", "fieldTypes");
+        if (m == null) errmessages.add(format("new type {}  not created", newFieldTypeName));
+        Thread.sleep(10);
+      }
+    } finally {
+      harness.close();
     }
     if(!errmessages.isEmpty()){
       errs.addAll(errmessages);
diff --git a/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchema.java b/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchema.java
index 3af0813..2254d1f 100644
--- a/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchema.java
+++ b/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchema.java
@@ -26,6 +26,7 @@
 import org.apache.solr.common.util.NamedList;
 import org.apache.zookeeper.KeeperException;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
@@ -47,9 +48,9 @@
   protected String getCloudSolrConfig() {
     return "solrconfig-managed-schema.xml";
   }
-      
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  public void test() throws Exception {
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.set(CoreAdminParams.ACTION, CoreAdminParams.CoreAdminAction.STATUS.toString());
     QueryRequest request = new QueryRequest(params);
diff --git a/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchemaConcurrent.java b/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchemaConcurrent.java
index d675161..7fa4b06 100644
--- a/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchemaConcurrent.java
+++ b/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchemaConcurrent.java
@@ -16,6 +16,12 @@
  * limitations under the License.
  */
 
+import java.util.ArrayList;
+import java.util.List;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import java.util.concurrent.TimeUnit;
+
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
@@ -25,25 +31,17 @@
 import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkCoreNodeProps;
-import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.util.BaseTestHarness;
 import org.apache.solr.util.RESTfulServerProvider;
 import org.apache.solr.util.RestTestHarness;
+import org.apache.zookeeper.data.Stat;
 import org.eclipse.jetty.servlet.ServletHolder;
+import org.junit.BeforeClass;
+import org.junit.Test;
 import org.restlet.ext.servlet.ServerServlet;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.zookeeper.data.Stat;
-
-import org.junit.BeforeClass;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.SortedMap;
-import java.util.TreeMap;
-import java.util.concurrent.TimeUnit;
-
 public class TestCloudManagedSchemaConcurrent extends AbstractFullDistribZkTestBase {
   private static final Logger log = LoggerFactory.getLogger(TestCloudManagedSchemaConcurrent.class);
   private static final String SUCCESS_XPATH = "/response/lst[@name='responseHeader']/int[@name='status'][.='0']";
@@ -56,10 +54,7 @@
 
   public TestCloudManagedSchemaConcurrent() {
     super();
-    fixShardCount = true;
-
     sliceCount = 4;
-    shardCount = 8;
   }
 
   @BeforeClass
@@ -69,6 +64,14 @@
   }
 
   @Override
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
+    for (RestTestHarness h : restTestHarnesses) {
+      h.close();
+    }
+  }
+
+  @Override
   protected String getCloudSolrConfig() {
     return "solrconfig-managed-schema.xml";
   }
@@ -239,10 +242,11 @@
 
     return expectedAddFieldTypes;
   }
-  
 
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  @ShardsFixed(num = 8)
+  public void test() throws Exception {
     verifyWaitForSchemaUpdateToPropagate();
     setupHarnesses();
     concurrentOperationsTest();
@@ -352,8 +356,11 @@
         return coreUrl.endsWith("/") ? coreUrl.substring(0, coreUrl.length()-1) : coreUrl;
       }
     });
-
-    addFieldTypePut(harness, "fooInt", 15);
+    try {
+      addFieldTypePut(harness, "fooInt", 15);
+    } finally {
+      harness.close();
+    }
 
     // go into ZK to get the version of the managed schema after the update
     SolrZkClient zkClient = cloudClient.getZkStateReader().getZkClient();
@@ -410,21 +417,24 @@
         return replicaUrl.endsWith("/") ? replicaUrl.substring(0, replicaUrl.length()-1) : replicaUrl;
       }
     });
-
-    long waitMs = waitSecs * 1000L;
-    if (waitMs > 0) Thread.sleep(waitMs); // wait a moment for the zk watcher to fire
-
     try {
-      testHarness.validateQuery("/schema/zkversion?wt=xml", "//zkversion=" + schemaZkVersion);
-    } catch (Exception exc) {
-      if (retry) {
-        // brief wait before retrying
-        Thread.sleep(waitMs > 0 ? waitMs : 2000L);
-
+      long waitMs = waitSecs * 1000L;
+      if (waitMs > 0) Thread.sleep(waitMs); // wait a moment for the zk watcher to fire
+  
+      try {
         testHarness.validateQuery("/schema/zkversion?wt=xml", "//zkversion=" + schemaZkVersion);
-      } else {
-        throw exc;
+      } catch (Exception exc) {
+        if (retry) {
+          // brief wait before retrying
+          Thread.sleep(waitMs > 0 ? waitMs : 2000L);
+  
+          testHarness.validateQuery("/schema/zkversion?wt=xml", "//zkversion=" + schemaZkVersion);
+        } else {
+          throw exc;
+        }
       }
+    } finally {
+      testHarness.close();
     }
   }
 
diff --git a/solr/core/src/test/org/apache/solr/schema/TestCloudSchemaless.java b/solr/core/src/test/org/apache/solr/schema/TestCloudSchemaless.java
index aac3b2f..1df5640c 100644
--- a/solr/core/src/test/org/apache/solr/schema/TestCloudSchemaless.java
+++ b/solr/core/src/test/org/apache/solr/schema/TestCloudSchemaless.java
@@ -27,8 +27,9 @@
 import org.apache.solr.util.RESTfulServerProvider;
 import org.apache.solr.util.RestTestHarness;
 import org.eclipse.jetty.servlet.ServletHolder;
+import org.junit.After;
 import org.junit.BeforeClass;
-import org.junit.Before;
+import org.junit.Test;
 import org.restlet.ext.servlet.ServerServlet;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -48,23 +49,27 @@
   private static final Logger log = LoggerFactory.getLogger(TestCloudManagedSchemaConcurrent.class);
   private static final String SUCCESS_XPATH = "/response/lst[@name='responseHeader']/int[@name='status'][.='0']";
 
-  @Before
   @Override
-  public void setUp() throws Exception {
+  public void distribSetUp() throws Exception {
 
-    super.setUp();
+    super.distribSetUp();
 
     useJettyDataDir = false;
 
     System.setProperty("numShards", Integer.toString(sliceCount));
   }
+  
+  @After
+  public void teardDown() throws Exception {
+    super.tearDown();
+    for (RestTestHarness h : restTestHarnesses) {
+      h.close();
+    }
+  }
 
   public TestCloudSchemaless() {
     schemaString = "schema-add-schema-fields-update-processor.xml";
-    fixShardCount = true;
-
     sliceCount = 4;
-    shardCount = 8;
   }
 
   @BeforeClass
@@ -113,8 +118,9 @@
     return expectedAddFields;
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 8)
+  public void test() throws Exception {
     setupHarnesses();
 
     // First, add a bunch of documents in a single update with the same new field.
diff --git a/solr/core/src/java/org/apache/solr/common/ResourceLoader.java b/solr/core/src/test/org/apache/solr/schema/ThrowErrorOnInitFieldType.java
similarity index 71%
rename from solr/core/src/java/org/apache/solr/common/ResourceLoader.java
rename to solr/core/src/test/org/apache/solr/schema/ThrowErrorOnInitFieldType.java
index e18663c..7745afb 100644
--- a/solr/core/src/java/org/apache/solr/common/ResourceLoader.java
+++ b/solr/core/src/test/org/apache/solr/schema/ThrowErrorOnInitFieldType.java
@@ -1,4 +1,4 @@
-package org.apache.solr.common;
+package org.apache.solr.schema;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
@@ -17,10 +17,14 @@
  * limitations under the License.
  */
 
+import java.util.Map;
+
 /**
- * @deprecated This interface has been kept for backwards compatibility and will
- * be removed in (5.0). Use {@link org.apache.lucene.analysis.util.ResourceLoader}
+ * throws an {@link java.lang.Error} on init for testing purposes
  */
-@Deprecated
-public interface ResourceLoader extends org.apache.lucene.analysis.util.ResourceLoader {
+public class ThrowErrorOnInitFieldType extends TextField {
+
+  protected void init(IndexSchema schema, Map<String,String> args) {
+    throw new Error("Doing my job, throwing java.lang.Error");
+  }
 }
diff --git a/solr/core/src/test/org/apache/solr/search/AnalyticsMergeStrategyTest.java b/solr/core/src/test/org/apache/solr/search/AnalyticsMergeStrategyTest.java
index 0ad6bb8..f62a8df 100644
--- a/solr/core/src/test/org/apache/solr/search/AnalyticsMergeStrategyTest.java
+++ b/solr/core/src/test/org/apache/solr/search/AnalyticsMergeStrategyTest.java
@@ -22,8 +22,7 @@
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.junit.BeforeClass;
-
-import java.util.Arrays;
+import org.junit.Test;
 
 /**
  * Test for QueryComponent's distributed querying
@@ -33,8 +32,6 @@
 public class AnalyticsMergeStrategyTest extends BaseDistributedSearchTestCase {
 
   public AnalyticsMergeStrategyTest() {
-    fixShardCount = true;
-    shardCount = 3;
     stress = 0;
   }
 
@@ -43,8 +40,9 @@
     initCore("solrconfig-analytics-query.xml", "schema15.xml");
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 3)
+  public void test() throws Exception {
     del("*:*");
 
     index_specific(0,"id","1", "sort_i", "5");
diff --git a/solr/core/src/test/org/apache/solr/search/MergeStrategyTest.java b/solr/core/src/test/org/apache/solr/search/MergeStrategyTest.java
index 6a9466f..ee733c4 100644
--- a/solr/core/src/test/org/apache/solr/search/MergeStrategyTest.java
+++ b/solr/core/src/test/org/apache/solr/search/MergeStrategyTest.java
@@ -21,13 +21,12 @@
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrDocumentList;
-import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.util.NamedList;
 import org.apache.solr.handler.component.MergeStrategy;
 import org.apache.solr.handler.component.ResponseBuilder;
 import org.apache.solr.handler.component.ShardRequest;
 import org.junit.BeforeClass;
+import org.junit.Test;
 
 import java.util.Arrays;
 
@@ -39,8 +38,6 @@
 public class MergeStrategyTest extends BaseDistributedSearchTestCase {
 
   public MergeStrategyTest() {
-    fixShardCount = true;
-    shardCount = 3;
     stress = 0;
   }
 
@@ -49,8 +46,9 @@
     initCore("solrconfig-plugcollector.xml", "schema15.xml");
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  @ShardsFixed(num = 3)
+  public void test() throws Exception {
     del("*:*");
 
     index_specific(0,"id","1", "sort_i", "5");
diff --git a/solr/core/src/test/org/apache/solr/search/QueryParsingTest.java b/solr/core/src/test/org/apache/solr/search/QueryParsingTest.java
index 482c02e..e63066f 100644
--- a/solr/core/src/test/org/apache/solr/search/QueryParsingTest.java
+++ b/solr/core/src/test/org/apache/solr/search/QueryParsingTest.java
@@ -20,10 +20,9 @@
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.SortField;
 import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.schema.SchemaField;
-import org.apache.solr.search.SortSpec;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.schema.SchemaField;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
@@ -79,7 +78,7 @@
     SortSpec spec;
     SolrQueryRequest req = req();
 
-    sort = QueryParsing.parseSort("score desc", req);
+    sort = QueryParsing.parseSortSpec("score desc", req).getSort();
     assertNull("sort", sort);//only 1 thing in the list, no Sort specified
 
     spec = QueryParsing.parseSortSpec("score desc", req);
@@ -89,7 +88,7 @@
     assertEquals(0, spec.getSchemaFields().size());
 
     // SOLR-4458 - using different case variations of asc and desc
-    sort = QueryParsing.parseSort("score aSc", req);
+    sort = QueryParsing.parseSortSpec("score aSc", req).getSort();
     SortField[] flds = sort.getSort();
     assertEquals(flds[0].getType(), SortField.Type.SCORE);
     assertTrue(flds[0].getReverse());
@@ -102,7 +101,7 @@
     assertEquals(1, spec.getSchemaFields().size());
     assertNull(spec.getSchemaFields().get(0));
 
-    sort = QueryParsing.parseSort("weight dEsC", req);
+    sort = QueryParsing.parseSortSpec("weight dEsC", req).getSort();
     flds = sort.getSort();
     assertEquals(flds[0].getType(), SortField.Type.FLOAT);
     assertEquals(flds[0].getField(), "weight");
@@ -118,7 +117,7 @@
     assertNotNull(spec.getSchemaFields().get(0));
     assertEquals("weight", spec.getSchemaFields().get(0).getName());
 
-    sort = QueryParsing.parseSort("weight desc,bday ASC", req);
+    sort = QueryParsing.parseSortSpec("weight desc,bday ASC", req).getSort();
     flds = sort.getSort();
     assertEquals(flds[0].getType(), SortField.Type.FLOAT);
     assertEquals(flds[0].getField(), "weight");
@@ -127,7 +126,7 @@
     assertEquals(flds[1].getField(), "bday");
     assertEquals(flds[1].getReverse(), false);
     //order aliases
-    sort = QueryParsing.parseSort("weight top,bday asc", req);
+    sort = QueryParsing.parseSortSpec("weight top,bday asc", req).getSort();
     flds = sort.getSort();
     assertEquals(flds[0].getType(), SortField.Type.FLOAT);
     assertEquals(flds[0].getField(), "weight");
@@ -135,7 +134,7 @@
     assertEquals(flds[1].getType(), SortField.Type.LONG);
     assertEquals(flds[1].getField(), "bday");
     assertEquals(flds[1].getReverse(), false);
-    sort = QueryParsing.parseSort("weight top,bday bottom", req);
+    sort = QueryParsing.parseSortSpec("weight top,bday bottom", req).getSort();
     flds = sort.getSort();
     assertEquals(flds[0].getType(), SortField.Type.FLOAT);
     assertEquals(flds[0].getField(), "weight");
@@ -145,20 +144,20 @@
     assertEquals(flds[1].getReverse(), false);
 
     //test weird spacing
-    sort = QueryParsing.parseSort("weight         DESC,            bday         asc", req);
+    sort = QueryParsing.parseSortSpec("weight         DESC,            bday         asc", req).getSort();
     flds = sort.getSort();
     assertEquals(flds[0].getType(), SortField.Type.FLOAT);
     assertEquals(flds[0].getField(), "weight");
     assertEquals(flds[1].getField(), "bday");
     assertEquals(flds[1].getType(), SortField.Type.LONG);
     //handles trailing commas
-    sort = QueryParsing.parseSort("weight desc,", req);
+    sort = QueryParsing.parseSortSpec("weight desc,", req).getSort();
     flds = sort.getSort();
     assertEquals(flds[0].getType(), SortField.Type.FLOAT);
     assertEquals(flds[0].getField(), "weight");
 
     //test functions
-    sort = QueryParsing.parseSort("pow(weight, 2) desc", req);
+    sort = QueryParsing.parseSortSpec("pow(weight, 2) desc", req).getSort();
     flds = sort.getSort();
     assertEquals(flds[0].getType(), SortField.Type.REWRITEABLE);
     //Not thrilled about the fragility of string matching here, but...
@@ -166,12 +165,12 @@
     assertEquals(flds[0].getField(), "pow(float(weight),const(2))");
     
     //test functions (more deep)
-    sort = QueryParsing.parseSort("sum(product(r_f1,sum(d_f1,t_f1,1.0)),a_f1) asc", req);
+    sort = QueryParsing.parseSortSpec("sum(product(r_f1,sum(d_f1,t_f1,1.0)),a_f1) asc", req).getSort();
     flds = sort.getSort();
     assertEquals(flds[0].getType(), SortField.Type.REWRITEABLE);
     assertEquals(flds[0].getField(), "sum(product(float(r_f1),sum(float(d_f1),float(t_f1),const(1.0))),float(a_f1))");
 
-    sort = QueryParsing.parseSort("pow(weight,                 2.0)         desc", req);
+    sort = QueryParsing.parseSortSpec("pow(weight,                 2.0)         desc", req).getSort();
     flds = sort.getSort();
     assertEquals(flds[0].getType(), SortField.Type.REWRITEABLE);
     //Not thrilled about the fragility of string matching here, but...
@@ -202,19 +201,19 @@
     assertEquals("bday", schemaFlds.get(2).getName());
     
     //handles trailing commas
-    sort = QueryParsing.parseSort("weight desc,", req);
+    sort = QueryParsing.parseSortSpec("weight desc,", req).getSort();
     flds = sort.getSort();
     assertEquals(flds[0].getType(), SortField.Type.FLOAT);
     assertEquals(flds[0].getField(), "weight");
 
     //Test literals in functions
-    sort = QueryParsing.parseSort("strdist(foo_s1, \"junk\", jw) desc", req);
+    sort = QueryParsing.parseSortSpec("strdist(foo_s1, \"junk\", jw) desc", req).getSort();
     flds = sort.getSort();
     assertEquals(flds[0].getType(), SortField.Type.REWRITEABLE);
     //the value sources get wrapped, so the out field is different than the input
     assertEquals(flds[0].getField(), "strdist(str(foo_s1),literal(junk), dist=org.apache.lucene.search.spell.JaroWinklerDistance)");
 
-    sort = QueryParsing.parseSort("", req);
+    sort = QueryParsing.parseSortSpec("", req).getSort();
     assertNull(sort);
 
     spec = QueryParsing.parseSortSpec("", req);
@@ -231,40 +230,40 @@
 
     //test some bad vals
     try {
-      sort = QueryParsing.parseSort("weight, desc", req);
+      sort = QueryParsing.parseSortSpec("weight, desc", req).getSort();
       assertTrue(false);
     } catch (SolrException e) {
       //expected
     }
     try {
-      sort = QueryParsing.parseSort("w", req);
+      sort = QueryParsing.parseSortSpec("w", req).getSort();
       assertTrue(false);
     } catch (SolrException e) {
       //expected
     }
     try {
-      sort = QueryParsing.parseSort("weight desc, bday", req);
+      sort = QueryParsing.parseSortSpec("weight desc, bday", req).getSort();
       assertTrue(false);
     } catch (SolrException e) {
     }
 
     try {
       //bad number of commas
-      sort = QueryParsing.parseSort("pow(weight,,2) desc, bday asc", req);
+      sort = QueryParsing.parseSortSpec("pow(weight,,2) desc, bday asc", req).getSort();
       assertTrue(false);
     } catch (SolrException e) {
     }
 
     try {
       //bad function
-      sort = QueryParsing.parseSort("pow() desc, bday asc", req);
+      sort = QueryParsing.parseSortSpec("pow() desc, bday asc", req).getSort();
       assertTrue(false);
     } catch (SolrException e) {
     }
 
     try {
       //bad number of parens
-      sort = QueryParsing.parseSort("pow((weight,2) desc, bday asc", req);
+      sort = QueryParsing.parseSortSpec("pow((weight,2) desc, bday asc", req).getSort();
       assertTrue(false);
     } catch (SolrException e) {
     }
diff --git a/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
index 791c6ee..0f0b0a3 100644
--- a/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
+++ b/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
@@ -17,6 +17,14 @@
 
 package org.apache.solr.search;
 
+import java.util.Collections;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.Iterator;
+
 import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrException;
@@ -45,13 +53,102 @@
   }
 
   @Test
-  public void testCollapseQueries() throws Exception {
-    final String group = (random().nextBoolean() ? "group_s" : "group_s_dv");
-    
-    String[] doc = {"id","1", "term_s", "YYYY", group, "group1", "test_ti", "5", "test_tl", "10", "test_tf", "2000"};
+  public void testStringCollapse() throws Exception {
+    List<String> types = new ArrayList();
+    types.add("group_s");
+    types.add("group_s_dv");
+    Collections.shuffle(types, random());
+    String group = types.get(0);
+    String hint = (random().nextBoolean() ? " hint="+CollapsingQParserPlugin.HINT_TOP_FC : "");
+    testCollapseQueries(group, hint, false);
+  }
+
+
+  @Test
+  public void testNumericCollapse() throws Exception {
+    List<String> types = new ArrayList();
+    types.add("group_i");
+    types.add("group_ti_dv");
+    types.add("group_f");
+    types.add("group_tf_dv");
+    Collections.shuffle(types, random());
+    String group = types.get(0);
+    String hint = "";
+    testCollapseQueries(group, hint, true);
+  }
+
+  @Test
+
+  public void testMergeBoost() throws Exception {
+
+    Set<Integer> boosted = new HashSet();
+    Set<Integer> results = new HashSet();
+
+    for(int i=0; i<200; i++) {
+      boosted.add(random().nextInt(1000));
+    }
+
+    for(int i=0; i<200; i++) {
+      results.add(random().nextInt(1000));
+    }
+
+    int[] boostedArray = new int[boosted.size()];
+    int[] resultsArray = new int[results.size()];
+
+    Iterator<Integer> boostIt = boosted.iterator();
+    int index = 0;
+    while(boostIt.hasNext()) {
+      boostedArray[index++] = boostIt.next();
+    }
+
+    Iterator<Integer> resultsIt = results.iterator();
+    index = 0;
+    while(resultsIt.hasNext()) {
+      resultsArray[index++] = resultsIt.next();
+    }
+
+    Arrays.sort(boostedArray);
+    Arrays.sort(resultsArray);
+
+    CollapsingQParserPlugin.MergeBoost mergeBoost = new CollapsingQParserPlugin.MergeBoost(boostedArray);
+
+    List<Integer> boostedResults = new ArrayList();
+
+    for(int i=0; i<resultsArray.length; i++) {
+      int result = resultsArray[i];
+      if(mergeBoost.boost(result)) {
+        boostedResults.add(result);
+      }
+    }
+
+    List<Integer> controlResults = new ArrayList();
+
+    for(int i=0; i<resultsArray.length; i++) {
+      int result = resultsArray[i];
+      if(Arrays.binarySearch(boostedArray, result) > -1) {
+        controlResults.add(result);
+      }
+    }
+
+    if(boostedResults.size() == controlResults.size()) {
+      for(int i=0; i<boostedResults.size(); i++) {
+        if(!boostedResults.get(i).equals(controlResults.get(i).intValue())) {
+          throw new Exception("boosted results do not match control results, boostedResults size:"+boostedResults.toString()+", controlResults size:"+controlResults.toString());
+        }
+      }
+    } else {
+      throw new Exception("boosted results do not match control results, boostedResults size:"+boostedResults.toString()+", controlResults size:"+controlResults.toString());
+    }
+  }
+
+
+
+  private void testCollapseQueries(String group, String hint, boolean numeric) throws Exception {
+
+    String[] doc = {"id","1", "term_s", "YYYY", group, "1", "test_ti", "5", "test_tl", "10", "test_tf", "2000"};
     assertU(adoc(doc));
     assertU(commit());
-    String[] doc1 = {"id","2", "term_s","YYYY", group, "group1", "test_ti", "50", "test_tl", "100", "test_tf", "200"};
+    String[] doc1 = {"id","2", "term_s","YYYY", group, "1", "test_ti", "50", "test_tl", "100", "test_tf", "200"};
     assertU(adoc(doc1));
 
 
@@ -63,19 +160,25 @@
     assertU(adoc(doc3));
 
 
-    String[] doc4 = {"id","5", "term_s", "YYYY", group, "group2", "test_ti", "4", "test_tl", "10", "test_tf", "2000"};
+    String[] doc4 = {"id","5", "term_s", "YYYY", group, "2", "test_ti", "4", "test_tl", "10", "test_tf", "2000"};
     assertU(adoc(doc4));
     assertU(commit());
-    String[] doc5 = {"id","6", "term_s","YYYY", group, "group2", "test_ti", "10", "test_tl", "100", "test_tf", "200"};
+    String[] doc5 = {"id","6", "term_s","YYYY", group, "2", "test_ti", "10", "test_tl", "100", "test_tf", "200"};
     assertU(adoc(doc5));
     assertU(commit());
 
+    String[] doc6 = {"id","7", "term_s", "YYYY", group, "1", "test_ti", "8", "test_tl", "50", "test_tf", "300"};
+    assertU(adoc(doc6));
+    assertU(commit());
+
+
+
 
 
     //Test collapse by score and following sort by score
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+"}");
+    params.add("fq", "{!collapse field="+group+""+hint+"}");
     params.add("defType", "edismax");
     params.add("bf", "field(test_ti)");
     assertQ(req(params, "indent", "on"), "*[count(//doc)=2]",
@@ -87,7 +190,7 @@
     // SOLR-5544 test ordering with empty sort param
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" nullPolicy=expand min=test_tf}");
+    params.add("fq", "{!collapse field="+group+" nullPolicy=expand min=test_tf"+hint+"}");
     params.add("defType", "edismax");
     params.add("bf", "field(test_ti)");
     params.add("sort","");
@@ -101,7 +204,7 @@
     // Test value source collapse criteria
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse min=field(test_ti)}");
+    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse min=field(test_ti)"+hint+"}");
     params.add("sort", "test_ti desc");
     assertQ(req(params), "*[count(//doc)=3]",
         "//result/doc[1]/float[@name='id'][.='4.0']",
@@ -112,7 +215,7 @@
     // Test value source collapse criteria with cscore function
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse min=cscore()}");
+    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse min=cscore()"+hint+"}");
     params.add("defType", "edismax");
     params.add("bf", "field(test_ti)");
     assertQ(req(params), "*[count(//doc)=3]",
@@ -124,7 +227,7 @@
     // Test value source collapse criteria with compound cscore function
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse min=sum(cscore(),field(test_ti))}");
+    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse min=sum(cscore(),field(test_ti))"+hint+"}");
     params.add("defType", "edismax");
     params.add("bf", "field(test_ti)");
     assertQ(req(params), "*[count(//doc)=3]",
@@ -137,7 +240,7 @@
 
     params = new ModifiableSolrParams();
     params.add("q", "YYYY");
-    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse}");
+    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse"+hint+"}");
     params.add("defType", "edismax");
     params.add("bf", "field(test_ti)");
     params.add("qf", "term_s");
@@ -151,7 +254,7 @@
     //Test SOLR-5773 with score collapse criteria
     params = new ModifiableSolrParams();
     params.add("q", "YYYY");
-    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse}");
+    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse"+hint+"}");
     params.add("defType", "edismax");
     params.add("bf", "field(test_ti)");
     params.add("qf", "term_s");
@@ -165,7 +268,7 @@
     //Test SOLR-5773 with max field collapse criteria
     params = new ModifiableSolrParams();
     params.add("q", "YYYY");
-    params.add("fq", "{!collapse field="+group+" min=test_ti nullPolicy=collapse}");
+    params.add("fq", "{!collapse field="+group+" min=test_ti nullPolicy=collapse"+hint+"}");
     params.add("defType", "edismax");
     params.add("bf", "field(test_ti)");
     params.add("qf", "term_s");
@@ -180,7 +283,7 @@
     //Test SOLR-5773 elevating documents with null group
     params = new ModifiableSolrParams();
     params.add("q", "YYYY");
-    params.add("fq", "{!collapse field="+group+"}");
+    params.add("fq", "{!collapse field="+group+""+hint+"}");
     params.add("defType", "edismax");
     params.add("bf", "field(test_ti)");
     params.add("qf", "term_s");
@@ -197,7 +300,7 @@
     //Test collapse by min int field and sort
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" min=test_ti}");
+    params.add("fq", "{!collapse field="+group+" min=test_ti"+hint+"}");
     params.add("sort", "id desc");
     assertQ(req(params), "*[count(//doc)=2]",
                            "//result/doc[1]/float[@name='id'][.='5.0']",
@@ -205,7 +308,7 @@
 
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" min=test_ti}");
+    params.add("fq", "{!collapse field="+group+" min=test_ti"+hint+"}");
     params.add("sort", "id asc");
     assertQ(req(params), "*[count(//doc)=2]",
                          "//result/doc[1]/float[@name='id'][.='1.0']",
@@ -213,15 +316,17 @@
 
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" min=test_ti}");
+    params.add("fq", "{!collapse field="+group+" min=test_ti"+hint+"}");
     params.add("sort", "test_tl asc,id desc");
     assertQ(req(params), "*[count(//doc)=2]",
         "//result/doc[1]/float[@name='id'][.='5.0']",
         "//result/doc[2]/float[@name='id'][.='1.0']");
 
+
+
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" min=test_ti}");
+    params.add("fq", "{!collapse field="+group+" min=test_ti"+hint+"}");
     params.add("sort", "score desc,id asc");
     params.add("defType", "edismax");
     params.add("bf", "field(id)");
@@ -235,39 +340,43 @@
     //Test collapse by max int field
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" max=test_ti}");
+    params.add("fq", "{!collapse field="+group+" max=test_ti"+hint+"}");
     params.add("sort", "test_ti asc");
     assertQ(req(params), "*[count(//doc)=2]",
                          "//result/doc[1]/float[@name='id'][.='6.0']",
                          "//result/doc[2]/float[@name='id'][.='2.0']"
         );
 
+    try {
+      //Test collapse by min long field
+      params = new ModifiableSolrParams();
+      params.add("q", "*:*");
+      params.add("fq", "{!collapse field="+group+" min=test_tl"+hint+"}");
+      params.add("sort", "test_ti desc");
+      assertQ(req(params), "*[count(//doc)=2]",
+          "//result/doc[1]/float[@name='id'][.='1.0']",
+          "//result/doc[2]/float[@name='id'][.='5.0']");
 
 
-    //Test collapse by min long field
-    params = new ModifiableSolrParams();
-    params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" min=test_tl}");
-    params.add("sort", "test_ti desc");
-    assertQ(req(params), "*[count(//doc)=2]",
-        "//result/doc[1]/float[@name='id'][.='1.0']",
-        "//result/doc[2]/float[@name='id'][.='5.0']");
-
-
-    //Test collapse by max long field
-    params = new ModifiableSolrParams();
-    params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" max=test_tl}");
-    params.add("sort", "test_ti desc");
-    assertQ(req(params), "*[count(//doc)=2]",
-                         "//result/doc[1]/float[@name='id'][.='2.0']",
-                         "//result/doc[2]/float[@name='id'][.='6.0']");
+      //Test collapse by max long field
+      params = new ModifiableSolrParams();
+      params.add("q", "*:*");
+      params.add("fq", "{!collapse field="+group+" max=test_tl"+hint+"}");
+      params.add("sort", "test_ti desc");
+      assertQ(req(params), "*[count(//doc)=2]",
+                           "//result/doc[1]/float[@name='id'][.='2.0']",
+                           "//result/doc[2]/float[@name='id'][.='6.0']");
+    } catch (Exception e) {
+      if(!numeric) {
+        throw e;
+      }
+    }
 
 
     //Test collapse by min float field
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" min=test_tf}");
+    params.add("fq", "{!collapse field="+group+" min=test_tf"+hint+"}");
     params.add("sort", "test_ti desc");
     assertQ(req(params), "*[count(//doc)=2]",
                          "//result/doc[1]/float[@name='id'][.='2.0']",
@@ -279,7 +388,7 @@
     //Test collapse by min float field
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" max=test_tf}");
+    params.add("fq", "{!collapse field="+group+" max=test_tf"+hint+"}");
     params.add("sort", "test_ti asc");
     assertQ(req(params), "*[count(//doc)=2]",
                          "//result/doc[1]/float[@name='id'][.='5.0']",
@@ -288,7 +397,7 @@
     //Test collapse by min float field sort by score
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" max=test_tf}");
+    params.add("fq", "{!collapse field="+group+" max=test_tf"+hint+"}");
     params.add("defType", "edismax");
     params.add("bf", "field(id)");
     params.add("fl", "score, id");
@@ -304,7 +413,7 @@
     //Test nullPolicy expand
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" max=test_tf nullPolicy=expand}");
+    params.add("fq", "{!collapse field="+group+" max=test_tf nullPolicy=expand"+hint+"}");
     params.add("sort", "id desc");
     assertQ(req(params), "*[count(//doc)=4]",
         "//result/doc[1]/float[@name='id'][.='5.0']",
@@ -316,7 +425,7 @@
 
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+" max=test_tf nullPolicy=collapse}");
+    params.add("fq", "{!collapse field="+group+" max=test_tf nullPolicy=collapse"+hint+"}");
     params.add("sort", "id desc");
     assertQ(req(params), "*[count(//doc)=3]",
         "//result/doc[1]/float[@name='id'][.='5.0']",
@@ -326,7 +435,7 @@
 
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+"}");
+    params.add("fq", "{!collapse field="+group+hint+"}");
     params.add("defType", "edismax");
     params.add("bf", "field(test_ti)");
     params.add("fq","{!tag=test_ti}id:5");
@@ -338,7 +447,7 @@
     // SOLR-5230 - ensure CollapsingFieldValueCollector.finish() is called
     params = new ModifiableSolrParams();
     params.add("q", "*:*");
-    params.add("fq", "{!collapse field="+group+"}");
+    params.add("fq", "{!collapse field="+group+hint+"}");
     params.add("group", "true");
     params.add("group.field", "id");
     assertQ(req(params), "*[count(//doc)=2]");
@@ -350,14 +459,15 @@
     assertU(commit());
     params = new ModifiableSolrParams();
     params.add("q", "YYYY");
-    params.add("fq", "{!collapse field="+group+" nullPolicy=collapse}");
+    params.add("fq", "{!collapse field="+group+hint+" nullPolicy=collapse}");
     params.add("defType", "edismax");
     params.add("bf", "field(test_ti)");
     params.add("qf", "term_s");
     params.add("qt", "/elevate");
-    assertQ(req(params), "*[count(//doc)=2]",
+    assertQ(req(params), "*[count(//doc)=3]",
                          "//result/doc[1]/float[@name='id'][.='3.0']",
-                         "//result/doc[2]/float[@name='id'][.='6.0']");
+                         "//result/doc[2]/float[@name='id'][.='6.0']",
+                         "//result/doc[3]/float[@name='id'][.='7.0']");
 
 
   }
@@ -385,4 +495,5 @@
     assertQ(req(params), "*[count(//doc)=0]");
   }
 
+
 }
diff --git a/solr/core/src/test/org/apache/solr/search/TestRankQueryPlugin.java b/solr/core/src/test/org/apache/solr/search/TestRankQueryPlugin.java
index 2e9a11a..dc2b75e 100644
--- a/solr/core/src/test/org/apache/solr/search/TestRankQueryPlugin.java
+++ b/solr/core/src/test/org/apache/solr/search/TestRankQueryPlugin.java
@@ -23,6 +23,8 @@
 import org.apache.lucene.index.NumericDocValues;
 import org.apache.lucene.index.ReaderUtil;
 import org.apache.lucene.search.FieldComparator;
+import org.apache.lucene.search.LeafCollector;
+import org.apache.lucene.search.LeafFieldComparator;
 import org.apache.lucene.search.ScoreDoc;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.SortField;
@@ -51,7 +53,6 @@
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.request.SolrQueryRequest;
-
 import org.junit.Ignore;
 
 import java.io.IOException;
@@ -410,7 +411,8 @@
           // :TODO: would be simpler to always serialize every position of SortField[]
           if (type==SortField.Type.SCORE || type==SortField.Type.DOC) continue;
 
-          FieldComparator comparator = null;
+          FieldComparator<?> comparator = null;
+          LeafFieldComparator leafComparator = null;
           Object[] vals = new Object[nDocs];
 
           int lastIdx = -1;
@@ -433,12 +435,12 @@
 
             if (comparator == null) {
               comparator = sortField.getComparator(1,0);
-              comparator = comparator.setNextReader(currentLeaf);
+              leafComparator = comparator.getLeafComparator(currentLeaf);
             }
 
             doc -= currentLeaf.docBase;  // adjust for what segment this is in
-            comparator.setScorer(new FakeScorer(doc, score));
-            comparator.copy(0, doc);
+            leafComparator.setScorer(new FakeScorer(doc, score));
+            leafComparator.copy(0, doc);
             Object val = comparator.value(0);
             if (null != ft) val = ft.marshalSortValue(val);
             vals[position] = val;
@@ -705,24 +707,28 @@
   class TestCollector extends TopDocsCollector {
 
     private List<ScoreDoc> list = new ArrayList();
-    private NumericDocValues values;
-    private int base;
 
     public TestCollector(PriorityQueue pq) {
       super(pq);
     }
 
-    public boolean acceptsDocsOutOfOrder() {
-      return false;
-    }
+    @Override
+    public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
+      final int base = context.docBase;
+      final NumericDocValues values = DocValues.getNumeric(context.reader(), "sort_i");
+      return new LeafCollector() {
+        
+        @Override
+        public void setScorer(Scorer scorer) throws IOException {}
+        
+        public boolean acceptsDocsOutOfOrder() {
+          return false;
+        }
 
-    public void doSetNextReader(LeafReaderContext context) throws IOException {
-      values = DocValues.getNumeric(context.reader(), "sort_i");
-      base = context.docBase;
-    }
-
-    public void collect(int doc) {
-      list.add(new ScoreDoc(doc+base, (float)values.get(doc)));
+        public void collect(int doc) {
+          list.add(new ScoreDoc(doc+base, (float)values.get(doc)));
+        }
+      };
     }
 
     public int topDocsSize() {
@@ -759,27 +765,27 @@
   class TestCollector1 extends TopDocsCollector {
 
     private List<ScoreDoc> list = new ArrayList();
-    private int base;
-    private Scorer scorer;
 
     public TestCollector1(PriorityQueue pq) {
       super(pq);
     }
 
-    public boolean acceptsDocsOutOfOrder() {
-      return false;
-    }
-
-    public void doSetNextReader(LeafReaderContext context) throws IOException {
-      base = context.docBase;
-    }
-
-    public void setScorer(Scorer scorer) {
-      this.scorer = scorer;
-    }
-
-    public void collect(int doc) throws IOException {
-      list.add(new ScoreDoc(doc+base, scorer.score()));
+    @Override
+    public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
+      final int base = context.docBase;
+      return new LeafCollector() {
+        
+        Scorer scorer;
+        
+        @Override
+        public void setScorer(Scorer scorer) throws IOException {
+          this.scorer = scorer;
+        }
+        
+        public void collect(int doc) throws IOException {
+          list.add(new ScoreDoc(doc+base, scorer.score()));
+        }
+      };
     }
 
     public int topDocsSize() {
@@ -813,7 +819,4 @@
     }
   }
 
-
-
-
 }
diff --git a/solr/core/src/test/org/apache/solr/search/TestRecoveryHdfs.java b/solr/core/src/test/org/apache/solr/search/TestRecoveryHdfs.java
index 65af599..a803352 100644
--- a/solr/core/src/test/org/apache/solr/search/TestRecoveryHdfs.java
+++ b/solr/core/src/test/org/apache/solr/search/TestRecoveryHdfs.java
@@ -19,7 +19,6 @@
 
 import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
 
-import java.io.File;
 import java.io.IOException;
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -42,15 +41,14 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.cloud.hdfs.HdfsBasicDistributedZk2Test;
 import org.apache.solr.cloud.hdfs.HdfsTestUtil;
+import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.update.DirectUpdateHandler2;
 import org.apache.solr.update.HdfsUpdateLog;
 import org.apache.solr.update.UpdateHandler;
 import org.apache.solr.update.UpdateLog;
 import org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase;
-import org.apache.solr.util.IOUtils;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
@@ -84,12 +82,10 @@
     try {
       URI uri = new URI(hdfsUri);
       fs = FileSystem.newInstance(uri, new Configuration());
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    } catch (URISyntaxException e) {
+    } catch (IOException | URISyntaxException e) {
       throw new RuntimeException(e);
     }
-    
+
     System.setProperty("solr.ulog.dir", hdfsUri + "/solr/shard1");
     
     initCore("solrconfig-tlog.xml","schema15.xml");
diff --git a/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java b/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java
index 12c5725..c086b7a 100644
--- a/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java
+++ b/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java
@@ -17,25 +17,22 @@
  * limitations under the License.
  */
 
+import java.text.ParseException;
+import java.util.Arrays;
+
 import com.carrotsearch.randomizedtesting.RandomizedTest;
 import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
 import com.spatial4j.core.context.SpatialContext;
 import com.spatial4j.core.distance.DistanceUtils;
 import com.spatial4j.core.shape.Point;
 import com.spatial4j.core.shape.Rectangle;
-import com.spatial4j.core.shape.impl.RectangleImpl;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrException;
-import org.apache.solr.schema.AbstractSpatialFieldType;
-import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.util.SpatialUtils;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import java.text.ParseException;
-import java.util.Arrays;
-
 /**
  * Test Solr 4's new spatial capabilities from the new Lucene spatial module. Don't thoroughly test it here because
  * Lucene spatial has its own tests.  Some of these tests were ported from Solr 3 spatial tests.
@@ -72,13 +69,13 @@
   public void testBadShapeParse400() {
     assertQEx(null, req(
         "fl", "id," + fieldName, "q", "*:*", "rows", "1000",
-        "fq", "{!field f="+fieldName+"}Intersects(NonexistentShape(89.9,-130 d=9))"), 400);
+        "fq", "{!field f=" + fieldName + "}Intersects(NonexistentShape(89.9,-130 d=9))"), 400);
     assertQEx(null, req(
         "fl", "id," + fieldName, "q", "*:*", "rows", "1000",
-        "fq", "{!field f="+fieldName+"}Intersects(NonexistentShape(89.9,-130 d=9"), 400);//missing parens
+        "fq", "{!field f=" + fieldName + "}Intersects(NonexistentShape(89.9,-130 d=9"), 400);//missing parens
     assertQEx(null, req(
         "fl", "id," + fieldName, "q", "*:*", "rows", "1000",
-        "fq", "{!field f="+fieldName+"}Intersectssss"), 400);
+        "fq", "{!field f=" + fieldName + "}Intersectssss"), 400);
 
     ignoreException("NonexistentShape");
     try {
@@ -110,19 +107,20 @@
   @Test
   public void testIntersectFilter() throws Exception {
     setupDocs();
+    
     //Try some edge cases
-    checkHits(fieldName, "1,1", 175, 3, 5, 6, 7);
-    checkHits(fieldName, "0,179.8", 200, 2, 8, 9);
-    checkHits(fieldName, "89.8, 50", 200, 2, 10, 11);//this goes over the north pole
-    checkHits(fieldName, "-89.8, 50", 200, 2, 12, 13);//this goes over the south pole
+    checkHits(fieldName, "1,1", 175, DistanceUtils.EARTH_MEAN_RADIUS_KM, 3, 5, 6, 7);
+    checkHits(fieldName, "0,179.8", 200, DistanceUtils.EARTH_MEAN_RADIUS_KM, 2, 8, 9);
+    checkHits(fieldName, "89.8, 50", 200, DistanceUtils.EARTH_MEAN_RADIUS_KM, 2, 10, 11);//this goes over the north pole
+    checkHits(fieldName, "-89.8, 50", 200, DistanceUtils.EARTH_MEAN_RADIUS_KM, 2, 12, 13);//this goes over the south pole
     //try some normal cases
-    checkHits(fieldName, "33.0,-80.0", 300, 2);
+    checkHits(fieldName, "33.0,-80.0", 300, DistanceUtils.EARTH_MEAN_RADIUS_KM, 2);
     //large distance
-    checkHits(fieldName, "1,1", 5000, 3, 5, 6, 7);
+    checkHits(fieldName, "1,1", 5000, DistanceUtils.EARTH_MEAN_RADIUS_KM, 3, 5, 6, 7);
     //Because we are generating a box based on the west/east longitudes and the south/north latitudes, which then
     //translates to a range query, which is slightly more inclusive.  Thus, even though 0.0 is 15.725 kms away,
     //it will be included, b/c of the box calculation.
-    checkHits(fieldName, false, "0.1,0.1", 15, 2, 5, 6);
+    checkHits(fieldName, false, "0.1,0.1", 15, DistanceUtils.EARTH_MEAN_RADIUS_KM, 2, 5, 6);
 
     //try some more
     clearIndex();
@@ -133,18 +131,18 @@
     assertU(adoc("id", "17", fieldName, "44.043900,-95.436643"));
     assertU(commit());
 
-    checkHits(fieldName, "0,0", 1000, 1, 14);
-    checkHits(fieldName, "0,0", 2000, 2, 14, 15);
-    checkHits(fieldName, false, "0,0", 3000, 3, 14, 15, 16);
-    checkHits(fieldName, "0,0", 3001, 3, 14, 15, 16);
-    checkHits(fieldName, "0,0", 3000.1, 3, 14, 15, 16);
+    checkHits(fieldName, "0,0", 1000, DistanceUtils.EARTH_MEAN_RADIUS_KM, 1, 14);
+    checkHits(fieldName, "0,0", 2000, DistanceUtils.EARTH_MEAN_RADIUS_KM, 2, 14, 15);
+    checkHits(fieldName, false, "0,0", 3000, DistanceUtils.EARTH_MEAN_RADIUS_KM, 3, 14, 15, 16);
+    checkHits(fieldName, "0,0", 3001, DistanceUtils.EARTH_MEAN_RADIUS_KM, 3, 14, 15, 16);
+    checkHits(fieldName, "0,0", 3000.1, DistanceUtils.EARTH_MEAN_RADIUS_KM, 3, 14, 15, 16);
 
     //really fine grained distance and reflects some of the vagaries of how we are calculating the box
-    checkHits(fieldName, "43.517030,-96.789603", 109, 0);
+    checkHits(fieldName, "43.517030,-96.789603", 109, DistanceUtils.EARTH_MEAN_RADIUS_KM, 0);
 
     //falls outside of the real distance, but inside the bounding box
-    checkHits(fieldName, true,  "43.517030,-96.789603", 110, 0);
-    checkHits(fieldName, false, "43.517030,-96.789603", 110, 1, 17);
+    checkHits(fieldName, true,  "43.517030,-96.789603", 110, DistanceUtils.EARTH_MEAN_RADIUS_KM, 0);
+    checkHits(fieldName, false, "43.517030,-96.789603", 110, DistanceUtils.EARTH_MEAN_RADIUS_KM, 1, 17);
   }
 
   @Test
@@ -157,21 +155,21 @@
     assertU(commit());
 
     assertQ(req(
-        "fl", "id," + fieldName, "q", "*:*", "rows", "1000",
-        "fq", "{!bbox sfield="+fieldName+" pt="+IN+" d=9}"),
+            "fl", "id," + fieldName, "q", "*:*", "rows", "1000",
+            "fq", "{!bbox sfield=" + fieldName + " pt=" + IN + " d=9}"),
         "//result/doc/*[@name='" + fieldName + "']//text()='" + OUT + "'");
   }
 
   @Test
   public void checkQueryEmptyIndex() throws ParseException {
-    checkHits(fieldName, "0,0", 100, 0);//doesn't error
+    checkHits(fieldName, "0,0", 100, DistanceUtils.EARTH_MEAN_RADIUS_KM, 0);//doesn't error
   }
 
-  private void checkHits(String fieldName, String pt, double distKM, int count, int ... docIds) throws ParseException {
-    checkHits(fieldName, true, pt, distKM, count, docIds);
+  private void checkHits(String fieldName, String pt, double distKM, double sphereRadius, int count, int ... docIds) throws ParseException {
+    checkHits(fieldName, true, pt, distKM, sphereRadius, count, docIds);
   }
 
-  private void checkHits(String fieldName, boolean exact, String ptStr, double distKM, int count, int ... docIds) throws ParseException {
+  private void checkHits(String fieldName, boolean exact, String ptStr, double distKM, double sphereRadius, int count, int ... docIds) throws ParseException {
     if (exact && fieldName.equalsIgnoreCase("bbox")) {
       return; // bbox field only supports rectangular query
     }
@@ -217,7 +215,7 @@
     {
       assertQ(req(
           "fl", "id", "q", "*:*", "rows", "1000",
-          "fq", "{!" + (exact ? "geofilt" : "bbox") + " sfield=" + fieldName + " pt='" + ptStr + "' d=" + distKM + "}"),
+          "fq", "{!" + (exact ? "geofilt" : "bbox") + " sfield=" + fieldName + " pt='" + ptStr + "' d=" + distKM + " sphere_radius=" + sphereRadius + "}"),
           tests);
     }
 
@@ -332,7 +330,7 @@
           "sfield=" + fieldName + " "
           + (score != null ? "score="+score : "") + " "
           + (filter != null ? "filter="+filter : "") + " "
-          + "pt=" + lat + "," + lon + " d=" + (dDEG * DistanceUtils.DEG_TO_KM) + "}";
+          + "pt=" + lat + "," + lon + " d=" + (dDEG /* DistanceUtils.DEG_TO_KM*/) + "}";
     } else {
       return "{! "
           + (score != null ? "score="+score : "") + " "
@@ -361,37 +359,6 @@
   }
 
   @Test
-  public void solr4OldShapeSyntax() throws Exception {
-    assumeFalse("Mostly just valid for prefix-tree", fieldName.equals("pointvector"));
-
-    //we also test that the old syntax is parsed in worldBounds in the schema
-    {
-      IndexSchema schema = h.getCore().getLatestSchema();
-      AbstractSpatialFieldType type = (AbstractSpatialFieldType) schema.getFieldTypeByName("stqpt_u_oldworldbounds");
-      SpatialContext ctx = type.getStrategy("foo").getSpatialContext();
-      assertEquals(new RectangleImpl(0, 1000, 0, 1000, ctx), ctx.getWorldBounds());
-    }
-
-    //syntax supported in Solr 4 but not beyond
-    //   See Spatial4j LegacyShapeReadWriterFormat
-    String rect = "-74.093 41.042 -69.347 44.558";//minX minY maxX maxY
-    String circ = "Circle(4.56,1.23 d=0.0710)";
-
-    //show we can index this (without an error)
-    assertU(adoc("id", "rect", fieldName, rect));
-    if (!fieldName.equals("bbox")) {
-      assertU(adoc("id", "circ", fieldName, circ));
-      assertU(commit());
-    }
-
-    //only testing no error
-    assertJQ(req("q", "{!field f=" + fieldName + "}Intersects(" + rect + ")"));
-    if (!fieldName.equals("bbox")) {
-      assertJQ(req("q", "{!field f=" + fieldName + "}Intersects(" + circ + ")"));
-    }
-  }
-
-  @Test
   public void testBadScoreParam() throws Exception {
     assertQEx("expect friendly error message",
         "none",
diff --git a/solr/core/src/test/org/apache/solr/search/TestSolrJ.java b/solr/core/src/test/org/apache/solr/search/TestSolrJ.java
index 4aca50f..de98974 100644
--- a/solr/core/src/test/org/apache/solr/search/TestSolrJ.java
+++ b/solr/core/src/test/org/apache/solr/search/TestSolrJ.java
@@ -163,25 +163,22 @@
 
 
   public void doCommitPerf() throws Exception {
-    HttpSolrClient client = new HttpSolrClient("http://127.0.0.1:8983/solr");
 
-    long start = System.currentTimeMillis();
+    try (HttpSolrClient client = new HttpSolrClient("http://127.0.0.1:8983/solr")) {
 
-    for (int i=0; i<10000; i++) {
-      SolrInputDocument doc = new SolrInputDocument();
-      doc.addField("id", Integer.toString(i % 13));
-      client.add(doc);
-      client.commit(true, true, true);
+      long start = System.currentTimeMillis();
+
+      for (int i = 0; i < 10000; i++) {
+        SolrInputDocument doc = new SolrInputDocument();
+        doc.addField("id", Integer.toString(i % 13));
+        client.add(doc);
+        client.commit(true, true, true);
+      }
+
+      long end = System.currentTimeMillis();
+      System.out.println("TIME: " + (end-start));
     }
 
-    long end = System.currentTimeMillis();
-
-    client.shutdown();
-
-    System.out.println("TIME: " + (end-start));
   }
 
-
-
-
 }
diff --git a/solr/core/src/test/org/apache/solr/search/TestSort.java b/solr/core/src/test/org/apache/solr/search/TestSort.java
index 8107ad9..01077ba 100644
--- a/solr/core/src/test/org/apache/solr/search/TestSort.java
+++ b/solr/core/src/test/org/apache/solr/search/TestSort.java
@@ -270,7 +270,7 @@
         boolean trackScores = r.nextBoolean();
         boolean trackMaxScores = r.nextBoolean();
         boolean scoreInOrder = r.nextBoolean();
-        final TopFieldCollector topCollector = TopFieldCollector.create(sort, top, true, trackScores, trackMaxScores, scoreInOrder);
+        final TopFieldCollector topCollector = TopFieldCollector.create(sort, top, true, trackScores, trackMaxScores);
 
         final List<MyDoc> collectedDocs = new ArrayList<>();
         // delegate and collect docs ourselves
diff --git a/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java b/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java
index 4c58030..51cd8ab 100644
--- a/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java
+++ b/solr/core/src/test/org/apache/solr/search/mlt/CloudMLTQParserTest.java
@@ -23,6 +23,7 @@
 import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -35,8 +36,6 @@
   static Logger log = LoggerFactory.getLogger(CloudMLTQParserTest.class);
   
   public CloudMLTQParserTest() {
-    fixShardCount = true;
-    shardCount = 2;
     sliceCount = 2;
     
     configString = "solrconfig.xml";
@@ -47,8 +46,10 @@
   protected String getCloudSolrConfig() {
     return configString;
   }
-  
-  public void doTest() throws Exception {
+
+  @Test
+  @ShardsFixed(num = 2)
+  public void test() throws Exception {
     
     waitForRecoveriesToFinish(false);
 
diff --git a/solr/core/src/test/org/apache/solr/search/stats/TestBaseStatsCache.java b/solr/core/src/test/org/apache/solr/search/stats/TestBaseStatsCache.java
index 048c1a5..3ecd2de 100644
--- a/solr/core/src/test/org/apache/solr/search/stats/TestBaseStatsCache.java
+++ b/solr/core/src/test/org/apache/solr/search/stats/TestBaseStatsCache.java
@@ -30,13 +30,13 @@
   protected abstract String getStatsCacheClassName();
 
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     System.setProperty("solr.statsCache", getStatsCacheClassName());
   }
 
-  public void tearDown() throws Exception {
-    super.tearDown();
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
     System.clearProperty("solr.statsCache");
   }
   
diff --git a/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java b/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java
index 74ba1a1..b93b897 100644
--- a/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java
+++ b/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java
@@ -22,23 +22,24 @@
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.junit.Test;
 
 public class TestDefaultStatsCache extends BaseDistributedSearchTestCase {
   private int docId = 0;
   
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     System.setProperty("solr.statsCache", LocalStatsCache.class.getName());
   }
   
-  public void tearDown() throws Exception {
-    super.tearDown();
+  public void distribTearDown() throws Exception {
+    super.distribTearDown();
     System.clearProperty("solr.statsCache");
   }
-  
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  public void test() throws Exception {
     del("*:*");
     for (int i = 0; i < clients.size(); i++) {
       int shard = i + 1;
diff --git a/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java b/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java
index dcd0904..08e3baa 100644
--- a/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java
+++ b/solr/core/src/test/org/apache/solr/servlet/SolrRequestParserTest.java
@@ -22,7 +22,9 @@
 import static org.easymock.EasyMock.expect;
 import static org.easymock.EasyMock.replay;
 
+import java.io.BufferedInputStream;
 import java.io.ByteArrayInputStream;
+import java.io.IOException;
 import java.net.HttpURLConnection;
 import java.net.SocketTimeoutException;
 import java.net.URL;
@@ -34,6 +36,7 @@
 import java.util.Map;
 import java.util.Vector;
 
+import javax.servlet.ReadListener;
 import javax.servlet.ServletInputStream;
 import javax.servlet.http.HttpServletRequest;
 
@@ -225,10 +228,7 @@
       expect(request.getContentType()).andReturn( contentType ).anyTimes();
       expect(request.getQueryString()).andReturn(getParams).anyTimes();
       expect(request.getContentLength()).andReturn(postBytes.length).anyTimes();
-      expect(request.getInputStream()).andReturn(new ServletInputStream() {
-        private final ByteArrayInputStream in = new ByteArrayInputStream(postBytes);
-        @Override public int read() { return in.read(); }
-      });
+      expect(request.getInputStream()).andReturn(new ByteServletInputStream(postBytes));
       replay(request);
       
       MultipartRequestParser multipart = new MultipartRequestParser( 2048 );
@@ -243,6 +243,39 @@
       assertArrayEquals( "contentType: "+contentType, new String[]{"foo","bar"}, p.getParams("dup") );
     }
   }
+
+  static class ByteServletInputStream extends ServletInputStream  {
+    final BufferedInputStream in;
+    final int len;
+    int readCount = 0;
+
+    public ByteServletInputStream(byte[] data) {
+      this.len = data.length;
+      this.in = new BufferedInputStream(new ByteArrayInputStream(data));
+    }
+
+    @Override
+    public boolean isFinished() {
+      return readCount == len;
+    }
+
+    @Override
+    public boolean isReady() {
+      return true;
+    }
+
+    @Override
+    public void setReadListener(ReadListener readListener) {
+      throw new IllegalStateException("Not supported");
+    }
+
+    @Override
+    public int read() throws IOException {
+      int read = in.read();
+      readCount += read;
+      return read;
+    }
+  }
   
   @Test
   public void testStandardParseParamsAndFillStreamsISO88591() throws Exception
@@ -257,10 +290,7 @@
     expect(request.getContentType()).andReturn( contentType ).anyTimes();
     expect(request.getQueryString()).andReturn(getParams).anyTimes();
     expect(request.getContentLength()).andReturn(postBytes.length).anyTimes();
-    expect(request.getInputStream()).andReturn(new ServletInputStream() {
-      private final ByteArrayInputStream in = new ByteArrayInputStream(postBytes);
-      @Override public int read() { return in.read(); }
-    });
+    expect(request.getInputStream()).andReturn(new ByteServletInputStream(postBytes));
     replay(request);
     
     MultipartRequestParser multipart = new MultipartRequestParser( 2048 );
@@ -292,10 +322,7 @@
     // we dont pass a content-length to let the security mechanism limit it:
     expect(request.getContentLength()).andReturn(-1).anyTimes();
     expect(request.getQueryString()).andReturn(null).anyTimes();
-    expect(request.getInputStream()).andReturn(new ServletInputStream() {
-      private final ByteArrayInputStream in = new ByteArrayInputStream(large.toString().getBytes(StandardCharsets.US_ASCII));
-      @Override public int read() { return in.read(); }
-    });
+    expect(request.getInputStream()).andReturn(new ByteServletInputStream(large.toString().getBytes(StandardCharsets.US_ASCII)));
     replay(request);
     
     FormDataRequestParser formdata = new FormDataRequestParser( limitKBytes );    
@@ -319,6 +346,21 @@
     // we emulate Jetty that returns empty stream when parameters were parsed before:
     expect(request.getInputStream()).andReturn(new ServletInputStream() {
       @Override public int read() { return -1; }
+
+      @Override
+      public boolean isFinished() {
+        return true;
+      }
+
+      @Override
+      public boolean isReady() {
+        return true;
+      }
+
+      @Override
+      public void setReadListener(ReadListener readListener) {
+
+      }
     });
     replay(request);
     
diff --git a/solr/core/src/test/org/apache/solr/store/blockcache/BlockCacheTest.java b/solr/core/src/test/org/apache/solr/store/blockcache/BlockCacheTest.java
index bc5e75c..9024337 100644
--- a/solr/core/src/test/org/apache/solr/store/blockcache/BlockCacheTest.java
+++ b/solr/core/src/test/org/apache/solr/store/blockcache/BlockCacheTest.java
@@ -21,7 +21,6 @@
 import java.util.Random;
 import java.util.concurrent.atomic.AtomicLong;
 
-import org.apache.hadoop.conf.Configuration;
 import org.apache.lucene.util.LuceneTestCase;
 import org.junit.Test;
 
@@ -34,7 +33,7 @@
     int slabSize = blockSize * 4096;
     long totalMemory = 2 * slabSize;
     
-    BlockCache blockCache = new BlockCache(new Metrics(new Configuration()), true,totalMemory,slabSize,blockSize);
+    BlockCache blockCache = new BlockCache(new Metrics(), true, totalMemory, slabSize, blockSize);
     byte[] buffer = new byte[1024];
     Random random = random();
     byte[] newData = new byte[blockSize];
@@ -87,8 +86,7 @@
     int slabSize = blockSize * 1024;
     long totalMemory = 2 * slabSize;
 
-    BlockCache blockCache = new BlockCache(new Metrics(new Configuration()),
-        true, totalMemory, slabSize);
+    BlockCache blockCache = new BlockCache(new Metrics(), true, totalMemory, slabSize);
     BlockCacheKey blockCacheKey = new BlockCacheKey();
     blockCacheKey.setBlock(0);
     blockCacheKey.setFile(0);
diff --git a/solr/core/src/test/org/apache/solr/store/blockcache/BufferStoreTest.java b/solr/core/src/test/org/apache/solr/store/blockcache/BufferStoreTest.java
new file mode 100644
index 0000000..6751414
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/store/blockcache/BufferStoreTest.java
@@ -0,0 +1,93 @@
+package org.apache.solr.store.blockcache;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.math.BigDecimal;
+
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.solr.common.util.NamedList;
+import org.junit.Before;
+import org.junit.Test;
+
+public class BufferStoreTest extends LuceneTestCase {
+  private final static int blockSize = 1024;
+
+  private Metrics metrics;
+
+  private Store store;
+
+  @Before
+  public void setup() {
+    metrics = new Metrics();
+    BufferStore.initNewBuffer(blockSize, blockSize, metrics);
+    store = BufferStore.instance(blockSize);
+  }
+
+  @Test
+  public void testBufferTakePut() {
+    byte[] b1 = store.takeBuffer(blockSize);
+
+    assertGaugeMetricsChanged(false, false);
+
+    byte[] b2 = store.takeBuffer(blockSize);
+    byte[] b3 = store.takeBuffer(blockSize);
+
+    assertRawMetricCounts(2, 0);
+    assertGaugeMetricsChanged(true, false);
+
+    store.putBuffer(b1);
+
+    assertGaugeMetricsChanged(false, false);
+
+    store.putBuffer(b2);
+    store.putBuffer(b3);
+
+    assertRawMetricCounts(0, 2);
+    assertGaugeMetricsChanged(false, true);
+  }
+
+  private void assertRawMetricCounts(int allocated, int lost) {
+    assertEquals("Buffer allocation count is wrong.", allocated,
+        metrics.shardBuffercacheAllocate.get());
+    assertEquals("Lost buffer count is wrong", lost,
+        metrics.shardBuffercacheLost.get());
+  }
+
+  /**
+   * Stateful method to verify whether the amount of buffers allocated and lost
+   * since the last call has changed.
+   *
+   * @param allocated
+   *          whether buffers should have been allocated since the last call
+   * @param lost
+   *          whether buffers should have been lost since the last call
+   */
+  private void assertGaugeMetricsChanged(boolean allocated, boolean lost) {
+    NamedList<Number> stats = metrics.getStatistics();
+
+    assertEquals("Buffer allocation metric not updating correctly.",
+        allocated, isMetricPositive(stats, "buffercache.allocations"));
+    assertEquals("Buffer lost metric not updating correctly.",
+        lost, isMetricPositive(stats, "buffercache.lost"));
+  }
+
+  private boolean isMetricPositive(NamedList<Number> stats, String metric) {
+    return new BigDecimal(stats.get(metric).toString()).compareTo(BigDecimal.ZERO) > 0;
+  }
+
+}
diff --git a/solr/core/src/test/org/apache/solr/update/MockStreamingSolrClients.java b/solr/core/src/test/org/apache/solr/update/MockStreamingSolrClients.java
index 264b7c2..7786578 100644
--- a/solr/core/src/test/org/apache/solr/update/MockStreamingSolrClients.java
+++ b/solr/core/src/test/org/apache/solr/update/MockStreamingSolrClients.java
@@ -85,9 +85,8 @@
       return solrClient.request(request);
     }
 
-
     @Override
-    public void shutdown() {}
+    public void close() {}
     
   }
 }
diff --git a/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java b/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java
index 17c9b47..afabcd9 100644
--- a/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java
+++ b/solr/core/src/test/org/apache/solr/update/PeerSyncTest.java
@@ -25,6 +25,7 @@
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.StrUtils;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.util.Arrays;
@@ -41,18 +42,16 @@
     params(DISTRIB_UPDATE_PARAM, FROM_LEADER);
   
   public PeerSyncTest() {
-    fixShardCount = true;
-    shardCount = 3;
     stress = 0;
 
     // TODO: a better way to do this?
     configString = "solrconfig-tlog.xml";
     schemaString = "schema.xml";
   }
-  
-  
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  @ShardsFixed(num = 3)
+  public void test() throws Exception {
     handle.clear();
     handle.put("timestamp", SKIPVAL);
     handle.put("score", SKIPVAL);
diff --git a/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java b/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java
index 6b18632..dc923e6 100644
--- a/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java
+++ b/solr/core/src/test/org/apache/solr/update/SolrCmdDistributorTest.java
@@ -19,8 +19,8 @@
 
 import org.apache.lucene.index.LogDocMergePolicy;
 import org.apache.solr.BaseDistributedSearchTestCase;
-import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
@@ -36,6 +36,7 @@
 import org.apache.solr.core.ConfigSolr;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.CoresLocator;
+import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrEventListener;
 import org.apache.solr.search.SolrIndexSearcher;
@@ -47,11 +48,13 @@
 import org.apache.solr.update.SolrCmdDistributor.StdNode;
 import org.apache.solr.update.processor.DistributedUpdateProcessor;
 import org.junit.BeforeClass;
+import org.junit.Test;
 import org.xml.sax.SAXException;
 
 import javax.xml.parsers.ParserConfigurationException;
 import java.io.File;
 import java.io.IOException;
+import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
@@ -70,7 +73,7 @@
   private UpdateShardHandler updateShardHandler;
   
   public SolrCmdDistributorTest() throws ParserConfigurationException, IOException, SAXException {
-    updateShardHandler = new UpdateShardHandler(new ConfigSolr() {
+    updateShardHandler = new UpdateShardHandler(new ConfigSolr(null, null) {
 
       @Override
       public CoresLocator getCoresLocator() {
@@ -78,17 +81,16 @@
       }
 
       @Override
-      protected String getShardHandlerFactoryConfigPath() {
+      public PluginInfo getShardHandlerFactoryPluginInfo() {
         return null;
       }
 
       @Override
-      public boolean isPersistent() {
-        return false;
-      }});
+      protected String getProperty(CfgProp key) {
+        return null;
+      }
+    });
     
-    fixShardCount = true;
-    shardCount = 4;
     stress = 0;
   }
 
@@ -105,6 +107,10 @@
   // we don't get helpful override behavior due to the method being static
   @Override
   protected void createServers(int numShards) throws Exception {
+
+    System.setProperty("configSetBaseDir", TEST_HOME());
+    System.setProperty("coreRootDirectory", testDir.toPath().resolve("control").toString());
+    writeCoreProperties(testDir.toPath().resolve("control/cores"), DEFAULT_TEST_CORENAME);
     controlJetty = createJetty(new File(getSolrHome()), testDir + "/control/data", null, getSolrConfigFile(), getSchemaFile());
 
     controlClient = createNewSolrClient(controlJetty.getLocalPort());
@@ -113,6 +119,10 @@
     StringBuilder sb = new StringBuilder();
     for (int i = 0; i < numShards; i++) {
       if (sb.length() > 0) sb.append(',');
+      String shardname = "shard" + i;
+      Path coresPath = testDir.toPath().resolve(shardname).resolve("cores");
+      writeCoreProperties(coresPath, DEFAULT_TEST_CORENAME);
+      System.setProperty("coreRootDirectory", testDir.toPath().resolve(shardname).toString());
       JettySolrRunner j = createJetty(new File(getSolrHome()),
           testDir + "/shard" + i + "/data", null, getSolrConfigFile(),
           getSchemaFile());
@@ -125,9 +135,10 @@
 
     shards = sb.toString();
   }
-  
-  @Override
-  public void doTest() throws Exception {
+
+  @Test
+  @ShardsFixed(num = 4)
+  public void test() throws Exception {
     del("*:*");
     
     SolrCmdDistributor cmdDistrib = new SolrCmdDistributor(updateShardHandler);
@@ -309,7 +320,7 @@
     
     cmdDistrib.finish();
 
-    assertEquals(shardCount, commits.get());
+    assertEquals(getShardCount(), commits.get());
     
     for (SolrClient c : clients) {
       NamedList<Object> resp = c.request(new LukeRequest());
@@ -506,14 +517,9 @@
   }
   
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
-  }
-  
-  @Override
-  public void tearDown() throws Exception {
+  public void distribTearDown() throws Exception {
     updateShardHandler.close();
-    super.tearDown();
+    super.distribTearDown();
   }
 
   private void testDistribOpenSearcher() {
diff --git a/solr/core/src/test/org/apache/solr/update/SolrIndexConfigTest.java b/solr/core/src/test/org/apache/solr/update/SolrIndexConfigTest.java
index a29cf37..d5019f7 100644
--- a/solr/core/src/test/org/apache/solr/update/SolrIndexConfigTest.java
+++ b/solr/core/src/test/org/apache/solr/update/SolrIndexConfigTest.java
@@ -17,6 +17,10 @@
  * limitations under the License.
  */
 
+import java.io.File;
+import java.io.IOException;
+import javax.xml.parsers.ParserConfigurationException;
+
 import org.apache.lucene.index.ConcurrentMergeScheduler;
 import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.index.SimpleMergedSegmentWarmer;
@@ -26,13 +30,10 @@
 import org.apache.solr.core.TestMergePolicyConfig;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.schema.IndexSchemaFactory;
+import org.junit.BeforeClass;
 import org.junit.Test;
 import org.xml.sax.SAXException;
 
-import javax.xml.parsers.ParserConfigurationException;
-import java.io.File;
-import java.io.IOException;
-
 /**
  * Testcase for {@link SolrIndexConfig}
  *
@@ -40,13 +41,19 @@
  */
 public class SolrIndexConfigTest extends SolrTestCaseJ4 {
 
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    initCore("solrconfig.xml","schema.xml");
+  }
+
   @Test
   public void testFailingSolrIndexConfigCreation() {
     try {
       SolrConfig solrConfig = new SolrConfig("bad-mp-solrconfig.xml");
       SolrIndexConfig solrIndexConfig = new SolrIndexConfig(solrConfig, null, null);
       IndexSchema indexSchema = IndexSchemaFactory.buildIndexSchema("schema.xml", solrConfig);
-      solrIndexConfig.toIndexWriterConfig(indexSchema);
+      h.getCore().setLatestSchema(indexSchema);
+      solrIndexConfig.toIndexWriterConfig(h.getCore());
       fail("a mergePolicy should have an empty constructor in order to be instantiated in Solr thus this should fail ");
     } catch (Exception e) {
       // it failed as expected
@@ -61,8 +68,9 @@
         null);
     assertNotNull(solrIndexConfig);
     IndexSchema indexSchema = IndexSchemaFactory.buildIndexSchema("schema.xml", solrConfig);
-
-    IndexWriterConfig iwc = solrIndexConfig.toIndexWriterConfig(indexSchema);
+    
+    h.getCore().setLatestSchema(indexSchema);
+    IndexWriterConfig iwc = solrIndexConfig.toIndexWriterConfig(h.getCore());
 
     assertNotNull("null mp", iwc.getMergePolicy());
     assertTrue("mp is not TMP", iwc.getMergePolicy() instanceof TieredMergePolicy);
@@ -87,7 +95,8 @@
     assertEquals(SimpleMergedSegmentWarmer.class.getName(),
         solrIndexConfig.mergedSegmentWarmerInfo.className);
     IndexSchema indexSchema = IndexSchemaFactory.buildIndexSchema("schema.xml", solrConfig);
-    IndexWriterConfig iwc = solrIndexConfig.toIndexWriterConfig(indexSchema);
+    h.getCore().setLatestSchema(indexSchema);
+    IndexWriterConfig iwc = solrIndexConfig.toIndexWriterConfig(h.getCore());
     assertEquals(SimpleMergedSegmentWarmer.class, iwc.getMergedSegmentWarmer().getClass());
   }
 
diff --git a/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java
index 5ac91e4..77d3c04 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/FieldMutatingUpdateProcessorTest.java
@@ -823,6 +823,7 @@
                        f("editors", "John W. Campbell"),
                        f("store1_price", 87),
                        f("store2_price", 78),
+                       f("store3_price", (Object) null),
                        f("list_price", 1000)));
     assertNotNull(d);
     assertEquals("misc",d.getFieldValue("category"));
diff --git a/solr/core/src/test/org/apache/solr/update/processor/SignatureUpdateProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/SignatureUpdateProcessorFactoryTest.java
index dde06e4..e941412 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/SignatureUpdateProcessorFactoryTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/SignatureUpdateProcessorFactoryTest.java
@@ -17,12 +17,7 @@
 
 package org.apache.solr.update.processor;
 
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Map;
-
 import org.apache.lucene.util.Constants;
-
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.client.solrj.impl.BinaryRequestWriter;
 import org.apache.solr.client.solrj.request.UpdateRequest;
@@ -30,20 +25,23 @@
 import org.apache.solr.common.params.MultiMapSolrParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.params.UpdateParams;
-import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.ContentStream;
 import org.apache.solr.common.util.ContentStreamBase;
+import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.handler.BinaryUpdateRequestHandler;
 import org.apache.solr.handler.UpdateRequestHandler;
+import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequestBase;
-import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+
 /**
  * 
  */
diff --git a/solr/core/src/java/org/apache/solr/util/IOUtils.java b/solr/core/src/test/org/apache/solr/util/DistanceUnitsTest.java
similarity index 65%
copy from solr/core/src/java/org/apache/solr/util/IOUtils.java
copy to solr/core/src/test/org/apache/solr/util/DistanceUnitsTest.java
index e7b82ea..6b2b5a4 100644
--- a/solr/core/src/java/org/apache/solr/util/IOUtils.java
+++ b/solr/core/src/test/org/apache/solr/util/DistanceUnitsTest.java
@@ -1,10 +1,7 @@
 package org.apache.solr.util;
 
-import java.io.Closeable;
-
-import org.apache.solr.core.HdfsDirectoryFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import com.spatial4j.core.distance.DistanceUtils;
+import org.apache.lucene.util.LuceneTestCase;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
@@ -23,16 +20,10 @@
  * limitations under the License.
  */
 
-public class IOUtils {
-  public static Logger LOG = LoggerFactory.getLogger(IOUtils.class);
-  
-  public static void closeQuietly(Closeable closeable) {
-    try {
-      if (closeable != null) {
-        closeable.close();
-      }
-    } catch (Exception e) {
-      LOG.error("Error while closing", e);
-    }
+public class DistanceUnitsTest extends LuceneTestCase {
+
+  public void testAddNewUnits() throws Exception {
+    DistanceUnits.addUnits("lightyears", 6.73430542e-12, 9.4605284e12 * DistanceUtils.KM_TO_DEG);
+    assertTrue(DistanceUnits.getSupportedUnits().contains("lightyears"));
   }
 }
diff --git a/solr/core/src/test/org/apache/solr/util/MockConfigSolr.java b/solr/core/src/test/org/apache/solr/util/MockConfigSolr.java
index 6b67940..506c22c 100644
--- a/solr/core/src/test/org/apache/solr/util/MockConfigSolr.java
+++ b/solr/core/src/test/org/apache/solr/util/MockConfigSolr.java
@@ -19,25 +19,30 @@
 
 import org.apache.solr.core.ConfigSolr;
 import org.apache.solr.core.CoresLocator;
+import org.apache.solr.core.PluginInfo;
 
 /**
  *
  */
 public class MockConfigSolr extends ConfigSolr {
 
+  public MockConfigSolr() {
+    super(null, null);
+  }
+
   @Override
   public CoresLocator getCoresLocator() {
     return null;
   }
 
   @Override
-  protected String getShardHandlerFactoryConfigPath() {
+  public PluginInfo getShardHandlerFactoryPluginInfo() {
     return null;
   }
 
   @Override
-  public boolean isPersistent() {
-    return false;
+  protected String getProperty(CfgProp key) {
+    return null;
   }
 
 }
diff --git a/solr/core/src/test/org/apache/solr/util/MockCoreContainer.java b/solr/core/src/test/org/apache/solr/util/MockCoreContainer.java
index 94d988a..3d736df 100644
--- a/solr/core/src/test/org/apache/solr/util/MockCoreContainer.java
+++ b/solr/core/src/test/org/apache/solr/util/MockCoreContainer.java
@@ -1,7 +1,5 @@
 package org.apache.solr.util;
 
-import org.apache.solr.core.CoreContainer;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -19,6 +17,22 @@
  * limitations under the License.
  */
 
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.CoreDescriptor;
+
+
 public class MockCoreContainer extends CoreContainer {
+  public static class MockCoreDescriptor extends CoreDescriptor {
+    public MockCoreDescriptor() {
+      super(new MockCoreContainer(), "mock", "path");
+    }
+  }
   
+  public MockCoreContainer() {
+    super(new Object());
+  }
+  
+  public String getCoreRootDirectory() {
+    return "coreroot";
+  }
 }
diff --git a/solr/core/src/test/org/apache/solr/util/SimplePostToolTest.java b/solr/core/src/test/org/apache/solr/util/SimplePostToolTest.java
index 7bdbac8..68e537e 100644
--- a/solr/core/src/test/org/apache/solr/util/SimplePostToolTest.java
+++ b/solr/core/src/test/org/apache/solr/util/SimplePostToolTest.java
@@ -113,7 +113,7 @@
   @Test
   public void testTypeSupported() {
     assertTrue(t_web.typeSupported("application/pdf"));
-    assertTrue(t_web.typeSupported("text/xml"));
+    assertTrue(t_web.typeSupported("application/xml"));
     assertFalse(t_web.typeSupported("text/foo"));
 
     t_web.fileTypes = "doc,xls,ppt";
diff --git a/solr/core/src/test/org/apache/solr/util/TestObjectReleaseTracker.java b/solr/core/src/test/org/apache/solr/util/TestObjectReleaseTracker.java
new file mode 100644
index 0000000..ce53621
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/util/TestObjectReleaseTracker.java
@@ -0,0 +1,62 @@
+package org.apache.solr.util;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.TestRuleLimitSysouts.Limit;
+import org.apache.solr.common.util.ObjectReleaseTracker;
+import org.junit.Test;
+
+
+@Limit(bytes=150000) // raise limit as this writes to sys err
+public class TestObjectReleaseTracker extends LuceneTestCase {
+  
+  @Test
+  public void testObjectReleaseTracker() {
+    ObjectReleaseTracker.track(new Object());
+    ObjectReleaseTracker.release(new Object());
+    assertFalse(ObjectReleaseTracker.clearObjectTrackerAndCheckEmpty());
+    assertTrue(ObjectReleaseTracker.clearObjectTrackerAndCheckEmpty());
+    Object obj = new Object();
+    ObjectReleaseTracker.track(obj);
+    ObjectReleaseTracker.release(obj);
+    assertTrue(ObjectReleaseTracker.clearObjectTrackerAndCheckEmpty());
+    
+    Object obj1 = new Object();
+    ObjectReleaseTracker.track(obj1);
+    Object obj2 = new Object();
+    ObjectReleaseTracker.track(obj2);
+    Object obj3 = new Object();
+    ObjectReleaseTracker.track(obj3);
+    
+    ObjectReleaseTracker.release(obj1);
+    ObjectReleaseTracker.release(obj2);
+    ObjectReleaseTracker.release(obj3);
+    assertTrue(ObjectReleaseTracker.clearObjectTrackerAndCheckEmpty());
+    
+    ObjectReleaseTracker.track(obj1);
+    ObjectReleaseTracker.track(obj2);
+    ObjectReleaseTracker.track(obj3);
+    
+    ObjectReleaseTracker.release(obj1);
+    ObjectReleaseTracker.release(obj2);
+    // ObjectReleaseTracker.release(obj3);
+    assertFalse(ObjectReleaseTracker.clearObjectTrackerAndCheckEmpty());
+    assertTrue(ObjectReleaseTracker.clearObjectTrackerAndCheckEmpty());
+  }
+}
diff --git a/solr/example/README.txt b/solr/example/README.txt
index fd7cb7d..abea8ee 100644
--- a/solr/example/README.txt
+++ b/solr/example/README.txt
@@ -40,7 +40,7 @@
 
 To add documents to the index, use bin/post, for example:
 
-     bin/post techproducts example/exampledocs/*.xml
+     bin/post -c techproducts example/exampledocs/*.xml
 
 (where "techproducts" is the Solr core name)
 
@@ -48,7 +48,7 @@
 
  * example/solr/README.txt
    For more information about the "Solr Home" and Solr specific configuration
- * http://lucene.apache.org/solr/tutorial.html
+ * http://lucene.apache.org/solr/quickstart.html
    For a Tutorial using this example configuration
  * http://wiki.apache.org/solr/SolrResources 
    For a list of other tutorials and introductory articles.
diff --git a/solr/example/example-DIH/solr/db/conf/schema.xml b/solr/example/example-DIH/solr/db/conf/schema.xml
index d0611fb..d407c5d 100755
--- a/solr/example/example-DIH/solr/db/conf/schema.xml
+++ b/solr/example/example-DIH/solr/db/conf/schema.xml
@@ -697,7 +697,7 @@
       http://wiki.apache.org/solr/SolrAdaptersForLuceneSpatial4
     -->
     <fieldType name="location_rpt" class="solr.SpatialRecursivePrefixTreeFieldType"
-        geo="true" distErrPct="0.025" maxDistErr="0.000009" units="degrees" />
+        geo="true" distErrPct="0.025" maxDistErr="0.001" distanceUnits="kilometers" />
 
    <!-- Money/currency field type. See http://wiki.apache.org/solr/MoneyFieldType
         Parameters:
diff --git a/solr/example/example-DIH/solr/db/conf/solrconfig.xml b/solr/example/example-DIH/solr/db/conf/solrconfig.xml
index f06001f..2b32375 100755
--- a/solr/example/example-DIH/solr/db/conf/solrconfig.xml
+++ b/solr/example/example-DIH/solr/db/conf/solrconfig.xml
@@ -276,15 +276,6 @@
     <unlockOnStartup>false</unlockOnStartup>
       -->
 
-    <!-- If true, IndexReaders will be opened/reopened from the IndexWriter
-         instead of from the Directory. Hosts in a master/slave setup
-         should have this set to false while those in a SolrCloud
-         cluster need to be set to true. Default: true
-      -->
-    <!-- 
-    <nrtMode>true</nrtMode>
-      -->
-
     <!-- Commit Deletion Policy
          Custom deletion policies can be specified here. The class must
          implement org.apache.lucene.index.IndexDeletionPolicy.
@@ -1736,8 +1727,9 @@
   <!--
      Custom response writers can be declared as needed...
     -->
-    <queryResponseWriter name="velocity" class="solr.VelocityResponseWriter" startup="lazy"/>
-  
+  <queryResponseWriter name="velocity" class="solr.VelocityResponseWriter" startup="lazy">
+    <str name="template.base.dir">${velocity.template.base.dir:}</str>
+  </queryResponseWriter>
 
   <!-- XSLT response writer transforms the XML output by any xslt file found
        in Solr's conf/xslt directory.  Changes to xslt files are checked for
diff --git a/solr/example/example-DIH/solr/db/core.properties b/solr/example/example-DIH/solr/db/core.properties
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/solr/example/example-DIH/solr/db/core.properties
diff --git a/solr/example/example-DIH/solr/mail/conf/schema.xml b/solr/example/example-DIH/solr/mail/conf/schema.xml
index f7fc006..d5c3b82 100755
--- a/solr/example/example-DIH/solr/mail/conf/schema.xml
+++ b/solr/example/example-DIH/solr/mail/conf/schema.xml
@@ -616,7 +616,7 @@
       http://wiki.apache.org/solr/SolrAdaptersForLuceneSpatial4
     -->
     <fieldType name="location_rpt" class="solr.SpatialRecursivePrefixTreeFieldType"
-        geo="true" distErrPct="0.025" maxDistErr="0.000009" units="degrees" />
+        geo="true" distErrPct="0.025" maxDistErr="0.001" distanceUnits="kilometers" />
 
    <!-- Money/currency field type. See http://wiki.apache.org/solr/MoneyFieldType
         Parameters:
diff --git a/solr/example/example-DIH/solr/mail/conf/solrconfig.xml b/solr/example/example-DIH/solr/mail/conf/solrconfig.xml
index cc502ff..44c1a37 100755
--- a/solr/example/example-DIH/solr/mail/conf/solrconfig.xml
+++ b/solr/example/example-DIH/solr/mail/conf/solrconfig.xml
@@ -279,15 +279,6 @@
     <unlockOnStartup>false</unlockOnStartup>
       -->
 
-    <!-- If true, IndexReaders will be opened/reopened from the IndexWriter
-         instead of from the Directory. Hosts in a master/slave setup
-         should have this set to false while those in a SolrCloud
-         cluster need to be set to true. Default: true
-      -->
-    <!-- 
-    <nrtMode>true</nrtMode>
-      -->
-
     <!-- Commit Deletion Policy
          Custom deletion policies can be specified here. The class must
          implement org.apache.lucene.index.IndexDeletionPolicy.
@@ -1707,8 +1698,9 @@
   <!--
      Custom response writers can be declared as needed...
     -->
-    <queryResponseWriter name="velocity" class="solr.VelocityResponseWriter" startup="lazy"/>
-  
+  <queryResponseWriter name="velocity" class="solr.VelocityResponseWriter" startup="lazy">
+    <str name="template.base.dir">${velocity.template.base.dir:}</str>
+  </queryResponseWriter>
 
   <!-- XSLT response writer transforms the XML output by any xslt file found
        in Solr's conf/xslt directory.  Changes to xslt files are checked for
diff --git a/solr/example/example-DIH/solr/mail/core.properties b/solr/example/example-DIH/solr/mail/core.properties
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/solr/example/example-DIH/solr/mail/core.properties
diff --git a/solr/example/example-DIH/solr/rss/conf/schema.xml b/solr/example/example-DIH/solr/rss/conf/schema.xml
index 874eedf..9d6c8d6 100755
--- a/solr/example/example-DIH/solr/rss/conf/schema.xml
+++ b/solr/example/example-DIH/solr/rss/conf/schema.xml
@@ -647,7 +647,7 @@
       http://wiki.apache.org/solr/SolrAdaptersForLuceneSpatial4
     -->
     <fieldType name="location_rpt" class="solr.SpatialRecursivePrefixTreeFieldType"
-        geo="true" distErrPct="0.025" maxDistErr="0.000009" units="degrees" />
+        geo="true" distErrPct="0.025" maxDistErr="0.001" distanceUnits="kilometers" />
 
    <!-- Money/currency field type. See http://wiki.apache.org/solr/MoneyFieldType
         Parameters:
diff --git a/solr/example/example-DIH/solr/rss/conf/solrconfig.xml b/solr/example/example-DIH/solr/rss/conf/solrconfig.xml
index c73dc59..cba64b8 100755
--- a/solr/example/example-DIH/solr/rss/conf/solrconfig.xml
+++ b/solr/example/example-DIH/solr/rss/conf/solrconfig.xml
@@ -276,15 +276,6 @@
     <unlockOnStartup>false</unlockOnStartup>
       -->
 
-    <!-- If true, IndexReaders will be opened/reopened from the IndexWriter
-         instead of from the Directory. Hosts in a master/slave setup
-         should have this set to false while those in a SolrCloud
-         cluster need to be set to true. Default: true
-      -->
-    <!-- 
-    <nrtMode>true</nrtMode>
-      -->
-
     <!-- Commit Deletion Policy
          Custom deletion policies can be specified here. The class must
          implement org.apache.lucene.index.IndexDeletionPolicy.
@@ -1704,8 +1695,9 @@
   <!--
      Custom response writers can be declared as needed...
     -->
-    <queryResponseWriter name="velocity" class="solr.VelocityResponseWriter" startup="lazy"/>
-  
+  <queryResponseWriter name="velocity" class="solr.VelocityResponseWriter" startup="lazy">
+    <str name="template.base.dir">${velocity.template.base.dir:}</str>
+  </queryResponseWriter>
 
   <!-- XSLT response writer transforms the XML output by any xslt file found
        in Solr's conf/xslt directory.  Changes to xslt files are checked for
diff --git a/solr/example/example-DIH/solr/rss/core.properties b/solr/example/example-DIH/solr/rss/core.properties
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/solr/example/example-DIH/solr/rss/core.properties
diff --git a/solr/example/example-DIH/solr/solr.xml b/solr/example/example-DIH/solr/solr.xml
index a9d924b..191e51f 100644
--- a/solr/example/example-DIH/solr/solr.xml
+++ b/solr/example/example-DIH/solr/solr.xml
@@ -1,10 +1,2 @@
 <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
-<solr sharedLib="lib" persistent="true">
-	<cores adminPath="/admin/cores" hostPort="${jetty.port:8983}" hostContext="${hostContext:solr}">
-		<core default="true" instanceDir="db" name="db"/>
-		<core default="false" instanceDir="rss" name="rss"/>
-		<core default="false" instanceDir="mail" name="mail"/>
-		<core default="false" instanceDir="tika" name="tika"/>
-    <core default="false" instanceDir="solr" name="solr"/>
-	</cores>
-</solr>
+<solr></solr>
diff --git a/solr/example/example-DIH/solr/solr/conf/schema.xml b/solr/example/example-DIH/solr/solr/conf/schema.xml
index 88eca72..6e6530d 100755
--- a/solr/example/example-DIH/solr/solr/conf/schema.xml
+++ b/solr/example/example-DIH/solr/solr/conf/schema.xml
@@ -697,7 +697,7 @@
       http://wiki.apache.org/solr/SolrAdaptersForLuceneSpatial4
     -->
     <fieldType name="location_rpt" class="solr.SpatialRecursivePrefixTreeFieldType"
-        geo="true" distErrPct="0.025" maxDistErr="0.000009" units="degrees" />
+        geo="true" distErrPct="0.025" maxDistErr="0.001" distanceUnits="kilometers" />
 
    <!-- Money/currency field type. See http://wiki.apache.org/solr/MoneyFieldType
         Parameters:
diff --git a/solr/example/example-DIH/solr/solr/conf/solrconfig.xml b/solr/example/example-DIH/solr/solr/conf/solrconfig.xml
index 5a6ae8c..e640a4c 100755
--- a/solr/example/example-DIH/solr/solr/conf/solrconfig.xml
+++ b/solr/example/example-DIH/solr/solr/conf/solrconfig.xml
@@ -276,15 +276,6 @@
     <unlockOnStartup>false</unlockOnStartup>
       -->
 
-    <!-- If true, IndexReaders will be opened/reopened from the IndexWriter
-         instead of from the Directory. Hosts in a master/slave setup
-         should have this set to false while those in a SolrCloud
-         cluster need to be set to true. Default: true
-      -->
-    <!-- 
-    <nrtMode>true</nrtMode>
-      -->
-
     <!-- Commit Deletion Policy
          Custom deletion policies can be specified here. The class must
          implement org.apache.lucene.index.IndexDeletionPolicy.
@@ -1735,8 +1726,9 @@
   <!--
      Custom response writers can be declared as needed...
     -->
-    <queryResponseWriter name="velocity" class="solr.VelocityResponseWriter" startup="lazy"/>
-  
+  <queryResponseWriter name="velocity" class="solr.VelocityResponseWriter" startup="lazy">
+    <str name="template.base.dir">${velocity.template.base.dir:}</str>
+  </queryResponseWriter>
 
   <!-- XSLT response writer transforms the XML output by any xslt file found
        in Solr's conf/xslt directory.  Changes to xslt files are checked for
diff --git a/solr/example/example-DIH/solr/solr/core.properties b/solr/example/example-DIH/solr/solr/core.properties
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/solr/example/example-DIH/solr/solr/core.properties
diff --git a/solr/example/example-DIH/solr/tika/conf/schema.xml b/solr/example/example-DIH/solr/tika/conf/schema.xml
index 2e24a48..f4fe0b9 100755
--- a/solr/example/example-DIH/solr/tika/conf/schema.xml
+++ b/solr/example/example-DIH/solr/tika/conf/schema.xml
@@ -528,7 +528,7 @@
       http://wiki.apache.org/solr/SolrAdaptersForLuceneSpatial4
     -->
     <fieldType name="location_rpt" class="solr.SpatialRecursivePrefixTreeFieldType"
-        geo="true" distErrPct="0.025" maxDistErr="0.000009" units="degrees" />
+        geo="true" distErrPct="0.025" maxDistErr="0.001" distanceUnits="kilometers" />
 
 
 
diff --git a/solr/example/example-DIH/solr/tika/conf/solrconfig.xml b/solr/example/example-DIH/solr/tika/conf/solrconfig.xml
index 4f85cc1..5240cf0 100755
--- a/solr/example/example-DIH/solr/tika/conf/solrconfig.xml
+++ b/solr/example/example-DIH/solr/tika/conf/solrconfig.xml
@@ -277,15 +277,6 @@
     <unlockOnStartup>false</unlockOnStartup>
       -->
 
-    <!-- If true, IndexReaders will be opened/reopened from the IndexWriter
-         instead of from the Directory. Hosts in a master/slave setup
-         should have this set to false while those in a SolrCloud
-         cluster need to be set to true. Default: true
-      -->
-    <!-- 
-    <nrtMode>true</nrtMode>
-      -->
-
     <!-- Commit Deletion Policy
          Custom deletion policies can be specified here. The class must
          implement org.apache.lucene.index.IndexDeletionPolicy.
@@ -1713,8 +1704,9 @@
   <!--
      Custom response writers can be declared as needed...
     -->
-    <queryResponseWriter name="velocity" class="solr.VelocityResponseWriter" startup="lazy"/>
-  
+  <queryResponseWriter name="velocity" class="solr.VelocityResponseWriter" startup="lazy">
+    <str name="template.base.dir">${velocity.template.base.dir:}</str>
+  </queryResponseWriter>
 
   <!-- XSLT response writer transforms the XML output by any xslt file found
        in Solr's conf/xslt directory.  Changes to xslt files are checked for
diff --git a/solr/example/example-DIH/solr/tika/core.properties b/solr/example/example-DIH/solr/tika/core.properties
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/solr/example/example-DIH/solr/tika/core.properties
diff --git a/solr/example/exampledocs/utf8-example.xml b/solr/example/exampledocs/utf8-example.xml
index e1334d4..ee300a6 100644
--- a/solr/example/exampledocs/utf8-example.xml
+++ b/solr/example/exampledocs/utf8-example.xml
@@ -17,7 +17,7 @@
 -->
 
 <!-- 
-  After posting this to SOLR with post.sh, searching for "êâîôû" from 
+  After posting this to Solr with bin/post, searching for "êâîôû" from
   the solr/admin/ search page must return this document.
  -->
 
diff --git a/solr/example/files/README.txt b/solr/example/files/README.txt
new file mode 100644
index 0000000..fd4dad0
--- /dev/null
+++ b/solr/example/files/README.txt
@@ -0,0 +1,16 @@
+bin/solr stop
+rm -Rf server/solr/files/
+
+# templates extracted with:
+#    unzip  -j dist/solr-velocity-*.jar velocity/* -x *.properties -d example/files/templates/
+bin/solr start -Dvelocity.template.base.dir=<absolute path to example/files/templates>
+# TODO: make it so an install dir relative path can be used somehow?
+bin/solr create_core -c files
+bin/post -c files ~/Documents
+curl http://localhost:8983/solr/files/config/params -H 'Content-type:application/json'  -d '{
+"update" : {
+  "facets": {
+    "facet.field":"content_type"
+    }
+  }
+}'
\ No newline at end of file
diff --git a/solr/example/films/README.txt b/solr/example/films/README.txt
index 129a603..74a29a1 100644
--- a/solr/example/films/README.txt
+++ b/solr/example/films/README.txt
@@ -3,7 +3,7 @@
 
 The data is fetched from Freebase and the data license is present in the films-LICENSE.txt file.
 
-This data consists of the following fields -
+This data consists of the following fields:
  * "id" - unique identifier for the movie
  * "name" - Name of the movie
  * "directed_by" - The person(s) who directed the making of the film
@@ -14,53 +14,123 @@
    * Start Solr:
        bin/solr start
 
-   * Create a "films" core
-       bin/solr create_core -n films -c data_driven_schema_configs
+   * Create a "films" core:
+       bin/solr create -c films
 
-   * Update the schema (by default it will guess the field types based on the date as it is indexed):
-curl http://localhost:8983/solr/films/schema/fields -X POST -H 'Content-type:application/json' --data-binary '
-[
-    {
-        "name":"genre",
-        "type":"string",
-        "stored":true,
-        "multiValued":true
-    },
-    {
-        "name":"directed_by",
-        "type":"string",
-        "stored":true,
-        "multiValued":true
-    },
-    {
+   * Set the schema on a couple of fields that Solr would otherwise guess differently (than we'd like) about:
+curl http://localhost:8983/solr/films/schema -X POST -H 'Content-type:application/json' --data-binary '{
+    "add-field" : {
         "name":"name",
         "type":"text_general",
         "stored":true
     },
-    {
+    "add-field" : {
         "name":"initial_release_date",
         "type":"tdate",
         "stored":true
     }
-]'
+}'
 
    * Now let's index the data, using one of these three commands:
 
-     - JSON: bin/post films example/exampledocs/films.json
-     - XML: bin/post films example/exampledocs/films.xml
-     - CSV: bin/post films example/exampledocs/films.csv params=f.genre.split=true&f.directed_by.split=true&f.genre.separator=|&f.directed_by.separator=|
+     - JSON: bin/post -c films example/films/films.json
+     - XML: bin/post -c films example/films/films.xml
+     - CSV: bin/post \
+                  -c films \
+                  example/films/films.csv \
+                  -params "f.genre.split=true&f.directed_by.split=true&f.genre.separator=|&f.directed_by.separator=|"
 
-   * Let's get searching.
+   * Let's get searching!
      - Search for 'Batman':
        http://localhost:8983/solr/films/query?q=name:batman
 
+       * If you get an error about the name field not existing, you haven't yet indexed the data
+       * If you don't get an error, but zero results, chances are that the _name_ field schema type override wasn't set
+         before indexing the data the first time (it ended up as a "string" type, requiring exact matching by case even).
+         It's easiest to simply reset the environment and try again, ensuring that each step successfully executes.
+
      - Show me all 'Super hero' movies:
        http://localhost:8983/solr/films/query?q=*:*&fq=genre:%22Superhero%20movie%22
 
-     - Let's see the distribution of genres across all the movies. See the facet section for the counts:
+     - Let's see the distribution of genres across all the movies. See the facet section of the response for the counts:
        http://localhost:8983/solr/films/query?q=*:*&facet=true&facet.field=genre
 
+     - Browse the indexed films in a traditional browser search interface:
+       http://localhost:8983/solr/films/browse
+
+       Now browse including the genre field as a facet:
+       http://localhost:8983/solr/films/browse?facet.field=genre
+
+       If you want to set a facet for /browse to keep around for every request add the facet.field into the "facets"
+       param set (which the /browse handler is already configured to use):
+curl http://localhost:8983/solr/films/config/params -H 'Content-type:application/json'  -d '{
+"update" : {
+  "facets": {
+    "facet.field":"genre"
+    }
+  }
+}'
+
+        And now http://localhost:8983/solr/films/browse will display the _genre_ facet automatically.
+
 Exploring the data further - 
 
-  * Increase the MAX_ITERATIONS value, put in your freebase API_KEY and run the exampledocs_generator.py script using Python 3.
-    Now re-index Solr with the new data.
\ No newline at end of file
+  * Increase the MAX_ITERATIONS value, put in your freebase API_KEY and run the film_data_generator.py script using Python 3.
+    Now re-index Solr with the new data.
+
+FAQ:
+  Why override the schema of the _name_ and _initial_release_date_ fields?
+
+     Without overriding those field types, the _name_ field would have been guessed as a multi-valued string field type
+     and _initial_release_date_ would have been guessed as a multi-valued tdate type.  It makes more sense with this
+     particular data set domain to have the movie name be a single valued general full-text searchable field,
+     and for the release date also to be single valued.
+
+  How do I clear and reset my environment?
+
+      See the script below.
+
+  Is there an easy to copy/paste script to do all of the above?
+
+    Here ya go << END_OF_SCRIPT
+
+bin/solr stop
+rm server/logs/*.log
+rm -Rf server/solr/films/
+bin/solr start
+bin/solr create -c films
+curl http://localhost:8983/solr/films/schema -X POST -H 'Content-type:application/json' --data-binary '{
+    "add-field" : {
+        "name":"name",
+        "type":"text_general",
+        "stored":true
+    },
+    "add-field" : {
+        "name":"initial_release_date",
+        "type":"tdate",
+        "stored":true
+    }
+}'
+bin/post -c films example/films/films.json
+curl http://localhost:8983/solr/films/config/params -H 'Content-type:application/json'  -d '{
+"update" : {
+  "facets": {
+    "facet.field":"genre"
+    }
+  }
+}'
+
+# END_OF_SCRIPT
+
+Additional fun -
+
+Add highlighting:
+curl http://localhost:8983/solr/films/config/params -H 'Content-type:application/json'  -d '{
+"set" : {
+  "browse": {
+    "hl":"on",
+    "hl.fl":"name"
+    }
+  }
+}'
+try http://localhost:8983/solr/films/browse?q=batman now, and you'll see "batman" highlighted in the results
diff --git a/solr/example/films/film_data_generator.py b/solr/example/films/film_data_generator.py
index e09edd6..f23224f 100644
--- a/solr/example/films/film_data_generator.py
+++ b/solr/example/films/film_data_generator.py
@@ -104,10 +104,6 @@
 
 if __name__ == "__main__":
   filmlist = []
-  #Adding 1 entry manually to play nice with schemaless mode
-  firstFilm = {'directed_by': ['Wes Anderson'], 'initial_release_date': '2014-03-28', 'genre': ['Comedy'],
-   'name': 'The Grand Budapest Hotel', 'id': '/en/001'}
-  filmlist.append(firstFilm)
   cursor = do_query(filmlist)
   i=0
   while(cursor):
diff --git a/solr/example/films/films.csv b/solr/example/films/films.csv
index c0422b1..82fe40d 100644
--- a/solr/example/films/films.csv
+++ b/solr/example/films/films.csv
@@ -1,5 +1,4 @@
 name,directed_by,genre,type,id,initial_release_date
-The Grand Budapest Hotel,Wes Anderson,Comedy,,/en/001,2014-03-28
 .45,Gary Lennon,Black comedy|Thriller|Psychological thriller|Indie film|Action Film|Crime Thriller|Crime Fiction|Drama,,/en/45_2006,2006-11-30
 9,Shane Acker,Computer Animation|Animation|Apocalyptic and post-apocalyptic fiction|Science Fiction|Short Film|Thriller|Fantasy,,/en/9_2005,2005-04-21
 69,Lee Sang-il,Japanese Movies|Drama,,/en/69_2004,2004-07-10
@@ -100,7 +99,7 @@
 Adventures Into Digital Comics,Sébastien Dumesnil,Documentary film,,/en/adventures_into_digital_comics,
 Ae Fond Kiss...,Ken Loach,Romance Film|Drama,,/en/ae_fond_kiss,2004-02-13
 Aetbaar,Vikram Bhatt,Thriller|Romance Film|Mystery|Horror|Musical|Bollywood|World cinema|Drama|Musical Drama,,/en/aetbaar,2004-01-23
-Aethiree,K. S. Ravikumar,Comedy|Tamil cinema|World cinema,,/en/aethiree,2004-04-23
+Aethirree,K. S. Ravikumar,Comedy|Tamil cinema|World cinema,,/en/aethiree,2004-04-23
 After Innocence,Jessica Sanders,Documentary film|Crime Fiction|Political cinema|Culture &amp; Society|Law &amp; Crime|Biographical film,,/en/after_innocence,
 After the Sunset,Brett Ratner,Crime Fiction|Action/Adventure|Action Film|Crime Thriller|Heist film|Caper story|Crime Comedy|Comedy,,/en/after_the_sunset,2004-11-10
 Aftermath,Thomas Farone,Crime Fiction|Thriller,,/en/aftermath_2007,2013-03-01
diff --git a/solr/example/films/films.json b/solr/example/films/films.json
index 886c5a9..5436b76 100644
--- a/solr/example/films/films.json
+++ b/solr/example/films/films.json
@@ -1,18 +1,9 @@
 [
   {
-    "genre": [
-      "Comedy"
-    ],
-    "initial_release_date": "2014-03-28",
-    "directed_by": [
-      "Wes Anderson"
-    ],
-    "name": "The Grand Budapest Hotel",
-    "id": "/en/001"
-  },
-  {
     "id": "/en/45_2006",
-    "name": ".45",
+    "directed_by": [
+      "Gary Lennon"
+    ],
     "initial_release_date": "2006-11-30",
     "genre": [
       "Black comedy",
@@ -24,13 +15,13 @@
       "Crime Fiction",
       "Drama"
     ],
-    "directed_by": [
-      "Gary Lennon"
-    ]
+    "name": ".45"
   },
   {
     "id": "/en/9_2005",
-    "name": "9",
+    "directed_by": [
+      "Shane Acker"
+    ],
     "initial_release_date": "2005-04-21",
     "genre": [
       "Computer Animation",
@@ -41,25 +32,25 @@
       "Thriller",
       "Fantasy"
     ],
-    "directed_by": [
-      "Shane Acker"
-    ]
+    "name": "9"
   },
   {
     "id": "/en/69_2004",
-    "name": "69",
+    "directed_by": [
+      "Lee Sang-il"
+    ],
     "initial_release_date": "2004-07-10",
     "genre": [
       "Japanese Movies",
       "Drama"
     ],
-    "directed_by": [
-      "Lee Sang-il"
-    ]
+    "name": "69"
   },
   {
     "id": "/en/300_2007",
-    "name": "300",
+    "directed_by": [
+      "Zack Snyder"
+    ],
     "initial_release_date": "2006-12-09",
     "genre": [
       "Epic film",
@@ -71,13 +62,13 @@
       "Superhero movie",
       "Historical Epic"
     ],
-    "directed_by": [
-      "Zack Snyder"
-    ]
+    "name": "300"
   },
   {
     "id": "/en/2046_2004",
-    "name": "2046",
+    "directed_by": [
+      "Wong Kar-wai"
+    ],
     "initial_release_date": "2004-05-20",
     "genre": [
       "Romance Film",
@@ -85,36 +76,36 @@
       "Science Fiction",
       "Drama"
     ],
-    "directed_by": [
-      "Wong Kar-wai"
-    ]
+    "name": "2046"
   },
   {
     "id": "/en/quien_es_el_senor_lopez",
-    "name": "\u00bfQui\u00e9n es el se\u00f1or L\u00f3pez?",
+    "directed_by": [
+      "Luis Mandoki"
+    ],
     "genre": [
       "Documentary film"
     ],
-    "directed_by": [
-      "Luis Mandoki"
-    ]
+    "name": "\u00bfQui\u00e9n es el se\u00f1or L\u00f3pez?"
   },
   {
     "id": "/en/weird_al_yankovic_the_ultimate_video_collection",
-    "name": "\"Weird Al\" Yankovic: The Ultimate Video Collection",
+    "directed_by": [
+      "Jay Levey",
+      "\"Weird Al\" Yankovic"
+    ],
     "initial_release_date": "2003-11-04",
     "genre": [
       "Music video",
       "Parody"
     ],
-    "directed_by": [
-      "Jay Levey",
-      "\"Weird Al\" Yankovic"
-    ]
+    "name": "\"Weird Al\" Yankovic: The Ultimate Video Collection"
   },
   {
     "id": "/en/15_park_avenue",
-    "name": "15 Park Avenue",
+    "directed_by": [
+      "Aparna Sen"
+    ],
     "initial_release_date": "2005-10-27",
     "genre": [
       "Art film",
@@ -123,37 +114,37 @@
       "Drama",
       "Musical Drama"
     ],
-    "directed_by": [
-      "Aparna Sen"
-    ]
+    "name": "15 Park Avenue"
   },
   {
     "id": "/en/2_fast_2_furious",
-    "name": "2 Fast 2 Furious",
+    "directed_by": [
+      "John Singleton"
+    ],
     "initial_release_date": "2003-06-03",
     "genre": [
       "Thriller",
       "Action Film",
       "Crime Fiction"
     ],
-    "directed_by": [
-      "John Singleton"
-    ]
+    "name": "2 Fast 2 Furious"
   },
   {
     "id": "/en/7g_rainbow_colony",
-    "name": "7G Rainbow Colony",
+    "directed_by": [
+      "Selvaraghavan"
+    ],
     "initial_release_date": "2004-10-15",
     "genre": [
       "Drama"
     ],
-    "directed_by": [
-      "Selvaraghavan"
-    ]
+    "name": "7G Rainbow Colony"
   },
   {
     "id": "/en/3-iron",
-    "name": "3-Iron",
+    "directed_by": [
+      "Kim Ki-duk"
+    ],
     "initial_release_date": "2004-09-07",
     "genre": [
       "Crime Fiction",
@@ -162,13 +153,13 @@
       "World cinema",
       "Drama"
     ],
-    "directed_by": [
-      "Kim Ki-duk"
-    ]
+    "name": "3-Iron"
   },
   {
     "id": "/en/10_5_apocalypse",
-    "name": "10.5: Apocalypse",
+    "directed_by": [
+      "John Lafia"
+    ],
     "initial_release_date": "2006-03-18",
     "genre": [
       "Disaster Film",
@@ -177,13 +168,13 @@
       "Action/Adventure",
       "Action Film"
     ],
-    "directed_by": [
-      "John Lafia"
-    ]
+    "name": "10.5: Apocalypse"
   },
   {
     "id": "/en/8_mile",
-    "name": "8 Mile",
+    "directed_by": [
+      "Curtis Hanson"
+    ],
     "initial_release_date": "2002-09-08",
     "genre": [
       "Musical",
@@ -191,13 +182,13 @@
       "Drama",
       "Musical Drama"
     ],
-    "directed_by": [
-      "Curtis Hanson"
-    ]
+    "name": "8 Mile"
   },
   {
     "id": "/en/100_girls",
-    "name": "100 Girls",
+    "directed_by": [
+      "Michael Davis"
+    ],
     "initial_release_date": "2001-09-25",
     "genre": [
       "Romantic comedy",
@@ -206,13 +197,13 @@
       "Teen film",
       "Comedy"
     ],
-    "directed_by": [
-      "Michael Davis"
-    ]
+    "name": "100 Girls"
   },
   {
     "id": "/en/40_days_and_40_nights",
-    "name": "40 Days and 40 Nights",
+    "directed_by": [
+      "Michael Lehmann"
+    ],
     "initial_release_date": "2002-03-01",
     "genre": [
       "Romance Film",
@@ -221,20 +212,10 @@
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Michael Lehmann"
-    ]
+    "name": "40 Days and 40 Nights"
   },
   {
     "id": "/en/50_cent_the_new_breed",
-    "name": "50 Cent: The New Breed",
-    "initial_release_date": "2003-04-15",
-    "genre": [
-      "Documentary film",
-      "Music",
-      "Concert film",
-      "Biographical film"
-    ],
     "directed_by": [
       "Don Robinson",
       "Damon Johnson",
@@ -244,11 +225,21 @@
       "John Quigley",
       "Jessy Terrero",
       "Noa Shaw"
-    ]
+    ],
+    "initial_release_date": "2003-04-15",
+    "genre": [
+      "Documentary film",
+      "Music",
+      "Concert film",
+      "Biographical film"
+    ],
+    "name": "50 Cent: The New Breed"
   },
   {
     "id": "/en/3_the_dale_earnhardt_story",
-    "name": "3: The Dale Earnhardt Story",
+    "directed_by": [
+      "Russell Mulcahy"
+    ],
     "initial_release_date": "2004-12-11",
     "genre": [
       "Sports",
@@ -256,13 +247,13 @@
       "Biographical film",
       "Drama"
     ],
-    "directed_by": [
-      "Russell Mulcahy"
-    ]
+    "name": "3: The Dale Earnhardt Story"
   },
   {
     "id": "/en/61__2001",
-    "name": "61*",
+    "directed_by": [
+      "Billy Crystal"
+    ],
     "initial_release_date": "2001-04-28",
     "genre": [
       "Sports",
@@ -271,13 +262,13 @@
       "Television film",
       "Drama"
     ],
-    "directed_by": [
-      "Billy Crystal"
-    ]
+    "name": "61*"
   },
   {
     "id": "/en/24_hour_party_people",
-    "name": "24 Hour Party People",
+    "directed_by": [
+      "Michael Winterbottom"
+    ],
     "initial_release_date": "2002-02-13",
     "genre": [
       "Biographical film",
@@ -286,13 +277,13 @@
       "Music",
       "Drama"
     ],
-    "directed_by": [
-      "Michael Winterbottom"
-    ]
+    "name": "24 Hour Party People"
   },
   {
     "id": "/en/10th_wolf",
-    "name": "10th &amp; Wolf",
+    "directed_by": [
+      "Robert Moresco"
+    ],
     "initial_release_date": "2006-08-18",
     "genre": [
       "Mystery",
@@ -302,51 +293,51 @@
       "Gangster Film",
       "Drama"
     ],
-    "directed_by": [
-      "Robert Moresco"
-    ]
+    "name": "10th &amp; Wolf"
   },
   {
     "id": "/en/25th_hour",
-    "name": "25th Hour",
+    "directed_by": [
+      "Spike Lee"
+    ],
     "initial_release_date": "2002-12-16",
     "genre": [
       "Crime Fiction",
       "Drama"
     ],
-    "directed_by": [
-      "Spike Lee"
-    ]
+    "name": "25th Hour"
   },
   {
     "id": "/en/7_seconds_2005",
-    "name": "7 Seconds",
+    "directed_by": [
+      "Simon Fellows"
+    ],
     "initial_release_date": "2005-06-28",
     "genre": [
       "Thriller",
       "Action Film",
       "Crime Fiction"
     ],
-    "directed_by": [
-      "Simon Fellows"
-    ]
+    "name": "7 Seconds"
   },
   {
     "id": "/en/28_days_later",
-    "name": "28 Days Later",
+    "directed_by": [
+      "Danny Boyle"
+    ],
     "initial_release_date": "2002-11-01",
     "genre": [
       "Science Fiction",
       "Horror",
       "Thriller"
     ],
-    "directed_by": [
-      "Danny Boyle"
-    ]
+    "name": "28 Days Later"
   },
   {
     "id": "/en/21_grams",
-    "name": "21 Grams",
+    "directed_by": [
+      "Alejandro Gonz\u00e1lez I\u00f1\u00e1rritu"
+    ],
     "initial_release_date": "2003-09-05",
     "genre": [
       "Thriller",
@@ -354,13 +345,13 @@
       "Crime Fiction",
       "Drama"
     ],
-    "directed_by": [
-      "Alejandro Gonz\u00e1lez I\u00f1\u00e1rritu"
-    ]
+    "name": "21 Grams"
   },
   {
     "id": "/en/9th_company",
-    "name": "The 9th Company",
+    "directed_by": [
+      "Fedor Bondarchuk"
+    ],
     "initial_release_date": "2005-09-29",
     "genre": [
       "War film",
@@ -368,38 +359,38 @@
       "Historical fiction",
       "Drama"
     ],
-    "directed_by": [
-      "Fedor Bondarchuk"
-    ]
+    "name": "The 9th Company"
   },
   {
     "id": "/en/102_dalmatians",
-    "name": "102 Dalmatians",
+    "directed_by": [
+      "Kevin Lima"
+    ],
     "initial_release_date": "2000-11-22",
     "genre": [
       "Family",
       "Adventure Film",
       "Comedy"
     ],
-    "directed_by": [
-      "Kevin Lima"
-    ]
+    "name": "102 Dalmatians"
   },
   {
     "id": "/en/16_years_of_alcohol",
-    "name": "16 Years of Alcohol",
+    "directed_by": [
+      "Richard Jobson"
+    ],
     "initial_release_date": "2003-08-14",
     "genre": [
       "Indie film",
       "Drama"
     ],
-    "directed_by": [
-      "Richard Jobson"
-    ]
+    "name": "16 Years of Alcohol"
   },
   {
     "id": "/en/12b",
-    "name": "12B",
+    "directed_by": [
+      "Jeeva"
+    ],
     "initial_release_date": "2001-09-28",
     "genre": [
       "Romance Film",
@@ -408,13 +399,13 @@
       "World cinema",
       "Drama"
     ],
-    "directed_by": [
-      "Jeeva"
-    ]
+    "name": "12B"
   },
   {
     "id": "/en/2009_lost_memories",
-    "name": "2009 Lost Memories",
+    "directed_by": [
+      "Lee Si-myung"
+    ],
     "initial_release_date": "2002-02-01",
     "genre": [
       "Thriller",
@@ -423,13 +414,13 @@
       "Mystery",
       "Drama"
     ],
-    "directed_by": [
-      "Lee Si-myung"
-    ]
+    "name": "2009 Lost Memories"
   },
   {
     "id": "/en/16_blocks",
-    "name": "16 Blocks",
+    "directed_by": [
+      "Richard Donner"
+    ],
     "initial_release_date": "2006-03-01",
     "genre": [
       "Thriller",
@@ -437,13 +428,13 @@
       "Action Film",
       "Drama"
     ],
-    "directed_by": [
-      "Richard Donner"
-    ]
+    "name": "16 Blocks"
   },
   {
     "id": "/en/15_minutes",
-    "name": "15 Minutes",
+    "directed_by": [
+      "John Herzfeld"
+    ],
     "initial_release_date": "2001-03-01",
     "genre": [
       "Thriller",
@@ -452,26 +443,26 @@
       "Crime Thriller",
       "Drama"
     ],
-    "directed_by": [
-      "John Herzfeld"
-    ]
+    "name": "15 Minutes"
   },
   {
     "id": "/en/50_first_dates",
-    "name": "50 First Dates",
+    "directed_by": [
+      "Peter Segal"
+    ],
     "initial_release_date": "2004-02-13",
     "genre": [
       "Romantic comedy",
       "Romance Film",
       "Comedy"
     ],
-    "directed_by": [
-      "Peter Segal"
-    ]
+    "name": "50 First Dates"
   },
   {
     "id": "/en/9_songs",
-    "name": "9 Songs",
+    "directed_by": [
+      "Michael Winterbottom"
+    ],
     "initial_release_date": "2004-05-16",
     "genre": [
       "Erotica",
@@ -481,25 +472,25 @@
       "Musical Drama",
       "Drama"
     ],
-    "directed_by": [
-      "Michael Winterbottom"
-    ]
+    "name": "9 Songs"
   },
   {
     "id": "/en/20_fingers_2004",
-    "name": "20 Fingers",
+    "directed_by": [
+      "Mania Akbari"
+    ],
     "initial_release_date": "2004-09-01",
     "genre": [
       "World cinema",
       "Drama"
     ],
-    "directed_by": [
-      "Mania Akbari"
-    ]
+    "name": "20 Fingers"
   },
   {
     "id": "/en/3_needles",
-    "name": "3 Needles",
+    "directed_by": [
+      "Thom Fitzgerald"
+    ],
     "initial_release_date": "2006-12-01",
     "genre": [
       "Indie film",
@@ -507,13 +498,13 @@
       "Chinese Movies",
       "Drama"
     ],
-    "directed_by": [
-      "Thom Fitzgerald"
-    ]
+    "name": "3 Needles"
   },
   {
     "id": "/en/28_days_2000",
-    "name": "28 Days",
+    "directed_by": [
+      "Betty Thomas"
+    ],
     "initial_release_date": "2000-02-08",
     "genre": [
       "Comedy-drama",
@@ -521,13 +512,14 @@
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Betty Thomas"
-    ]
+    "name": "28 Days"
   },
   {
     "id": "/en/36_china_town",
-    "name": "36 China Town",
+    "directed_by": [
+      "Abbas Burmawalla",
+      "Mustan Burmawalla"
+    ],
     "initial_release_date": "2006-04-21",
     "genre": [
       "Thriller",
@@ -538,14 +530,13 @@
       "Bollywood",
       "Musical comedy"
     ],
-    "directed_by": [
-      "Abbas Burmawalla",
-      "Mustan Burmawalla"
-    ]
+    "name": "36 China Town"
   },
   {
     "id": "/en/7_mujeres_1_homosexual_y_carlos",
-    "name": "7 mujeres, 1 homosexual y Carlos",
+    "directed_by": [
+      "Rene Bueno"
+    ],
     "initial_release_date": "2004-06-01",
     "genre": [
       "Romantic comedy",
@@ -556,13 +547,13 @@
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Rene Bueno"
-    ]
+    "name": "7 mujeres, 1 homosexual y Carlos"
   },
   {
     "id": "/en/88_minutes",
-    "name": "88 Minutes",
+    "directed_by": [
+      "Jon Avnet"
+    ],
     "initial_release_date": "2007-02-14",
     "genre": [
       "Thriller",
@@ -570,26 +561,26 @@
       "Mystery",
       "Drama"
     ],
-    "directed_by": [
-      "Jon Avnet"
-    ]
+    "name": "88 Minutes"
   },
   {
     "id": "/en/500_years_later",
-    "name": "500 Years Later",
+    "directed_by": [
+      "Owen 'Alik Shahadah"
+    ],
     "initial_release_date": "2005-10-11",
     "genre": [
       "Indie film",
       "Documentary film",
       "History"
     ],
-    "directed_by": [
-      "Owen 'Alik Shahadah"
-    ]
+    "name": "500 Years Later"
   },
   {
     "id": "/en/50_ways_of_saying_fabulous",
-    "name": "50 Ways of Saying Fabulous",
+    "directed_by": [
+      "Stewart Main"
+    ],
     "genre": [
       "LGBT",
       "Indie film",
@@ -599,13 +590,13 @@
       "Coming of age",
       "Drama"
     ],
-    "directed_by": [
-      "Stewart Main"
-    ]
+    "name": "50 Ways of Saying Fabulous"
   },
   {
     "id": "/en/5x2",
-    "name": "5x2",
+    "directed_by": [
+      "Fran\u00e7ois Ozon"
+    ],
     "initial_release_date": "2004-09-01",
     "genre": [
       "Romance Film",
@@ -614,26 +605,26 @@
       "Fiction",
       "Drama"
     ],
-    "directed_by": [
-      "Fran\u00e7ois Ozon"
-    ]
+    "name": "5x2"
   },
   {
     "id": "/en/28_weeks_later",
-    "name": "28 Weeks Later",
+    "directed_by": [
+      "Juan Carlos Fresnadillo"
+    ],
     "initial_release_date": "2007-04-26",
     "genre": [
       "Science Fiction",
       "Horror",
       "Thriller"
     ],
-    "directed_by": [
-      "Juan Carlos Fresnadillo"
-    ]
+    "name": "28 Weeks Later"
   },
   {
     "id": "/en/10_5",
-    "name": "10.5",
+    "directed_by": [
+      "John Lafia"
+    ],
     "initial_release_date": "2004-05-02",
     "genre": [
       "Disaster Film",
@@ -641,13 +632,13 @@
       "Action/Adventure",
       "Drama"
     ],
-    "directed_by": [
-      "John Lafia"
-    ]
+    "name": "10.5"
   },
   {
     "id": "/en/13_going_on_30",
-    "name": "13 Going on 30",
+    "directed_by": [
+      "Gary Winick"
+    ],
     "initial_release_date": "2004-04-14",
     "genre": [
       "Romantic comedy",
@@ -657,13 +648,13 @@
       "Fantasy Comedy",
       "Comedy"
     ],
-    "directed_by": [
-      "Gary Winick"
-    ]
+    "name": "13 Going on 30"
   },
   {
     "id": "/en/2ldk",
-    "name": "2LDK",
+    "directed_by": [
+      "Yukihiko Tsutsumi"
+    ],
     "initial_release_date": "2004-05-13",
     "genre": [
       "LGBT",
@@ -674,26 +665,26 @@
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Yukihiko Tsutsumi"
-    ]
+    "name": "2LDK"
   },
   {
     "id": "/en/7_phere",
-    "name": "7\u00bd Phere",
+    "directed_by": [
+      "Ishaan Trivedi"
+    ],
     "initial_release_date": "2005-07-29",
     "genre": [
       "Bollywood",
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Ishaan Trivedi"
-    ]
+    "name": "7\u00bd Phere"
   },
   {
     "id": "/en/a_beautiful_mind",
-    "name": "A Beautiful Mind",
+    "directed_by": [
+      "Ron Howard"
+    ],
     "initial_release_date": "2001-12-13",
     "genre": [
       "Biographical film",
@@ -704,13 +695,13 @@
       "Documentary film",
       "Drama"
     ],
-    "directed_by": [
-      "Ron Howard"
-    ]
+    "name": "A Beautiful Mind"
   },
   {
     "id": "/en/a_cinderella_story",
-    "name": "A Cinderella Story",
+    "directed_by": [
+      "Mark Rosman"
+    ],
     "initial_release_date": "2004-07-10",
     "genre": [
       "Teen film",
@@ -719,13 +710,13 @@
       "Family",
       "Comedy"
     ],
-    "directed_by": [
-      "Mark Rosman"
-    ]
+    "name": "A Cinderella Story"
   },
   {
     "id": "/en/a_cock_and_bull_story",
-    "name": "A Cock and Bull Story",
+    "directed_by": [
+      "Michael Winterbottom"
+    ],
     "initial_release_date": "2005-07-17",
     "genre": [
       "Mockumentary",
@@ -733,25 +724,25 @@
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Michael Winterbottom"
-    ]
+    "name": "A Cock and Bull Story"
   },
   {
     "id": "/en/a_common_thread",
-    "name": "A Common Thread",
+    "directed_by": [
+      "\u00c9l\u00e9onore Faucher"
+    ],
     "initial_release_date": "2004-05-14",
     "genre": [
       "Romance Film",
       "Drama"
     ],
-    "directed_by": [
-      "\u00c9l\u00e9onore Faucher"
-    ]
+    "name": "A Common Thread"
   },
   {
     "id": "/en/a_dirty_shame",
-    "name": "A Dirty Shame",
+    "directed_by": [
+      "John Waters"
+    ],
     "initial_release_date": "2004-09-12",
     "genre": [
       "Sex comedy",
@@ -762,24 +753,24 @@
       "Gross-out film",
       "Comedy"
     ],
-    "directed_by": [
-      "John Waters"
-    ]
+    "name": "A Dirty Shame"
   },
   {
     "id": "/en/a_duo_occasion",
-    "name": "A Duo Occasion",
+    "directed_by": [
+      "Pierre Lamoureux"
+    ],
     "initial_release_date": "2005-11-22",
     "genre": [
       "Music video"
     ],
-    "directed_by": [
-      "Pierre Lamoureux"
-    ]
+    "name": "A Duo Occasion"
   },
   {
     "id": "/en/a_good_year",
-    "name": "A Good Year",
+    "directed_by": [
+      "Ridley Scott"
+    ],
     "initial_release_date": "2006-09-09",
     "genre": [
       "Romantic comedy",
@@ -791,13 +782,13 @@
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Ridley Scott"
-    ]
+    "name": "A Good Year"
   },
   {
     "id": "/en/a_history_of_violence_2005",
-    "name": "A History of Violence",
+    "directed_by": [
+      "David Cronenberg"
+    ],
     "initial_release_date": "2005-05-16",
     "genre": [
       "Thriller",
@@ -805,13 +796,13 @@
       "Crime Fiction",
       "Drama"
     ],
-    "directed_by": [
-      "David Cronenberg"
-    ]
+    "name": "A History of Violence"
   },
   {
     "id": "/en/ett_hal_i_mitt_hjarta",
-    "name": "A Hole in My Heart",
+    "directed_by": [
+      "Lukas Moodysson"
+    ],
     "initial_release_date": "2004-09-10",
     "genre": [
       "Horror",
@@ -819,13 +810,13 @@
       "Social problem film",
       "Drama"
     ],
-    "directed_by": [
-      "Lukas Moodysson"
-    ]
+    "name": "A Hole in My Heart"
   },
   {
     "id": "/en/a_knights_tale",
-    "name": "A Knight's Tale",
+    "directed_by": [
+      "Brian Helgeland"
+    ],
     "initial_release_date": "2001-03-08",
     "genre": [
       "Romantic comedy",
@@ -837,13 +828,14 @@
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Brian Helgeland"
-    ]
+    "name": "A Knight's Tale"
   },
   {
     "id": "/en/a_league_of_ordinary_gentlemen",
-    "name": "A League of Ordinary Gentlemen",
+    "directed_by": [
+      "Christopher Browne",
+      "Alexander H. Browne"
+    ],
     "initial_release_date": "2006-03-21",
     "genre": [
       "Documentary film",
@@ -851,14 +843,13 @@
       "Culture &amp; Society",
       "Biographical film"
     ],
-    "directed_by": [
-      "Christopher Browne",
-      "Alexander H. Browne"
-    ]
+    "name": "A League of Ordinary Gentlemen"
   },
   {
     "id": "/en/a_little_trip_to_heaven",
-    "name": "A Little Trip to Heaven",
+    "directed_by": [
+      "Baltasar Korm\u00e1kur"
+    ],
     "initial_release_date": "2005-12-26",
     "genre": [
       "Thriller",
@@ -870,13 +861,13 @@
       "Ensemble Film",
       "Drama"
     ],
-    "directed_by": [
-      "Baltasar Korm\u00e1kur"
-    ]
+    "name": "A Little Trip to Heaven"
   },
   {
     "id": "/en/a_lot_like_love",
-    "name": "A Lot like Love",
+    "directed_by": [
+      "Nigel Cole"
+    ],
     "initial_release_date": "2005-04-21",
     "genre": [
       "Romantic comedy",
@@ -885,39 +876,39 @@
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Nigel Cole"
-    ]
+    "name": "A Lot like Love"
   },
   {
     "id": "/en/a_love_song_for_bobby_long",
-    "name": "A Love Song for Bobby Long",
+    "directed_by": [
+      "Shainee Gabel"
+    ],
     "initial_release_date": "2004-09-02",
     "genre": [
       "Film adaptation",
       "Melodrama",
       "Drama"
     ],
-    "directed_by": [
-      "Shainee Gabel"
-    ]
+    "name": "A Love Song for Bobby Long"
   },
   {
     "id": "/en/a_man_a_real_one",
-    "name": "A Man, a Real One",
+    "directed_by": [
+      "Arnaud Larrieu",
+      "Jean-Marie Larrieu"
+    ],
     "initial_release_date": "2003-05-28",
     "genre": [
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Arnaud Larrieu",
-      "Jean-Marie Larrieu"
-    ]
+    "name": "A Man, a Real One"
   },
   {
     "id": "/en/a_midsummer_nights_rave",
-    "name": "A Midsummer Night's Rave",
+    "directed_by": [
+      "Gil Cates Jr."
+    ],
     "genre": [
       "Romance Film",
       "Romantic comedy",
@@ -925,13 +916,13 @@
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Gil Cates Jr."
-    ]
+    "name": "A Midsummer Night's Rave"
   },
   {
     "id": "/en/a_mighty_wind",
-    "name": "A Mighty Wind",
+    "directed_by": [
+      "Christopher Guest"
+    ],
     "initial_release_date": "2003-03-12",
     "genre": [
       "Mockumentary",
@@ -940,48 +931,48 @@
       "Musical comedy",
       "Comedy"
     ],
-    "directed_by": [
-      "Christopher Guest"
-    ]
+    "name": "A Mighty Wind"
   },
   {
     "id": "/en/a_perfect_day",
-    "name": "A Perfect Day",
+    "directed_by": [
+      "Khalil Joreige",
+      "Joana Hadjithomas"
+    ],
     "genre": [
       "World cinema",
       "Drama"
     ],
-    "directed_by": [
-      "Khalil Joreige",
-      "Joana Hadjithomas"
-    ]
+    "name": "A Perfect Day"
   },
   {
     "id": "/en/a_prairie_home_companion_2006",
-    "name": "A Prairie Home Companion",
+    "directed_by": [
+      "Robert Altman"
+    ],
     "initial_release_date": "2006-02-12",
     "genre": [
       "Musical comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Robert Altman"
-    ]
+    "name": "A Prairie Home Companion"
   },
   {
     "id": "/en/a_ring_of_endless_light_2002",
-    "name": "A Ring of Endless Light",
+    "directed_by": [
+      "Greg Beeman"
+    ],
     "initial_release_date": "2002-08-23",
     "genre": [
       "Drama"
     ],
-    "directed_by": [
-      "Greg Beeman"
-    ]
+    "name": "A Ring of Endless Light"
   },
   {
     "id": "/en/a_scanner_darkly_2006",
-    "name": "A Scanner Darkly",
+    "directed_by": [
+      "Richard Linklater"
+    ],
     "initial_release_date": "2006-07-07",
     "genre": [
       "Science Fiction",
@@ -992,13 +983,13 @@
       "Thriller",
       "Drama"
     ],
-    "directed_by": [
-      "Richard Linklater"
-    ]
+    "name": "A Scanner Darkly"
   },
   {
     "id": "/en/a_short_film_about_john_bolton",
-    "name": "A Short Film About John Bolton",
+    "directed_by": [
+      "Neil Gaiman"
+    ],
     "genre": [
       "Documentary film",
       "Short Film",
@@ -1009,25 +1000,25 @@
       "Comedy",
       "Biographical film"
     ],
-    "directed_by": [
-      "Neil Gaiman"
-    ]
+    "name": "A Short Film About John Bolton"
   },
   {
     "id": "/en/a_shot_in_the_west",
-    "name": "A Shot in the West",
+    "directed_by": [
+      "Bob Kelly"
+    ],
     "initial_release_date": "2006-07-16",
     "genre": [
       "Western",
       "Short Film"
     ],
-    "directed_by": [
-      "Bob Kelly"
-    ]
+    "name": "A Shot in the West"
   },
   {
     "id": "/en/a_sound_of_thunder_2005",
-    "name": "A Sound of Thunder",
+    "directed_by": [
+      "Peter Hyams"
+    ],
     "initial_release_date": "2005-05-15",
     "genre": [
       "Science Fiction",
@@ -1037,50 +1028,50 @@
       "Apocalyptic and post-apocalyptic fiction",
       "Time travel"
     ],
-    "directed_by": [
-      "Peter Hyams"
-    ]
+    "name": "A Sound of Thunder"
   },
   {
     "id": "/en/a_state_of_mind",
-    "name": "A State of Mind",
+    "directed_by": [
+      "Daniel Gordon"
+    ],
     "initial_release_date": "2005-08-10",
     "genre": [
       "Documentary film",
       "Political cinema",
       "Sports"
     ],
-    "directed_by": [
-      "Daniel Gordon"
-    ]
+    "name": "A State of Mind"
   },
   {
     "id": "/en/a_time_for_drunken_horses",
-    "name": "A Time for Drunken Horses",
+    "directed_by": [
+      "Bahman Ghobadi"
+    ],
     "genre": [
       "World cinema",
       "War film",
       "Drama"
     ],
-    "directed_by": [
-      "Bahman Ghobadi"
-    ]
+    "name": "A Time for Drunken Horses"
   },
   {
     "id": "/en/a_ton_image",
-    "name": "\u00c0 ton image",
+    "directed_by": [
+      "Aruna Villiers"
+    ],
     "initial_release_date": "2004-05-26",
     "genre": [
       "Thriller",
       "Science Fiction"
     ],
-    "directed_by": [
-      "Aruna Villiers"
-    ]
+    "name": "\u00c0 ton image"
   },
   {
     "id": "/en/a_very_long_engagement",
-    "name": "A Very Long Engagement",
+    "directed_by": [
+      "Jean-Pierre Jeunet"
+    ],
     "initial_release_date": "2004-10-27",
     "genre": [
       "War film",
@@ -1088,36 +1079,36 @@
       "World cinema",
       "Drama"
     ],
-    "directed_by": [
-      "Jean-Pierre Jeunet"
-    ]
+    "name": "A Very Long Engagement"
   },
   {
     "id": "/en/a_view_from_the_eiffel_tower",
-    "name": "A View from Eiffel Tower",
+    "directed_by": [
+      "Nikola Vuk\u010devi\u0107"
+    ],
     "genre": [
       "Drama"
     ],
-    "directed_by": [
-      "Nikola Vuk\u010devi\u0107"
-    ]
+    "name": "A View from Eiffel Tower"
   },
   {
     "id": "/en/a_walk_to_remember",
-    "name": "A Walk to Remember",
+    "directed_by": [
+      "Adam Shankman"
+    ],
     "initial_release_date": "2002-01-23",
     "genre": [
       "Coming of age",
       "Romance Film",
       "Drama"
     ],
-    "directed_by": [
-      "Adam Shankman"
-    ]
+    "name": "A Walk to Remember"
   },
   {
     "id": "/en/a_i",
-    "name": "A.I. Artificial Intelligence",
+    "directed_by": [
+      "Steven Spielberg"
+    ],
     "initial_release_date": "2001-06-26",
     "genre": [
       "Science Fiction",
@@ -1125,13 +1116,13 @@
       "Adventure Film",
       "Drama"
     ],
-    "directed_by": [
-      "Steven Spielberg"
-    ]
+    "name": "A.I. Artificial Intelligence"
   },
   {
     "id": "/en/a_k_a_tommy_chong",
-    "name": "a/k/a Tommy Chong",
+    "directed_by": [
+      "Josh Gilbert"
+    ],
     "initial_release_date": "2006-06-14",
     "genre": [
       "Documentary film",
@@ -1139,26 +1130,26 @@
       "Law &amp; Crime",
       "Biographical film"
     ],
-    "directed_by": [
-      "Josh Gilbert"
-    ]
+    "name": "a/k/a Tommy Chong"
   },
   {
     "id": "/en/aalvar",
-    "name": "Aalvar",
+    "directed_by": [
+      "Chella"
+    ],
     "initial_release_date": "2007-01-12",
     "genre": [
       "Action Film",
       "Tamil cinema",
       "World cinema"
     ],
-    "directed_by": [
-      "Chella"
-    ]
+    "name": "Aalvar"
   },
   {
     "id": "/en/aap_ki_khatir",
-    "name": "Aap Ki Khatir",
+    "directed_by": [
+      "Dharmesh Darshan"
+    ],
     "initial_release_date": "2006-08-25",
     "genre": [
       "Romance Film",
@@ -1166,13 +1157,13 @@
       "Bollywood",
       "Drama"
     ],
-    "directed_by": [
-      "Dharmesh Darshan"
-    ]
+    "name": "Aap Ki Khatir"
   },
   {
     "id": "/en/aaru_2005",
-    "name": "Aaru",
+    "directed_by": [
+      "Hari"
+    ],
     "initial_release_date": "2005-12-09",
     "genre": [
       "Thriller",
@@ -1181,26 +1172,26 @@
       "Tamil cinema",
       "World cinema"
     ],
-    "directed_by": [
-      "Hari"
-    ]
+    "name": "Aaru"
   },
   {
     "id": "/en/aata",
-    "name": "Aata",
+    "directed_by": [
+      "V.N. Aditya"
+    ],
     "initial_release_date": "2007-05-09",
     "genre": [
       "Romance Film",
       "Tollywood",
       "World cinema"
     ],
-    "directed_by": [
-      "V.N. Aditya"
-    ]
+    "name": "Aata"
   },
   {
     "id": "/en/aathi",
-    "name": "Aadhi",
+    "directed_by": [
+      "Ramana"
+    ],
     "initial_release_date": "2006-01-14",
     "genre": [
       "Thriller",
@@ -1212,13 +1203,13 @@
       "Drama",
       "Musical Drama"
     ],
-    "directed_by": [
-      "Ramana"
-    ]
+    "name": "Aadhi"
   },
   {
     "id": "/en/aayitha_ezhuthu",
-    "name": "Aaytha Ezhuthu",
+    "directed_by": [
+      "Mani Ratnam"
+    ],
     "initial_release_date": "2004-05-21",
     "genre": [
       "Thriller",
@@ -1227,13 +1218,13 @@
       "World cinema",
       "Drama"
     ],
-    "directed_by": [
-      "Mani Ratnam"
-    ]
+    "name": "Aaytha Ezhuthu"
   },
   {
     "id": "/en/abandon_2002",
-    "name": "Abandon",
+    "directed_by": [
+      "Stephen Gaghan"
+    ],
     "initial_release_date": "2002-10-18",
     "genre": [
       "Mystery",
@@ -1242,41 +1233,41 @@
       "Suspense",
       "Drama"
     ],
-    "directed_by": [
-      "Stephen Gaghan"
-    ]
+    "name": "Abandon"
   },
   {
     "id": "/en/abduction_the_megumi_yokota_story",
-    "name": "Abduction: The Megumi Yokota Story",
+    "directed_by": [
+      "Patty Kim",
+      "Chris Sheridan"
+    ],
     "genre": [
       "Documentary film",
       "Political cinema",
       "Culture &amp; Society",
       "Law &amp; Crime"
     ],
-    "directed_by": [
-      "Patty Kim",
-      "Chris Sheridan"
-    ]
+    "name": "Abduction: The Megumi Yokota Story"
   },
   {
     "id": "/en/about_a_boy_2002",
-    "name": "About a Boy",
+    "directed_by": [
+      "Chris Weitz",
+      "Paul Weitz"
+    ],
     "initial_release_date": "2002-04-26",
     "genre": [
       "Romance Film",
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Chris Weitz",
-      "Paul Weitz"
-    ]
+    "name": "About a Boy"
   },
   {
     "id": "/en/about_schmidt",
-    "name": "About Schmidt",
+    "directed_by": [
+      "Alexander Payne"
+    ],
     "initial_release_date": "2002-05-22",
     "genre": [
       "Black comedy",
@@ -1287,38 +1278,38 @@
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Alexander Payne"
-    ]
+    "name": "About Schmidt"
   },
   {
     "id": "/en/accepted",
-    "name": "Accepted",
+    "directed_by": [
+      "Steve Pink"
+    ],
     "initial_release_date": "2006-08-18",
     "genre": [
       "Teen film",
       "Comedy"
     ],
-    "directed_by": [
-      "Steve Pink"
-    ]
+    "name": "Accepted"
   },
   {
     "id": "/en/across_the_hall",
-    "name": "Across the Hall",
+    "directed_by": [
+      "Alex Merkin",
+      "Alex Merkin"
+    ],
     "genre": [
       "Short Film",
       "Thriller",
       "Drama"
     ],
-    "directed_by": [
-      "Alex Merkin",
-      "Alex Merkin"
-    ]
+    "name": "Across the Hall"
   },
   {
     "id": "/en/adam_steve",
-    "name": "Adam &amp; Steve",
+    "directed_by": [
+      "Craig Chester"
+    ],
     "initial_release_date": "2005-04-24",
     "genre": [
       "Romance Film",
@@ -1330,13 +1321,13 @@
       "Gay Interest",
       "Comedy"
     ],
-    "directed_by": [
-      "Craig Chester"
-    ]
+    "name": "Adam &amp; Steve"
   },
   {
     "id": "/en/adam_resurrected",
-    "name": "Adam Resurrected",
+    "directed_by": [
+      "Paul Schrader"
+    ],
     "initial_release_date": "2008-08-30",
     "genre": [
       "Historical period drama",
@@ -1344,81 +1335,81 @@
       "War film",
       "Drama"
     ],
-    "directed_by": [
-      "Paul Schrader"
-    ]
+    "name": "Adam Resurrected"
   },
   {
     "id": "/en/adaptation_2002",
-    "name": "Adaptation",
+    "directed_by": [
+      "Spike Jonze"
+    ],
     "initial_release_date": "2002-12-06",
     "genre": [
       "Crime Fiction",
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Spike Jonze"
-    ]
+    "name": "Adaptation"
   },
   {
     "id": "/en/address_unknown",
-    "name": "Address Unknown",
+    "directed_by": [
+      "Kim Ki-duk"
+    ],
     "initial_release_date": "2001-06-02",
     "genre": [
       "War film",
       "Drama"
     ],
-    "directed_by": [
-      "Kim Ki-duk"
-    ]
+    "name": "Address Unknown"
   },
   {
     "id": "/en/adrenaline_rush_2002",
-    "name": "Adrenaline Rush",
+    "directed_by": [
+      "Marc Fafard"
+    ],
     "initial_release_date": "2002-10-18",
     "genre": [
       "Documentary film",
       "Short Film"
     ],
-    "directed_by": [
-      "Marc Fafard"
-    ]
+    "name": "Adrenaline Rush"
   },
   {
     "id": "/en/essential_keys_to_better_bowling_2006",
-    "name": "Essential Keys To Better Bowling",
+    "directed_by": [],
     "genre": [
       "Documentary film",
       "Sports"
     ],
-    "directed_by": []
+    "name": "Essential Keys To Better Bowling"
   },
   {
     "id": "/en/adventures_into_digital_comics",
-    "name": "Adventures Into Digital Comics",
+    "directed_by": [
+      "S\u00e9bastien Dumesnil"
+    ],
     "genre": [
       "Documentary film"
     ],
-    "directed_by": [
-      "S\u00e9bastien Dumesnil"
-    ]
+    "name": "Adventures Into Digital Comics"
   },
   {
     "id": "/en/ae_fond_kiss",
-    "name": "Ae Fond Kiss...",
+    "directed_by": [
+      "Ken Loach"
+    ],
     "initial_release_date": "2004-02-13",
     "genre": [
       "Romance Film",
       "Drama"
     ],
-    "directed_by": [
-      "Ken Loach"
-    ]
+    "name": "Ae Fond Kiss..."
   },
   {
     "id": "/en/aetbaar",
-    "name": "Aetbaar",
+    "directed_by": [
+      "Vikram Bhatt"
+    ],
     "initial_release_date": "2004-01-23",
     "genre": [
       "Thriller",
@@ -1431,13 +1422,10 @@
       "Drama",
       "Musical Drama"
     ],
-    "directed_by": [
-      "Vikram Bhatt"
-    ]
+    "name": "Aetbaar"
   },
   {
     "id": "/en/aethiree",
-    "name": "Aethiree",
     "initial_release_date": "2004-04-23",
     "genre": [
       "Comedy",
@@ -1446,11 +1434,11 @@
     ],
     "directed_by": [
       "K. S. Ravikumar"
-    ]
+    ],
+    "name": "Aethirree"
   },
   {
     "id": "/en/after_innocence",
-    "name": "After Innocence",
     "genre": [
       "Documentary film",
       "Crime Fiction",
@@ -1461,11 +1449,11 @@
     ],
     "directed_by": [
       "Jessica Sanders"
-    ]
+    ],
+    "name": "After Innocence"
   },
   {
     "id": "/en/after_the_sunset",
-    "name": "After the Sunset",
     "initial_release_date": "2004-11-10",
     "genre": [
       "Crime Fiction",
@@ -1479,11 +1467,11 @@
     ],
     "directed_by": [
       "Brett Ratner"
-    ]
+    ],
+    "name": "After the Sunset"
   },
   {
     "id": "/en/aftermath_2007",
-    "name": "Aftermath",
     "initial_release_date": "2013-03-01",
     "genre": [
       "Crime Fiction",
@@ -1491,11 +1479,11 @@
     ],
     "directed_by": [
       "Thomas Farone"
-    ]
+    ],
+    "name": "Aftermath"
   },
   {
     "id": "/en/against_the_ropes",
-    "name": "Against the Ropes",
     "initial_release_date": "2004-02-20",
     "genre": [
       "Biographical film",
@@ -1504,11 +1492,11 @@
     ],
     "directed_by": [
       "Charles S. Dutton"
-    ]
+    ],
+    "name": "Against the Ropes"
   },
   {
     "id": "/en/agent_cody_banks_2_destination_london",
-    "name": "Agent Cody Banks 2: Destination London",
     "initial_release_date": "2004-03-12",
     "genre": [
       "Adventure Film",
@@ -1522,21 +1510,21 @@
     ],
     "directed_by": [
       "Kevin Allen"
-    ]
+    ],
+    "name": "Agent Cody Banks 2: Destination London"
   },
   {
     "id": "/en/agent_one-half",
-    "name": "Agent One-Half",
     "genre": [
       "Comedy"
     ],
     "directed_by": [
       "Brian Bero"
-    ]
+    ],
+    "name": "Agent One-Half"
   },
   {
     "id": "/en/agnes_and_his_brothers",
-    "name": "Agnes and His Brothers",
     "initial_release_date": "2004-09-05",
     "genre": [
       "Drama",
@@ -1544,11 +1532,11 @@
     ],
     "directed_by": [
       "Oskar Roehler"
-    ]
+    ],
+    "name": "Agnes and His Brothers"
   },
   {
     "id": "/en/aideista_parhain",
-    "name": "Mother of Mine",
     "initial_release_date": "2005-08-25",
     "genre": [
       "War film",
@@ -1556,11 +1544,11 @@
     ],
     "directed_by": [
       "Klaus H\u00e4r\u00f6"
-    ]
+    ],
+    "name": "Mother of Mine"
   },
   {
     "id": "/en/aileen_life_and_death_of_a_serial_killer",
-    "name": "Aileen: Life and Death of a Serial Killer",
     "initial_release_date": "2003-05-10",
     "genre": [
       "Documentary film",
@@ -1570,11 +1558,11 @@
     "directed_by": [
       "Nick Broomfield",
       "Joan Churchill"
-    ]
+    ],
+    "name": "Aileen: Life and Death of a Serial Killer"
   },
   {
     "id": "/en/air_2005",
-    "name": "Air",
     "initial_release_date": "2005-02-05",
     "genre": [
       "Fantasy",
@@ -1585,11 +1573,11 @@
     ],
     "directed_by": [
       "Osamu Dezaki"
-    ]
+    ],
+    "name": "Air"
   },
   {
     "id": "/en/air_bud_seventh_inning_fetch",
-    "name": "Air Bud: Seventh Inning Fetch",
     "initial_release_date": "2002-02-21",
     "genre": [
       "Family",
@@ -1599,11 +1587,11 @@
     ],
     "directed_by": [
       "Robert Vince"
-    ]
+    ],
+    "name": "Air Bud: Seventh Inning Fetch"
   },
   {
     "id": "/en/air_bud_spikes_back",
-    "name": "Air Bud: Spikes Back",
     "initial_release_date": "2003-06-24",
     "genre": [
       "Family",
@@ -1612,11 +1600,11 @@
     ],
     "directed_by": [
       "Mike Southon"
-    ]
+    ],
+    "name": "Air Bud: Spikes Back"
   },
   {
     "id": "/en/air_buddies",
-    "name": "Air Buddies",
     "initial_release_date": "2006-12-10",
     "genre": [
       "Family",
@@ -1627,11 +1615,11 @@
     ],
     "directed_by": [
       "Robert Vince"
-    ]
+    ],
+    "name": "Air Buddies"
   },
   {
     "id": "/en/aitraaz",
-    "name": "Aitraaz",
     "initial_release_date": "2004-11-12",
     "genre": [
       "Trial drama",
@@ -1643,11 +1631,11 @@
     "directed_by": [
       "Abbas Burmawalla",
       "Mustan Burmawalla"
-    ]
+    ],
+    "name": "Aitraaz"
   },
   {
     "id": "/en/aka_2002",
-    "name": "AKA",
     "initial_release_date": "2002-01-19",
     "genre": [
       "LGBT",
@@ -1657,11 +1645,11 @@
     ],
     "directed_by": [
       "Duncan Roy"
-    ]
+    ],
+    "name": "AKA"
   },
   {
     "id": "/en/aakasha_gopuram",
-    "name": "Aakasha Gopuram",
     "initial_release_date": "2008-08-22",
     "genre": [
       "Romance Film",
@@ -1671,11 +1659,11 @@
     ],
     "directed_by": [
       "K.P.Kumaran"
-    ]
+    ],
+    "name": "Aakasha Gopuram"
   },
   {
     "id": "/en/akbar-jodha",
-    "name": "Jodhaa Akbar",
     "initial_release_date": "2008-02-13",
     "genre": [
       "Biographical film",
@@ -1690,22 +1678,22 @@
     ],
     "directed_by": [
       "Ashutosh Gowariker"
-    ]
+    ],
+    "name": "Jodhaa Akbar"
   },
   {
     "id": "/en/akeelah_and_the_bee",
-    "name": "Akeelah and the Bee",
     "initial_release_date": "2006-03-16",
     "genre": [
       "Drama"
     ],
     "directed_by": [
       "Doug Atchison"
-    ]
+    ],
+    "name": "Akeelah and the Bee"
   },
   {
     "id": "/en/aks",
-    "name": "The Reflection",
     "initial_release_date": "2001-07-13",
     "genre": [
       "Horror",
@@ -1716,11 +1704,11 @@
     ],
     "directed_by": [
       "Rakeysh Omprakash Mehra"
-    ]
+    ],
+    "name": "The Reflection"
   },
   {
     "id": "/en/aksar",
-    "name": "Aksar",
     "initial_release_date": "2006-02-03",
     "genre": [
       "Romance Film",
@@ -1730,11 +1718,11 @@
     ],
     "directed_by": [
       "Anant Mahadevan"
-    ]
+    ],
+    "name": "Aksar"
   },
   {
     "id": "/en/al_franken_god_spoke",
-    "name": "Al Franken: God Spoke",
     "initial_release_date": "2006-09-13",
     "genre": [
       "Mockumentary",
@@ -1746,11 +1734,11 @@
     "directed_by": [
       "Nick Doob",
       "Chris Hegedus"
-    ]
+    ],
+    "name": "Al Franken: God Spoke"
   },
   {
     "id": "/en/alag",
-    "name": "Different",
     "initial_release_date": "2006-06-16",
     "genre": [
       "Thriller",
@@ -1760,11 +1748,11 @@
     ],
     "directed_by": [
       "Ashu Trikha"
-    ]
+    ],
+    "name": "Different"
   },
   {
     "id": "/en/alai",
-    "name": "Wave",
     "initial_release_date": "2003-09-10",
     "genre": [
       "Romance Film",
@@ -1775,11 +1763,11 @@
     ],
     "directed_by": [
       "Vikram Kumar"
-    ]
+    ],
+    "name": "Wave"
   },
   {
     "id": "/en/alaipayuthey",
-    "name": "Waves",
     "initial_release_date": "2000-04-14",
     "genre": [
       "Musical",
@@ -1789,11 +1777,11 @@
     ],
     "directed_by": [
       "Mani Ratnam"
-    ]
+    ],
+    "name": "Waves"
   },
   {
     "id": "/en/alatriste",
-    "name": "Alatriste",
     "initial_release_date": "2006-09-01",
     "genre": [
       "Thriller",
@@ -1805,11 +1793,11 @@
     ],
     "directed_by": [
       "Agust\u00edn D\u00edaz Yanes"
-    ]
+    ],
+    "name": "Alatriste"
   },
   {
     "id": "/en/alex_emma",
-    "name": "Alex &amp; Emma",
     "initial_release_date": "2003-06-20",
     "genre": [
       "Romantic comedy",
@@ -1818,11 +1806,11 @@
     ],
     "directed_by": [
       "Rob Reiner"
-    ]
+    ],
+    "name": "Alex &amp; Emma"
   },
   {
     "id": "/en/alexander_2004",
-    "name": "Alexander",
     "initial_release_date": "2004-11-16",
     "genre": [
       "War film",
@@ -1837,11 +1825,11 @@
       "Oliver Stone",
       "Wilhelm Sasnal",
       "Anka Sasnal"
-    ]
+    ],
+    "name": "Alexander"
   },
   {
     "id": "/en/alexandras_project",
-    "name": "Alexandra's Project",
     "genre": [
       "Thriller",
       "Suspense",
@@ -1852,11 +1840,11 @@
     ],
     "directed_by": [
       "Rolf de Heer"
-    ]
+    ],
+    "name": "Alexandra's Project"
   },
   {
     "id": "/en/alfie_2004",
-    "name": "Alfie",
     "initial_release_date": "2004-10-22",
     "genre": [
       "Sex comedy",
@@ -1869,11 +1857,11 @@
     ],
     "directed_by": [
       "Charles Shyer"
-    ]
+    ],
+    "name": "Alfie"
   },
   {
     "id": "/en/ali_2001",
-    "name": "Ali",
     "initial_release_date": "2001-12-11",
     "genre": [
       "Biographical film",
@@ -1884,11 +1872,11 @@
     ],
     "directed_by": [
       "Michael Mann"
-    ]
+    ],
+    "name": "Ali"
   },
   {
     "id": "/en/ali_g_indahouse",
-    "name": "Ali G Indahouse",
     "initial_release_date": "2002-03-22",
     "genre": [
       "Stoner film",
@@ -1899,11 +1887,11 @@
     ],
     "directed_by": [
       "Mark Mylod"
-    ]
+    ],
+    "name": "Ali G Indahouse"
   },
   {
     "id": "/en/alien_autopsy_2006",
-    "name": "Alien Autopsy",
     "initial_release_date": "2006-04-07",
     "genre": [
       "Science Fiction",
@@ -1912,11 +1900,11 @@
     ],
     "directed_by": [
       "Jonny Campbell"
-    ]
+    ],
+    "name": "Alien Autopsy"
   },
   {
     "id": "/en/avp_alien_vs_predator",
-    "name": "Alien vs. Predator",
     "initial_release_date": "2004-08-12",
     "genre": [
       "Science Fiction",
@@ -1928,11 +1916,11 @@
     ],
     "directed_by": [
       "Paul W. S. Anderson"
-    ]
+    ],
+    "name": "Alien vs. Predator"
   },
   {
     "id": "/en/avpr_aliens_vs_predator_requiem",
-    "name": "AVPR: Aliens vs Predator - Requiem",
     "initial_release_date": "2007-12-25",
     "genre": [
       "Science Fiction",
@@ -1945,11 +1933,11 @@
     "directed_by": [
       "Colin Strause",
       "Greg Strause"
-    ]
+    ],
+    "name": "AVPR: Aliens vs Predator - Requiem"
   },
   {
     "id": "/en/aliens_of_the_deep",
-    "name": "Aliens of the Deep",
     "initial_release_date": "2005-01-28",
     "genre": [
       "Documentary film",
@@ -1961,11 +1949,11 @@
       "James Cameron",
       "Steven Quale",
       "Steven Quale"
-    ]
+    ],
+    "name": "Aliens of the Deep"
   },
   {
     "id": "/en/alive_2002",
-    "name": "Alive",
     "initial_release_date": "2002-09-12",
     "genre": [
       "Science Fiction",
@@ -1978,11 +1966,11 @@
     ],
     "directed_by": [
       "Ryuhei Kitamura"
-    ]
+    ],
+    "name": "Alive"
   },
   {
     "id": "/en/all_about_lily_chou-chou",
-    "name": "All About Lily Chou-Chou",
     "initial_release_date": "2001-09-07",
     "genre": [
       "Crime Fiction",
@@ -1995,11 +1983,11 @@
     ],
     "directed_by": [
       "Shunji Iwai"
-    ]
+    ],
+    "name": "All About Lily Chou-Chou"
   },
   {
     "id": "/en/all_about_the_benjamins",
-    "name": "All About the Benjamins",
     "initial_release_date": "2002-03-08",
     "genre": [
       "Action Film",
@@ -2009,11 +1997,11 @@
     ],
     "directed_by": [
       "Kevin Bray"
-    ]
+    ],
+    "name": "All About the Benjamins"
   },
   {
     "id": "/en/all_i_want_2002",
-    "name": "All I Want",
     "initial_release_date": "2002-09-10",
     "genre": [
       "Romantic comedy",
@@ -2023,11 +2011,11 @@
     ],
     "directed_by": [
       "Jeffrey Porter"
-    ]
+    ],
+    "name": "All I Want"
   },
   {
     "id": "/en/all_over_the_guy",
-    "name": "All Over the Guy",
     "genre": [
       "Indie film",
       "LGBT",
@@ -2040,11 +2028,11 @@
     ],
     "directed_by": [
       "Julie Davis"
-    ]
+    ],
+    "name": "All Over the Guy"
   },
   {
     "id": "/en/all_souls_day_2005",
-    "name": "All Souls Day",
     "initial_release_date": "2005-01-25",
     "genre": [
       "Horror",
@@ -2054,11 +2042,11 @@
     "directed_by": [
       "Jeremy Kasten",
       "Mark A. Altman"
-    ]
+    ],
+    "name": "All Souls Day"
   },
   {
     "id": "/en/all_the_kings_men_2006",
-    "name": "All the King's Men",
     "initial_release_date": "2006-09-10",
     "genre": [
       "Political drama",
@@ -2066,11 +2054,11 @@
     ],
     "directed_by": [
       "Steven Zaillian"
-    ]
+    ],
+    "name": "All the King's Men"
   },
   {
     "id": "/en/all_the_real_girls",
-    "name": "All the Real Girls",
     "initial_release_date": "2003-01-19",
     "genre": [
       "Romance Film",
@@ -2080,11 +2068,11 @@
     ],
     "directed_by": [
       "David Gordon Green"
-    ]
+    ],
+    "name": "All the Real Girls"
   },
   {
     "id": "/en/allari_bullodu",
-    "name": "Allari Bullodu",
     "genre": [
       "Comedy",
       "Romance Film",
@@ -2093,11 +2081,11 @@
     ],
     "directed_by": [
       "Kovelamudi Raghavendra Rao"
-    ]
+    ],
+    "name": "Allari Bullodu"
   },
   {
     "id": "/en/allari_pidugu",
-    "name": "Allari Pidugu",
     "initial_release_date": "2005-10-05",
     "genre": [
       "Drama",
@@ -2106,22 +2094,22 @@
     ],
     "directed_by": [
       "Jayant Paranji"
-    ]
+    ],
+    "name": "Allari Pidugu"
   },
   {
     "id": "/en/alles_auf_zucker",
-    "name": "Alles auf Zucker!",
     "initial_release_date": "2004-12-31",
     "genre": [
       "Comedy"
     ],
     "directed_by": [
       "Dani Levy"
-    ]
+    ],
+    "name": "Alles auf Zucker!"
   },
   {
     "id": "/en/alley_cats_strike",
-    "name": "Alley Cats Strike!",
     "initial_release_date": "2000-03-18",
     "genre": [
       "Family",
@@ -2129,11 +2117,11 @@
     ],
     "directed_by": [
       "Rod Daniel"
-    ]
+    ],
+    "name": "Alley Cats Strike!"
   },
   {
     "id": "/en/almost_famous",
-    "name": "Almost Famous",
     "initial_release_date": "2000-09-08",
     "genre": [
       "Musical",
@@ -2147,11 +2135,11 @@
     ],
     "directed_by": [
       "Cameron Crowe"
-    ]
+    ],
+    "name": "Almost Famous"
   },
   {
     "id": "/en/almost_round_three",
-    "name": "Almost: Round Three",
     "initial_release_date": "2004-11-10",
     "genre": [
       "Sports"
@@ -2159,21 +2147,21 @@
     "directed_by": [
       "Matt Hill",
       "Matt Hill"
-    ]
+    ],
+    "name": "Almost: Round Three"
   },
   {
     "id": "/en/alone_and_restless",
-    "name": "Alone and Restless",
     "genre": [
       "Drama"
     ],
     "directed_by": [
       "Michael Thomas Dunn"
-    ]
+    ],
+    "name": "Alone and Restless"
   },
   {
     "id": "/en/alone_in_the_dark",
-    "name": "Alone in the Dark",
     "initial_release_date": "2005-01-28",
     "genre": [
       "Science Fiction",
@@ -2185,11 +2173,11 @@
     ],
     "directed_by": [
       "Uwe Boll"
-    ]
+    ],
+    "name": "Alone in the Dark"
   },
   {
     "id": "/en/along_came_polly",
-    "name": "Along Came Polly",
     "initial_release_date": "2004-01-12",
     "genre": [
       "Romantic comedy",
@@ -2200,11 +2188,11 @@
     ],
     "directed_by": [
       "John Hamburg"
-    ]
+    ],
+    "name": "Along Came Polly"
   },
   {
     "id": "/en/alpha_dog",
-    "name": "Alpha Dog",
     "initial_release_date": "2006-01-27",
     "genre": [
       "Crime Fiction",
@@ -2213,11 +2201,11 @@
     ],
     "directed_by": [
       "Nick Cassavetes"
-    ]
+    ],
+    "name": "Alpha Dog"
   },
   {
     "id": "/en/amelie",
-    "name": "Am\u00e9lie",
     "initial_release_date": "2001-04-25",
     "genre": [
       "Romance Film",
@@ -2225,11 +2213,11 @@
     ],
     "directed_by": [
       "Jean-Pierre Jeunet"
-    ]
+    ],
+    "name": "Am\u00e9lie"
   },
   {
     "id": "/en/america_freedom_to_fascism",
-    "name": "America: Freedom to Fascism",
     "initial_release_date": "2006-07-28",
     "genre": [
       "Documentary film",
@@ -2238,11 +2226,11 @@
     ],
     "directed_by": [
       "Aaron Russo"
-    ]
+    ],
+    "name": "America: Freedom to Fascism"
   },
   {
     "id": "/en/americas_sweethearts",
-    "name": "America's Sweethearts",
     "initial_release_date": "2001-07-17",
     "genre": [
       "Romantic comedy",
@@ -2251,11 +2239,11 @@
     ],
     "directed_by": [
       "Joe Roth"
-    ]
+    ],
+    "name": "America's Sweethearts"
   },
   {
     "id": "/en/american_cowslip",
-    "name": "American Cowslip",
     "initial_release_date": "2009-07-24",
     "genre": [
       "Black comedy",
@@ -2264,11 +2252,11 @@
     ],
     "directed_by": [
       "Mark David"
-    ]
+    ],
+    "name": "American Cowslip"
   },
   {
     "id": "/en/american_desi",
-    "name": "American Desi",
     "genre": [
       "Indie film",
       "Romance Film",
@@ -2279,11 +2267,11 @@
     ],
     "directed_by": [
       "Piyush Dinker Pandya"
-    ]
+    ],
+    "name": "American Desi"
   },
   {
     "id": "/en/american_dog",
-    "name": "Bolt",
     "initial_release_date": "2008-11-17",
     "genre": [
       "Family",
@@ -2294,11 +2282,11 @@
     "directed_by": [
       "Chris Williams",
       "Byron Howard"
-    ]
+    ],
+    "name": "Bolt"
   },
   {
     "id": "/en/american_dreamz",
-    "name": "American Dreamz",
     "initial_release_date": "2006-04-21",
     "genre": [
       "Political cinema",
@@ -2309,11 +2297,11 @@
     ],
     "directed_by": [
       "Paul Weitz"
-    ]
+    ],
+    "name": "American Dreamz"
   },
   {
     "id": "/en/american_gangster",
-    "name": "American Gangster",
     "initial_release_date": "2007-10-19",
     "genre": [
       "Crime Fiction",
@@ -2328,11 +2316,11 @@
     ],
     "directed_by": [
       "Ridley Scott"
-    ]
+    ],
+    "name": "American Gangster"
   },
   {
     "id": "/en/american_gun",
-    "name": "American Gun",
     "initial_release_date": "2005-09-15",
     "genre": [
       "Indie film",
@@ -2340,11 +2328,11 @@
     ],
     "directed_by": [
       "Aric Avelino"
-    ]
+    ],
+    "name": "American Gun"
   },
   {
     "id": "/en/american_hardcore_2006",
-    "name": "American Hardcore",
     "initial_release_date": "2006-03-11",
     "genre": [
       "Music",
@@ -2355,11 +2343,11 @@
     ],
     "directed_by": [
       "Paul Rachman"
-    ]
+    ],
+    "name": "American Hardcore"
   },
   {
     "id": "/en/american_outlaws",
-    "name": "American Outlaws",
     "initial_release_date": "2001-08-17",
     "genre": [
       "Western",
@@ -2372,22 +2360,22 @@
     ],
     "directed_by": [
       "Les Mayfield"
-    ]
+    ],
+    "name": "American Outlaws"
   },
   {
     "id": "/en/american_pie_the_naked_mile",
-    "name": "American Pie Presents: The Naked Mile",
     "initial_release_date": "2006-12-07",
     "genre": [
       "Comedy"
     ],
     "directed_by": [
       "Joe Nussbaum"
-    ]
+    ],
+    "name": "American Pie Presents: The Naked Mile"
   },
   {
     "id": "/en/american_pie_2",
-    "name": "American Pie 2",
     "initial_release_date": "2001-08-06",
     "genre": [
       "Romance Film",
@@ -2395,22 +2383,22 @@
     ],
     "directed_by": [
       "James B. Rogers"
-    ]
+    ],
+    "name": "American Pie 2"
   },
   {
     "id": "/en/american_pie_presents_band_camp",
-    "name": "American Pie Presents: Band Camp",
     "initial_release_date": "2005-10-31",
     "genre": [
       "Comedy"
     ],
     "directed_by": [
       "Steve Rash"
-    ]
+    ],
+    "name": "American Pie Presents: Band Camp"
   },
   {
     "id": "/en/american_psycho_2000",
-    "name": "American Psycho",
     "initial_release_date": "2000-01-21",
     "genre": [
       "Black comedy",
@@ -2425,11 +2413,11 @@
     ],
     "directed_by": [
       "Mary Harron"
-    ]
+    ],
+    "name": "American Psycho"
   },
   {
     "id": "/en/american_splendor_2003",
-    "name": "American Splendor",
     "initial_release_date": "2003-01-20",
     "genre": [
       "Indie film",
@@ -2442,11 +2430,11 @@
     "directed_by": [
       "Shari Springer Berman",
       "Robert Pulcini"
-    ]
+    ],
+    "name": "American Splendor"
   },
   {
     "id": "/en/american_wedding",
-    "name": "American Wedding",
     "initial_release_date": "2003-07-24",
     "genre": [
       "Romance Film",
@@ -2454,11 +2442,11 @@
     ],
     "directed_by": [
       "Jesse Dylan"
-    ]
+    ],
+    "name": "American Wedding"
   },
   {
     "id": "/en/americano_2005",
-    "name": "Americano",
     "initial_release_date": "2005-01-07",
     "genre": [
       "Romance Film",
@@ -2467,11 +2455,11 @@
     ],
     "directed_by": [
       "Kevin Noland"
-    ]
+    ],
+    "name": "Americano"
   },
   {
     "id": "/en/amma_nanna_o_tamila_ammayi",
-    "name": "Amma Nanna O Tamila Ammayi",
     "initial_release_date": "2003-04-19",
     "genre": [
       "Sports",
@@ -2481,11 +2469,11 @@
     ],
     "directed_by": [
       "Puri Jagannadh"
-    ]
+    ],
+    "name": "Amma Nanna O Tamila Ammayi"
   },
   {
     "id": "/en/amores_perros",
-    "name": "Amores perros",
     "initial_release_date": "2000-05-14",
     "genre": [
       "Thriller",
@@ -2493,11 +2481,11 @@
     ],
     "directed_by": [
       "Alejandro Gonz\u00e1lez I\u00f1\u00e1rritu"
-    ]
+    ],
+    "name": "Amores perros"
   },
   {
     "id": "/en/amrutham",
-    "name": "Amrutham",
     "initial_release_date": "2004-12-24",
     "genre": [
       "Drama",
@@ -2506,11 +2494,11 @@
     ],
     "directed_by": [
       "Sibi Malayil"
-    ]
+    ],
+    "name": "Amrutham"
   },
   {
     "id": "/en/an_american_crime",
-    "name": "An American Crime",
     "initial_release_date": "2007-01-19",
     "genre": [
       "Crime Fiction",
@@ -2520,11 +2508,11 @@
     ],
     "directed_by": [
       "Tommy O'Haver"
-    ]
+    ],
+    "name": "An American Crime"
   },
   {
     "id": "/en/an_american_haunting",
-    "name": "An American Haunting",
     "initial_release_date": "2005-11-05",
     "genre": [
       "Horror",
@@ -2533,11 +2521,11 @@
     ],
     "directed_by": [
       "Courtney Solomon"
-    ]
+    ],
+    "name": "An American Haunting"
   },
   {
     "id": "/en/an_american_tail_the_mystery_of_the_night_monster",
-    "name": "An American Tail: The Mystery of the Night Monster",
     "initial_release_date": "2000-07-25",
     "genre": [
       "Fantasy",
@@ -2552,11 +2540,11 @@
     ],
     "directed_by": [
       "Larry Latham"
-    ]
+    ],
+    "name": "An American Tail: The Mystery of the Night Monster"
   },
   {
     "id": "/en/an_evening_with_kevin_smith",
-    "name": "An Evening with Kevin Smith",
     "genre": [
       "Documentary film",
       "Stand-up comedy",
@@ -2568,32 +2556,32 @@
     ],
     "directed_by": [
       "J.M. Kenny"
-    ]
+    ],
+    "name": "An Evening with Kevin Smith"
   },
   {
     "id": "/en/an_evening_with_kevin_smith_2006",
-    "name": "An Evening with Kevin Smith 2: Evening Harder",
     "genre": [
       "Documentary film"
     ],
     "directed_by": [
       "J.M. Kenny"
-    ]
+    ],
+    "name": "An Evening with Kevin Smith 2: Evening Harder"
   },
   {
     "id": "/en/an_everlasting_piece",
-    "name": "An Everlasting Piece",
     "initial_release_date": "2000-12-25",
     "genre": [
       "Comedy"
     ],
     "directed_by": [
       "Barry Levinson"
-    ]
+    ],
+    "name": "An Everlasting Piece"
   },
   {
     "id": "/en/an_extremely_goofy_movie",
-    "name": "An Extremely Goofy Movie",
     "initial_release_date": "2000-02-29",
     "genre": [
       "Animation",
@@ -2605,22 +2593,22 @@
     "directed_by": [
       "Ian Harrowell",
       "Douglas McCarthy"
-    ]
+    ],
+    "name": "An Extremely Goofy Movie"
   },
   {
     "id": "/en/an_inconvenient_truth",
-    "name": "An Inconvenient Truth",
     "initial_release_date": "2006-01-24",
     "genre": [
       "Documentary film"
     ],
     "directed_by": [
       "Davis Guggenheim"
-    ]
+    ],
+    "name": "An Inconvenient Truth"
   },
   {
     "id": "/en/an_unfinished_life",
-    "name": "An Unfinished Life",
     "initial_release_date": "2005-08-19",
     "genre": [
       "Melodrama",
@@ -2628,11 +2616,11 @@
     ],
     "directed_by": [
       "Lasse Hallstr\u00f6m"
-    ]
+    ],
+    "name": "An Unfinished Life"
   },
   {
     "id": "/en/anacondas_the_hunt_for_the_blood_orchid",
-    "name": "Anacondas: The Hunt for the Blood Orchid",
     "initial_release_date": "2004-08-25",
     "genre": [
       "Thriller",
@@ -2645,22 +2633,22 @@
     ],
     "directed_by": [
       "Dwight H. Little"
-    ]
+    ],
+    "name": "Anacondas: The Hunt for the Blood Orchid"
   },
   {
     "id": "/en/anal_pick-up",
-    "name": "Anal Pick-Up",
     "genre": [
       "Pornographic film",
       "Gay pornography"
     ],
     "directed_by": [
       "Decklin"
-    ]
+    ],
+    "name": "Anal Pick-Up"
   },
   {
     "id": "/en/analyze_that",
-    "name": "Analyze That",
     "initial_release_date": "2002-12-06",
     "genre": [
       "Buddy film",
@@ -2670,11 +2658,11 @@
     ],
     "directed_by": [
       "Harold Ramis"
-    ]
+    ],
+    "name": "Analyze That"
   },
   {
     "id": "/en/anamorph",
-    "name": "Anamorph",
     "genre": [
       "Psychological thriller",
       "Crime Fiction",
@@ -2685,11 +2673,11 @@
     ],
     "directed_by": [
       "H.S. Miller"
-    ]
+    ],
+    "name": "Anamorph"
   },
   {
     "id": "/en/anand_2004",
-    "name": "Anand",
     "initial_release_date": "2004-10-15",
     "genre": [
       "Musical",
@@ -2702,11 +2690,11 @@
     ],
     "directed_by": [
       "Sekhar Kammula"
-    ]
+    ],
+    "name": "Anand"
   },
   {
     "id": "/en/anbe_aaruyire",
-    "name": "Anbe Aaruyire",
     "initial_release_date": "2005-08-15",
     "genre": [
       "Romance Film",
@@ -2716,11 +2704,11 @@
     ],
     "directed_by": [
       "S. J. Surya"
-    ]
+    ],
+    "name": "Anbe Aaruyire"
   },
   {
     "id": "/en/anbe_sivam",
-    "name": "Love is God",
     "initial_release_date": "2003-01-14",
     "genre": [
       "Musical",
@@ -2734,11 +2722,11 @@
     ],
     "directed_by": [
       "Sundar C."
-    ]
+    ],
+    "name": "Love is God"
   },
   {
     "id": "/en/ancanar",
-    "name": "Ancanar",
     "genre": [
       "Fantasy",
       "Adventure Film",
@@ -2747,22 +2735,22 @@
     "directed_by": [
       "Sam R. Balcomb",
       "Raiya Corsiglia"
-    ]
+    ],
+    "name": "Ancanar"
   },
   {
     "id": "/en/anchorman_the_legend_of_ron_burgundy",
-    "name": "Anchorman: The Legend of Ron Burgundy",
     "initial_release_date": "2004-06-28",
     "genre": [
       "Comedy"
     ],
     "directed_by": [
       "Adam McKay"
-    ]
+    ],
+    "name": "Anchorman: The Legend of Ron Burgundy"
   },
   {
     "id": "/en/andaaz",
-    "name": "Andaaz",
     "initial_release_date": "2003-05-23",
     "genre": [
       "Musical",
@@ -2772,22 +2760,22 @@
     ],
     "directed_by": [
       "Raj Kanwar"
-    ]
+    ],
+    "name": "Andaaz"
   },
   {
     "id": "/en/andarivaadu",
-    "name": "Andarivaadu",
     "initial_release_date": "2005-06-03",
     "genre": [
       "Comedy"
     ],
     "directed_by": [
       "Srinu Vaitla"
-    ]
+    ],
+    "name": "Andarivaadu"
   },
   {
     "id": "/en/andhrawala",
-    "name": "Andhrawala",
     "initial_release_date": "2004-01-01",
     "genre": [
       "Adventure Film",
@@ -2798,11 +2786,11 @@
     "directed_by": [
       "Puri Jagannadh",
       "V.V.S. Ram"
-    ]
+    ],
+    "name": "Andhrawala"
   },
   {
     "id": "/en/ang_tanging_ina",
-    "name": "Ang Tanging Ina",
     "initial_release_date": "2003-05-28",
     "genre": [
       "Comedy",
@@ -2810,11 +2798,11 @@
     ],
     "directed_by": [
       "Wenn V. Deramas"
-    ]
+    ],
+    "name": "Ang Tanging Ina"
   },
   {
     "id": "/en/angel_eyes",
-    "name": "Angel Eyes",
     "initial_release_date": "2001-05-18",
     "genre": [
       "Romance Film",
@@ -2823,11 +2811,11 @@
     ],
     "directed_by": [
       "Luis Mandoki"
-    ]
+    ],
+    "name": "Angel Eyes"
   },
   {
     "id": "/en/angel-a",
-    "name": "Angel-A",
     "initial_release_date": "2005-12-21",
     "genre": [
       "Romance Film",
@@ -2838,11 +2826,11 @@
     ],
     "directed_by": [
       "Luc Besson"
-    ]
+    ],
+    "name": "Angel-A"
   },
   {
     "id": "/en/angels_and_demons_2008",
-    "name": "Angels &amp; Demons",
     "initial_release_date": "2009-05-04",
     "genre": [
       "Thriller",
@@ -2851,28 +2839,26 @@
     ],
     "directed_by": [
       "Ron Howard"
-    ]
+    ],
+    "name": "Angels &amp; Demons"
   },
   {
     "id": "/en/angels_and_virgins",
-    "name": "Virgin Territory",
-    "directed_by": [
-      "David Leland"
-    ],
+    "initial_release_date": "2007-12-17",
     "genre": [
       "Romance Film",
       "Comedy",
       "Adventure Film",
       "Drama"
     ],
-    "initial_release_date": "2007-12-17"
+    "directed_by": [
+      "David Leland"
+    ],
+    "name": "Virgin Territory"
   },
   {
     "id": "/en/angels_in_the_infield",
-    "name": "Angels in the Infield",
-    "directed_by": [
-      "Robert King"
-    ],
+    "initial_release_date": "2000-04-09",
     "genre": [
       "Fantasy",
       "Sports",
@@ -2881,54 +2867,53 @@
       "Heavenly Comedy",
       "Comedy"
     ],
-    "initial_release_date": "2000-04-09"
+    "directed_by": [
+      "Robert King"
+    ],
+    "name": "Angels in the Infield"
   },
   {
     "id": "/en/anger_management_2003",
-    "name": "Anger Management",
-    "directed_by": [
-      "Peter Segal"
-    ],
+    "initial_release_date": "2003-03-05",
     "genre": [
       "Black comedy",
       "Slapstick",
       "Comedy"
     ],
-    "initial_release_date": "2003-03-05"
+    "directed_by": [
+      "Peter Segal"
+    ],
+    "name": "Anger Management"
   },
   {
     "id": "/en/angli_the_movie",
-    "name": "Angli: The Movie",
-    "directed_by": [
-      "Mario Busietta"
-    ],
+    "initial_release_date": "2005-05-28",
     "genre": [
       "Thriller",
       "Action Film",
       "Crime Fiction"
     ],
-    "initial_release_date": "2005-05-28"
+    "directed_by": [
+      "Mario Busietta"
+    ],
+    "name": "Angli: The Movie"
   },
   {
     "id": "/en/animal_factory",
-    "name": "Animal Factory",
-    "directed_by": [
-      "Steve Buscemi"
-    ],
+    "initial_release_date": "2000-10-22",
     "genre": [
       "Crime Fiction",
       "Prison film",
       "Drama"
     ],
-    "initial_release_date": "2000-10-22"
+    "directed_by": [
+      "Steve Buscemi"
+    ],
+    "name": "Animal Factory"
   },
   {
     "id": "/en/anjaneya",
-    "name": "Anjaneya",
-    "directed_by": [
-      "Maharajan",
-      "N.Maharajan"
-    ],
+    "initial_release_date": "2003-10-24",
     "genre": [
       "Romance Film",
       "Crime Fiction",
@@ -2936,41 +2921,40 @@
       "World cinema",
       "Tamil cinema"
     ],
-    "initial_release_date": "2003-10-24"
+    "directed_by": [
+      "Maharajan",
+      "N.Maharajan"
+    ],
+    "name": "Anjaneya"
   },
   {
     "id": "/en/ankahee",
-    "name": "Ankahee",
-    "directed_by": [
-      "Vikram Bhatt"
-    ],
+    "initial_release_date": "2006-05-19",
     "genre": [
       "Romance Film",
       "Thriller",
       "Drama"
     ],
-    "initial_release_date": "2006-05-19"
+    "directed_by": [
+      "Vikram Bhatt"
+    ],
+    "name": "Ankahee"
   },
   {
     "id": "/en/annapolis_2006",
-    "name": "Annapolis",
-    "directed_by": [
-      "Justin Lin"
-    ],
     "genre": [
       "Romance Film",
       "Sports",
       "Drama"
-    ]
+    ],
+    "directed_by": [
+      "Justin Lin"
+    ],
+    "name": "Annapolis"
   },
   {
     "id": "/en/annavaram_2007",
-    "name": "Annavaram",
-    "directed_by": [
-      "Gridhar",
-      "Bhimaneni Srinivasa Rao",
-      "Sippy"
-    ],
+    "initial_release_date": "2006-12-29",
     "genre": [
       "Thriller",
       "Musical",
@@ -2979,14 +2963,16 @@
       "Tollywood",
       "World cinema"
     ],
-    "initial_release_date": "2006-12-29"
+    "directed_by": [
+      "Gridhar",
+      "Bhimaneni Srinivasa Rao",
+      "Sippy"
+    ],
+    "name": "Annavaram"
   },
   {
     "id": "/en/anniyan",
-    "name": "Anniyan",
-    "directed_by": [
-      "S. Shankar"
-    ],
+    "initial_release_date": "2005-06-10",
     "genre": [
       "Horror",
       "Short Film",
@@ -2996,14 +2982,14 @@
       "Action Film",
       "Drama"
     ],
-    "initial_release_date": "2005-06-10"
+    "directed_by": [
+      "S. Shankar"
+    ],
+    "name": "Anniyan"
   },
   {
     "id": "/en/another_gay_movie",
-    "name": "Another Gay Movie",
-    "directed_by": [
-      "Todd Stephens"
-    ],
+    "initial_release_date": "2006-04-28",
     "genre": [
       "Parody",
       "Coming of age",
@@ -3017,14 +3003,14 @@
       "Comedy",
       "Pornographic film"
     ],
-    "initial_release_date": "2006-04-28"
+    "directed_by": [
+      "Todd Stephens"
+    ],
+    "name": "Another Gay Movie"
   },
   {
     "id": "/en/ant_man",
-    "name": "Ant-Man",
-    "directed_by": [
-      "Peyton Reed"
-    ],
+    "initial_release_date": "2015-07-17",
     "genre": [
       "Thriller",
       "Science Fiction",
@@ -3032,92 +3018,92 @@
       "Superhero movie",
       "Comedy"
     ],
-    "initial_release_date": "2015-07-17"
+    "directed_by": [
+      "Peyton Reed"
+    ],
+    "name": "Ant-Man"
   },
   {
     "id": "/en/anthony_zimmer",
-    "name": "Anthony Zimmer",
-    "directed_by": [
-      "J\u00e9r\u00f4me Salle"
-    ],
+    "initial_release_date": "2005-04-27",
     "genre": [
       "Thriller",
       "Romance Film",
       "World cinema",
       "Crime Thriller"
     ],
-    "initial_release_date": "2005-04-27"
+    "directed_by": [
+      "J\u00e9r\u00f4me Salle"
+    ],
+    "name": "Anthony Zimmer"
   },
   {
     "id": "/en/antwone_fisher_2003",
-    "name": "Antwone Fisher",
-    "directed_by": [
-      "Denzel Washington"
-    ],
+    "initial_release_date": "2002-09-12",
     "genre": [
       "Romance Film",
       "Biographical film",
       "Drama"
     ],
-    "initial_release_date": "2002-09-12"
+    "directed_by": [
+      "Denzel Washington"
+    ],
+    "name": "Antwone Fisher"
   },
   {
     "id": "/en/anukokunda_oka_roju",
-    "name": "Anukokunda Oka Roju",
-    "directed_by": [
-      "Chandra Sekhar Yeleti"
-    ],
+    "initial_release_date": "2005-06-30",
     "genre": [
       "Thriller",
       "Horror",
       "Tollywood",
       "World cinema"
     ],
-    "initial_release_date": "2005-06-30"
+    "directed_by": [
+      "Chandra Sekhar Yeleti"
+    ],
+    "name": "Anukokunda Oka Roju"
   },
   {
     "id": "/en/anus_magillicutty",
-    "name": "Anus Magillicutty",
-    "directed_by": [
-      "Morey Fineburgh"
-    ],
+    "initial_release_date": "2003-04-15",
     "genre": [
       "B movie",
       "Romance Film",
       "Comedy"
     ],
-    "initial_release_date": "2003-04-15"
+    "directed_by": [
+      "Morey Fineburgh"
+    ],
+    "name": "Anus Magillicutty"
   },
   {
     "id": "/en/any_way_the_wind_blows",
-    "name": "Any Way the Wind Blows",
-    "directed_by": [
-      "Tom Barman"
-    ],
+    "initial_release_date": "2003-05-17",
     "genre": [
       "Comedy-drama"
     ],
-    "initial_release_date": "2003-05-17"
+    "directed_by": [
+      "Tom Barman"
+    ],
+    "name": "Any Way the Wind Blows"
   },
   {
     "id": "/en/anything_else",
-    "name": "Anything Else",
-    "directed_by": [
-      "Woody Allen"
-    ],
+    "initial_release_date": "2003-08-27",
     "genre": [
       "Romantic comedy",
       "Romance Film",
       "Comedy"
     ],
-    "initial_release_date": "2003-08-27"
+    "directed_by": [
+      "Woody Allen"
+    ],
+    "name": "Anything Else"
   },
   {
     "id": "/en/apasionados",
-    "name": "Apasionados",
-    "directed_by": [
-      "Juan Jos\u00e9 Jusid"
-    ],
+    "initial_release_date": "2002-06-06",
     "genre": [
       "Romantic comedy",
       "Romance Film",
@@ -3125,14 +3111,14 @@
       "Comedy",
       "Drama"
     ],
-    "initial_release_date": "2002-06-06"
+    "directed_by": [
+      "Juan Jos\u00e9 Jusid"
+    ],
+    "name": "Apasionados"
   },
   {
     "id": "/en/apocalypto",
-    "name": "Apocalypto",
-    "directed_by": [
-      "Mel Gibson"
-    ],
+    "initial_release_date": "2006-12-08",
     "genre": [
       "Action Film",
       "Adventure Film",
@@ -3140,14 +3126,14 @@
       "Thriller",
       "Drama"
     ],
-    "initial_release_date": "2006-12-08"
+    "directed_by": [
+      "Mel Gibson"
+    ],
+    "name": "Apocalypto"
   },
   {
     "id": "/en/aprils_shower",
-    "name": "April's Shower",
-    "directed_by": [
-      "Trish Doolan"
-    ],
+    "initial_release_date": "2006-01-13",
     "genre": [
       "Romantic comedy",
       "Indie film",
@@ -3160,14 +3146,14 @@
       "Comedy",
       "Drama"
     ],
-    "initial_release_date": "2006-01-13"
+    "directed_by": [
+      "Trish Doolan"
+    ],
+    "name": "April's Shower"
   },
   {
     "id": "/en/aquamarine_2006",
-    "name": "Aquamarine",
-    "directed_by": [
-      "Elizabeth Allen Rosenbaum"
-    ],
+    "initial_release_date": "2006-02-26",
     "genre": [
       "Coming of age",
       "Teen film",
@@ -3177,27 +3163,27 @@
       "Fantasy Comedy",
       "Comedy"
     ],
-    "initial_release_date": "2006-02-26"
+    "directed_by": [
+      "Elizabeth Allen Rosenbaum"
+    ],
+    "name": "Aquamarine"
   },
   {
     "id": "/en/arabian_nights",
-    "name": "Arabian Nights",
-    "directed_by": [
-      "Steve Barron"
-    ],
+    "initial_release_date": "2000-04-30",
     "genre": [
       "Family",
       "Fantasy",
       "Adventure Film"
     ],
-    "initial_release_date": "2000-04-30"
+    "directed_by": [
+      "Steve Barron"
+    ],
+    "name": "Arabian Nights"
   },
   {
     "id": "/en/aragami",
-    "name": "Aragami",
-    "directed_by": [
-      "Ryuhei Kitamura"
-    ],
+    "initial_release_date": "2003-03-27",
     "genre": [
       "Thriller",
       "Action/Adventure",
@@ -3206,14 +3192,14 @@
       "Action Film",
       "Drama"
     ],
-    "initial_release_date": "2003-03-27"
+    "directed_by": [
+      "Ryuhei Kitamura"
+    ],
+    "name": "Aragami"
   },
   {
     "id": "/en/arahan",
-    "name": "Arahan",
-    "directed_by": [
-      "Ryoo Seung-wan"
-    ],
+    "initial_release_date": "2004-04-30",
     "genre": [
       "Action Film",
       "Comedy",
@@ -3221,28 +3207,28 @@
       "East Asian cinema",
       "World cinema"
     ],
-    "initial_release_date": "2004-04-30"
+    "directed_by": [
+      "Ryoo Seung-wan"
+    ],
+    "name": "Arahan"
   },
   {
     "id": "/en/ararat",
-    "name": "Ararat",
-    "directed_by": [
-      "Atom Egoyan"
-    ],
+    "initial_release_date": "2002-05-20",
     "genre": [
       "LGBT",
       "Political drama",
       "War film",
       "Drama"
     ],
-    "initial_release_date": "2002-05-20"
+    "directed_by": [
+      "Atom Egoyan"
+    ],
+    "name": "Ararat"
   },
   {
     "id": "/en/are_we_there_yet",
-    "name": "Are We There Yet",
-    "directed_by": [
-      "Brian Levant"
-    ],
+    "initial_release_date": "2005-01-21",
     "genre": [
       "Family",
       "Adventure Film",
@@ -3250,14 +3236,14 @@
       "Comedy",
       "Drama"
     ],
-    "initial_release_date": "2005-01-21"
+    "directed_by": [
+      "Brian Levant"
+    ],
+    "name": "Are We There Yet"
   },
   {
     "id": "/en/arinthum_ariyamalum",
-    "name": "Arinthum Ariyamalum",
-    "directed_by": [
-      "Vishnuvardhan"
-    ],
+    "initial_release_date": "2005-05-20",
     "genre": [
       "Crime Fiction",
       "Family",
@@ -3267,67 +3253,67 @@
       "World cinema",
       "Drama"
     ],
-    "initial_release_date": "2005-05-20"
+    "directed_by": [
+      "Vishnuvardhan"
+    ],
+    "name": "Arinthum Ariyamalum"
   },
   {
     "id": "/en/arisan",
-    "name": "Arisan!",
-    "directed_by": [
-      "Nia Dinata"
-    ],
+    "initial_release_date": "2003-12-10",
     "genre": [
       "Comedy",
       "Drama"
     ],
-    "initial_release_date": "2003-12-10"
+    "directed_by": [
+      "Nia Dinata"
+    ],
+    "name": "Arisan!"
   },
   {
     "id": "/en/arjun_2004",
-    "name": "Arjun",
-    "directed_by": [
-      "Gunasekhar",
-      "J. Hemambar"
-    ],
+    "initial_release_date": "2004-08-18",
     "genre": [
       "Action Film",
       "Tollywood",
       "World cinema"
     ],
-    "initial_release_date": "2004-08-18"
+    "directed_by": [
+      "Gunasekhar",
+      "J. Hemambar"
+    ],
+    "name": "Arjun"
   },
   {
     "id": "/en/armaan",
-    "name": "Armaan",
-    "directed_by": [
-      "Honey Irani"
-    ],
+    "initial_release_date": "2003-05-16",
     "genre": [
       "Romance Film",
       "Family",
       "Drama"
     ],
-    "initial_release_date": "2003-05-16"
+    "directed_by": [
+      "Honey Irani"
+    ],
+    "name": "Armaan"
   },
   {
     "id": "/en/around_the_bend",
-    "name": "Around the Bend",
-    "directed_by": [
-      "Jordan Roberts"
-    ],
+    "initial_release_date": "2004-10-08",
     "genre": [
       "Family Drama",
       "Comedy-drama",
       "Road movie",
       "Drama"
     ],
-    "initial_release_date": "2004-10-08"
+    "directed_by": [
+      "Jordan Roberts"
+    ],
+    "name": "Around the Bend"
   },
   {
     "id": "/en/around_the_world_in_80_days_2004",
-    "name": "Around the World in 80 Days",
-    "directed_by": [
-      "Frank Coraci"
-    ],
+    "initial_release_date": "2004-06-13",
     "genre": [
       "Adventure Film",
       "Action Film",
@@ -3336,11 +3322,20 @@
       "Romance Film",
       "Comedy"
     ],
-    "initial_release_date": "2004-06-13"
+    "directed_by": [
+      "Frank Coraci"
+    ],
+    "name": "Around the World in 80 Days"
   },
   {
     "id": "/en/art_of_the_devil_2",
-    "name": "Art of the Devil 2",
+    "initial_release_date": "2005-12-01",
+    "genre": [
+      "Horror",
+      "Slasher",
+      "Fantasy",
+      "Mystery"
+    ],
     "directed_by": [
       "Pasith Buranajan",
       "Seree Phongnithi",
@@ -3350,30 +3345,21 @@
       "Kongkiat Khomsiri",
       "Isara Nadee"
     ],
-    "genre": [
-      "Horror",
-      "Slasher",
-      "Fantasy",
-      "Mystery"
-    ],
-    "initial_release_date": "2005-12-01"
+    "name": "Art of the Devil 2"
   },
   {
     "id": "/en/art_school_confidential",
-    "name": "Art School Confidential",
+    "genre": [
+      "Comedy-drama"
+    ],
     "directed_by": [
       "Terry Zwigoff"
     ],
-    "genre": [
-      "Comedy-drama"
-    ]
+    "name": "Art School Confidential"
   },
   {
     "id": "/en/arul",
-    "name": "Arul",
-    "directed_by": [
-      "Hari"
-    ],
+    "initial_release_date": "2004-05-01",
     "genre": [
       "Musical",
       "Action Film",
@@ -3382,28 +3368,28 @@
       "Drama",
       "Musical Drama"
     ],
-    "initial_release_date": "2004-05-01"
+    "directed_by": [
+      "Hari"
+    ],
+    "name": "Arul"
   },
   {
     "id": "/en/arya_2007",
-    "name": "Aarya",
-    "directed_by": [
-      "Balasekaran"
-    ],
+    "initial_release_date": "2007-08-10",
     "genre": [
       "Romance Film",
       "Drama",
       "Tamil cinema",
       "World cinema"
     ],
-    "initial_release_date": "2007-08-10"
+    "directed_by": [
+      "Balasekaran"
+    ],
+    "name": "Aarya"
   },
   {
     "id": "/en/arya_2004",
-    "name": "Arya",
-    "directed_by": [
-      "Sukumar"
-    ],
+    "initial_release_date": "2004-05-07",
     "genre": [
       "Musical",
       "Romance Film",
@@ -3415,26 +3401,26 @@
       "World cinema",
       "Tollywood"
     ],
-    "initial_release_date": "2004-05-07"
+    "directed_by": [
+      "Sukumar"
+    ],
+    "name": "Arya"
   },
   {
     "id": "/en/aryan_2006",
-    "name": "Aryan: Unbreakable",
-    "directed_by": [
-      "Abhishek Kapoor"
-    ],
+    "initial_release_date": "2006-12-05",
     "genre": [
       "Action Film",
       "Drama"
     ],
-    "initial_release_date": "2006-12-05"
+    "directed_by": [
+      "Abhishek Kapoor"
+    ],
+    "name": "Aryan: Unbreakable"
   },
   {
     "id": "/en/as_it_is_in_heaven",
-    "name": "As It Is in Heaven",
-    "directed_by": [
-      "Kay Pollak"
-    ],
+    "initial_release_date": "2004-08-20",
     "genre": [
       "Musical",
       "Comedy",
@@ -3444,14 +3430,14 @@
       "Musical Drama",
       "World cinema"
     ],
-    "initial_release_date": "2004-08-20"
+    "directed_by": [
+      "Kay Pollak"
+    ],
+    "name": "As It Is in Heaven"
   },
   {
     "id": "/en/ashok",
-    "name": "Ashok",
-    "directed_by": [
-      "Surender Reddy"
-    ],
+    "initial_release_date": "2006-07-13",
     "genre": [
       "Action Film",
       "Romance Film",
@@ -3459,28 +3445,28 @@
       "Tollywood",
       "World cinema"
     ],
-    "initial_release_date": "2006-07-13"
+    "directed_by": [
+      "Surender Reddy"
+    ],
+    "name": "Ashok"
   },
   {
     "id": "/en/ask_the_dust_2006",
-    "name": "Ask the Dust",
-    "directed_by": [
-      "Robert Towne"
-    ],
+    "initial_release_date": "2006-02-02",
     "genre": [
       "Historical period drama",
       "Film adaptation",
       "Romance Film",
       "Drama"
     ],
-    "initial_release_date": "2006-02-02"
+    "directed_by": [
+      "Robert Towne"
+    ],
+    "name": "Ask the Dust"
   },
   {
     "id": "/en/asoka",
-    "name": "Ashoka the Great",
-    "directed_by": [
-      "Santosh Sivan"
-    ],
+    "initial_release_date": "2001-09-13",
     "genre": [
       "Action Film",
       "Romance Film",
@@ -3492,14 +3478,14 @@
       "Drama",
       "Musical Drama"
     ],
-    "initial_release_date": "2001-09-13"
+    "directed_by": [
+      "Santosh Sivan"
+    ],
+    "name": "Ashoka the Great"
   },
   {
     "id": "/en/assault_on_precinct_13_2005",
-    "name": "Assault on Precinct 13",
-    "directed_by": [
-      "Jean-Fran\u00e7ois Richet"
-    ],
+    "initial_release_date": "2005-01-19",
     "genre": [
       "Thriller",
       "Action Film",
@@ -3507,54 +3493,54 @@
       "Crime Fiction",
       "Drama"
     ],
-    "initial_release_date": "2005-01-19"
+    "directed_by": [
+      "Jean-Fran\u00e7ois Richet"
+    ],
+    "name": "Assault on Precinct 13"
   },
   {
     "id": "/en/astitva",
-    "name": "Astitva",
-    "directed_by": [
-      "Mahesh Manjrekar"
-    ],
+    "initial_release_date": "2000-10-06",
     "genre": [
       "Art film",
       "Bollywood",
       "World cinema",
       "Drama"
     ],
-    "initial_release_date": "2000-10-06"
+    "directed_by": [
+      "Mahesh Manjrekar"
+    ],
+    "name": "Astitva"
   },
   {
     "id": "/en/asylum_2005",
-    "name": "Asylum",
-    "directed_by": [
-      "David Mackenzie"
-    ],
+    "initial_release_date": "2005-08-12",
     "genre": [
       "Film adaptation",
       "Romance Film",
       "Thriller",
       "Drama"
     ],
-    "initial_release_date": "2005-08-12"
+    "directed_by": [
+      "David Mackenzie"
+    ],
+    "name": "Asylum"
   },
   {
     "id": "/en/atanarjuat",
-    "name": "Atanarjuat: The Fast Runner",
-    "directed_by": [
-      "Zacharias Kunuk"
-    ],
+    "initial_release_date": "2001-05-13",
     "genre": [
       "Fantasy",
       "Drama"
     ],
-    "initial_release_date": "2001-05-13"
+    "directed_by": [
+      "Zacharias Kunuk"
+    ],
+    "name": "Atanarjuat: The Fast Runner"
   },
   {
     "id": "/en/athadu",
-    "name": "Athadu",
-    "directed_by": [
-      "Trivikram Srinivas"
-    ],
+    "initial_release_date": "2005-08-10",
     "genre": [
       "Action Film",
       "Thriller",
@@ -3563,42 +3549,42 @@
       "Tollywood",
       "World cinema"
     ],
-    "initial_release_date": "2005-08-10"
+    "directed_by": [
+      "Trivikram Srinivas"
+    ],
+    "name": "Athadu"
   },
   {
     "id": "/en/atl_2006",
-    "name": "ATL",
-    "directed_by": [
-      "Chris Robinson"
-    ],
+    "initial_release_date": "2006-03-28",
     "genre": [
       "Coming of age",
       "Comedy",
       "Drama"
     ],
-    "initial_release_date": "2006-03-28"
+    "directed_by": [
+      "Chris Robinson"
+    ],
+    "name": "ATL"
   },
   {
     "id": "/en/atlantis_the_lost_empire",
-    "name": "Atlantis: The Lost Empire",
-    "directed_by": [
-      "Gary Trousdale",
-      "Kirk Wise"
-    ],
+    "initial_release_date": "2001-06-03",
     "genre": [
       "Adventure Film",
       "Science Fiction",
       "Family",
       "Animation"
     ],
-    "initial_release_date": "2001-06-03"
+    "directed_by": [
+      "Gary Trousdale",
+      "Kirk Wise"
+    ],
+    "name": "Atlantis: The Lost Empire"
   },
   {
     "id": "/en/atonement_2007",
-    "name": "Atonement",
-    "directed_by": [
-      "Joe Wright"
-    ],
+    "initial_release_date": "2007-08-28",
     "genre": [
       "Romance Film",
       "War film",
@@ -3606,14 +3592,14 @@
       "Drama",
       "Music"
     ],
-    "initial_release_date": "2007-08-28"
+    "directed_by": [
+      "Joe Wright"
+    ],
+    "name": "Atonement"
   },
   {
     "id": "/en/attagasam",
-    "name": "Attahasam",
-    "directed_by": [
-      "Saran"
-    ],
+    "initial_release_date": "2004-11-12",
     "genre": [
       "Action Film",
       "Thriller",
@@ -3621,14 +3607,13 @@
       "World cinema",
       "Drama"
     ],
-    "initial_release_date": "2004-11-12"
+    "directed_by": [
+      "Saran"
+    ],
+    "name": "Attahasam"
   },
   {
     "id": "/en/attila_2001",
-    "name": "Attila",
-    "directed_by": [
-      "Dick Lowry"
-    ],
     "genre": [
       "Adventure Film",
       "History",
@@ -3636,38 +3621,38 @@
       "War film",
       "Historical fiction",
       "Biographical film"
-    ]
+    ],
+    "directed_by": [
+      "Dick Lowry"
+    ],
+    "name": "Attila"
   },
   {
     "id": "/en/austin_powers_goldmember",
-    "name": "Austin Powers: Goldmember",
-    "directed_by": [
-      "Jay Roach"
-    ],
+    "initial_release_date": "2002-07-22",
     "genre": [
       "Action Film",
       "Crime Fiction",
       "Comedy"
     ],
-    "initial_release_date": "2002-07-22"
+    "directed_by": [
+      "Jay Roach"
+    ],
+    "name": "Austin Powers: Goldmember"
   },
   {
     "id": "/en/australian_rules",
-    "name": "Australian Rules",
+    "genre": [
+      "Drama"
+    ],
     "directed_by": [
       "Paul Goldman"
     ],
-    "genre": [
-      "Drama"
-    ]
+    "name": "Australian Rules"
   },
   {
     "id": "/en/auto",
-    "name": "Oram Po",
-    "directed_by": [
-      "Pushkar",
-      "Gayatri"
-    ],
+    "initial_release_date": "2007-02-16",
     "genre": [
       "Action Film",
       "Comedy",
@@ -3675,29 +3660,30 @@
       "World cinema",
       "Drama"
     ],
-    "initial_release_date": "2007-02-16"
+    "directed_by": [
+      "Pushkar",
+      "Gayatri"
+    ],
+    "name": "Oram Po"
   },
   {
     "id": "/en/auto_focus",
-    "name": "Auto Focus",
-    "directed_by": [
-      "Paul Schrader",
-      "Larry Karaszewski"
-    ],
+    "initial_release_date": "2002-09-08",
     "genre": [
       "Biographical film",
       "Indie film",
       "Crime Fiction",
       "Drama"
     ],
-    "initial_release_date": "2002-09-08"
+    "directed_by": [
+      "Paul Schrader",
+      "Larry Karaszewski"
+    ],
+    "name": "Auto Focus"
   },
   {
     "id": "/en/autograph_2004",
-    "name": "Autograph",
-    "directed_by": [
-      "Cheran"
-    ],
+    "initial_release_date": "2004-02-14",
     "genre": [
       "Musical",
       "Romance Film",
@@ -3706,14 +3692,14 @@
       "Tamil cinema",
       "World cinema"
     ],
-    "initial_release_date": "2004-02-14"
+    "directed_by": [
+      "Cheran"
+    ],
+    "name": "Autograph"
   },
   {
     "id": "/en/avalon_2001",
-    "name": "Avalon",
-    "directed_by": [
-      "Mamoru Oshii"
-    ],
+    "initial_release_date": "2001-01-20",
     "genre": [
       "Science Fiction",
       "Thriller",
@@ -3722,28 +3708,28 @@
       "Fantasy",
       "Drama"
     ],
-    "initial_release_date": "2001-01-20"
+    "directed_by": [
+      "Mamoru Oshii"
+    ],
+    "name": "Avalon"
   },
   {
     "id": "/en/avatar_2009",
-    "name": "Avatar",
-    "directed_by": [
-      "James Cameron"
-    ],
+    "initial_release_date": "2009-12-10",
     "genre": [
       "Science Fiction",
       "Adventure Film",
       "Fantasy",
       "Action Film"
     ],
-    "initial_release_date": "2009-12-10"
+    "directed_by": [
+      "James Cameron"
+    ],
+    "name": "Avatar"
   },
   {
     "id": "/en/avenging_angelo",
-    "name": "Avenging Angelo",
-    "directed_by": [
-      "Martyn Burke"
-    ],
+    "initial_release_date": "2002-08-30",
     "genre": [
       "Action Film",
       "Romance Film",
@@ -3755,27 +3741,27 @@
       "Gangster Film",
       "Comedy"
     ],
-    "initial_release_date": "2002-08-30"
+    "directed_by": [
+      "Martyn Burke"
+    ],
+    "name": "Avenging Angelo"
   },
   {
     "id": "/en/awake_2007",
-    "name": "Awake",
-    "directed_by": [
-      "Joby Harold"
-    ],
+    "initial_release_date": "2007-11-30",
     "genre": [
       "Thriller",
       "Crime Fiction",
       "Mystery"
     ],
-    "initial_release_date": "2007-11-30"
+    "directed_by": [
+      "Joby Harold"
+    ],
+    "name": "Awake"
   },
   {
     "id": "/en/awara_paagal_deewana",
-    "name": "Awara Paagal Deewana",
-    "directed_by": [
-      "Vikram Bhatt"
-    ],
+    "initial_release_date": "2002-06-20",
     "genre": [
       "Action Film",
       "World cinema",
@@ -3787,14 +3773,14 @@
       "Drama",
       "Musical Drama"
     ],
-    "initial_release_date": "2002-06-20"
+    "directed_by": [
+      "Vikram Bhatt"
+    ],
+    "name": "Awara Paagal Deewana"
   },
   {
     "id": "/en/awesome_i_fuckin_shot_that",
-    "name": "Awesome; I Fuckin' Shot That!",
-    "directed_by": [
-      "Adam Yauch"
-    ],
+    "initial_release_date": "2006-01-06",
     "genre": [
       "Concert film",
       "Rockumentary",
@@ -3802,14 +3788,14 @@
       "Documentary film",
       "Indie film"
     ],
-    "initial_release_date": "2006-01-06"
+    "directed_by": [
+      "Adam Yauch"
+    ],
+    "name": "Awesome; I Fuckin' Shot That!"
   },
   {
     "id": "/en/azumi",
-    "name": "Azumi",
-    "directed_by": [
-      "Ryuhei Kitamura"
-    ],
+    "initial_release_date": "2003-05-10",
     "genre": [
       "Action Film",
       "Epic film",
@@ -3817,14 +3803,14 @@
       "Fantasy",
       "Thriller"
     ],
-    "initial_release_date": "2003-05-10"
+    "directed_by": [
+      "Ryuhei Kitamura"
+    ],
+    "name": "Azumi"
   },
   {
     "id": "/wikipedia/en_title/$00C6on_Flux_$0028film$0029",
-    "name": "\u00c6on Flux",
-    "directed_by": [
-      "Karyn Kusama"
-    ],
+    "initial_release_date": "2005-12-01",
     "genre": [
       "Science Fiction",
       "Dystopia",
@@ -3832,14 +3818,14 @@
       "Thriller",
       "Adventure Film"
     ],
-    "initial_release_date": "2005-12-01"
+    "directed_by": [
+      "Karyn Kusama"
+    ],
+    "name": "\u00c6on Flux"
   },
   {
     "id": "/en/baabul",
-    "name": "Baabul",
-    "directed_by": [
-      "Ravi Chopra"
-    ],
+    "initial_release_date": "2006-12-08",
     "genre": [
       "Musical",
       "Family",
@@ -3849,14 +3835,14 @@
       "Drama",
       "Musical Drama"
     ],
-    "initial_release_date": "2006-12-08"
+    "directed_by": [
+      "Ravi Chopra"
+    ],
+    "name": "Baabul"
   },
   {
     "id": "/en/baadasssss_cinema",
-    "name": "BaadAsssss Cinema",
-    "directed_by": [
-      "Isaac Julien"
-    ],
+    "initial_release_date": "2002-08-14",
     "genre": [
       "Indie film",
       "Documentary film",
@@ -3865,14 +3851,14 @@
       "Film &amp; Television History",
       "Biographical film"
     ],
-    "initial_release_date": "2002-08-14"
+    "directed_by": [
+      "Isaac Julien"
+    ],
+    "name": "BaadAsssss Cinema"
   },
   {
     "id": "/en/baadasssss",
-    "name": "Baadasssss!",
-    "directed_by": [
-      "Mario Van Peebles"
-    ],
+    "initial_release_date": "2003-09-07",
     "genre": [
       "Indie film",
       "Biographical film",
@@ -3880,79 +3866,79 @@
       "Historical period drama",
       "Drama"
     ],
-    "initial_release_date": "2003-09-07"
+    "directed_by": [
+      "Mario Van Peebles"
+    ],
+    "name": "Baadasssss!"
   },
   {
     "id": "/en/babel_2006",
-    "name": "Babel",
-    "directed_by": [
-      "Alejandro Gonz\u00e1lez I\u00f1\u00e1rritu"
-    ],
+    "initial_release_date": "2006-05-23",
     "genre": [
       "Indie film",
       "Political drama",
       "Drama"
     ],
-    "initial_release_date": "2006-05-23"
+    "directed_by": [
+      "Alejandro Gonz\u00e1lez I\u00f1\u00e1rritu"
+    ],
+    "name": "Babel"
   },
   {
     "id": "/en/baby_boy",
-    "name": "Baby Boy",
-    "directed_by": [
-      "John Singleton"
-    ],
+    "initial_release_date": "2001-06-21",
     "genre": [
       "Coming of age",
       "Crime Fiction",
       "Drama"
     ],
-    "initial_release_date": "2001-06-21"
+    "directed_by": [
+      "John Singleton"
+    ],
+    "name": "Baby Boy"
   },
   {
     "id": "/en/back_by_midnight",
-    "name": "Back by Midnight",
-    "directed_by": [
-      "Harry Basil"
-    ],
+    "initial_release_date": "2005-01-25",
     "genre": [
       "Prison film",
       "Comedy"
     ],
-    "initial_release_date": "2005-01-25"
+    "directed_by": [
+      "Harry Basil"
+    ],
+    "name": "Back by Midnight"
   },
   {
     "id": "/en/back_to_school_with_franklin",
-    "name": "Back to School with Franklin",
-    "directed_by": [
-      "Arna Selznick"
-    ],
+    "initial_release_date": "2003-08-19",
     "genre": [
       "Family",
       "Animation",
       "Educational film"
     ],
-    "initial_release_date": "2003-08-19"
+    "directed_by": [
+      "Arna Selznick"
+    ],
+    "name": "Back to School with Franklin"
   },
   {
     "id": "/en/bad_boys_ii",
-    "name": "Bad Boys II",
-    "directed_by": [
-      "Michael Bay"
-    ],
+    "initial_release_date": "2003-07-09",
     "genre": [
       "Action Film",
       "Crime Fiction",
       "Thriller",
       "Comedy"
     ],
-    "initial_release_date": "2003-07-09"
+    "directed_by": [
+      "Michael Bay"
+    ],
+    "name": "Bad Boys II"
   },
   {
     "id": "/wikipedia/ru_id/1598664",
-    "name": "Bad Company",
-    "directed_by": [
-      "Joel Schumacher"
-    ],
+    "initial_release_date": "2002-04-26",
     "genre": [
       "Spy film",
       "Action/Adventure",
@@ -3960,62 +3946,62 @@
       "Thriller",
       "Comedy"
     ],
-    "initial_release_date": "2002-04-26"
+    "directed_by": [
+      "Joel Schumacher"
+    ],
+    "name": "Bad Company"
   },
   {
     "id": "/en/bad_education",
-    "name": "Bad Education",
-    "directed_by": [
-      "Pedro Almod\u00f3var"
-    ],
+    "initial_release_date": "2004-03-19",
     "genre": [
       "Mystery",
       "Drama"
     ],
-    "initial_release_date": "2004-03-19"
+    "directed_by": [
+      "Pedro Almod\u00f3var"
+    ],
+    "name": "Bad Education"
   },
   {
     "id": "/en/bad_eggs",
-    "name": "Bad Eggs",
+    "genre": [
+      "Comedy"
+    ],
     "directed_by": [
       "Tony Martin"
     ],
-    "genre": [
-      "Comedy"
-    ]
+    "name": "Bad Eggs"
   },
   {
     "id": "/en/bad_news_bears",
-    "name": "Bad News Bears",
-    "directed_by": [
-      "Richard Linklater"
-    ],
+    "initial_release_date": "2005-07-22",
     "genre": [
       "Family",
       "Sports",
       "Comedy"
     ],
-    "initial_release_date": "2005-07-22"
+    "directed_by": [
+      "Richard Linklater"
+    ],
+    "name": "Bad News Bears"
   },
   {
     "id": "/en/bad_santa",
-    "name": "Bad Santa",
-    "directed_by": [
-      "Terry Zwigoff"
-    ],
+    "initial_release_date": "2003-11-26",
     "genre": [
       "Black comedy",
       "Crime Fiction",
       "Comedy"
     ],
-    "initial_release_date": "2003-11-26"
+    "directed_by": [
+      "Terry Zwigoff"
+    ],
+    "name": "Bad Santa"
   },
   {
     "id": "/en/badal",
-    "name": "Badal",
-    "directed_by": [
-      "Raj Kanwar"
-    ],
+    "initial_release_date": "2000-02-11",
     "genre": [
       "Musical",
       "Romance Film",
@@ -4023,30 +4009,29 @@
       "Drama",
       "Musical Drama"
     ],
-    "initial_release_date": "2000-02-11"
+    "directed_by": [
+      "Raj Kanwar"
+    ],
+    "name": "Badal"
   },
   {
     "id": "/en/baghdad_er",
-    "name": "Baghdad ER",
-    "directed_by": [
-      "Jon Alpert",
-      "Matthew O'Neill"
-    ],
+    "initial_release_date": "2006-08-29",
     "genre": [
       "Documentary film",
       "Culture &amp; Society",
       "War film",
       "Biographical film"
     ],
-    "initial_release_date": "2006-08-29"
+    "directed_by": [
+      "Jon Alpert",
+      "Matthew O'Neill"
+    ],
+    "name": "Baghdad ER"
   },
   {
     "id": "/en/baise_moi",
-    "name": "Baise Moi",
-    "directed_by": [
-      "Virginie Despentes",
-      "Coralie Trinh Thi"
-    ],
+    "initial_release_date": "2000-06-28",
     "genre": [
       "Erotica",
       "Thriller",
@@ -4056,14 +4041,15 @@
       "Drama",
       "Road movie"
     ],
-    "initial_release_date": "2000-06-28"
+    "directed_by": [
+      "Virginie Despentes",
+      "Coralie Trinh Thi"
+    ],
+    "name": "Baise Moi"
   },
   {
     "id": "/en/bait_2000",
-    "name": "Bait",
-    "directed_by": [
-      "Antoine Fuqua"
-    ],
+    "initial_release_date": "2000-09-15",
     "genre": [
       "Thriller",
       "Crime Fiction",
@@ -4074,27 +4060,27 @@
       "Comedy",
       "Drama"
     ],
-    "initial_release_date": "2000-09-15"
+    "directed_by": [
+      "Antoine Fuqua"
+    ],
+    "name": "Bait"
   },
   {
     "id": "/en/bala_2002",
-    "name": "Bala",
-    "directed_by": [
-      "Deepak"
-    ],
+    "initial_release_date": "2002-12-13",
     "genre": [
       "Drama",
       "Tamil cinema",
       "World cinema"
     ],
-    "initial_release_date": "2002-12-13"
+    "directed_by": [
+      "Deepak"
+    ],
+    "name": "Bala"
   },
   {
     "id": "/en/ballistic_ecks_vs_sever",
-    "name": "Ballistic: Ecks vs. Sever",
-    "directed_by": [
-      "Wych Kaosayananda"
-    ],
+    "initial_release_date": "2002-09-20",
     "genre": [
       "Spy film",
       "Thriller",
@@ -4104,42 +4090,42 @@
       "Action Thriller",
       "Glamorized Spy Film"
     ],
-    "initial_release_date": "2002-09-20"
+    "directed_by": [
+      "Wych Kaosayananda"
+    ],
+    "name": "Ballistic: Ecks vs. Sever"
   },
   {
     "id": "/en/balu_abcdefg",
-    "name": "Balu ABCDEFG",
-    "directed_by": [
-      "A. Karunakaran"
-    ],
+    "initial_release_date": "2005-01-06",
     "genre": [
       "Romance Film",
       "Tollywood",
       "World cinema",
       "Drama"
     ],
-    "initial_release_date": "2005-01-06"
+    "directed_by": [
+      "A. Karunakaran"
+    ],
+    "name": "Balu ABCDEFG"
   },
   {
     "id": "/en/balzac_and_the_little_chinese_seamstress_2002",
-    "name": "The Little Chinese Seamstress",
-    "directed_by": [
-      "Dai Sijie"
-    ],
+    "initial_release_date": "2002-05-16",
     "genre": [
       "Romance Film",
       "Comedy-drama",
       "Biographical film",
       "Drama"
     ],
-    "initial_release_date": "2002-05-16"
+    "directed_by": [
+      "Dai Sijie"
+    ],
+    "name": "The Little Chinese Seamstress"
   },
   {
     "id": "/en/bambi_ii",
-    "name": "Bambi II",
-    "directed_by": [
-      "Brian Pimental"
-    ],
+    "initial_release_date": "2006-01-26",
     "genre": [
       "Animation",
       "Family",
@@ -4148,14 +4134,14 @@
       "Children's/Family",
       "Family-Oriented Adventure"
     ],
-    "initial_release_date": "2006-01-26"
+    "directed_by": [
+      "Brian Pimental"
+    ],
+    "name": "Bambi II"
   },
   {
     "id": "/en/bamboozled",
-    "name": "Bamboozled",
-    "directed_by": [
-      "Spike Lee"
-    ],
+    "initial_release_date": "2000-10-06",
     "genre": [
       "Satire",
       "Indie film",
@@ -4166,15 +4152,14 @@
       "Comedy",
       "Drama"
     ],
-    "initial_release_date": "2000-10-06"
+    "directed_by": [
+      "Spike Lee"
+    ],
+    "name": "Bamboozled"
   },
   {
     "id": "/en/bandidas",
-    "name": "Bandidas",
-    "directed_by": [
-      "Espen Sandberg",
-      "Joachim R\u00f8nning"
-    ],
+    "initial_release_date": "2006-01-18",
     "genre": [
       "Western",
       "Action Film",
@@ -4183,14 +4168,15 @@
       "Comedy",
       "Adventure Film"
     ],
-    "initial_release_date": "2006-01-18"
+    "directed_by": [
+      "Espen Sandberg",
+      "Joachim R\u00f8nning"
+    ],
+    "name": "Bandidas"
   },
   {
     "id": "/en/bandits",
-    "name": "Bandits",
-    "directed_by": [
-      "Barry Levinson"
-    ],
+    "initial_release_date": "2001-10-12",
     "genre": [
       "Romantic comedy",
       "Crime Fiction",
@@ -4200,136 +4186,145 @@
       "Comedy",
       "Drama"
     ],
-    "initial_release_date": "2001-10-12"
+    "directed_by": [
+      "Barry Levinson"
+    ],
+    "name": "Bandits"
   },
   {
     "id": "/en/bangaram",
-    "name": "Bangaram",
-    "directed_by": [
-      "Dharani"
-    ],
+    "initial_release_date": "2006-05-03",
     "genre": [
       "Action Film",
       "Crime Fiction",
       "Drama"
     ],
-    "initial_release_date": "2006-05-03"
+    "directed_by": [
+      "Dharani"
+    ],
+    "name": "Bangaram"
   },
   {
     "id": "/en/bangkok_loco",
-    "name": "Bangkok Loco",
-    "directed_by": [
-      "Pornchai Hongrattanaporn"
-    ],
+    "initial_release_date": "2004-10-07",
     "genre": [
       "Musical",
       "Musical comedy",
       "Comedy"
     ],
-    "initial_release_date": "2004-10-07"
+    "directed_by": [
+      "Pornchai Hongrattanaporn"
+    ],
+    "name": "Bangkok Loco"
   },
   {
     "id": "/en/baran",
-    "name": "Baran",
-    "directed_by": [
-      "Majid Majidi"
-    ],
+    "initial_release_date": "2001-01-31",
     "genre": [
       "Romance Film",
       "Adventure Film",
       "World cinema",
       "Drama"
     ],
-    "initial_release_date": "2001-01-31"
+    "directed_by": [
+      "Majid Majidi"
+    ],
+    "name": "Baran"
   },
   {
     "id": "/en/barbershop",
-    "name": "Barbershop",
-    "directed_by": [
-      "Tim Story"
-    ],
+    "initial_release_date": "2002-08-07",
     "genre": [
       "Ensemble Film",
       "Workplace Comedy",
       "Comedy"
     ],
-    "initial_release_date": "2002-08-07"
+    "directed_by": [
+      "Tim Story"
+    ],
+    "name": "Barbershop"
   },
   {
     "id": "/en/bareback_mountain",
-    "name": "Bareback Mountain",
-    "directed_by": [
-      "Afton Nills"
-    ],
     "genre": [
       "Pornographic film",
       "Gay pornography"
-    ]
+    ],
+    "directed_by": [
+      "Afton Nills"
+    ],
+    "name": "Bareback Mountain"
   },
   {
     "id": "/wikipedia/pt/Barnyard",
-    "name": "Barnyard",
-    "directed_by": [
-      "Steve Oedekerk"
-    ],
+    "initial_release_date": "2006-08-04",
     "genre": [
       "Family",
       "Animation",
       "Comedy"
     ],
-    "initial_release_date": "2006-08-04"
+    "directed_by": [
+      "Steve Oedekerk"
+    ],
+    "name": "Barnyard"
   },
   {
     "id": "/en/barricade_2007",
-    "name": "Barricade",
-    "directed_by": [
-      "Timo Rose"
-    ],
     "genre": [
       "Slasher",
       "Horror"
-    ]
+    ],
+    "directed_by": [
+      "Timo Rose"
+    ],
+    "name": "Barricade"
   },
   {
     "id": "/en/bas_itna_sa_khwaab_hai",
-    "name": "Bas Itna Sa Khwaab Hai",
-    "directed_by": [
-      "Goldie Behl"
-    ],
+    "initial_release_date": "2001-07-06",
     "genre": [
       "Romance Film",
       "Bollywood",
       "World cinema"
     ],
-    "initial_release_date": "2001-07-06"
+    "directed_by": [
+      "Goldie Behl"
+    ],
+    "name": "Bas Itna Sa Khwaab Hai"
   },
   {
     "id": "/en/basic_2003",
-    "name": "Basic",
-    "directed_by": [
-      "John McTiernan"
-    ],
+    "initial_release_date": "2003-03-28",
     "genre": [
       "Thriller",
       "Action Film",
       "Mystery"
     ],
-    "initial_release_date": "2003-03-28"
+    "directed_by": [
+      "John McTiernan"
+    ],
+    "name": "Basic"
   },
   {
-    "genre": [
-      "Drama"
-    ],
-    "name": "Basic emotions",
+    "id": "/en/basic_emotions",
     "directed_by": [
       "Thomas Moon",
       "Julie Pham",
       "Georgia Lee"
     ],
-    "id": "/en/basic_emotions",
-    "initial_release_date": "2004-09-09"
+    "initial_release_date": "2004-09-09",
+    "name": "Basic emotions",
+    "genre": [
+      "Drama"
+    ]
   },
   {
+    "id": "/en/basic_instinct_2",
+    "directed_by": [
+      "Michael Caton-Jones"
+    ],
+    "initial_release_date": "2006-03-31",
+    "name": "Basic Instinct 2",
     "genre": [
       "Thriller",
       "Erotic thriller",
@@ -4337,68 +4332,69 @@
       "Mystery",
       "Crime Fiction",
       "Horror"
-    ],
-    "name": "Basic Instinct 2",
-    "directed_by": [
-      "Michael Caton-Jones"
-    ],
-    "id": "/en/basic_instinct_2",
-    "initial_release_date": "2006-03-31"
+    ]
   },
   {
-    "genre": [
-      "Drama"
-    ],
-    "name": "Battle In Heaven",
+    "id": "/en/batalla_en_el_cielo",
     "directed_by": [
       "Carlos Reygadas"
     ],
-    "id": "/en/batalla_en_el_cielo",
-    "initial_release_date": "2005-05-15"
+    "initial_release_date": "2005-05-15",
+    "name": "Battle In Heaven",
+    "genre": [
+      "Drama"
+    ]
   },
   {
+    "id": "/en/batman_begins",
+    "directed_by": [
+      "Christopher Nolan"
+    ],
+    "initial_release_date": "2005-06-10",
+    "name": "Batman Begins",
     "genre": [
       "Action Film",
       "Crime Fiction",
       "Adventure Film",
       "Film noir",
       "Drama"
-    ],
-    "name": "Batman Begins",
-    "directed_by": [
-      "Christopher Nolan"
-    ],
-    "id": "/en/batman_begins",
-    "initial_release_date": "2005-06-10"
+    ]
   },
   {
+    "id": "/en/batman_beyond_return_of_the_joker",
+    "directed_by": [
+      "Curt Geda"
+    ],
+    "initial_release_date": "2000-12-12",
+    "name": "Batman Beyond: Return of the Joker",
     "genre": [
       "Science Fiction",
       "Animation",
       "Superhero movie",
       "Action Film"
-    ],
-    "name": "Batman Beyond: Return of the Joker",
-    "directed_by": [
-      "Curt Geda"
-    ],
-    "id": "/en/batman_beyond_return_of_the_joker",
-    "initial_release_date": "2000-12-12"
+    ]
   },
   {
+    "id": "/en/batman_dead_end",
+    "directed_by": [
+      "Sandy Collora"
+    ],
+    "initial_release_date": "2003-07-19",
+    "name": "Batman: Dead End",
     "genre": [
       "Indie film",
       "Short Film",
       "Fan film"
-    ],
-    "name": "Batman: Dead End",
-    "directed_by": [
-      "Sandy Collora"
-    ],
-    "id": "/en/batman_dead_end",
-    "initial_release_date": "2003-07-19"
+    ]
   },
   {
+    "id": "/en/batman_mystery_of_the_batwoman",
+    "directed_by": [
+      "Curt Geda",
+      "Tim Maltby"
+    ],
+    "initial_release_date": "2003-10-21",
+    "name": "Batman: Mystery of the Batwoman",
     "genre": [
       "Animated cartoon",
       "Animation",
@@ -4408,31 +4404,30 @@
       "Fantasy",
       "Short Film",
       "Fantasy Adventure"
-    ],
-    "name": "Batman: Mystery of the Batwoman",
-    "directed_by": [
-      "Curt Geda",
-      "Tim Maltby"
-    ],
-    "id": "/en/batman_mystery_of_the_batwoman",
-    "initial_release_date": "2003-10-21"
+    ]
   },
   {
+    "id": "/en/batoru_rowaiaru_ii_chinkonka",
+    "directed_by": [
+      "Kenta Fukasaku",
+      "Kinji Fukasaku"
+    ],
+    "initial_release_date": "2003-07-05",
+    "name": "Battle Royale II: Requiem",
     "genre": [
       "Thriller",
       "Action Film",
       "Science Fiction",
       "Drama"
-    ],
-    "name": "Battle Royale II: Requiem",
-    "directed_by": [
-      "Kenta Fukasaku",
-      "Kinji Fukasaku"
-    ],
-    "id": "/en/batoru_rowaiaru_ii_chinkonka",
-    "initial_release_date": "2003-07-05"
+    ]
   },
   {
+    "id": "/en/battlefield_baseball",
+    "directed_by": [
+      "Y\u016bdai Yamaguchi"
+    ],
+    "initial_release_date": "2003-07-19",
+    "name": "Battlefield Baseball",
     "genre": [
       "Martial Arts Film",
       "Horror",
@@ -4442,52 +4437,52 @@
       "Japanese Movies",
       "Horror comedy",
       "Comedy"
-    ],
-    "name": "Battlefield Baseball",
-    "directed_by": [
-      "Y\u016bdai Yamaguchi"
-    ],
-    "id": "/en/battlefield_baseball",
-    "initial_release_date": "2003-07-19"
+    ]
   },
   {
-    "genre": [
-      "Documentary film"
-    ],
-    "name": "BBS: The Documentary",
+    "id": "/en/bbs_the_documentary",
     "directed_by": [
       "Jason Scott Sadofsky"
     ],
-    "id": "/en/bbs_the_documentary"
+    "name": "BBS: The Documentary",
+    "genre": [
+      "Documentary film"
+    ]
   },
   {
+    "id": "/en/be_cool",
+    "directed_by": [
+      "F. Gary Gray"
+    ],
+    "initial_release_date": "2005-03-04",
+    "name": "Be Cool",
     "genre": [
       "Crime Fiction",
       "Crime Comedy",
       "Comedy"
-    ],
-    "name": "Be Cool",
-    "directed_by": [
-      "F. Gary Gray"
-    ],
-    "id": "/en/be_cool",
-    "initial_release_date": "2005-03-04"
+    ]
   },
   {
+    "id": "/en/be_kind_rewind",
+    "directed_by": [
+      "Michel Gondry"
+    ],
+    "initial_release_date": "2008-01-20",
+    "name": "Be Kind Rewind",
     "genre": [
       "Farce",
       "Comedy of Errors",
       "Comedy",
       "Drama"
-    ],
-    "name": "Be Kind Rewind",
-    "directed_by": [
-      "Michel Gondry"
-    ],
-    "id": "/en/be_kind_rewind",
-    "initial_release_date": "2008-01-20"
+    ]
   },
   {
+    "id": "/en/be_with_me",
+    "directed_by": [
+      "Eric Khoo"
+    ],
+    "initial_release_date": "2005-05-12",
+    "name": "Be with Me",
     "genre": [
       "Indie film",
       "LGBT",
@@ -4495,137 +4490,137 @@
       "Art film",
       "Romance Film",
       "Drama"
-    ],
-    "name": "Be with Me",
-    "directed_by": [
-      "Eric Khoo"
-    ],
-    "id": "/en/be_with_me",
-    "initial_release_date": "2005-05-12"
+    ]
   },
   {
+    "id": "/en/beah_a_black_woman_speaks",
+    "directed_by": [
+      "Lisa Gay Hamilton"
+    ],
+    "initial_release_date": "2003-08-22",
+    "name": "Beah: A Black Woman Speaks",
     "genre": [
       "Documentary film",
       "History",
       "Biographical film"
-    ],
-    "name": "Beah: A Black Woman Speaks",
-    "directed_by": [
-      "Lisa Gay Hamilton"
-    ],
-    "id": "/en/beah_a_black_woman_speaks",
-    "initial_release_date": "2003-08-22"
+    ]
   },
   {
+    "id": "/en/beastly_boyz",
+    "directed_by": [
+      "David DeCoteau"
+    ],
+    "name": "Beastly Boyz",
     "genre": [
       "LGBT",
       "Horror",
       "B movie",
       "Teen film"
-    ],
-    "name": "Beastly Boyz",
-    "directed_by": [
-      "David DeCoteau"
-    ],
-    "id": "/en/beastly_boyz"
+    ]
   },
   {
-    "genre": [
-      "Comedy"
-    ],
-    "name": "Beauty Shop",
+    "id": "/en/beauty_shop",
     "directed_by": [
       "Bille Woodruff"
     ],
-    "id": "/en/beauty_shop",
-    "initial_release_date": "2005-03-24"
+    "initial_release_date": "2005-03-24",
+    "name": "Beauty Shop",
+    "genre": [
+      "Comedy"
+    ]
   },
   {
+    "id": "/en/bedazzled_2000",
+    "directed_by": [
+      "Harold Ramis"
+    ],
+    "initial_release_date": "2000-10-19",
+    "name": "Bedazzled",
     "genre": [
       "Romantic comedy",
       "Fantasy",
       "Black comedy",
       "Romance Film",
       "Comedy"
-    ],
-    "name": "Bedazzled",
-    "directed_by": [
-      "Harold Ramis"
-    ],
-    "id": "/en/bedazzled_2000",
-    "initial_release_date": "2000-10-19"
+    ]
   },
   {
+    "id": "/en/bee_movie",
+    "directed_by": [
+      "Steve Hickner",
+      "Simon J. Smith"
+    ],
+    "initial_release_date": "2007-10-28",
+    "name": "Bee Movie",
     "genre": [
       "Family",
       "Adventure Film",
       "Animation",
       "Comedy"
-    ],
-    "name": "Bee Movie",
-    "directed_by": [
-      "Steve Hickner",
-      "Simon J. Smith"
-    ],
-    "id": "/en/bee_movie",
-    "initial_release_date": "2007-10-28"
+    ]
   },
   {
+    "id": "/en/bee_season_2005",
+    "directed_by": [
+      "David Siegel",
+      "Scott McGehee"
+    ],
+    "initial_release_date": "2005-11-11",
+    "name": "Bee Season",
     "genre": [
       "Film adaptation",
       "Coming of age",
       "Family Drama",
       "Drama"
-    ],
-    "name": "Bee Season",
-    "directed_by": [
-      "David Siegel",
-      "Scott McGehee"
-    ],
-    "id": "/en/bee_season_2005",
-    "initial_release_date": "2005-11-11"
+    ]
   },
   {
+    "id": "/en/beer_league",
+    "directed_by": [
+      "Frank Sebastiano"
+    ],
+    "initial_release_date": "2006-09-15",
+    "name": "Artie Lange's Beer League",
     "genre": [
       "Sports",
       "Indie film",
       "Comedy"
-    ],
-    "name": "Artie Lange's Beer League",
-    "directed_by": [
-      "Frank Sebastiano"
-    ],
-    "id": "/en/beer_league",
-    "initial_release_date": "2006-09-15"
+    ]
   },
   {
+    "id": "/en/beer_the_movie",
+    "directed_by": [
+      "Peter Hoare"
+    ],
+    "initial_release_date": "2006-05-16",
+    "name": "Beer: The Movie",
     "genre": [
       "Indie film",
       "Cult film",
       "Parody",
       "Bloopers &amp; Candid Camera",
       "Comedy"
-    ],
-    "name": "Beer: The Movie",
-    "directed_by": [
-      "Peter Hoare"
-    ],
-    "id": "/en/beer_the_movie",
-    "initial_release_date": "2006-05-16"
+    ]
   },
   {
-    "genre": [
-      "Absurdism",
-      "Comedy"
-    ],
-    "name": "Beerfest",
+    "id": "/en/beerfest",
     "directed_by": [
       "Jay Chandrasekhar"
     ],
-    "id": "/en/beerfest",
-    "initial_release_date": "2006-08-25"
+    "initial_release_date": "2006-08-25",
+    "name": "Beerfest",
+    "genre": [
+      "Absurdism",
+      "Comedy"
+    ]
   },
   {
+    "id": "/en/before_night_falls_2001",
+    "directed_by": [
+      "Julian Schnabel"
+    ],
+    "initial_release_date": "2000-09-03",
+    "name": "Before Night Falls",
     "genre": [
       "LGBT",
       "Gay Themed",
@@ -4634,69 +4629,69 @@
       "Gay Interest",
       "Biographical film",
       "Drama"
-    ],
-    "name": "Before Night Falls",
-    "directed_by": [
-      "Julian Schnabel"
-    ],
-    "id": "/en/before_night_falls_2001",
-    "initial_release_date": "2000-09-03"
+    ]
   },
   {
+    "id": "/en/before_sunset",
+    "directed_by": [
+      "Richard Linklater"
+    ],
+    "initial_release_date": "2004-02-10",
+    "name": "Before Sunset",
     "genre": [
       "Romance Film",
       "Indie film",
       "Comedy",
       "Drama"
-    ],
-    "name": "Before Sunset",
-    "directed_by": [
-      "Richard Linklater"
-    ],
-    "id": "/en/before_sunset",
-    "initial_release_date": "2004-02-10"
+    ]
   },
   {
+    "id": "/en/behind_enemy_lines",
+    "directed_by": [
+      "John Moore"
+    ],
+    "initial_release_date": "2001-11-17",
+    "name": "Behind Enemy Lines",
     "genre": [
       "Thriller",
       "Action Film",
       "War film",
       "Action/Adventure",
       "Drama"
-    ],
-    "name": "Behind Enemy Lines",
-    "directed_by": [
-      "John Moore"
-    ],
-    "id": "/en/behind_enemy_lines",
-    "initial_release_date": "2001-11-17"
+    ]
   },
   {
+    "id": "/en/behind_the_mask_2006",
+    "directed_by": [
+      "Shannon Keith"
+    ],
+    "initial_release_date": "2006-03-21",
+    "name": "Behind the Mask",
     "genre": [
       "Documentary film",
       "Indie film",
       "Political cinema",
       "Crime Fiction"
-    ],
-    "name": "Behind the Mask",
-    "directed_by": [
-      "Shannon Keith"
-    ],
-    "id": "/en/behind_the_mask_2006",
-    "initial_release_date": "2006-03-21"
+    ]
   },
   {
-    "genre": [
-      "Drama"
-    ],
-    "name": "Behind the Sun",
+    "id": "/en/behind_the_sun_2001",
     "directed_by": [
       "Walter Salles"
     ],
-    "id": "/en/behind_the_sun_2001",
-    "initial_release_date": "2001-09-06"
+    "initial_release_date": "2001-09-06",
+    "name": "Behind the Sun",
+    "genre": [
+      "Drama"
+    ]
   },
   {
+    "id": "/en/being_cyrus",
+    "directed_by": [
+      "Homi Adajania"
+    ],
+    "initial_release_date": "2005-11-08",
+    "name": "Being Cyrus",
     "genre": [
       "Thriller",
       "Black comedy",
@@ -4704,53 +4699,53 @@
       "Psychological thriller",
       "Crime Fiction",
       "Drama"
-    ],
-    "name": "Being Cyrus",
-    "directed_by": [
-      "Homi Adajania"
-    ],
-    "id": "/en/being_cyrus",
-    "initial_release_date": "2005-11-08"
+    ]
   },
   {
+    "id": "/en/being_julia",
+    "directed_by": [
+      "Istv\u00e1n Szab\u00f3"
+    ],
+    "initial_release_date": "2004-09-03",
+    "name": "Being Julia",
     "genre": [
       "Romance Film",
       "Romantic comedy",
       "Comedy-drama",
       "Comedy",
       "Drama"
-    ],
-    "name": "Being Julia",
-    "directed_by": [
-      "Istv\u00e1n Szab\u00f3"
-    ],
-    "id": "/en/being_julia",
-    "initial_release_date": "2004-09-03"
+    ]
   },
   {
-    "genre": [
-      "Drama"
-    ],
-    "name": "Bekhal's Tears",
+    "id": "/en/bekhals_tears",
     "directed_by": [
       "Lauand Omar"
     ],
-    "id": "/en/bekhals_tears"
+    "name": "Bekhal's Tears",
+    "genre": [
+      "Drama"
+    ]
   },
   {
+    "id": "/en/believe_in_me",
+    "directed_by": [
+      "Robert Collector"
+    ],
+    "name": "Believe in Me",
     "genre": [
       "Sports",
       "Family Drama",
       "Family",
       "Drama"
-    ],
-    "name": "Believe in Me",
-    "directed_by": [
-      "Robert Collector"
-    ],
-    "id": "/en/believe_in_me"
+    ]
   },
   {
+    "id": "/en/belly_of_the_beast",
+    "directed_by": [
+      "Ching Siu-tung"
+    ],
+    "initial_release_date": "2003-12-30",
+    "name": "Belly of the Beast",
     "genre": [
       "Action Film",
       "Thriller",
@@ -4760,28 +4755,28 @@
       "Crime Thriller",
       "Action Thriller",
       "Chinese Movies"
-    ],
-    "name": "Belly of the Beast",
-    "directed_by": [
-      "Ching Siu-tung"
-    ],
-    "id": "/en/belly_of_the_beast",
-    "initial_release_date": "2003-12-30"
+    ]
   },
   {
+    "id": "/en/bellyful",
+    "directed_by": [
+      "Melvin Van Peebles"
+    ],
+    "initial_release_date": "2000-06-28",
+    "name": "Bellyful",
     "genre": [
       "Indie film",
       "Satire",
       "Comedy"
-    ],
-    "name": "Bellyful",
-    "directed_by": [
-      "Melvin Van Peebles"
-    ],
-    "id": "/en/bellyful",
-    "initial_release_date": "2000-06-28"
+    ]
   },
   {
+    "id": "/en/bend_it_like_beckham",
+    "directed_by": [
+      "Gurinder Chadha"
+    ],
+    "initial_release_date": "2002-04-11",
+    "name": "Bend It Like Beckham",
     "genre": [
       "Coming of age",
       "Indie film",
@@ -4791,73 +4786,73 @@
       "Comedy-drama",
       "Comedy",
       "Drama"
-    ],
-    "name": "Bend It Like Beckham",
-    "directed_by": [
-      "Gurinder Chadha"
-    ],
-    "id": "/en/bend_it_like_beckham",
-    "initial_release_date": "2002-04-11"
+    ]
   },
   {
+    "id": "/en/bendito_infierno",
+    "directed_by": [
+      "Agust\u00edn D\u00edaz Yanes"
+    ],
+    "initial_release_date": "2001-11-28",
+    "name": "Don't Tempt Me",
     "genre": [
       "Religious Film",
       "Fantasy",
       "Comedy"
-    ],
-    "name": "Don't Tempt Me",
-    "directed_by": [
-      "Agust\u00edn D\u00edaz Yanes"
-    ],
-    "id": "/en/bendito_infierno",
-    "initial_release_date": "2001-11-28"
+    ]
   },
   {
+    "id": "/en/beneath",
+    "directed_by": [
+      "Dagen Merrill"
+    ],
+    "initial_release_date": "2007-08-07",
+    "name": "Beneath",
     "genre": [
       "Horror",
       "Psychological thriller",
       "Thriller",
       "Supernatural",
       "Crime Thriller"
-    ],
-    "name": "Beneath",
-    "directed_by": [
-      "Dagen Merrill"
-    ],
-    "id": "/en/beneath",
-    "initial_release_date": "2007-08-07"
+    ]
   },
   {
+    "id": "/en/beneath_clouds",
+    "directed_by": [
+      "Ivan Sen"
+    ],
+    "initial_release_date": "2002-02-08",
+    "name": "Beneath Clouds",
     "genre": [
       "Indie film",
       "Romance Film",
       "Road movie",
       "Social problem film",
       "Drama"
-    ],
-    "name": "Beneath Clouds",
-    "directed_by": [
-      "Ivan Sen"
-    ],
-    "id": "/en/beneath_clouds",
-    "initial_release_date": "2002-02-08"
+    ]
   },
   {
+    "id": "/en/beowulf_2007",
+    "directed_by": [
+      "Robert Zemeckis"
+    ],
+    "initial_release_date": "2007-11-05",
+    "name": "Beowulf",
     "genre": [
       "Adventure Film",
       "Computer Animation",
       "Fantasy",
       "Action Film",
       "Animation"
-    ],
-    "name": "Beowulf",
-    "directed_by": [
-      "Robert Zemeckis"
-    ],
-    "id": "/en/beowulf_2007",
-    "initial_release_date": "2007-11-05"
+    ]
   },
   {
+    "id": "/en/beowulf_grendel",
+    "directed_by": [
+      "Sturla Gunnarsson"
+    ],
+    "initial_release_date": "2005-09-14",
+    "name": "Beowulf &amp; Grendel",
     "genre": [
       "Adventure Film",
       "Action Film",
@@ -4868,173 +4863,173 @@
       "Historical period drama",
       "Mythological Fantasy",
       "Drama"
-    ],
-    "name": "Beowulf &amp; Grendel",
-    "directed_by": [
-      "Sturla Gunnarsson"
-    ],
-    "id": "/en/beowulf_grendel",
-    "initial_release_date": "2005-09-14"
+    ]
   },
   {
-    "genre": [
-      "Comedy"
-    ],
-    "name": "Best in Show",
+    "id": "/en/best_in_show",
     "directed_by": [
       "Christopher Guest"
     ],
-    "id": "/en/best_in_show",
-    "initial_release_date": "2000-09-08"
+    "initial_release_date": "2000-09-08",
+    "name": "Best in Show",
+    "genre": [
+      "Comedy"
+    ]
   },
   {
+    "id": "/en/the_best_of_the_bloodiest_brawls_vol_1",
+    "directed_by": [],
+    "initial_release_date": "2006-03-14",
+    "name": "The Best of The Bloodiest Brawls, Vol. 1",
     "genre": [
       "Sports"
-    ],
-    "name": "The Best of The Bloodiest Brawls, Vol. 1",
-    "directed_by": [],
-    "id": "/en/the_best_of_the_bloodiest_brawls_vol_1",
-    "initial_release_date": "2006-03-14"
+    ]
   },
   {
+    "id": "/en/better_luck_tomorrow",
+    "directed_by": [
+      "Justin Lin"
+    ],
+    "initial_release_date": "2003-04-11",
+    "name": "Better Luck Tomorrow",
     "genre": [
       "Coming of age",
       "Teen film",
       "Crime Fiction",
       "Crime Drama",
       "Drama"
-    ],
-    "name": "Better Luck Tomorrow",
-    "directed_by": [
-      "Justin Lin"
-    ],
-    "id": "/en/better_luck_tomorrow",
-    "initial_release_date": "2003-04-11"
+    ]
   },
   {
-    "genre": [
-      "Biographical film",
-      "Drama"
-    ],
-    "name": "Bettie Page: Dark Angel",
+    "id": "/en/bettie_page_dark_angel",
     "directed_by": [
       "Nico B."
     ],
-    "id": "/en/bettie_page_dark_angel",
-    "initial_release_date": "2004-02-11"
+    "initial_release_date": "2004-02-11",
+    "name": "Bettie Page: Dark Angel",
+    "genre": [
+      "Biographical film",
+      "Drama"
+    ]
   },
   {
+    "id": "/en/bewitched_2005",
+    "directed_by": [
+      "Nora Ephron"
+    ],
+    "initial_release_date": "2005-06-24",
+    "name": "Bewitched",
     "genre": [
       "Romantic comedy",
       "Fantasy",
       "Romance Film",
       "Comedy"
-    ],
-    "name": "Bewitched",
-    "directed_by": [
-      "Nora Ephron"
-    ],
-    "id": "/en/bewitched_2005",
-    "initial_release_date": "2005-06-24"
+    ]
   },
   {
+    "id": "/en/beyond_borders",
+    "directed_by": [
+      "Martin Campbell"
+    ],
+    "initial_release_date": "2003-10-24",
+    "name": "Beyond Borders",
     "genre": [
       "Adventure Film",
       "Historical period drama",
       "Romance Film",
       "War film",
       "Drama"
-    ],
-    "name": "Beyond Borders",
-    "directed_by": [
-      "Martin Campbell"
-    ],
-    "id": "/en/beyond_borders",
-    "initial_release_date": "2003-10-24"
+    ]
   },
   {
+    "id": "/en/beyond_re-animator",
+    "directed_by": [
+      "Brian Yuzna"
+    ],
+    "initial_release_date": "2003-04-04",
+    "name": "Beyond Re-Animator",
     "genre": [
       "Horror",
       "Science Fiction",
       "Comedy"
-    ],
-    "name": "Beyond Re-Animator",
-    "directed_by": [
-      "Brian Yuzna"
-    ],
-    "id": "/en/beyond_re-animator",
-    "initial_release_date": "2003-04-04"
+    ]
   },
   {
+    "id": "/en/beyond_the_sea",
+    "directed_by": [
+      "Kevin Spacey"
+    ],
+    "initial_release_date": "2004-09-11",
+    "name": "Beyond the Sea",
     "genre": [
       "Musical",
       "Music",
       "Biographical film",
       "Drama",
       "Musical Drama"
-    ],
-    "name": "Beyond the Sea",
-    "directed_by": [
-      "Kevin Spacey"
-    ],
-    "id": "/en/beyond_the_sea",
-    "initial_release_date": "2004-09-11"
+    ]
   },
   {
+    "id": "/en/bhadra_2005",
+    "directed_by": [
+      "Boyapati Srinu"
+    ],
+    "initial_release_date": "2005-05-12",
+    "name": "Bhadra",
     "genre": [
       "Action Film",
       "Tollywood",
       "World cinema",
       "Drama"
-    ],
-    "name": "Bhadra",
-    "directed_by": [
-      "Boyapati Srinu"
-    ],
-    "id": "/en/bhadra_2005",
-    "initial_release_date": "2005-05-12"
+    ]
   },
   {
+    "id": "/en/bhageeradha",
+    "directed_by": [
+      "Rasool Ellore"
+    ],
+    "initial_release_date": "2005-10-13",
+    "name": "Bhageeratha",
     "genre": [
       "Drama",
       "Tollywood",
       "World cinema"
-    ],
-    "name": "Bhageeratha",
-    "directed_by": [
-      "Rasool Ellore"
-    ],
-    "id": "/en/bhageeradha",
-    "initial_release_date": "2005-10-13"
+    ]
   },
   {
+    "id": "/en/bheema",
+    "directed_by": [
+      "N. Lingusamy"
+    ],
+    "initial_release_date": "2008-01-14",
+    "name": "Bheemaa",
     "genre": [
       "Action Film",
       "Tamil cinema",
       "World cinema"
-    ],
-    "name": "Bheemaa",
-    "directed_by": [
-      "N. Lingusamy"
-    ],
-    "id": "/en/bheema",
-    "initial_release_date": "2008-01-14"
+    ]
   },
   {
+    "id": "/en/bhoot",
+    "directed_by": [
+      "Ram Gopal Varma"
+    ],
+    "initial_release_date": "2003-05-17",
+    "name": "Bhoot",
     "genre": [
       "Horror",
       "Thriller",
       "Bollywood",
       "World cinema"
-    ],
-    "name": "Bhoot",
-    "directed_by": [
-      "Ram Gopal Varma"
-    ],
-    "id": "/en/bhoot",
-    "initial_release_date": "2003-05-17"
+    ]
   },
   {
+    "id": "/en/bichhoo",
+    "directed_by": [
+      "Guddu Dhanoa"
+    ],
+    "initial_release_date": "2000-07-07",
+    "name": "Bichhoo",
     "genre": [
       "Thriller",
       "Action Film",
@@ -5042,15 +5037,15 @@
       "Bollywood",
       "World cinema",
       "Drama"
-    ],
-    "name": "Bichhoo",
-    "directed_by": [
-      "Guddu Dhanoa"
-    ],
-    "id": "/en/bichhoo",
-    "initial_release_date": "2000-07-07"
+    ]
   },
   {
+    "id": "/en/big_eden",
+    "directed_by": [
+      "Thomas Bezucha"
+    ],
+    "initial_release_date": "2000-04-18",
+    "name": "Big Eden",
     "genre": [
       "LGBT",
       "Indie film",
@@ -5061,28 +5056,28 @@
       "Gay Themed",
       "Romantic comedy",
       "Drama"
-    ],
-    "name": "Big Eden",
-    "directed_by": [
-      "Thomas Bezucha"
-    ],
-    "id": "/en/big_eden",
-    "initial_release_date": "2000-04-18"
+    ]
   },
   {
+    "id": "/en/big_fat_liar",
+    "directed_by": [
+      "Shawn Levy"
+    ],
+    "initial_release_date": "2002-02-02",
+    "name": "Big Fat Liar",
     "genre": [
       "Family",
       "Adventure Film",
       "Comedy"
-    ],
-    "name": "Big Fat Liar",
-    "directed_by": [
-      "Shawn Levy"
-    ],
-    "id": "/en/big_fat_liar",
-    "initial_release_date": "2002-02-02"
+    ]
   },
   {
+    "id": "/en/big_fish",
+    "directed_by": [
+      "Tim Burton"
+    ],
+    "initial_release_date": "2003-12-10",
+    "name": "Big Fish",
     "genre": [
       "Fantasy",
       "Adventure Film",
@@ -5093,53 +5088,53 @@
       "Fantasy Comedy",
       "Comedy",
       "Drama"
-    ],
-    "name": "Big Fish",
-    "directed_by": [
-      "Tim Burton"
-    ],
-    "id": "/en/big_fish",
-    "initial_release_date": "2003-12-10"
+    ]
   },
   {
+    "id": "/en/big_girls_dont_cry_2002",
+    "directed_by": [
+      "Maria von Heland"
+    ],
+    "initial_release_date": "2002-10-24",
+    "name": "Big Girls Don't Cry",
     "genre": [
       "World cinema",
       "Melodrama",
       "Teen film",
       "Drama"
-    ],
-    "name": "Big Girls Don't Cry",
-    "directed_by": [
-      "Maria von Heland"
-    ],
-    "id": "/en/big_girls_dont_cry_2002",
-    "initial_release_date": "2002-10-24"
+    ]
   },
   {
-    "genre": [
-      "Drama"
-    ],
-    "name": "Big Man, Little Love",
+    "id": "/en/big_man_little_love",
     "directed_by": [
       "Handan \u0130pek\u00e7i"
     ],
-    "id": "/en/big_man_little_love",
-    "initial_release_date": "2001-10-19"
+    "initial_release_date": "2001-10-19",
+    "name": "Big Man, Little Love",
+    "genre": [
+      "Drama"
+    ]
   },
   {
+    "id": "/en/big_mommas_house",
+    "directed_by": [
+      "Raja Gosnell"
+    ],
+    "initial_release_date": "2000-05-31",
+    "name": "Big Momma's House",
     "genre": [
       "Action Film",
       "Crime Fiction",
       "Comedy"
-    ],
-    "name": "Big Momma's House",
-    "directed_by": [
-      "Raja Gosnell"
-    ],
-    "id": "/en/big_mommas_house",
-    "initial_release_date": "2000-05-31"
+    ]
   },
   {
+    "id": "/en/big_mommas_house_2",
+    "directed_by": [
+      "John Whitesell"
+    ],
+    "initial_release_date": "2006-01-26",
+    "name": "Big Momma's House 2",
     "genre": [
       "Crime Fiction",
       "Slapstick",
@@ -5148,25 +5143,25 @@
       "Thriller",
       "Farce",
       "Comedy"
-    ],
-    "name": "Big Momma's House 2",
-    "directed_by": [
-      "John Whitesell"
-    ],
-    "id": "/en/big_mommas_house_2",
-    "initial_release_date": "2006-01-26"
+    ]
   },
   {
-    "genre": [
-      "Pornographic film"
-    ],
-    "name": "Big Toys, No Boys 2",
+    "id": "/en/big_toys_no_boys_2",
     "directed_by": [
       "Trist\u00e1n"
     ],
-    "id": "/en/big_toys_no_boys_2"
+    "name": "Big Toys, No Boys 2",
+    "genre": [
+      "Pornographic film"
+    ]
   },
   {
+    "id": "/en/big_trouble_2002",
+    "directed_by": [
+      "Barry Sonnenfeld"
+    ],
+    "initial_release_date": "2002-04-05",
+    "name": "Big Trouble",
     "genre": [
       "Crime Fiction",
       "Black comedy",
@@ -5174,30 +5169,30 @@
       "Action/Adventure",
       "Gangster Film",
       "Comedy"
-    ],
-    "name": "Big Trouble",
-    "directed_by": [
-      "Barry Sonnenfeld"
-    ],
-    "id": "/en/big_trouble_2002",
-    "initial_release_date": "2002-04-05"
+    ]
   },
   {
+    "id": "/en/bigger_than_the_sky",
+    "directed_by": [
+      "Al Corley"
+    ],
+    "initial_release_date": "2005-02-18",
+    "name": "Bigger Than the Sky",
     "genre": [
       "Romantic comedy",
       "Romance Film",
       "Comedy-drama",
       "Comedy",
       "Drama"
-    ],
-    "name": "Bigger Than the Sky",
-    "directed_by": [
-      "Al Corley"
-    ],
-    "id": "/en/bigger_than_the_sky",
-    "initial_release_date": "2005-02-18"
+    ]
   },
   {
+    "id": "/en/biggie_tupac",
+    "directed_by": [
+      "Nick Broomfield"
+    ],
+    "initial_release_date": "2002-01-11",
+    "name": "Biggie &amp; Tupac",
     "genre": [
       "Documentary film",
       "Hip hop film",
@@ -5206,43 +5201,44 @@
       "Crime Fiction",
       "True crime",
       "Biographical film"
-    ],
-    "name": "Biggie &amp; Tupac",
-    "directed_by": [
-      "Nick Broomfield"
-    ],
-    "id": "/en/biggie_tupac",
-    "initial_release_date": "2002-01-11"
+    ]
   },
   {
+    "id": "/en/bill_2007",
+    "directed_by": [
+      "Bernie Goldmann",
+      "Melisa Wallick"
+    ],
+    "initial_release_date": "2007-09-08",
+    "name": "Meet Bill",
     "genre": [
       "Romantic comedy",
       "Romance Film",
       "Comedy",
       "Drama"
-    ],
-    "name": "Meet Bill",
-    "directed_by": [
-      "Bernie Goldmann",
-      "Melisa Wallick"
-    ],
-    "id": "/en/bill_2007",
-    "initial_release_date": "2007-09-08"
+    ]
   },
   {
+    "id": "/en/billy_elliot",
+    "directed_by": [
+      "Stephen Daldry"
+    ],
+    "initial_release_date": "2000-05-19",
+    "name": "Billy Elliot",
     "genre": [
       "Comedy",
       "Music",
       "Drama"
-    ],
-    "name": "Billy Elliot",
-    "directed_by": [
-      "Stephen Daldry"
-    ],
-    "id": "/en/billy_elliot",
-    "initial_release_date": "2000-05-19"
+    ]
   },
   {
+    "id": "/en/bionicle_3_web_of_shadows",
+    "directed_by": [
+      "David Molina",
+      "Terry Shakespeare"
+    ],
+    "initial_release_date": "2005-10-11",
+    "name": "Bionicle 3: Web of Shadows",
     "genre": [
       "Fantasy",
       "Adventure Film",
@@ -5250,36 +5246,36 @@
       "Family",
       "Computer Animation",
       "Science Fiction"
-    ],
-    "name": "Bionicle 3: Web of Shadows",
-    "directed_by": [
-      "David Molina",
-      "Terry Shakespeare"
-    ],
-    "id": "/en/bionicle_3_web_of_shadows",
-    "initial_release_date": "2005-10-11"
+    ]
   },
   {
-    "genre": [
-      "Fantasy",
-      "Adventure Film",
-      "Animation",
-      "Family",
-      "Computer Animation",
-      "Science Fiction",
-      "Children's Fantasy",
-      "Children's/Family",
-      "Fantasy Adventure"
-    ],
-    "name": "Bionicle 2: Legends of Metru Nui",
-    "directed_by": [
-      "David Molina",
-      "Terry Shakespeare"
-    ],
     "id": "/en/bionicle_2_legends_of_metru_nui",
-    "initial_release_date": "2004-10-19"
+    "directed_by": [
+      "David Molina",
+      "Terry Shakespeare"
+    ],
+    "initial_release_date": "2004-10-19",
+    "name": "Bionicle 2: Legends of Metru Nui",
+    "genre": [
+      "Fantasy",
+      "Adventure Film",
+      "Animation",
+      "Family",
+      "Computer Animation",
+      "Science Fiction",
+      "Children's Fantasy",
+      "Children's/Family",
+      "Fantasy Adventure"
+    ]
   },
   {
+    "id": "/en/bionicle_mask_of_light",
+    "directed_by": [
+      "David Molina",
+      "Terry Shakespeare"
+    ],
+    "initial_release_date": "2003-09-16",
+    "name": "Bionicle: Mask of Light: The Movie",
     "genre": [
       "Family",
       "Fantasy",
@@ -5290,31 +5286,30 @@
       "Children's Fantasy",
       "Children's/Family",
       "Fantasy Adventure"
-    ],
-    "name": "Bionicle: Mask of Light: The Movie",
-    "directed_by": [
-      "David Molina",
-      "Terry Shakespeare"
-    ],
-    "id": "/en/bionicle_mask_of_light",
-    "initial_release_date": "2003-09-16"
+    ]
   },
   {
+    "id": "/en/birth_2004",
+    "directed_by": [
+      "Jonathan Glazer"
+    ],
+    "initial_release_date": "2004-09-08",
+    "name": "Birth",
     "genre": [
       "Mystery",
       "Indie film",
       "Romance Film",
       "Thriller",
       "Drama"
-    ],
-    "name": "Birth",
-    "directed_by": [
-      "Jonathan Glazer"
-    ],
-    "id": "/en/birth_2004",
-    "initial_release_date": "2004-09-08"
+    ]
   },
   {
+    "id": "/en/birthday_girl",
+    "directed_by": [
+      "Jez Butterworth"
+    ],
+    "initial_release_date": "2002-02-01",
+    "name": "Birthday Girl",
     "genre": [
       "Black comedy",
       "Thriller",
@@ -5324,26 +5319,26 @@
       "Romance Film",
       "Comedy",
       "Drama"
-    ],
-    "name": "Birthday Girl",
-    "directed_by": [
-      "Jez Butterworth"
-    ],
-    "id": "/en/birthday_girl",
-    "initial_release_date": "2002-02-01"
+    ]
   },
   {
-    "genre": [
-      "Comedy"
-    ],
-    "name": "Bite Me, Fanboy",
+    "id": "/en/bite_me_fanboy",
     "directed_by": [
       "Mat Nastos"
     ],
-    "id": "/en/bite_me_fanboy",
-    "initial_release_date": "2005-06-01"
+    "initial_release_date": "2005-06-01",
+    "name": "Bite Me, Fanboy",
+    "genre": [
+      "Comedy"
+    ]
   },
   {
+    "id": "/en/bitter_jester",
+    "directed_by": [
+      "Maija DiGiorgio"
+    ],
+    "initial_release_date": "2003-02-26",
+    "name": "Bitter Jester",
     "genre": [
       "Indie film",
       "Documentary film",
@@ -5351,94 +5346,94 @@
       "Culture &amp; Society",
       "Comedy",
       "Biographical film"
-    ],
-    "name": "Bitter Jester",
-    "directed_by": [
-      "Maija DiGiorgio"
-    ],
-    "id": "/en/bitter_jester",
-    "initial_release_date": "2003-02-26"
+    ]
   },
   {
-    "genre": [
-      "Family",
-      "Drama"
-    ],
-    "name": "Black",
+    "id": "/en/black_2005",
     "directed_by": [
       "Sanjay Leela Bhansali"
     ],
-    "id": "/en/black_2005",
-    "initial_release_date": "2005-02-04"
+    "initial_release_date": "2005-02-04",
+    "name": "Black",
+    "genre": [
+      "Family",
+      "Drama"
+    ]
   },
   {
+    "id": "/en/black_and_white_2002",
+    "directed_by": [
+      "Craig Lahiff"
+    ],
+    "initial_release_date": "2002-10-31",
+    "name": "Black and White",
     "genre": [
       "Trial drama",
       "Crime Fiction",
       "World cinema",
       "Drama"
-    ],
-    "name": "Black and White",
-    "directed_by": [
-      "Craig Lahiff"
-    ],
-    "id": "/en/black_and_white_2002",
-    "initial_release_date": "2002-10-31"
+    ]
   },
   {
+    "id": "/en/black_book_2006",
+    "directed_by": [
+      "Paul Verhoeven"
+    ],
+    "initial_release_date": "2006-09-01",
+    "name": "Black Book",
     "genre": [
       "Thriller",
       "War film",
       "Drama"
-    ],
-    "name": "Black Book",
-    "directed_by": [
-      "Paul Verhoeven"
-    ],
-    "id": "/en/black_book_2006",
-    "initial_release_date": "2006-09-01"
+    ]
   },
   {
+    "id": "/wikipedia/fr/Black_Christmas_$0028film$002C_2006$0029",
+    "directed_by": [
+      "Glen Morgan"
+    ],
+    "initial_release_date": "2006-12-15",
+    "name": "Black Christmas",
     "genre": [
       "Slasher",
       "Teen film",
       "Horror",
       "Thriller"
-    ],
-    "name": "Black Christmas",
-    "directed_by": [
-      "Glen Morgan"
-    ],
-    "id": "/wikipedia/fr/Black_Christmas_$0028film$002C_2006$0029",
-    "initial_release_date": "2006-12-15"
+    ]
   },
   {
+    "id": "/en/black_cloud",
+    "directed_by": [
+      "Ricky Schroder"
+    ],
+    "initial_release_date": "2004-04-30",
+    "name": "Black Cloud",
     "genre": [
       "Indie film",
       "Sports",
       "Drama"
-    ],
-    "name": "Black Cloud",
-    "directed_by": [
-      "Ricky Schroder"
-    ],
-    "id": "/en/black_cloud",
-    "initial_release_date": "2004-04-30"
+    ]
   },
   {
+    "id": "/en/black_friday_1993",
+    "directed_by": [
+      "Anurag Kashyap"
+    ],
+    "initial_release_date": "2004-05-20",
+    "name": "Black Friday",
     "genre": [
       "Crime Fiction",
       "Historical drama",
       "Drama"
-    ],
-    "name": "Black Friday",
-    "directed_by": [
-      "Anurag Kashyap"
-    ],
-    "id": "/en/black_friday_1993",
-    "initial_release_date": "2004-05-20"
+    ]
   },
   {
+    "id": "/en/black_hawk_down",
+    "directed_by": [
+      "Ridley Scott"
+    ],
+    "initial_release_date": "2001-12-18",
+    "name": "Black Hawk Down",
     "genre": [
       "War film",
       "Action/Adventure",
@@ -5446,28 +5441,28 @@
       "History",
       "Combat Films",
       "Drama"
-    ],
-    "name": "Black Hawk Down",
-    "directed_by": [
-      "Ridley Scott"
-    ],
-    "id": "/en/black_hawk_down",
-    "initial_release_date": "2001-12-18"
+    ]
   },
   {
+    "id": "/en/black_hole_2006",
+    "directed_by": [
+      "Tibor Tak\u00e1cs"
+    ],
+    "initial_release_date": "2006-06-10",
+    "name": "The Black Hole",
     "genre": [
       "Science Fiction",
       "Thriller",
       "Television film"
-    ],
-    "name": "The Black Hole",
-    "directed_by": [
-      "Tibor Tak\u00e1cs"
-    ],
-    "id": "/en/black_hole_2006",
-    "initial_release_date": "2006-06-10"
+    ]
   },
   {
+    "id": "/en/black_knight_2001",
+    "directed_by": [
+      "Gil Junger"
+    ],
+    "initial_release_date": "2001-11-15",
+    "name": "Black Knight",
     "genre": [
       "Time travel",
       "Adventure Film",
@@ -5477,56 +5472,56 @@
       "Adventure Comedy",
       "Fantasy Comedy",
       "Comedy"
-    ],
-    "name": "Black Knight",
-    "directed_by": [
-      "Gil Junger"
-    ],
-    "id": "/en/black_knight_2001",
-    "initial_release_date": "2001-11-15"
+    ]
   },
   {
+    "id": "/en/blackball_2005",
+    "directed_by": [
+      "Mel Smith"
+    ],
+    "initial_release_date": "2005-02-11",
+    "name": "Blackball",
     "genre": [
       "Sports",
       "Family Drama",
       "Comedy",
       "Drama"
-    ],
-    "name": "Blackball",
-    "directed_by": [
-      "Mel Smith"
-    ],
-    "id": "/en/blackball_2005",
-    "initial_release_date": "2005-02-11"
+    ]
   },
   {
+    "id": "/en/blackwoods",
+    "directed_by": [
+      "Uwe Boll"
+    ],
+    "name": "Blackwoods",
     "genre": [
       "Thriller",
       "Crime Thriller",
       "Psychological thriller",
       "Drama"
-    ],
-    "name": "Blackwoods",
-    "directed_by": [
-      "Uwe Boll"
-    ],
-    "id": "/en/blackwoods"
+    ]
   },
   {
+    "id": "/en/blade_ii",
+    "directed_by": [
+      "Guillermo del Toro"
+    ],
+    "initial_release_date": "2002-03-21",
+    "name": "Blade II",
     "genre": [
       "Thriller",
       "Horror",
       "Science Fiction",
       "Action Film"
-    ],
-    "name": "Blade II",
-    "directed_by": [
-      "Guillermo del Toro"
-    ],
-    "id": "/en/blade_ii",
-    "initial_release_date": "2002-03-21"
+    ]
   },
   {
+    "id": "/en/blade_trinity",
+    "directed_by": [
+      "David S. Goyer"
+    ],
+    "initial_release_date": "2004-12-07",
+    "name": "Blade: Trinity",
     "genre": [
       "Thriller",
       "Action Film",
@@ -5536,97 +5531,97 @@
       "Fantasy",
       "Adventure Film",
       "Action Thriller"
-    ],
-    "name": "Blade: Trinity",
-    "directed_by": [
-      "David S. Goyer"
-    ],
-    "id": "/en/blade_trinity",
-    "initial_release_date": "2004-12-07"
+    ]
   },
   {
+    "id": "/en/bleach_memories_of_nobody",
+    "directed_by": [
+      "Noriyuki Abe"
+    ],
+    "initial_release_date": "2006-12-16",
+    "name": "Bleach: Memories of Nobody",
     "genre": [
       "Anime",
       "Fantasy",
       "Animation",
       "Action Film",
       "Adventure Film"
-    ],
-    "name": "Bleach: Memories of Nobody",
-    "directed_by": [
-      "Noriyuki Abe"
-    ],
-    "id": "/en/bleach_memories_of_nobody",
-    "initial_release_date": "2006-12-16"
+    ]
   },
   {
+    "id": "/en/bless_the_child",
+    "directed_by": [
+      "Chuck Russell"
+    ],
+    "initial_release_date": "2000-08-11",
+    "name": "Bless the Child",
     "genre": [
       "Horror",
       "Crime Fiction",
       "Drama",
       "Thriller"
-    ],
-    "name": "Bless the Child",
-    "directed_by": [
-      "Chuck Russell"
-    ],
-    "id": "/en/bless_the_child",
-    "initial_release_date": "2000-08-11"
+    ]
   },
   {
-    "genre": [
-      "Crime Fiction",
-      "Drama"
-    ],
-    "name": "Blind Shaft",
+    "id": "/en/blind_shaft",
     "directed_by": [
       "Li Yang"
     ],
-    "id": "/en/blind_shaft",
-    "initial_release_date": "2003-02-12"
+    "initial_release_date": "2003-02-12",
+    "name": "Blind Shaft",
+    "genre": [
+      "Crime Fiction",
+      "Drama"
+    ]
   },
   {
+    "id": "/en/blissfully_yours",
+    "directed_by": [
+      "Apichatpong Weerasethakul"
+    ],
+    "initial_release_date": "2002-05-17",
+    "name": "Blissfully Yours",
     "genre": [
       "Erotica",
       "Romance Film",
       "World cinema",
       "Drama"
-    ],
-    "name": "Blissfully Yours",
-    "directed_by": [
-      "Apichatpong Weerasethakul"
-    ],
-    "id": "/en/blissfully_yours",
-    "initial_release_date": "2002-05-17"
+    ]
   },
   {
+    "id": "/en/blood_of_a_champion",
+    "directed_by": [
+      "Lawrence Page"
+    ],
+    "initial_release_date": "2006-03-07",
+    "name": "Blood of a Champion",
     "genre": [
       "Crime Fiction",
       "Sports",
       "Drama"
-    ],
-    "name": "Blood of a Champion",
-    "directed_by": [
-      "Lawrence Page"
-    ],
-    "id": "/en/blood_of_a_champion",
-    "initial_release_date": "2006-03-07"
+    ]
   },
   {
+    "id": "/en/blood_rain",
+    "directed_by": [
+      "Kim Dae-seung"
+    ],
+    "initial_release_date": "2005-05-04",
+    "name": "Blood Rain",
     "genre": [
       "Thriller",
       "Mystery",
       "East Asian cinema",
       "World cinema"
-    ],
-    "name": "Blood Rain",
-    "directed_by": [
-      "Kim Dae-seung"
-    ],
-    "id": "/en/blood_rain",
-    "initial_release_date": "2005-05-04"
+    ]
   },
   {
+    "id": "/en/blood_work",
+    "directed_by": [
+      "Clint Eastwood"
+    ],
+    "initial_release_date": "2002-08-09",
+    "name": "Blood Work",
     "genre": [
       "Mystery",
       "Crime Thriller",
@@ -5635,162 +5630,161 @@
       "Crime Fiction",
       "Detective fiction",
       "Drama"
-    ],
-    "name": "Blood Work",
-    "directed_by": [
-      "Clint Eastwood"
-    ],
-    "id": "/en/blood_work",
-    "initial_release_date": "2002-08-09"
+    ]
   },
   {
+    "id": "/en/bloodrayne_2006",
+    "directed_by": [
+      "Uwe Boll"
+    ],
+    "initial_release_date": "2005-10-23",
+    "name": "BloodRayne",
     "genre": [
       "Horror",
       "Action Film",
       "Fantasy",
       "Adventure Film",
       "Costume drama"
-    ],
-    "name": "BloodRayne",
-    "directed_by": [
-      "Uwe Boll"
-    ],
-    "id": "/en/bloodrayne_2006",
-    "initial_release_date": "2005-10-23"
+    ]
   },
   {
+    "id": "/en/bloodsport_ecws_most_violent_matches",
+    "directed_by": [],
+    "initial_release_date": "2006-02-07",
+    "name": "Bloodsport - ECW's Most Violent Matches",
     "genre": [
       "Documentary film",
       "Sports"
-    ],
-    "name": "Bloodsport - ECW's Most Violent Matches",
-    "directed_by": [],
-    "id": "/en/bloodsport_ecws_most_violent_matches",
-    "initial_release_date": "2006-02-07"
+    ]
   },
   {
+    "id": "/en/bloody_sunday",
+    "directed_by": [
+      "Paul Greengrass"
+    ],
+    "initial_release_date": "2002-01-16",
+    "name": "Bloody Sunday",
     "genre": [
       "Political drama",
       "Docudrama",
       "Historical fiction",
       "War film",
       "Drama"
-    ],
-    "name": "Bloody Sunday",
-    "directed_by": [
-      "Paul Greengrass"
-    ],
-    "id": "/en/bloody_sunday",
-    "initial_release_date": "2002-01-16"
+    ]
   },
   {
+    "id": "/en/blow",
+    "directed_by": [
+      "Ted Demme"
+    ],
+    "initial_release_date": "2001-03-29",
+    "name": "Blow",
     "genre": [
       "Biographical film",
       "Crime Fiction",
       "Film adaptation",
       "Historical period drama",
       "Drama"
-    ],
-    "name": "Blow",
-    "directed_by": [
-      "Ted Demme"
-    ],
-    "id": "/en/blow",
-    "initial_release_date": "2001-03-29"
+    ]
   },
   {
+    "id": "/en/blue_car",
+    "directed_by": [
+      "Karen Moncrieff"
+    ],
+    "initial_release_date": "2003-05-02",
+    "name": "Blue Car",
     "genre": [
       "Indie film",
       "Family Drama",
       "Coming of age",
       "Drama"
-    ],
-    "name": "Blue Car",
-    "directed_by": [
-      "Karen Moncrieff"
-    ],
-    "id": "/en/blue_car",
-    "initial_release_date": "2003-05-02"
+    ]
   },
   {
+    "id": "/en/blue_collar_comedy_tour_rides_again",
+    "directed_by": [
+      "C. B. Harding"
+    ],
+    "initial_release_date": "2004-12-05",
+    "name": "Blue Collar Comedy Tour Rides Again",
     "genre": [
       "Documentary film",
       "Stand-up comedy",
       "Comedy"
-    ],
-    "name": "Blue Collar Comedy Tour Rides Again",
+    ]
+  },
+  {
+    "id": "/en/blue_collar_comedy_tour_one_for_the_road",
     "directed_by": [
       "C. B. Harding"
     ],
-    "id": "/en/blue_collar_comedy_tour_rides_again",
-    "initial_release_date": "2004-12-05"
-  },
-  {
+    "initial_release_date": "2006-06-27",
+    "name": "Blue Collar Comedy Tour: One for the Road",
     "genre": [
       "Stand-up comedy",
       "Concert film",
       "Comedy"
-    ],
-    "name": "Blue Collar Comedy Tour: One for the Road",
+    ]
+  },
+  {
+    "id": "/en/blue_collar_comedy_tour_the_movie",
     "directed_by": [
       "C. B. Harding"
     ],
-    "id": "/en/blue_collar_comedy_tour_one_for_the_road",
-    "initial_release_date": "2006-06-27"
-  },
-  {
+    "initial_release_date": "2003-03-28",
+    "name": "Blue Collar Comedy Tour: The Movie",
     "genre": [
       "Stand-up comedy",
       "Documentary film",
       "Comedy"
-    ],
-    "name": "Blue Collar Comedy Tour: The Movie",
-    "directed_by": [
-      "C. B. Harding"
-    ],
-    "id": "/en/blue_collar_comedy_tour_the_movie",
-    "initial_release_date": "2003-03-28"
+    ]
   },
   {
+    "id": "/en/blue_crush",
+    "directed_by": [
+      "John Stockwell"
+    ],
+    "initial_release_date": "2002-08-08",
+    "name": "Blue Crush",
     "genre": [
       "Teen film",
       "Romance Film",
       "Sports",
       "Drama"
-    ],
-    "name": "Blue Crush",
-    "initial_release_date": "2002-08-08",
-    "id": "/en/blue_crush",
-    "directed_by": [
-      "John Stockwell"
     ]
   },
   {
-    "genre": [
-      "Romance Film",
-      "Drama"
-    ],
-    "name": "Blue Gate Crossing",
-    "initial_release_date": "2002-09-08",
     "id": "/en/blue_gate_crossing",
     "directed_by": [
       "Yee Chin-yen"
+    ],
+    "initial_release_date": "2002-09-08",
+    "name": "Blue Gate Crossing",
+    "genre": [
+      "Romance Film",
+      "Drama"
     ]
   },
   {
+    "id": "/en/blue_milk",
+    "directed_by": [
+      "William Grammer"
+    ],
+    "initial_release_date": "2006-06-20",
+    "name": "Blue Milk",
     "genre": [
       "Indie film",
       "Short Film",
       "Fan film"
-    ],
-    "name": "Blue Milk",
-    "initial_release_date": "2006-06-20",
-    "id": "/en/blue_milk",
-    "directed_by": [
-      "William Grammer"
     ]
   },
   {
+    "id": "/en/blue_state",
+    "directed_by": [
+      "Marshall Lewy"
+    ],
+    "name": "Blue State",
     "genre": [
       "Indie film",
       "Romance Film",
@@ -5799,40 +5793,41 @@
       "Political satire",
       "Road movie",
       "Comedy"
-    ],
-    "name": "Blue State",
-    "id": "/en/blue_state",
-    "directed_by": [
-      "Marshall Lewy"
     ]
   },
   {
+    "id": "/en/blueberry_2004",
+    "directed_by": [
+      "Jan Kounen"
+    ],
+    "initial_release_date": "2004-02-11",
+    "name": "Blueberry",
     "genre": [
       "Western",
       "Thriller",
       "Action Film",
       "Adventure Film"
-    ],
-    "name": "Blueberry",
-    "initial_release_date": "2004-02-11",
-    "id": "/en/blueberry_2004",
-    "directed_by": [
-      "Jan Kounen"
     ]
   },
   {
-    "genre": [
-      "Science Fiction",
-      "Drama"
-    ],
-    "name": "Blueprint",
-    "initial_release_date": "2003-12-08",
     "id": "/en/blueprint_2003",
     "directed_by": [
       "Rolf Sch\u00fcbel"
+    ],
+    "initial_release_date": "2003-12-08",
+    "name": "Blueprint",
+    "genre": [
+      "Science Fiction",
+      "Drama"
     ]
   },
   {
+    "id": "/en/bluffmaster",
+    "directed_by": [
+      "Rohan Sippy"
+    ],
+    "initial_release_date": "2005-12-16",
+    "name": "Bluffmaster!",
     "genre": [
       "Romance Film",
       "Musical",
@@ -5844,68 +5839,68 @@
       "World cinema",
       "Drama",
       "Musical Drama"
-    ],
-    "name": "Bluffmaster!",
-    "initial_release_date": "2005-12-16",
-    "id": "/en/bluffmaster",
-    "directed_by": [
-      "Rohan Sippy"
     ]
   },
   {
+    "id": "/en/boa_vs_python",
+    "directed_by": [
+      "David Flores"
+    ],
+    "initial_release_date": "2004-05-24",
+    "name": "Boa vs. Python",
     "genre": [
       "Horror",
       "Natural horror film",
       "Monster",
       "Science Fiction",
       "Creature Film"
-    ],
-    "name": "Boa vs. Python",
-    "initial_release_date": "2004-05-24",
-    "id": "/en/boa_vs_python",
-    "directed_by": [
-      "David Flores"
     ]
   },
   {
+    "id": "/en/bobby",
+    "directed_by": [
+      "Emilio Estevez"
+    ],
+    "initial_release_date": "2006-09-05",
+    "name": "Bobby",
     "genre": [
       "Political drama",
       "Historical period drama",
       "History",
       "Drama"
-    ],
-    "name": "Bobby",
-    "initial_release_date": "2006-09-05",
-    "id": "/en/bobby",
-    "directed_by": [
-      "Emilio Estevez"
     ]
   },
   {
-    "genre": [
-      "Crime Fiction",
-      "Drama"
-    ],
-    "name": "Boiler Room",
-    "initial_release_date": "2000-01-30",
     "id": "/en/boiler_room",
     "directed_by": [
       "Ben Younger"
+    ],
+    "initial_release_date": "2000-01-30",
+    "name": "Boiler Room",
+    "genre": [
+      "Crime Fiction",
+      "Drama"
     ]
   },
   {
-    "genre": [
-      "Musical"
-    ],
-    "name": "Bolletjes Blues",
-    "initial_release_date": "2006-03-23",
     "id": "/en/bolletjes_blues",
     "directed_by": [
       "Brigit Hillenius",
       "Karin Junger"
+    ],
+    "initial_release_date": "2006-03-23",
+    "name": "Bolletjes Blues",
+    "genre": [
+      "Musical"
     ]
   },
   {
+    "id": "/en/bollywood_hollywood",
+    "directed_by": [
+      "Deepa Mehta"
+    ],
+    "initial_release_date": "2002-10-25",
+    "name": "Bollywood/Hollywood",
     "genre": [
       "Bollywood",
       "Musical",
@@ -5913,28 +5908,28 @@
       "Romantic comedy",
       "Musical comedy",
       "Comedy"
-    ],
-    "name": "Bollywood/Hollywood",
-    "initial_release_date": "2002-10-25",
-    "id": "/en/bollywood_hollywood",
-    "directed_by": [
-      "Deepa Mehta"
     ]
   },
   {
+    "id": "/en/bomb_the_system",
+    "directed_by": [
+      "Adam Bhala Lough"
+    ],
+    "name": "Bomb the System",
     "genre": [
       "Crime Fiction",
       "Indie film",
       "Coming of age",
       "Drama"
-    ],
-    "name": "Bomb the System",
-    "id": "/en/bomb_the_system",
-    "directed_by": [
-      "Adam Bhala Lough"
     ]
   },
   {
+    "id": "/en/bommarillu",
+    "directed_by": [
+      "Bhaskar"
+    ],
+    "initial_release_date": "2006-08-09",
+    "name": "Bommarillu",
     "genre": [
       "Musical",
       "Romance Film",
@@ -5942,15 +5937,14 @@
       "Musical Drama",
       "Tollywood",
       "World cinema"
-    ],
-    "name": "Bommarillu",
-    "initial_release_date": "2006-08-09",
-    "id": "/en/bommarillu",
-    "directed_by": [
-      "Bhaskar"
     ]
   },
   {
+    "id": "/en/bon_cop_bad_cop",
+    "directed_by": [
+      "Eric Canuel"
+    ],
+    "name": "Bon Cop, Bad Cop",
     "genre": [
       "Crime Fiction",
       "Buddy film",
@@ -5958,27 +5952,28 @@
       "Action/Adventure",
       "Thriller",
       "Comedy"
-    ],
-    "name": "Bon Cop, Bad Cop",
-    "id": "/en/bon_cop_bad_cop",
-    "directed_by": [
-      "Eric Canuel"
     ]
   },
   {
+    "id": "/en/bones_2001",
+    "directed_by": [
+      "Ernest R. Dickerson"
+    ],
+    "initial_release_date": "2001-10-26",
+    "name": "Bones",
     "genre": [
       "Horror",
       "Blaxploitation film",
       "Action Film"
-    ],
-    "name": "Bones",
-    "initial_release_date": "2001-10-26",
-    "id": "/en/bones_2001",
-    "directed_by": [
-      "Ernest R. Dickerson"
     ]
   },
   {
+    "id": "/en/bonjour_monsieur_shlomi",
+    "directed_by": [
+      "Shemi Zarhin"
+    ],
+    "initial_release_date": "2003-04-03",
+    "name": "Bonjour Monsieur Shlomi",
     "genre": [
       "World cinema",
       "Family Drama",
@@ -5987,15 +5982,15 @@
       "Family",
       "Comedy",
       "Drama"
-    ],
-    "name": "Bonjour Monsieur Shlomi",
-    "initial_release_date": "2003-04-03",
-    "id": "/en/bonjour_monsieur_shlomi",
-    "directed_by": [
-      "Shemi Zarhin"
     ]
   },
   {
+    "id": "/en/boogeyman",
+    "directed_by": [
+      "Stephen T. Kay"
+    ],
+    "initial_release_date": "2005-02-04",
+    "name": "Boogeyman",
     "genre": [
       "Horror",
       "Supernatural",
@@ -6003,44 +5998,44 @@
       "Thriller",
       "Mystery",
       "Drama"
-    ],
-    "name": "Boogeyman",
-    "initial_release_date": "2005-02-04",
-    "id": "/en/boogeyman",
-    "directed_by": [
-      "Stephen T. Kay"
     ]
   },
   {
+    "id": "/en/boogiepop_and_others_2000",
+    "directed_by": [
+      "Ryu Kaneda"
+    ],
+    "initial_release_date": "2000-03-11",
+    "name": "Boogiepop and Others",
     "genre": [
       "Animation",
       "Fantasy",
       "Anime",
       "Thriller",
       "Japanese Movies"
-    ],
-    "name": "Boogiepop and Others",
-    "initial_release_date": "2000-03-11",
-    "id": "/en/boogiepop_and_others_2000",
-    "directed_by": [
-      "Ryu Kaneda"
     ]
   },
   {
+    "id": "/en/book_of_love_2004",
+    "directed_by": [
+      "Alan Brown"
+    ],
+    "initial_release_date": "2004-01-18",
+    "name": "Book of Love",
     "genre": [
       "Indie film",
       "Romance Film",
       "Comedy",
       "Drama"
-    ],
-    "name": "Book of Love",
-    "initial_release_date": "2004-01-18",
-    "id": "/en/book_of_love_2004",
-    "directed_by": [
-      "Alan Brown"
     ]
   },
   {
+    "id": "/en/book_of_shadows_blair_witch_2",
+    "directed_by": [
+      "Joe Berlinger"
+    ],
+    "initial_release_date": "2000-10-27",
+    "name": "Book of Shadows: Blair Witch 2",
     "genre": [
       "Horror",
       "Supernatural",
@@ -6050,63 +6045,63 @@
       "Thriller",
       "Ensemble Film",
       "Crime Fiction"
-    ],
-    "name": "Book of Shadows: Blair Witch 2",
-    "initial_release_date": "2000-10-27",
-    "id": "/en/book_of_shadows_blair_witch_2",
-    "directed_by": [
-      "Joe Berlinger"
     ]
   },
   {
-    "genre": [
-      "Crime Fiction",
-      "Drama"
-    ],
-    "name": "Bimmer",
-    "initial_release_date": "2003-08-02",
     "id": "/en/boomer",
     "directed_by": [
       "Pyotr Buslov"
+    ],
+    "initial_release_date": "2003-08-02",
+    "name": "Bimmer",
+    "genre": [
+      "Crime Fiction",
+      "Drama"
     ]
   },
   {
-    "genre": [
-      "Comedy"
-    ],
-    "name": "Borat: Cultural Learnings of America for Make Benefit Glorious Nation of Kazakhstan",
-    "initial_release_date": "2006-08-04",
     "id": "/wikipedia/de_id/1782985",
     "directed_by": [
       "Larry Charles"
+    ],
+    "initial_release_date": "2006-08-04",
+    "name": "Borat: Cultural Learnings of America for Make Benefit Glorious Nation of Kazakhstan",
+    "genre": [
+      "Comedy"
     ]
   },
   {
-    "genre": [
-      "Documentary film"
-    ],
-    "name": "Born into Brothels: Calcutta's Red Light Kids",
-    "initial_release_date": "2004-01-17",
     "id": "/en/born_into_brothels_calcuttas_red_light_kids",
     "directed_by": [
       "Zana Briski",
       "Ross Kauffman"
+    ],
+    "initial_release_date": "2004-01-17",
+    "name": "Born into Brothels: Calcutta's Red Light Kids",
+    "genre": [
+      "Documentary film"
     ]
   },
   {
+    "id": "/en/free_radicals",
+    "directed_by": [
+      "Barbara Albert"
+    ],
+    "name": "Free Radicals",
     "genre": [
       "World cinema",
       "Romance Film",
       "Art film",
       "Drama"
-    ],
-    "name": "Free Radicals",
-    "id": "/en/free_radicals",
-    "directed_by": [
-      "Barbara Albert"
     ]
   },
   {
+    "id": "/en/boss_2006",
+    "directed_by": [
+      "V.N. Aditya"
+    ],
+    "initial_release_date": "2006-09-27",
+    "name": "Boss",
     "genre": [
       "Musical",
       "Romance Film",
@@ -6114,76 +6109,76 @@
       "Musical Drama",
       "Tollywood",
       "World cinema"
-    ],
-    "name": "Boss",
-    "initial_release_date": "2006-09-27",
-    "id": "/en/boss_2006",
-    "directed_by": [
-      "V.N. Aditya"
     ]
   },
   {
+    "id": "/en/bossn_up",
+    "directed_by": [
+      "Dylan C. Brown"
+    ],
+    "initial_release_date": "2005-06-01",
+    "name": "Boss'n Up",
     "genre": [
       "Musical",
       "Indie film",
       "Crime Fiction",
       "Musical Drama",
       "Drama"
-    ],
-    "name": "Boss'n Up",
-    "initial_release_date": "2005-06-01",
-    "id": "/en/bossn_up",
-    "directed_by": [
-      "Dylan C. Brown"
     ]
   },
   {
+    "id": "/en/bossa_nova_2000",
+    "directed_by": [
+      "Bruno Barreto"
+    ],
+    "initial_release_date": "2000-02-18",
+    "name": "Bossa Nova",
     "genre": [
       "Romance Film",
       "Comedy",
       "Drama"
-    ],
-    "name": "Bossa Nova",
-    "initial_release_date": "2000-02-18",
-    "id": "/en/bossa_nova_2000",
-    "directed_by": [
-      "Bruno Barreto"
     ]
   },
   {
-    "genre": [
-      "Musical"
-    ],
-    "name": "Bosta",
     "id": "/en/bosta",
     "directed_by": [
       "Philippe Aractingi"
+    ],
+    "name": "Bosta",
+    "genre": [
+      "Musical"
     ]
   },
   {
+    "id": "/en/bowling_for_columbine",
+    "directed_by": [
+      "Michael Moore"
+    ],
+    "initial_release_date": "2002-05-15",
+    "name": "Bowling for Columbine",
     "genre": [
       "Indie film",
       "Documentary film",
       "Political cinema",
       "Historical Documentaries"
-    ],
-    "name": "Bowling for Columbine",
-    "initial_release_date": "2002-05-15",
-    "id": "/en/bowling_for_columbine",
-    "directed_by": [
-      "Michael Moore"
     ]
   },
   {
+    "id": "/en/bowling_fun_and_fundamentals_for_boys_and_girls",
+    "directed_by": [],
+    "name": "Bowling Fun And Fundamentals For Boys And Girls",
     "genre": [
       "Documentary film",
       "Sports"
-    ],
-    "name": "Bowling Fun And Fundamentals For Boys And Girls",
-    "id": "/en/bowling_fun_and_fundamentals_for_boys_and_girls",
-    "directed_by": []
+    ]
   },
   {
+    "id": "/en/boy_eats_girl",
+    "directed_by": [
+      "Stephen Bradley"
+    ],
+    "initial_release_date": "2005-04-06",
+    "name": "Boy Eats Girl",
     "genre": [
       "Indie film",
       "Horror",
@@ -6192,15 +6187,15 @@
       "Zombie Film",
       "Horror comedy",
       "Comedy"
-    ],
-    "name": "Boy Eats Girl",
-    "initial_release_date": "2005-04-06",
-    "id": "/en/boy_eats_girl",
-    "directed_by": [
-      "Stephen Bradley"
     ]
   },
   {
+    "id": "/en/boynton_beach_club",
+    "directed_by": [
+      "Susan Seidelman"
+    ],
+    "initial_release_date": "2006-08-04",
+    "name": "Boynton Beach Club",
     "genre": [
       "Romantic comedy",
       "Indie film",
@@ -6209,15 +6204,15 @@
       "Slice of life",
       "Ensemble Film",
       "Comedy"
-    ],
-    "name": "Boynton Beach Club",
-    "initial_release_date": "2006-08-04",
-    "id": "/en/boynton_beach_club",
-    "directed_by": [
-      "Susan Seidelman"
     ]
   },
   {
+    "id": "/en/boys_2003",
+    "directed_by": [
+      "S. Shankar"
+    ],
+    "initial_release_date": "2003-08-29",
+    "name": "Boys",
     "genre": [
       "Musical",
       "Romance Film",
@@ -6227,56 +6222,56 @@
       "Drama",
       "Musical comedy",
       "Musical Drama"
-    ],
-    "name": "Boys",
-    "initial_release_date": "2003-08-29",
-    "id": "/en/boys_2003",
-    "directed_by": [
-      "S. Shankar"
     ]
   },
   {
+    "id": "/en/brain_blockers",
+    "directed_by": [
+      "Lincoln Kupchak"
+    ],
+    "initial_release_date": "2007-03-15",
+    "name": "Brain Blockers",
     "genre": [
       "Horror",
       "Zombie Film",
       "Horror comedy",
       "Comedy"
-    ],
-    "name": "Brain Blockers",
-    "initial_release_date": "2007-03-15",
-    "id": "/en/brain_blockers",
-    "directed_by": [
-      "Lincoln Kupchak"
     ]
   },
   {
+    "id": "/en/breakin_all_the_rules",
+    "directed_by": [
+      "Daniel Taplitz"
+    ],
+    "initial_release_date": "2004-05-14",
+    "name": "Breakin' All the Rules",
     "genre": [
       "Romance Film",
       "Romantic comedy",
       "Comedy of Errors",
       "Comedy"
-    ],
-    "name": "Breakin' All the Rules",
-    "initial_release_date": "2004-05-14",
-    "id": "/en/breakin_all_the_rules",
-    "directed_by": [
-      "Daniel Taplitz"
     ]
   },
   {
+    "id": "/en/breaking_and_entering",
+    "directed_by": [
+      "Anthony Minghella"
+    ],
+    "initial_release_date": "2006-09-13",
+    "name": "Breaking and Entering",
     "genre": [
       "Romance Film",
       "Crime Fiction",
       "Drama"
-    ],
-    "name": "Breaking and Entering",
-    "initial_release_date": "2006-09-13",
-    "id": "/en/breaking_and_entering",
-    "directed_by": [
-      "Anthony Minghella"
     ]
   },
   {
+    "id": "/en/brick_2006",
+    "directed_by": [
+      "Rian Johnson"
+    ],
+    "initial_release_date": "2006-04-07",
+    "name": "Brick",
     "genre": [
       "Film noir",
       "Indie film",
@@ -6288,15 +6283,15 @@
       "Thriller",
       "Detective fiction",
       "Drama"
-    ],
-    "name": "Brick",
-    "initial_release_date": "2006-04-07",
-    "id": "/en/brick_2006",
-    "directed_by": [
-      "Rian Johnson"
     ]
   },
   {
+    "id": "/en/bride_and_prejudice",
+    "directed_by": [
+      "Gurinder Chadha"
+    ],
+    "initial_release_date": "2004-10-06",
+    "name": "Bride and Prejudice",
     "genre": [
       "Musical",
       "Romantic comedy",
@@ -6307,28 +6302,28 @@
       "Musical comedy",
       "Comedy",
       "Drama"
-    ],
-    "name": "Bride and Prejudice",
-    "initial_release_date": "2004-10-06",
-    "id": "/en/bride_and_prejudice",
-    "directed_by": [
-      "Gurinder Chadha"
     ]
   },
   {
+    "id": "/en/bridget_jones_the_edge_of_reason",
+    "directed_by": [
+      "Beeban Kidron"
+    ],
+    "initial_release_date": "2004-11-08",
+    "name": "Bridget Jones: The Edge of Reason",
     "genre": [
       "Romantic comedy",
       "Romance Film",
       "Comedy"
-    ],
-    "name": "Bridget Jones: The Edge of Reason",
-    "initial_release_date": "2004-11-08",
-    "id": "/en/bridget_jones_the_edge_of_reason",
-    "directed_by": [
-      "Beeban Kidron"
     ]
   },
   {
+    "id": "/en/bridget_joness_diary_2001",
+    "directed_by": [
+      "Sharon Maguire"
+    ],
+    "initial_release_date": "2001-04-04",
+    "name": "Bridget Jones's Diary",
     "genre": [
       "Romantic comedy",
       "Film adaptation",
@@ -6336,15 +6331,14 @@
       "Comedy of manners",
       "Comedy",
       "Drama"
-    ],
-    "name": "Bridget Jones's Diary",
-    "initial_release_date": "2001-04-04",
-    "id": "/en/bridget_joness_diary_2001",
-    "directed_by": [
-      "Sharon Maguire"
     ]
   },
   {
+    "id": "/en/brigham_city_2001",
+    "directed_by": [
+      "Richard Dutcher"
+    ],
+    "name": "Brigham City",
     "genre": [
       "Mystery",
       "Indie film",
@@ -6352,14 +6346,15 @@
       "Thriller",
       "Crime Thriller",
       "Drama"
-    ],
-    "name": "Brigham City",
-    "id": "/en/brigham_city_2001",
-    "directed_by": [
-      "Richard Dutcher"
     ]
   },
   {
+    "id": "/en/bright_young_things",
+    "directed_by": [
+      "Stephen Fry"
+    ],
+    "initial_release_date": "2003-10-03",
+    "name": "Bright Young Things",
     "genre": [
       "Indie film",
       "War film",
@@ -6368,130 +6363,130 @@
       "Comedy of manners",
       "Comedy",
       "Drama"
-    ],
-    "name": "Bright Young Things",
-    "initial_release_date": "2003-10-03",
-    "id": "/en/bright_young_things",
-    "directed_by": [
-      "Stephen Fry"
     ]
   },
   {
-    "genre": [
-      "Thriller"
-    ],
-    "name": "Brilliant",
-    "initial_release_date": "2004-02-15",
     "id": "/wikipedia/en_title/Brilliant_$0028film$0029",
     "directed_by": [
       "Roger Cardinal"
+    ],
+    "initial_release_date": "2004-02-15",
+    "name": "Brilliant",
+    "genre": [
+      "Thriller"
     ]
   },
   {
-    "genre": [
-      "Comedy",
-      "Sports"
-    ],
-    "name": "Bring It On",
-    "initial_release_date": "2000-08-22",
     "id": "/en/bring_it_on",
     "directed_by": [
       "Peyton Reed"
+    ],
+    "initial_release_date": "2000-08-22",
+    "name": "Bring It On",
+    "genre": [
+      "Comedy",
+      "Sports"
     ]
   },
   {
-    "genre": [
-      "Teen film",
-      "Sports",
-      "Comedy"
-    ],
-    "name": "Bring It On Again",
-    "initial_release_date": "2004-01-13",
     "id": "/en/bring_it_on_again",
     "directed_by": [
       "Damon Santostefano"
-    ]
-  },
-  {
+    ],
+    "initial_release_date": "2004-01-13",
+    "name": "Bring It On Again",
     "genre": [
       "Teen film",
       "Sports",
       "Comedy"
-    ],
-    "name": "Bring It On: All or Nothing",
-    "initial_release_date": "2006-08-08",
-    "id": "/en/bring_it_on_all_or_nothing",
-    "directed_by": [
-      "Steve Rash"
     ]
   },
   {
+    "id": "/en/bring_it_on_all_or_nothing",
+    "directed_by": [
+      "Steve Rash"
+    ],
+    "initial_release_date": "2006-08-08",
+    "name": "Bring It On: All or Nothing",
+    "genre": [
+      "Teen film",
+      "Sports",
+      "Comedy"
+    ]
+  },
+  {
+    "id": "/en/bringing_down_the_house",
+    "directed_by": [
+      "Adam Shankman"
+    ],
+    "initial_release_date": "2003-03-07",
+    "name": "Bringing Down the House",
     "genre": [
       "Romantic comedy",
       "Screwball comedy",
       "Comedy of Errors",
       "Crime Comedy",
       "Comedy"
-    ],
-    "name": "Bringing Down the House",
-    "initial_release_date": "2003-03-07",
-    "id": "/en/bringing_down_the_house",
-    "directed_by": [
-      "Adam Shankman"
     ]
   },
   {
-    "genre": [
-      "Documentary film",
-      "Biographical film"
-    ],
-    "name": "Broadway: The Golden Age",
-    "initial_release_date": "2004-06-11",
     "id": "/en/broadway_the_golden_age",
     "directed_by": [
       "Rick McKay"
+    ],
+    "initial_release_date": "2004-06-11",
+    "name": "Broadway: The Golden Age",
+    "genre": [
+      "Documentary film",
+      "Biographical film"
     ]
   },
   {
+    "id": "/en/brokeback_mountain",
+    "directed_by": [
+      "Ang Lee"
+    ],
+    "initial_release_date": "2005-09-02",
+    "name": "Brokeback Mountain",
     "genre": [
       "Romance Film",
       "Epic film",
       "Drama"
-    ],
-    "name": "Brokeback Mountain",
-    "initial_release_date": "2005-09-02",
-    "id": "/en/brokeback_mountain",
-    "directed_by": [
-      "Ang Lee"
     ]
   },
   {
+    "id": "/en/broken_allegiance",
+    "directed_by": [
+      "Nick Hallam"
+    ],
+    "name": "Broken Allegiance",
     "genre": [
       "Indie film",
       "Short Film",
       "Fan film"
-    ],
-    "name": "Broken Allegiance",
-    "id": "/en/broken_allegiance",
-    "directed_by": [
-      "Nick Hallam"
     ]
   },
   {
+    "id": "/en/broken_flowers",
+    "directed_by": [
+      "Jim Jarmusch"
+    ],
+    "initial_release_date": "2005-08-05",
+    "name": "Broken Flowers",
     "genre": [
       "Mystery",
       "Road movie",
       "Comedy",
       "Drama"
-    ],
-    "name": "Broken Flowers",
-    "initial_release_date": "2005-08-05",
-    "id": "/en/broken_flowers",
-    "directed_by": [
-      "Jim Jarmusch"
     ]
   },
   {
+    "id": "/en/the_broken_hearts_club_a_romantic_comedy",
+    "directed_by": [
+      "Greg Berlanti"
+    ],
+    "initial_release_date": "2000-01-29",
+    "name": "The Broken Hearts Club: A Romantic Comedy",
     "genre": [
       "Romance Film",
       "LGBT",
@@ -6504,98 +6499,98 @@
       "Ensemble Film",
       "Comedy",
       "Drama"
-    ],
-    "name": "The Broken Hearts Club: A Romantic Comedy",
-    "initial_release_date": "2000-01-29",
-    "id": "/en/the_broken_hearts_club_a_romantic_comedy",
-    "directed_by": [
-      "Greg Berlanti"
     ]
   },
   {
+    "id": "/en/brooklyn_lobster",
+    "directed_by": [
+      "Kevin Jordan"
+    ],
+    "initial_release_date": "2005-09-09",
+    "name": "Brooklyn Lobster",
     "genre": [
       "Indie film",
       "Family Drama",
       "Comedy-drama",
       "Comedy",
       "Drama"
-    ],
-    "name": "Brooklyn Lobster",
-    "initial_release_date": "2005-09-09",
-    "id": "/en/brooklyn_lobster",
-    "directed_by": [
-      "Kevin Jordan"
     ]
   },
   {
-    "genre": [
-      "Thriller",
-      "Crime Fiction"
-    ],
-    "name": "Brother",
     "id": "/en/brother",
     "directed_by": [
       "Takeshi Kitano"
+    ],
+    "name": "Brother",
+    "genre": [
+      "Thriller",
+      "Crime Fiction"
     ]
   },
   {
+    "id": "/en/brother_bear",
+    "directed_by": [
+      "Aaron Blaise",
+      "Robert A. Walker"
+    ],
+    "initial_release_date": "2003-10-20",
+    "name": "Brother Bear",
     "genre": [
       "Family",
       "Fantasy",
       "Animation",
       "Adventure Film"
-    ],
-    "name": "Brother Bear",
-    "initial_release_date": "2003-10-20",
-    "id": "/en/brother_bear",
-    "directed_by": [
-      "Aaron Blaise",
-      "Robert A. Walker"
     ]
   },
   {
+    "id": "/en/brother_bear_2",
+    "directed_by": [
+      "Ben Gluck"
+    ],
+    "initial_release_date": "2006-08-29",
+    "name": "Brother Bear 2",
     "genre": [
       "Family",
       "Animated cartoon",
       "Fantasy",
       "Adventure Film",
       "Animation"
-    ],
-    "name": "Brother Bear 2",
-    "initial_release_date": "2006-08-29",
-    "id": "/en/brother_bear_2",
-    "directed_by": [
-      "Ben Gluck"
     ]
   },
   {
+    "id": "/en/brother_2",
+    "directed_by": [
+      "Aleksei Balabanov"
+    ],
+    "initial_release_date": "2000-05-11",
+    "name": "Brother 2",
     "genre": [
       "Crime Fiction",
       "Thriller",
       "Action Film"
-    ],
-    "name": "Brother 2",
-    "initial_release_date": "2000-05-11",
-    "id": "/en/brother_2",
-    "directed_by": [
-      "Aleksei Balabanov"
     ]
   },
   {
-    "genre": [
-      "Horror",
-      "Cult film",
-      "Creature Film"
-    ],
-    "name": "Brotherhood of Blood",
     "id": "/en/brotherhood_of_blood",
     "directed_by": [
       "Michael Roesch",
       "Peter Scheerer",
       "Sid Haig"
+    ],
+    "name": "Brotherhood of Blood",
+    "genre": [
+      "Horror",
+      "Cult film",
+      "Creature Film"
     ]
   },
   {
+    "id": "/en/brotherhood_of_the_wolf",
+    "directed_by": [
+      "Christophe Gans"
+    ],
+    "initial_release_date": "2001-01-31",
+    "name": "Brotherhood of the Wolf",
     "genre": [
       "Martial Arts Film",
       "Adventure Film",
@@ -6604,15 +6599,16 @@
       "Historical fiction",
       "Thriller",
       "Action Film"
-    ],
-    "name": "Brotherhood of the Wolf",
-    "initial_release_date": "2001-01-31",
-    "id": "/en/brotherhood_of_the_wolf",
-    "directed_by": [
-      "Christophe Gans"
     ]
   },
   {
+    "id": "/en/brothers_of_the_head",
+    "directed_by": [
+      "Keith Fulton",
+      "Louis Pepe"
+    ],
+    "initial_release_date": "2005-09-10",
+    "name": "Brothers of the Head",
     "genre": [
       "Indie film",
       "Musical",
@@ -6623,16 +6619,15 @@
       "Historical period drama",
       "Musical Drama",
       "Drama"
-    ],
-    "name": "Brothers of the Head",
-    "initial_release_date": "2005-09-10",
-    "id": "/en/brothers_of_the_head",
-    "directed_by": [
-      "Keith Fulton",
-      "Louis Pepe"
     ]
   },
   {
+    "id": "/en/brown_sugar_2002",
+    "directed_by": [
+      "Rick Famuyiwa"
+    ],
+    "initial_release_date": "2002-10-05",
+    "name": "Brown Sugar",
     "genre": [
       "Musical",
       "Romantic comedy",
@@ -6642,58 +6637,58 @@
       "Musical comedy",
       "Comedy",
       "Drama"
-    ],
-    "name": "Brown Sugar",
-    "initial_release_date": "2002-10-05",
-    "id": "/en/brown_sugar_2002",
-    "directed_by": [
-      "Rick Famuyiwa"
     ]
   },
   {
+    "id": "/en/bruce_almighty",
+    "directed_by": [
+      "Tom Shadyac"
+    ],
+    "initial_release_date": "2003-05-23",
+    "name": "Bruce Almighty",
     "genre": [
       "Comedy",
       "Fantasy",
       "Drama"
-    ],
-    "name": "Bruce Almighty",
-    "initial_release_date": "2003-05-23",
-    "id": "/en/bruce_almighty",
-    "directed_by": [
-      "Tom Shadyac"
     ]
   },
   {
+    "id": "/en/bubba_ho-tep",
+    "directed_by": [
+      "Don Coscarelli"
+    ],
+    "initial_release_date": "2002-06-09",
+    "name": "Bubba Ho-Tep",
     "genre": [
       "Horror",
       "Parody",
       "Comedy",
       "Mystery",
       "Drama"
-    ],
-    "name": "Bubba Ho-Tep",
-    "initial_release_date": "2002-06-09",
-    "id": "/en/bubba_ho-tep",
-    "directed_by": [
-      "Don Coscarelli"
     ]
   },
   {
+    "id": "/en/bubble",
+    "directed_by": [
+      "Steven Soderbergh"
+    ],
+    "initial_release_date": "2005-09-03",
+    "name": "Bubble",
     "genre": [
       "Crime Fiction",
       "Mystery",
       "Indie film",
       "Thriller",
       "Drama"
-    ],
-    "name": "Bubble",
-    "initial_release_date": "2005-09-03",
-    "id": "/en/bubble",
-    "directed_by": [
-      "Steven Soderbergh"
     ]
   },
   {
+    "id": "/en/bubble_boy",
+    "directed_by": [
+      "Blair Hayes"
+    ],
+    "initial_release_date": "2001-08-23",
+    "name": "Bubble Boy",
     "genre": [
       "Romance Film",
       "Teen film",
@@ -6701,42 +6696,42 @@
       "Adventure Film",
       "Comedy",
       "Drama"
-    ],
-    "name": "Bubble Boy",
-    "initial_release_date": "2001-08-23",
-    "id": "/en/bubble_boy",
-    "directed_by": [
-      "Blair Hayes"
     ]
   },
   {
+    "id": "/en/buddy_boy",
+    "directed_by": [
+      "Mark Hanlon"
+    ],
+    "initial_release_date": "2000-03-24",
+    "name": "Buddy Boy",
     "genre": [
       "Psychological thriller",
       "Thriller",
       "Indie film",
       "Erotic thriller"
-    ],
-    "name": "Buddy Boy",
-    "initial_release_date": "2000-03-24",
-    "id": "/en/buddy_boy",
-    "directed_by": [
-      "Mark Hanlon"
     ]
   },
   {
+    "id": "/en/buffalo_dreams",
+    "directed_by": [
+      "David Jackson"
+    ],
+    "initial_release_date": "2005-03-11",
+    "name": "Buffalo Dreams",
     "genre": [
       "Western",
       "Teen film",
       "Drama"
-    ],
-    "name": "Buffalo Dreams",
-    "initial_release_date": "2005-03-11",
-    "id": "/en/buffalo_dreams",
-    "directed_by": [
-      "David Jackson"
     ]
   },
   {
+    "id": "/en/buffalo_soldiers",
+    "directed_by": [
+      "Gregor Jordan"
+    ],
+    "initial_release_date": "2001-09-08",
+    "name": "Buffalo Soldiers",
     "genre": [
       "War film",
       "Crime Fiction",
@@ -6745,29 +6740,29 @@
       "Satire",
       "Indie film",
       "Drama"
-    ],
-    "name": "Buffalo Soldiers",
-    "initial_release_date": "2001-09-08",
-    "id": "/en/buffalo_soldiers",
-    "directed_by": [
-      "Gregor Jordan"
     ]
   },
   {
+    "id": "/en/bug_2006",
+    "directed_by": [
+      "William Friedkin"
+    ],
+    "initial_release_date": "2006-05-19",
+    "name": "Bug",
     "genre": [
       "Thriller",
       "Horror",
       "Indie film",
       "Drama"
-    ],
-    "name": "Bug",
-    "initial_release_date": "2006-05-19",
-    "id": "/en/bug_2006",
-    "directed_by": [
-      "William Friedkin"
     ]
   },
   {
+    "id": "/en/bulletproof_monk",
+    "directed_by": [
+      "Paul Hunter"
+    ],
+    "initial_release_date": "2003-04-16",
+    "name": "Bulletproof Monk",
     "genre": [
       "Martial Arts Film",
       "Fantasy",
@@ -6777,29 +6772,29 @@
       "Action/Adventure",
       "Action Comedy",
       "Comedy"
-    ],
-    "name": "Bulletproof Monk",
-    "initial_release_date": "2003-04-16",
-    "id": "/en/bulletproof_monk",
-    "directed_by": [
-      "Paul Hunter"
     ]
   },
   {
+    "id": "/en/bully_2001",
+    "directed_by": [
+      "Larry Clark"
+    ],
+    "initial_release_date": "2001-06-15",
+    "name": "Bully",
     "genre": [
       "Teen film",
       "Crime Fiction",
       "Thriller",
       "Drama"
-    ],
-    "name": "Bully",
-    "initial_release_date": "2001-06-15",
-    "id": "/en/bully_2001",
-    "directed_by": [
-      "Larry Clark"
     ]
   },
   {
+    "id": "/en/bunny_2005",
+    "directed_by": [
+      "V. V. Vinayak"
+    ],
+    "initial_release_date": "2005-04-06",
+    "name": "Bunny",
     "genre": [
       "Musical",
       "Romance Film",
@@ -6807,28 +6802,28 @@
       "Tollywood",
       "Musical Drama",
       "Drama"
-    ],
-    "name": "Bunny",
-    "initial_release_date": "2005-04-06",
-    "id": "/en/bunny_2005",
-    "directed_by": [
-      "V. V. Vinayak"
     ]
   },
   {
+    "id": "/en/bunshinsaba",
+    "directed_by": [
+      "Ahn Byeong-ki"
+    ],
+    "initial_release_date": "2004-05-14",
+    "name": "Bunshinsaba",
     "genre": [
       "Horror",
       "World cinema",
       "East Asian cinema"
-    ],
-    "name": "Bunshinsaba",
-    "initial_release_date": "2004-05-14",
-    "id": "/en/bunshinsaba",
-    "directed_by": [
-      "Ahn Byeong-ki"
     ]
   },
   {
+    "id": "/en/bunty_aur_babli",
+    "directed_by": [
+      "Shaad Ali"
+    ],
+    "initial_release_date": "2005-05-27",
+    "name": "Bunty Aur Babli",
     "genre": [
       "Romance Film",
       "Musical",
@@ -6837,95 +6832,95 @@
       "Comedy",
       "Adventure Film",
       "Crime Fiction"
-    ],
-    "name": "Bunty Aur Babli",
-    "initial_release_date": "2005-05-27",
-    "id": "/en/bunty_aur_babli",
-    "directed_by": [
-      "Shaad Ali"
     ]
   },
   {
-    "genre": [
-      "Documentary film",
-      "True crime"
-    ],
-    "name": "Bus 174",
-    "initial_release_date": "2002-10-22",
     "id": "/en/onibus_174",
     "directed_by": [
       "Jos\u00e9 Padilha"
+    ],
+    "initial_release_date": "2002-10-22",
+    "name": "Bus 174",
+    "genre": [
+      "Documentary film",
+      "True crime"
     ]
   },
   {
+    "id": "/en/bus_conductor",
+    "directed_by": [
+      "V. M. Vinu"
+    ],
+    "initial_release_date": "2005-12-23",
+    "name": "Bus Conductor",
     "genre": [
       "Comedy",
       "Action Film",
       "Malayalam Cinema",
       "World cinema",
       "Drama"
-    ],
-    "name": "Bus Conductor",
-    "initial_release_date": "2005-12-23",
-    "id": "/en/bus_conductor",
-    "directed_by": [
-      "V. M. Vinu"
     ]
   },
   {
-    "genre": [
-      "Indie film",
-      "Documentary film"
-    ],
-    "name": "Busted Shoes and Broken Hearts: A Film About Lowlight",
     "id": "/m/0bvs38",
     "directed_by": [
       "Michael Votto"
+    ],
+    "name": "Busted Shoes and Broken Hearts: A Film About Lowlight",
+    "genre": [
+      "Indie film",
+      "Documentary film"
     ]
   },
   {
+    "id": "/en/butterfly_2004",
+    "directed_by": [
+      "Yan Yan Mak"
+    ],
+    "initial_release_date": "2004-09-04",
+    "name": "Butterfly",
     "genre": [
       "LGBT",
       "Chinese Movies",
       "Drama"
-    ],
-    "name": "Butterfly",
-    "initial_release_date": "2004-09-04",
-    "id": "/en/butterfly_2004",
-    "directed_by": [
-      "Yan Yan Mak"
     ]
   },
   {
+    "id": "/en/butterfly_on_a_wheel",
+    "directed_by": [
+      "Mike Barker"
+    ],
+    "initial_release_date": "2007-02-10",
+    "name": "Butterfly on a Wheel",
     "genre": [
       "Thriller",
       "Crime Thriller",
       "Crime Fiction",
       "Psychological thriller",
       "Drama"
-    ],
-    "name": "Butterfly on a Wheel",
-    "initial_release_date": "2007-02-10",
-    "id": "/en/butterfly_on_a_wheel",
-    "directed_by": [
-      "Mike Barker"
     ]
   },
   {
+    "id": "/en/c_i_d_moosa",
+    "directed_by": [
+      "Johny Antony"
+    ],
+    "initial_release_date": "2003-07-04",
+    "name": "C.I.D.Moosa",
     "genre": [
       "Action Film",
       "Comedy",
       "Malayalam Cinema",
       "World cinema"
-    ],
-    "name": "C.I.D.Moosa",
-    "initial_release_date": "2003-07-04",
-    "id": "/en/c_i_d_moosa",
-    "directed_by": [
-      "Johny Antony"
     ]
   },
   {
+    "id": "/en/c_r_a_z_y",
+    "directed_by": [
+      "Jean-Marc Vall\u00e9e"
+    ],
+    "initial_release_date": "2005-05-27",
+    "name": "C.R.A.Z.Y.",
     "genre": [
       "LGBT",
       "Indie film",
@@ -6936,15 +6931,14 @@
       "Historical period drama",
       "Coming of age",
       "Drama"
-    ],
-    "name": "C.R.A.Z.Y.",
-    "initial_release_date": "2005-05-27",
-    "id": "/en/c_r_a_z_y",
-    "directed_by": [
-      "Jean-Marc Vall\u00e9e"
     ]
   },
   {
+    "id": "/en/c_s_a_the_confederate_states_of_america",
+    "directed_by": [
+      "Kevin Willmott"
+    ],
+    "name": "C.S.A.: The Confederate States of America",
     "genre": [
       "Mockumentary",
       "Satire",
@@ -6954,66 +6948,66 @@
       "Political cinema",
       "Comedy",
       "Drama"
-    ],
-    "name": "C.S.A.: The Confederate States of America",
-    "id": "/en/c_s_a_the_confederate_states_of_america",
-    "directed_by": [
-      "Kevin Willmott"
     ]
   },
   {
-    "genre": [
-      "Comedy"
-    ],
-    "name": "Cabaret Paradis",
-    "initial_release_date": "2006-04-12",
     "id": "/en/cabaret_paradis",
     "directed_by": [
       "Corinne Benizio",
       "Gilles Benizio"
+    ],
+    "initial_release_date": "2006-04-12",
+    "name": "Cabaret Paradis",
+    "genre": [
+      "Comedy"
     ]
   },
   {
+    "id": "/wikipedia/it_id/335645",
+    "directed_by": [
+      "Michael Haneke"
+    ],
+    "initial_release_date": "2005-05-14",
+    "name": "Cach\u00e9",
     "genre": [
       "Thriller",
       "Mystery",
       "Psychological thriller",
       "Drama"
-    ],
-    "name": "Cach\u00e9",
-    "initial_release_date": "2005-05-14",
-    "id": "/wikipedia/it_id/335645",
-    "directed_by": [
-      "Michael Haneke"
     ]
   },
   {
-    "genre": [
-      "Drama"
-    ],
-    "name": "Cactuses",
-    "initial_release_date": "2006-03-15",
     "id": "/en/cactuses",
     "directed_by": [
       "Matt Hannon",
       "Rick Rapoza"
+    ],
+    "initial_release_date": "2006-03-15",
+    "name": "Cactuses",
+    "genre": [
+      "Drama"
     ]
   },
   {
+    "id": "/en/cadet_kelly",
+    "directed_by": [
+      "Larry Shaw"
+    ],
+    "initial_release_date": "2002-03-08",
+    "name": "Cadet Kelly",
     "genre": [
       "Teen film",
       "Coming of age",
       "Family",
       "Comedy"
-    ],
-    "name": "Cadet Kelly",
-    "initial_release_date": "2002-03-08",
-    "id": "/en/cadet_kelly",
-    "directed_by": [
-      "Larry Shaw"
     ]
   },
   {
+    "id": "/en/caffeine_2006",
+    "directed_by": [
+      "John Cosgrove"
+    ],
+    "name": "Caffeine",
     "genre": [
       "Romantic comedy",
       "Romance Film",
@@ -7021,106 +7015,107 @@
       "Ensemble Film",
       "Workplace Comedy",
       "Comedy"
-    ],
-    "name": "Caffeine",
-    "id": "/en/caffeine_2006",
-    "directed_by": [
-      "John Cosgrove"
     ]
   },
   {
+    "id": "/wikipedia/es_id/1062610",
+    "directed_by": [
+      "Nisha Ganatra",
+      "Jennifer Arzt"
+    ],
+    "name": "Cake",
     "genre": [
       "Romantic comedy",
       "Short Film",
       "Romance Film",
       "Comedy",
       "Drama"
-    ],
-    "name": "Cake",
-    "id": "/wikipedia/es_id/1062610",
-    "directed_by": [
-      "Nisha Ganatra",
-      "Jennifer Arzt"
     ]
   },
   {
+    "id": "/en/calcutta_mail",
+    "directed_by": [
+      "Sudhir Mishra"
+    ],
+    "initial_release_date": "2003-06-30",
+    "name": "Calcutta Mail",
     "genre": [
       "Thriller",
       "Bollywood",
       "World cinema"
-    ],
-    "name": "Calcutta Mail",
-    "initial_release_date": "2003-06-30",
-    "id": "/en/calcutta_mail",
-    "directed_by": [
-      "Sudhir Mishra"
     ]
   },
   {
-    "genre": [
-      "Indie film",
-      "Documentary film"
-    ],
-    "name": "Hackers Wanted",
     "id": "/en/can_you_hack_it",
     "directed_by": [
       "Sam Bozzo"
+    ],
+    "name": "Hackers Wanted",
+    "genre": [
+      "Indie film",
+      "Documentary film"
     ]
   },
   {
+    "id": "/en/candy_2006",
+    "directed_by": [
+      "Neil Armfield"
+    ],
+    "initial_release_date": "2006-04-27",
+    "name": "Candy",
     "genre": [
       "Romance Film",
       "Indie film",
       "World cinema",
       "Drama"
-    ],
-    "name": "Candy",
-    "initial_release_date": "2006-04-27",
-    "id": "/en/candy_2006",
-    "directed_by": [
-      "Neil Armfield"
     ]
   },
   {
+    "id": "/en/caotica_ana",
+    "directed_by": [
+      "Julio Medem"
+    ],
+    "initial_release_date": "2007-08-24",
+    "name": "Ca\u00f3tica Ana",
     "genre": [
       "Romance Film",
       "Mystery",
       "Drama"
-    ],
-    "name": "Ca\u00f3tica Ana",
-    "initial_release_date": "2007-08-24",
-    "id": "/en/caotica_ana",
-    "directed_by": [
-      "Julio Medem"
     ]
   },
   {
+    "id": "/en/capote",
+    "directed_by": [
+      "Bennett Miller"
+    ],
+    "initial_release_date": "2005-09-02",
+    "name": "Capote",
     "genre": [
       "Crime Fiction",
       "Biographical film",
       "Drama"
-    ],
-    "name": "Capote",
-    "initial_release_date": "2005-09-02",
-    "id": "/en/capote",
-    "directed_by": [
-      "Bennett Miller"
     ]
   },
   {
+    "id": "/en/capturing_the_friedmans",
+    "directed_by": [
+      "Andrew Jarecki"
+    ],
+    "initial_release_date": "2003-01-17",
+    "name": "Capturing the Friedmans",
     "genre": [
       "Documentary film",
       "Mystery",
       "Biographical film"
-    ],
-    "name": "Capturing the Friedmans",
-    "initial_release_date": "2003-01-17",
-    "id": "/en/capturing_the_friedmans",
-    "directed_by": [
-      "Andrew Jarecki"
     ]
   },
   {
+    "id": "/en/care_bears_journey_to_joke_a_lot",
+    "directed_by": [
+      "Mike Fallows"
+    ],
+    "initial_release_date": "2004-10-05",
+    "name": "Care Bears: Journey to Joke-a-lot",
     "genre": [
       "Musical",
       "Computer Animation",
@@ -7130,46 +7125,46 @@
       "Musical comedy",
       "Comedy",
       "Family"
-    ],
-    "name": "Care Bears: Journey to Joke-a-lot",
-    "initial_release_date": "2004-10-05",
-    "id": "/en/care_bears_journey_to_joke_a_lot",
-    "directed_by": [
-      "Mike Fallows"
     ]
   },
   {
+    "id": "/en/cargo_2006",
+    "directed_by": [
+      "Clive Gordon"
+    ],
+    "initial_release_date": "2006-01-24",
+    "name": "Cargo",
     "genre": [
       "Thriller",
       "Psychological thriller",
       "Indie film",
       "Adventure Film",
       "Drama"
-    ],
-    "name": "Cargo",
-    "initial_release_date": "2006-01-24",
-    "id": "/en/cargo_2006",
-    "directed_by": [
-      "Clive Gordon"
     ]
   },
   {
+    "id": "/en/cars",
+    "directed_by": [
+      "John Lasseter",
+      "Joe Ranft"
+    ],
+    "initial_release_date": "2006-03-14",
+    "name": "Cars",
     "genre": [
       "Animation",
       "Family",
       "Adventure Film",
       "Sports",
       "Comedy"
-    ],
-    "name": "Cars",
-    "initial_release_date": "2006-03-14",
-    "id": "/en/cars",
-    "directed_by": [
-      "John Lasseter",
-      "Joe Ranft"
     ]
   },
   {
+    "id": "/en/casanova",
+    "directed_by": [
+      "Lasse Hallstr\u00f6m"
+    ],
+    "initial_release_date": "2005-09-03",
+    "name": "Casanova",
     "genre": [
       "Romance Film",
       "Romantic comedy",
@@ -7179,15 +7174,15 @@
       "Swashbuckler film",
       "Comedy",
       "Drama"
-    ],
-    "name": "Casanova",
-    "initial_release_date": "2005-09-03",
-    "id": "/en/casanova",
-    "directed_by": [
-      "Lasse Hallstr\u00f6m"
     ]
   },
   {
+    "id": "/en/case_of_evil",
+    "directed_by": [
+      "Graham Theakston"
+    ],
+    "initial_release_date": "2002-10-25",
+    "name": "Sherlock: Case of Evil",
     "genre": [
       "Mystery",
       "Action Film",
@@ -7195,58 +7190,55 @@
       "Thriller",
       "Crime Fiction",
       "Drama"
-    ],
-    "name": "Sherlock: Case of Evil",
-    "initial_release_date": "2002-10-25",
-    "id": "/en/case_of_evil",
-    "directed_by": [
-      "Graham Theakston"
     ]
   },
   {
     "id": "/en/cast_away",
-    "name": "Cast Away",
     "initial_release_date": "2000-12-07",
+    "name": "Cast Away",
+    "directed_by": [
+      "Robert Zemeckis"
+    ],
     "genre": [
       "Airplanes and airports",
       "Adventure Film",
       "Action/Adventure",
       "Drama"
-    ],
-    "directed_by": [
-      "Robert Zemeckis"
     ]
   },
   {
     "id": "/en/castlevania_2007",
     "name": "Castlevania",
-    "genre": [
-      "Action Film",
-      "Horror"
-    ],
     "directed_by": [
       "Paul W. S. Anderson",
       "Sylvain White"
+    ],
+    "genre": [
+      "Action Film",
+      "Horror"
     ]
   },
   {
     "id": "/en/catch_me_if_you_can",
-    "name": "Catch Me If You Can",
     "initial_release_date": "2002-12-16",
+    "name": "Catch Me If You Can",
+    "directed_by": [
+      "Steven Spielberg"
+    ],
     "genre": [
       "Crime Fiction",
       "Comedy",
       "Biographical film",
       "Drama"
-    ],
-    "directed_by": [
-      "Steven Spielberg"
     ]
   },
   {
     "id": "/en/catch_that_kid",
-    "name": "Catch That Kid",
     "initial_release_date": "2004-02-06",
+    "name": "Catch That Kid",
+    "directed_by": [
+      "Bart Freundlich"
+    ],
     "genre": [
       "Teen film",
       "Adventure Film",
@@ -7257,27 +7249,27 @@
       "Crime Comedy",
       "Family-Oriented Adventure",
       "Comedy"
-    ],
-    "directed_by": [
-      "Bart Freundlich"
     ]
   },
   {
     "id": "/en/caterina_in_the_big_city",
-    "name": "Caterina in the Big City",
     "initial_release_date": "2003-10-24",
+    "name": "Caterina in the Big City",
+    "directed_by": [
+      "Paolo Virz\u00ec"
+    ],
     "genre": [
       "Comedy",
       "Drama"
-    ],
-    "directed_by": [
-      "Paolo Virz\u00ec"
     ]
   },
   {
     "id": "/en/cats_dogs",
-    "name": "Cats &amp; Dogs",
     "initial_release_date": "2001-07-04",
+    "name": "Cats &amp; Dogs",
+    "directed_by": [
+      "Lawrence Guterman"
+    ],
     "genre": [
       "Adventure Film",
       "Family",
@@ -7286,15 +7278,15 @@
       "Fantasy Adventure",
       "Fantasy Comedy",
       "Comedy"
-    ],
-    "directed_by": [
-      "Lawrence Guterman"
     ]
   },
   {
     "id": "/en/catwoman_2004",
-    "name": "Catwoman",
     "initial_release_date": "2004-07-19",
+    "name": "Catwoman",
+    "directed_by": [
+      "Pitof"
+    ],
     "genre": [
       "Action Film",
       "Crime Fiction",
@@ -7302,15 +7294,15 @@
       "Action/Adventure",
       "Thriller",
       "Superhero movie"
-    ],
-    "directed_by": [
-      "Pitof"
     ]
   },
   {
     "id": "/en/caved_in_prehistoric_terror",
-    "name": "Caved In: Prehistoric Terror",
     "initial_release_date": "2006-01-07",
+    "name": "Caved In: Prehistoric Terror",
+    "directed_by": [
+      "Richard Pepin"
+    ],
     "genre": [
       "Science Fiction",
       "Horror",
@@ -7320,29 +7312,29 @@
       "Television film",
       "Creature Film",
       "Sci-Fi Horror"
-    ],
-    "directed_by": [
-      "Richard Pepin"
     ]
   },
   {
     "id": "/en/cellular",
-    "name": "Cellular",
     "initial_release_date": "2004-09-10",
+    "name": "Cellular",
+    "directed_by": [
+      "David R. Ellis"
+    ],
     "genre": [
       "Thriller",
       "Action Film",
       "Crime Thriller",
       "Action/Adventure"
-    ],
-    "directed_by": [
-      "David R. Ellis"
     ]
   },
   {
     "id": "/en/center_stage",
-    "name": "Center Stage",
     "initial_release_date": "2000-05-12",
+    "name": "Center Stage",
+    "directed_by": [
+      "Nicholas Hytner"
+    ],
     "genre": [
       "Teen film",
       "Dance film",
@@ -7350,120 +7342,120 @@
       "Musical Drama",
       "Ensemble Film",
       "Drama"
-    ],
-    "directed_by": [
-      "Nicholas Hytner"
     ]
   },
   {
     "id": "/en/chai_lai",
-    "name": "Chai Lai",
     "initial_release_date": "2006-01-26",
+    "name": "Chai Lai",
+    "directed_by": [
+      "Poj Arnon"
+    ],
     "genre": [
       "Action Film",
       "Martial Arts Film",
       "Comedy"
-    ],
-    "directed_by": [
-      "Poj Arnon"
     ]
   },
   {
     "id": "/en/chain_2004",
     "name": "Chain",
-    "genre": [
-      "Documentary film"
-    ],
     "directed_by": [
       "Jem Cohen"
+    ],
+    "genre": [
+      "Documentary film"
     ]
   },
   {
     "id": "/en/chakram_2005",
-    "name": "Chakram",
     "initial_release_date": "2005-03-25",
+    "name": "Chakram",
+    "directed_by": [
+      "Krishna Vamsi"
+    ],
     "genre": [
       "Romance Film",
       "Drama",
       "Tollywood",
       "World cinema"
-    ],
-    "directed_by": [
-      "Krishna Vamsi"
     ]
   },
   {
     "id": "/en/challenger_2007",
     "name": "Challenger",
-    "genre": [
-      "Drama"
-    ],
     "directed_by": [
       "Philip Kaufman"
+    ],
+    "genre": [
+      "Drama"
     ]
   },
   {
     "id": "/en/chalo_ishq_ladaaye",
-    "name": "Chalo Ishq Ladaaye",
     "initial_release_date": "2002-12-27",
+    "name": "Chalo Ishq Ladaaye",
+    "directed_by": [
+      "Aziz Sejawal"
+    ],
     "genre": [
       "Romance Film",
       "Comedy",
       "Bollywood",
       "World cinema"
-    ],
-    "directed_by": [
-      "Aziz Sejawal"
     ]
   },
   {
     "id": "/en/chalte_chalte",
-    "name": "Chalte Chalte",
     "initial_release_date": "2003-06-12",
+    "name": "Chalte Chalte",
+    "directed_by": [
+      "Aziz Mirza"
+    ],
     "genre": [
       "Romance Film",
       "Musical",
       "Bollywood",
       "Drama",
       "Musical Drama"
-    ],
-    "directed_by": [
-      "Aziz Mirza"
     ]
   },
   {
     "id": "/en/chameli",
-    "name": "Chameli",
     "initial_release_date": "2003-12-31",
+    "name": "Chameli",
+    "directed_by": [
+      "Sudhir Mishra",
+      "Anant Balani"
+    ],
     "genre": [
       "Romance Film",
       "Bollywood",
       "World cinema",
       "Drama"
-    ],
-    "directed_by": [
-      "Sudhir Mishra",
-      "Anant Balani"
     ]
   },
   {
     "id": "/en/chandni_bar",
-    "name": "Chandni Bar",
     "initial_release_date": "2001-09-28",
+    "name": "Chandni Bar",
+    "directed_by": [
+      "Madhur Bhandarkar"
+    ],
     "genre": [
       "Crime Fiction",
       "Bollywood",
       "World cinema",
       "Drama"
-    ],
-    "directed_by": [
-      "Madhur Bhandarkar"
     ]
   },
   {
     "id": "/en/chandramukhi",
-    "name": "Chandramukhi",
     "initial_release_date": "2005-04-13",
+    "name": "Chandramukhi",
+    "directed_by": [
+      "P. Vasu"
+    ],
     "genre": [
       "Horror",
       "World cinema",
@@ -7473,29 +7465,29 @@
       "Comedy",
       "Fantasy",
       "Romance Film"
-    ],
-    "directed_by": [
-      "P. Vasu"
     ]
   },
   {
     "id": "/en/changing_lanes",
-    "name": "Changing Lanes",
     "initial_release_date": "2002-04-07",
+    "name": "Changing Lanes",
+    "directed_by": [
+      "Roger Michell"
+    ],
     "genre": [
       "Thriller",
       "Psychological thriller",
       "Melodrama",
       "Drama"
-    ],
-    "directed_by": [
-      "Roger Michell"
     ]
   },
   {
     "id": "/en/chaos_2007",
-    "name": "Chaos",
     "initial_release_date": "2005-12-15",
+    "name": "Chaos",
+    "directed_by": [
+      "Tony Giglio"
+    ],
     "genre": [
       "Thriller",
       "Action Film",
@@ -7503,68 +7495,68 @@
       "Heist film",
       "Action/Adventure",
       "Drama"
-    ],
-    "directed_by": [
-      "Tony Giglio"
     ]
   },
   {
     "id": "/en/chaos_2005",
-    "name": "Chaos",
     "initial_release_date": "2005-08-10",
+    "name": "Chaos",
+    "directed_by": [
+      "David DeFalco"
+    ],
     "genre": [
       "Horror",
       "Teen film",
       "B movie",
       "Slasher"
-    ],
-    "directed_by": [
-      "David DeFalco"
     ]
   },
   {
     "id": "/en/chaos_and_creation_at_abbey_road",
-    "name": "Chaos and Creation at Abbey Road",
     "initial_release_date": "2006-01-27",
-    "genre": [
-      "Musical"
-    ],
+    "name": "Chaos and Creation at Abbey Road",
     "directed_by": [
       "Simon Hilton"
+    ],
+    "genre": [
+      "Musical"
     ]
   },
   {
     "id": "/en/chaos_theory_2007",
     "name": "Chaos Theory",
+    "directed_by": [
+      "Marcos Siega"
+    ],
     "genre": [
       "Romance Film",
       "Romantic comedy",
       "Comedy-drama",
       "Comedy",
       "Drama"
-    ],
-    "directed_by": [
-      "Marcos Siega"
     ]
   },
   {
     "id": "/en/chapter_27",
-    "name": "Chapter 27",
     "initial_release_date": "2007-01-25",
+    "name": "Chapter 27",
+    "directed_by": [
+      "Jarrett Schaefer"
+    ],
     "genre": [
       "Indie film",
       "Crime Fiction",
       "Biographical film",
       "Drama"
-    ],
-    "directed_by": [
-      "Jarrett Schaefer"
     ]
   },
   {
     "id": "/en/charlie_and_the_chocolate_factory_2005",
-    "name": "Charlie and the Chocolate Factory",
     "initial_release_date": "2005-07-10",
+    "name": "Charlie and the Chocolate Factory",
+    "directed_by": [
+      "Tim Burton"
+    ],
     "genre": [
       "Fantasy",
       "Remake",
@@ -7573,30 +7565,30 @@
       "Children's Fantasy",
       "Children's/Family",
       "Comedy"
-    ],
-    "directed_by": [
-      "Tim Burton"
     ]
   },
   {
     "id": "/en/charlies_angels",
-    "name": "Charlie's Angels",
     "initial_release_date": "2000-10-22",
+    "name": "Charlie's Angels",
+    "directed_by": [
+      "Joseph McGinty Nichol"
+    ],
     "genre": [
       "Action Film",
       "Crime Fiction",
       "Comedy",
       "Adventure Film",
       "Thriller"
-    ],
-    "directed_by": [
-      "Joseph McGinty Nichol"
     ]
   },
   {
     "id": "/en/charlies_angels_full_throttle",
-    "name": "Charlie's Angels: Full Throttle",
     "initial_release_date": "2003-06-18",
+    "name": "Charlie's Angels: Full Throttle",
+    "directed_by": [
+      "Joseph McGinty Nichol"
+    ],
     "genre": [
       "Martial Arts Film",
       "Action Film",
@@ -7605,15 +7597,15 @@
       "Action/Adventure",
       "Action Comedy",
       "Comedy"
-    ],
-    "directed_by": [
-      "Joseph McGinty Nichol"
     ]
   },
   {
     "id": "/en/charlotte_gray",
-    "name": "Charlotte Gray",
     "initial_release_date": "2001-12-17",
+    "name": "Charlotte Gray",
+    "directed_by": [
+      "Gillian Armstrong"
+    ],
     "genre": [
       "Romance Film",
       "War film",
@@ -7621,58 +7613,58 @@
       "Historical period drama",
       "Film adaptation",
       "Drama"
-    ],
-    "directed_by": [
-      "Gillian Armstrong"
     ]
   },
   {
     "id": "/en/charlottes_web",
-    "name": "Charlotte's Web",
     "initial_release_date": "2006-12-07",
+    "name": "Charlotte's Web",
+    "directed_by": [
+      "Gary Winick"
+    ],
     "genre": [
       "Animation",
       "Family",
       "Comedy"
-    ],
-    "directed_by": [
-      "Gary Winick"
     ]
   },
   {
     "id": "/en/chasing_liberty",
-    "name": "Chasing Liberty",
     "initial_release_date": "2004-01-07",
+    "name": "Chasing Liberty",
+    "directed_by": [
+      "Andy Cadiff"
+    ],
     "genre": [
       "Romantic comedy",
       "Teen film",
       "Romance Film",
       "Road movie",
       "Comedy"
-    ],
-    "directed_by": [
-      "Andy Cadiff"
     ]
   },
   {
     "id": "/en/chasing_papi",
-    "name": "Chasing Papi",
     "initial_release_date": "2003-04-16",
+    "name": "Chasing Papi",
+    "directed_by": [
+      "Linda Mendoza"
+    ],
     "genre": [
       "Romance Film",
       "Romantic comedy",
       "Farce",
       "Chase Movie",
       "Comedy"
-    ],
-    "directed_by": [
-      "Linda Mendoza"
     ]
   },
   {
     "id": "/en/chasing_sleep",
-    "name": "Chasing Sleep",
     "initial_release_date": "2001-09-16",
+    "name": "Chasing Sleep",
+    "directed_by": [
+      "Michael Walker"
+    ],
     "genre": [
       "Mystery",
       "Psychological thriller",
@@ -7681,140 +7673,140 @@
       "Indie film",
       "Suspense",
       "Crime Thriller"
-    ],
-    "directed_by": [
-      "Michael Walker"
     ]
   },
   {
     "id": "/en/chasing_the_horizon",
-    "name": "Chasing the Horizon",
     "initial_release_date": "2006-04-26",
-    "genre": [
-      "Documentary film",
-      "Auto racing"
-    ],
+    "name": "Chasing the Horizon",
     "directed_by": [
       "Markus Canter",
       "Mason Canter"
+    ],
+    "genre": [
+      "Documentary film",
+      "Auto racing"
     ]
   },
   {
     "id": "/en/chathikkatha_chanthu",
-    "name": "Chathikkatha Chanthu",
     "initial_release_date": "2004-04-14",
+    "name": "Chathikkatha Chanthu",
+    "directed_by": [
+      "Meccartin"
+    ],
     "genre": [
       "Comedy",
       "Malayalam Cinema",
       "World cinema",
       "Drama"
-    ],
-    "directed_by": [
-      "Meccartin"
     ]
   },
   {
     "id": "/en/chatrapati",
-    "name": "Chhatrapati",
     "initial_release_date": "2005-09-25",
+    "name": "Chhatrapati",
+    "directed_by": [
+      "S. S. Rajamouli"
+    ],
     "genre": [
       "Action Film",
       "Tollywood",
       "World cinema",
       "Drama"
-    ],
-    "directed_by": [
-      "S. S. Rajamouli"
     ]
   },
   {
     "id": "/en/cheaper_by_the_dozen_2003",
-    "name": "Cheaper by the Dozen",
     "initial_release_date": "2003-12-25",
+    "name": "Cheaper by the Dozen",
+    "directed_by": [
+      "Shawn Levy"
+    ],
     "genre": [
       "Family",
       "Comedy",
       "Drama"
-    ],
-    "directed_by": [
-      "Shawn Levy"
     ]
   },
   {
     "id": "/en/cheaper_by_the_dozen_2",
-    "name": "Cheaper by the Dozen 2",
     "initial_release_date": "2005-12-21",
+    "name": "Cheaper by the Dozen 2",
+    "directed_by": [
+      "Adam Shankman"
+    ],
     "genre": [
       "Family",
       "Adventure Film",
       "Domestic Comedy",
       "Comedy"
-    ],
-    "directed_by": [
-      "Adam Shankman"
     ]
   },
   {
     "id": "/en/checking_out_2005",
-    "name": "Checking Out",
     "initial_release_date": "2005-04-10",
+    "name": "Checking Out",
+    "directed_by": [
+      "Jeff Hare"
+    ],
     "genre": [
       "Black comedy",
       "Comedy"
-    ],
-    "directed_by": [
-      "Jeff Hare"
     ]
   },
   {
     "id": "/en/chellamae",
-    "name": "Chellamae",
     "initial_release_date": "2004-09-10",
+    "name": "Chellamae",
+    "directed_by": [
+      "Gandhi Krishna"
+    ],
     "genre": [
       "Romance Film",
       "Tamil cinema",
       "World cinema"
-    ],
-    "directed_by": [
-      "Gandhi Krishna"
     ]
   },
   {
     "id": "/en/chemman_chaalai",
     "name": "Chemman Chaalai",
+    "directed_by": [
+      "Deepak Kumaran Menon"
+    ],
     "genre": [
       "Tamil cinema",
       "World cinema",
       "Drama"
-    ],
-    "directed_by": [
-      "Deepak Kumaran Menon"
     ]
   },
   {
     "id": "/en/chennaiyil_oru_mazhai_kaalam",
     "name": "Chennaiyil Oru Mazhai Kaalam",
-    "genre": [],
     "directed_by": [
       "Prabhu Deva"
-    ]
+    ],
+    "genre": []
   },
   {
     "id": "/en/cher_the_farewell_tour_live_in_miami",
-    "name": "The Farewell Tour",
     "initial_release_date": "2003-08-26",
-    "genre": [
-      "Music video"
-    ],
+    "name": "The Farewell Tour",
     "directed_by": [
       "Dorina Sanchez",
       "David Mallet"
+    ],
+    "genre": [
+      "Music video"
     ]
   },
   {
     "id": "/en/cherry_falls",
-    "name": "Cherry Falls",
     "initial_release_date": "2000-07-29",
+    "name": "Cherry Falls",
+    "directed_by": [
+      "Geoffrey Wright"
+    ],
     "genre": [
       "Satire",
       "Slasher",
@@ -7822,15 +7814,15 @@
       "Horror",
       "Horror comedy",
       "Comedy"
-    ],
-    "directed_by": [
-      "Geoffrey Wright"
     ]
   },
   {
     "id": "/wikipedia/en_title/Chess_$00282006_film$0029",
-    "name": "Chess",
     "initial_release_date": "2006-07-07",
+    "name": "Chess",
+    "directed_by": [
+      "RajBabu"
+    ],
     "genre": [
       "Crime Fiction",
       "Thriller",
@@ -7838,79 +7830,79 @@
       "Comedy",
       "Malayalam Cinema",
       "World cinema"
-    ],
-    "directed_by": [
-      "RajBabu"
     ]
   },
   {
     "id": "/en/chica_de_rio",
-    "name": "Girl from Rio",
     "initial_release_date": "2003-04-11",
+    "name": "Girl from Rio",
+    "directed_by": [
+      "Christopher Monger"
+    ],
     "genre": [
       "Romantic comedy",
       "Romance Film",
       "Comedy"
-    ],
-    "directed_by": [
-      "Christopher Monger"
     ]
   },
   {
     "id": "/en/chicago_2002",
-    "name": "Chicago",
     "initial_release_date": "2002-12-10",
+    "name": "Chicago",
+    "directed_by": [
+      "Rob Marshall"
+    ],
     "genre": [
       "Musical",
       "Crime Fiction",
       "Comedy",
       "Musical comedy"
-    ],
-    "directed_by": [
-      "Rob Marshall"
     ]
   },
   {
     "id": "/en/chicken_little",
-    "name": "Chicken Little",
     "initial_release_date": "2005-10-30",
+    "name": "Chicken Little",
+    "directed_by": [
+      "Mark Dindal"
+    ],
     "genre": [
       "Animation",
       "Adventure Film",
       "Comedy"
-    ],
-    "directed_by": [
-      "Mark Dindal"
     ]
   },
   {
     "id": "/en/chicken_run",
-    "name": "Chicken Run",
     "initial_release_date": "2000-06-21",
+    "name": "Chicken Run",
+    "directed_by": [
+      "Peter Lord",
+      "Nick Park"
+    ],
     "genre": [
       "Family",
       "Animation",
       "Comedy"
-    ],
-    "directed_by": [
-      "Peter Lord",
-      "Nick Park"
     ]
   },
   {
     "id": "/en/child_marriage_2005",
     "name": "Child Marriage",
-    "genre": [
-      "Documentary film"
-    ],
     "directed_by": [
       "Neeraj Kumar"
+    ],
+    "genre": [
+      "Documentary film"
     ]
   },
   {
     "id": "/en/children_of_men",
-    "name": "Children of Men",
     "initial_release_date": "2006-09-03",
+    "name": "Children of Men",
+    "directed_by": [
+      "Alfonso Cuar\u00f3n"
+    ],
     "genre": [
       "Thriller",
       "Action Film",
@@ -7923,42 +7915,42 @@
       "Film adaptation",
       "Action Thriller",
       "Drama"
-    ],
-    "directed_by": [
-      "Alfonso Cuar\u00f3n"
     ]
   },
   {
     "id": "/en/children_of_the_corn_revelation",
-    "name": "Children of the Corn: Revelation",
     "initial_release_date": "2001-10-09",
+    "name": "Children of the Corn: Revelation",
+    "directed_by": [
+      "Guy Magar"
+    ],
     "genre": [
       "Horror",
       "Supernatural",
       "Cult film"
-    ],
-    "directed_by": [
-      "Guy Magar"
     ]
   },
   {
     "id": "/en/children_of_the_living_dead",
     "name": "Children of the Living Dead",
+    "directed_by": [
+      "Tor Ramsey"
+    ],
     "genre": [
       "Indie film",
       "Teen film",
       "Horror",
       "Zombie Film",
       "Horror comedy"
-    ],
-    "directed_by": [
-      "Tor Ramsey"
     ]
   },
   {
     "id": "/en/chinthamani_kolacase",
-    "name": "Chinthamani Kolacase",
     "initial_release_date": "2006-03-31",
+    "name": "Chinthamani Kolacase",
+    "directed_by": [
+      "Shaji Kailas"
+    ],
     "genre": [
       "Horror",
       "Mystery",
@@ -7967,93 +7959,93 @@
       "Thriller",
       "Malayalam Cinema",
       "World cinema"
-    ],
-    "directed_by": [
-      "Shaji Kailas"
     ]
   },
   {
     "id": "/en/chips_2008",
     "name": "CHiPs",
+    "directed_by": [],
     "genre": [
       "Musical",
       "Children's/Family"
-    ],
-    "directed_by": []
+    ]
   },
   {
     "id": "/en/chithiram_pesuthadi",
-    "name": "Chithiram Pesuthadi",
     "initial_release_date": "2006-02-10",
+    "name": "Chithiram Pesuthadi",
+    "directed_by": [
+      "Mysskin"
+    ],
     "genre": [
       "Romance Film",
       "Tamil cinema",
       "World cinema",
       "Drama"
-    ],
-    "directed_by": [
-      "Mysskin"
     ]
   },
   {
     "id": "/en/chocolat_2000",
-    "name": "Chocolat",
     "initial_release_date": "2000-12-15",
+    "name": "Chocolat",
+    "directed_by": [
+      "Lasse Hallstr\u00f6m"
+    ],
     "genre": [
       "Romance Film",
       "Drama"
-    ],
-    "directed_by": [
-      "Lasse Hallstr\u00f6m"
     ]
   },
   {
     "id": "/en/choose_your_own_adventure_the_abominable_snowman",
-    "name": "Choose Your Own Adventure The Abominable Snowman",
     "initial_release_date": "2006-07-25",
+    "name": "Choose Your Own Adventure The Abominable Snowman",
+    "directed_by": [
+      "Bob Doucette"
+    ],
     "genre": [
       "Adventure Film",
       "Family",
       "Children's/Family",
       "Family-Oriented Adventure",
       "Animation"
-    ],
-    "directed_by": [
-      "Bob Doucette"
     ]
   },
   {
     "id": "/en/chopin_desire_for_love",
-    "name": "Chopin: Desire for Love",
     "initial_release_date": "2002-03-01",
+    "name": "Chopin: Desire for Love",
+    "directed_by": [
+      "Jerzy Antczak"
+    ],
     "genre": [
       "Biographical film",
       "Romance Film",
       "Music",
       "Drama"
-    ],
-    "directed_by": [
-      "Jerzy Antczak"
     ]
   },
   {
     "id": "/en/chopper",
-    "name": "Chopper",
     "initial_release_date": "2000-08-03",
+    "name": "Chopper",
+    "directed_by": [
+      "Andrew Dominik"
+    ],
     "genre": [
       "Biographical film",
       "Crime Fiction",
       "Comedy",
       "Drama"
-    ],
-    "directed_by": [
-      "Andrew Dominik"
     ]
   },
   {
     "id": "/en/chori_chori_2003",
-    "name": "Chori Chori",
     "initial_release_date": "2003-08-01",
+    "name": "Chori Chori",
+    "directed_by": [
+      "Milan Luthria"
+    ],
     "genre": [
       "Romance Film",
       "Musical",
@@ -8064,15 +8056,16 @@
       "World cinema",
       "Drama",
       "Musical Drama"
-    ],
-    "directed_by": [
-      "Milan Luthria"
     ]
   },
   {
     "id": "/en/chori_chori_chupke_chupke",
-    "name": "Chori Chori Chupke Chupke",
     "initial_release_date": "2001-03-09",
+    "name": "Chori Chori Chupke Chupke",
+    "directed_by": [
+      "Abbas Burmawalla",
+      "Mustan Burmawalla"
+    ],
     "genre": [
       "Romance Film",
       "Musical",
@@ -8080,16 +8073,15 @@
       "World cinema",
       "Drama",
       "Musical Drama"
-    ],
-    "directed_by": [
-      "Abbas Burmawalla",
-      "Mustan Burmawalla"
     ]
   },
   {
     "id": "/en/christinas_house",
-    "name": "Christina's House",
     "initial_release_date": "2000-02-24",
+    "name": "Christina's House",
+    "directed_by": [
+      "Gavin Wilding"
+    ],
     "genre": [
       "Thriller",
       "Mystery",
@@ -8098,15 +8090,15 @@
       "Slasher",
       "Psychological thriller",
       "Drama"
-    ],
-    "directed_by": [
-      "Gavin Wilding"
     ]
   },
   {
     "id": "/en/christmas_with_the_kranks",
-    "name": "Christmas with the Kranks",
     "initial_release_date": "2004-11-24",
+    "name": "Christmas with the Kranks",
+    "directed_by": [
+      "Joe Roth"
+    ],
     "genre": [
       "Christmas movie",
       "Family",
@@ -8114,93 +8106,93 @@
       "Slapstick",
       "Holiday Film",
       "Comedy"
-    ],
-    "directed_by": [
-      "Joe Roth"
     ]
   },
   {
     "id": "/en/chromophobia",
-    "name": "Chromophobia",
     "initial_release_date": "2005-05-21",
+    "name": "Chromophobia",
+    "directed_by": [
+      "Martha Fiennes"
+    ],
     "genre": [
       "Family Drama",
       "Drama"
-    ],
-    "directed_by": [
-      "Martha Fiennes"
     ]
   },
   {
     "id": "/en/chubby_killer",
     "name": "Chubby Killer",
+    "directed_by": [
+      "Reuben Rox"
+    ],
     "genre": [
       "Slasher",
       "Indie film",
       "Horror"
-    ],
-    "directed_by": [
-      "Reuben Rox"
     ]
   },
   {
     "id": "/en/chukkallo_chandrudu",
-    "name": "Chukkallo Chandrudu",
     "initial_release_date": "2006-01-14",
+    "name": "Chukkallo Chandrudu",
+    "directed_by": [
+      "Siva Kumar"
+    ],
     "genre": [
       "Comedy",
       "Tollywood",
       "World cinema",
       "Drama"
-    ],
-    "directed_by": [
-      "Siva Kumar"
     ]
   },
   {
     "id": "/en/chup_chup_ke",
-    "name": "Chup Chup Ke",
     "initial_release_date": "2006-06-09",
+    "name": "Chup Chup Ke",
+    "directed_by": [
+      "Priyadarshan",
+      "Kookie Gulati"
+    ],
     "genre": [
       "Romantic comedy",
       "Comedy",
       "Romance Film",
       "Drama"
-    ],
-    "directed_by": [
-      "Priyadarshan",
-      "Kookie Gulati"
     ]
   },
   {
     "id": "/en/church_ball",
-    "name": "Church Ball",
     "initial_release_date": "2006-03-17",
+    "name": "Church Ball",
+    "directed_by": [
+      "Kurt Hale"
+    ],
     "genre": [
       "Family",
       "Sports",
       "Comedy"
-    ],
-    "directed_by": [
-      "Kurt Hale"
     ]
   },
   {
     "id": "/en/churchill_the_hollywood_years",
-    "name": "Churchill: The Hollywood Years",
     "initial_release_date": "2004-12-03",
+    "name": "Churchill: The Hollywood Years",
+    "directed_by": [
+      "Peter Richardson"
+    ],
     "genre": [
       "Satire",
       "Comedy"
-    ],
-    "directed_by": [
-      "Peter Richardson"
     ]
   },
   {
     "id": "/en/cinderella_iii",
-    "name": "Cinderella III: A Twist in Time",
     "initial_release_date": "2007-02-06",
+    "name": "Cinderella III: A Twist in Time",
+    "directed_by": [
+      "Frank Nissen"
+    ],
     "genre": [
       "Family",
       "Animated cartoon",
@@ -8208,128 +8200,128 @@
       "Romance Film",
       "Animation",
       "Children's/Family"
-    ],
-    "directed_by": [
-      "Frank Nissen"
     ]
   },
   {
     "id": "/en/cinderella_man",
-    "name": "Cinderella Man",
     "initial_release_date": "2005-05-23",
+    "name": "Cinderella Man",
+    "directed_by": [
+      "Ron Howard"
+    ],
     "genre": [
       "Biographical film",
       "Historical period drama",
       "Romance Film",
       "Sports",
       "Drama"
-    ],
-    "directed_by": [
-      "Ron Howard"
     ]
   },
   {
     "id": "/en/cinemania",
     "name": "Cinemania",
-    "genre": [
-      "Documentary film",
-      "Culture &amp; Society"
-    ],
     "directed_by": [
       "Angela Christlieb",
       "Stephen Kijak"
+    ],
+    "genre": [
+      "Documentary film",
+      "Culture &amp; Society"
     ]
   },
   {
     "id": "/en/city_of_ghosts",
-    "name": "City of Ghosts",
     "initial_release_date": "2003-03-27",
+    "name": "City of Ghosts",
+    "directed_by": [
+      "Matt Dillon"
+    ],
     "genre": [
       "Thriller",
       "Crime Fiction",
       "Crime Thriller",
       "Drama"
-    ],
-    "directed_by": [
-      "Matt Dillon"
     ]
   },
   {
     "id": "/en/city_of_god",
-    "name": "City of God",
     "initial_release_date": "2002-05-18",
+    "name": "City of God",
+    "directed_by": [
+      "Fernando Meirelles"
+    ],
     "genre": [
       "Crime Fiction",
       "Drama"
-    ],
-    "directed_by": [
-      "Fernando Meirelles"
     ]
   },
   {
     "id": "/en/claustrophobia_2003",
     "name": "Claustrophobia",
+    "directed_by": [
+      "Mark Tapio Kines"
+    ],
     "genre": [
       "Slasher",
       "Horror"
-    ],
-    "directed_by": [
-      "Mark Tapio Kines"
     ]
   },
   {
     "id": "/en/clean",
-    "name": "Clean",
     "initial_release_date": "2004-03-27",
+    "name": "Clean",
+    "directed_by": [
+      "Olivier Assayas"
+    ],
     "genre": [
       "Music",
       "Drama"
-    ],
-    "directed_by": [
-      "Olivier Assayas"
     ]
   },
   {
     "id": "/en/clear_cut_the_story_of_philomath_oregon",
-    "name": "Clear Cut: The Story of Philomath, Oregon",
     "initial_release_date": "2006-01-20",
-    "genre": [
-      "Documentary film"
-    ],
+    "name": "Clear Cut: The Story of Philomath, Oregon",
     "directed_by": [
       "Peter Richardson"
+    ],
+    "genre": [
+      "Documentary film"
     ]
   },
   {
     "id": "/en/clerks_ii",
-    "name": "Clerks II",
     "initial_release_date": "2006-05-26",
+    "name": "Clerks II",
+    "directed_by": [
+      "Kevin Smith"
+    ],
     "genre": [
       "Buddy film",
       "Workplace Comedy",
       "Comedy"
-    ],
-    "directed_by": [
-      "Kevin Smith"
     ]
   },
   {
     "id": "/en/click",
-    "name": "Click",
     "initial_release_date": "2006-06-22",
+    "name": "Click",
+    "directed_by": [
+      "Frank Coraci"
+    ],
     "genre": [
       "Comedy",
       "Fantasy",
       "Drama"
-    ],
-    "directed_by": [
-      "Frank Coraci"
     ]
   },
   {
     "id": "/en/clockstoppers",
-    "name": "Clockstoppers",
     "initial_release_date": "2002-03-29",
+    "name": "Clockstoppers",
+    "directed_by": [
+      "Jonathan Frakes"
+    ],
     "genre": [
       "Science Fiction",
       "Teen film",
@@ -8337,40 +8329,40 @@
       "Thriller",
       "Adventure Film",
       "Comedy"
-    ],
-    "directed_by": [
-      "Jonathan Frakes"
     ]
   },
   {
     "id": "/en/closer_2004",
-    "name": "Closer",
     "initial_release_date": "2004-12-03",
+    "name": "Closer",
+    "directed_by": [
+      "Mike Nichols"
+    ],
     "genre": [
       "Romance Film",
       "Drama"
-    ],
-    "directed_by": [
-      "Mike Nichols"
     ]
   },
   {
     "id": "/en/closing_the_ring",
-    "name": "Closing the Ring",
     "initial_release_date": "2007-09-14",
+    "name": "Closing the Ring",
+    "directed_by": [
+      "Richard Attenborough"
+    ],
     "genre": [
       "War film",
       "Romance Film",
       "Drama"
-    ],
-    "directed_by": [
-      "Richard Attenborough"
     ]
   },
   {
     "id": "/en/club_dread",
-    "name": "Club Dread",
     "initial_release_date": "2004-02-27",
+    "name": "Club Dread",
+    "directed_by": [
+      "Jay Chandrasekhar"
+    ],
     "genre": [
       "Parody",
       "Horror",
@@ -8379,59 +8371,59 @@
       "Indie film",
       "Horror comedy",
       "Comedy"
-    ],
-    "directed_by": [
-      "Jay Chandrasekhar"
     ]
   },
   {
     "id": "/en/coach_carter",
-    "name": "Coach Carter",
     "initial_release_date": "2005-01-13",
+    "name": "Coach Carter",
+    "directed_by": [
+      "Thomas Carter"
+    ],
     "genre": [
       "Coming of age",
       "Sports",
       "Docudrama",
       "Biographical film",
       "Drama"
-    ],
-    "directed_by": [
-      "Thomas Carter"
     ]
   },
   {
     "id": "/en/coast_guard_2002",
-    "name": "The Coast Guard",
     "initial_release_date": "2002-11-14",
+    "name": "The Coast Guard",
+    "directed_by": [
+      "Kim Ki-duk"
+    ],
     "genre": [
       "Action Film",
       "War film",
       "East Asian cinema",
       "World cinema",
       "Drama"
-    ],
-    "directed_by": [
-      "Kim Ki-duk"
     ]
   },
   {
     "id": "/en/code_46",
-    "name": "Code 46",
     "initial_release_date": "2004-05-07",
+    "name": "Code 46",
+    "directed_by": [
+      "Michael Winterbottom"
+    ],
     "genre": [
       "Science Fiction",
       "Thriller",
       "Romance Film",
       "Drama"
-    ],
-    "directed_by": [
-      "Michael Winterbottom"
     ]
   },
   {
     "id": "/en/codename_kids_next_door_operation_z_e_r_o",
-    "name": "Codename: Kids Next Door: Operation Z.E.R.O.",
     "initial_release_date": "2006-01-13",
+    "name": "Codename: Kids Next Door: Operation Z.E.R.O.",
+    "directed_by": [
+      "Tom Warburton"
+    ],
     "genre": [
       "Science Fiction",
       "Animation",
@@ -8439,56 +8431,56 @@
       "Family",
       "Comedy",
       "Crime Fiction"
-    ],
-    "directed_by": [
-      "Tom Warburton"
     ]
   },
   {
     "id": "/en/coffee_and_cigarettes",
-    "name": "Coffee and Cigarettes",
     "initial_release_date": "2003-09-05",
+    "name": "Coffee and Cigarettes",
+    "directed_by": [
+      "Jim Jarmusch"
+    ],
     "genre": [
       "Music",
       "Comedy",
       "Drama"
-    ],
-    "directed_by": [
-      "Jim Jarmusch"
     ]
   },
   {
     "id": "/en/cold_creek_manor",
-    "name": "Cold Creek Manor",
     "initial_release_date": "2003-09-19",
+    "name": "Cold Creek Manor",
+    "directed_by": [
+      "Mike Figgis"
+    ],
     "genre": [
       "Thriller",
       "Mystery",
       "Psychological thriller",
       "Crime Thriller",
       "Drama"
-    ],
-    "directed_by": [
-      "Mike Figgis"
     ]
   },
   {
     "id": "/en/cold_mountain",
-    "name": "Cold Mountain",
     "initial_release_date": "2003-12-25",
+    "name": "Cold Mountain",
+    "directed_by": [
+      "Anthony Minghella"
+    ],
     "genre": [
       "War film",
       "Romance Film",
       "Drama"
-    ],
-    "directed_by": [
-      "Anthony Minghella"
     ]
   },
   {
     "id": "/en/cold_showers",
-    "name": "Cold Showers",
     "initial_release_date": "2005-05-22",
+    "name": "Cold Showers",
+    "directed_by": [
+      "Antony Cordier"
+    ],
     "genre": [
       "Coming of age",
       "LGBT",
@@ -8497,94 +8489,94 @@
       "Teen film",
       "Erotic Drama",
       "Drama"
-    ],
-    "directed_by": [
-      "Antony Cordier"
     ]
   },
   {
     "id": "/en/collateral",
-    "name": "Collateral",
     "initial_release_date": "2004-08-05",
+    "name": "Collateral",
+    "directed_by": [
+      "Michael Mann"
+    ],
     "genre": [
       "Thriller",
       "Crime Fiction",
       "Crime Thriller",
       "Film noir",
       "Drama"
-    ],
-    "directed_by": [
-      "Michael Mann"
     ]
   },
   {
     "id": "/en/collateral_damage_2002",
-    "name": "Collateral Damage",
     "initial_release_date": "2002-02-04",
+    "name": "Collateral Damage",
+    "directed_by": [
+      "Andrew Davis"
+    ],
     "genre": [
       "Action Film",
       "Thriller",
       "Drama"
-    ],
-    "directed_by": [
-      "Andrew Davis"
     ]
   },
   {
     "id": "/en/comedian_2002",
-    "name": "Comedian",
     "initial_release_date": "2002-10-11",
+    "name": "Comedian",
+    "directed_by": [
+      "Christian Charles"
+    ],
     "genre": [
       "Indie film",
       "Documentary film",
       "Stand-up comedy",
       "Comedy",
       "Biographical film"
-    ],
-    "directed_by": [
-      "Christian Charles"
     ]
   },
   {
     "id": "/en/coming_out_2006",
     "name": "Coming Out",
+    "directed_by": [
+      "Joel Zwick"
+    ],
     "genre": [
       "Comedy",
       "Drama"
-    ],
-    "directed_by": [
-      "Joel Zwick"
     ]
   },
   {
     "id": "/en/commitments",
-    "name": "Commitments",
     "initial_release_date": "2001-05-04",
+    "name": "Commitments",
+    "directed_by": [
+      "Carol Mayes"
+    ],
     "genre": [
       "Romantic comedy",
       "Romance Film",
       "Drama"
-    ],
-    "directed_by": [
-      "Carol Mayes"
     ]
   },
   {
     "id": "/en/common_ground_2000",
-    "name": "Common Ground",
     "initial_release_date": "2000-01-29",
+    "name": "Common Ground",
+    "directed_by": [
+      "Donna Deitch"
+    ],
     "genre": [
       "LGBT",
       "Drama"
-    ],
-    "directed_by": [
-      "Donna Deitch"
     ]
   },
   {
     "id": "/en/company_2002",
-    "name": "Company",
     "initial_release_date": "2002-04-15",
+    "name": "Company",
+    "directed_by": [
+      "Ram Gopal Varma"
+    ],
     "genre": [
       "Thriller",
       "Action Film",
@@ -8592,28 +8584,24 @@
       "Bollywood",
       "World cinema",
       "Drama"
-    ],
-    "directed_by": [
-      "Ram Gopal Varma"
     ]
   },
   {
     "id": "/en/confessions_of_a_dangerous_mind",
     "name": "Confessions of a Dangerous Mind",
+    "directed_by": [
+      "George Clooney"
+    ],
     "genre": [
       "Biographical film",
       "Thriller",
       "Crime Fiction",
       "Comedy",
       "Drama"
-    ],
-    "directed_by": [
-      "George Clooney"
     ]
   },
   {
     "id": "/en/confessions_of_a_teenage_drama_queen",
-    "name": "Confessions of a Teenage Drama Queen",
     "initial_release_date": "2004-02-17",
     "genre": [
       "Family",
@@ -8623,11 +8611,11 @@
     ],
     "directed_by": [
       "Sara Sugarman"
-    ]
+    ],
+    "name": "Confessions of a Teenage Drama Queen"
   },
   {
     "id": "/en/confetti_2006",
-    "name": "Confetti",
     "initial_release_date": "2006-05-05",
     "genre": [
       "Mockumentary",
@@ -8639,11 +8627,11 @@
     ],
     "directed_by": [
       "Debbie Isitt"
-    ]
+    ],
+    "name": "Confetti"
   },
   {
     "id": "/en/confidence_2004",
-    "name": "Confidence",
     "initial_release_date": "2003-01-20",
     "genre": [
       "Thriller",
@@ -8652,11 +8640,11 @@
     ],
     "directed_by": [
       "James Foley"
-    ]
+    ],
+    "name": "Confidence"
   },
   {
     "id": "/en/connie_and_carla",
-    "name": "Connie and Carla",
     "initial_release_date": "2004-04-16",
     "genre": [
       "LGBT",
@@ -8666,11 +8654,11 @@
     ],
     "directed_by": [
       "Michael Lembeck"
-    ]
+    ],
+    "name": "Connie and Carla"
   },
   {
     "id": "/en/conspiracy_2001",
-    "name": "Conspiracy",
     "initial_release_date": "2001-05-19",
     "genre": [
       "History",
@@ -8681,11 +8669,11 @@
     ],
     "directed_by": [
       "Frank Pierson"
-    ]
+    ],
+    "name": "Conspiracy"
   },
   {
     "id": "/en/constantine_2005",
-    "name": "Constantine",
     "initial_release_date": "2005-02-08",
     "genre": [
       "Horror",
@@ -8694,11 +8682,11 @@
     ],
     "directed_by": [
       "Francis Lawrence"
-    ]
+    ],
+    "name": "Constantine"
   },
   {
     "id": "/en/control_room",
-    "name": "Control Room",
     "genre": [
       "Documentary film",
       "Political cinema",
@@ -8709,11 +8697,11 @@
     ],
     "directed_by": [
       "Jehane Noujaim"
-    ]
+    ],
+    "name": "Control Room"
   },
   {
     "id": "/en/control_the_ian_curtis_film",
-    "name": "Control",
     "initial_release_date": "2007-05-17",
     "genre": [
       "Biographical film",
@@ -8725,11 +8713,11 @@
     ],
     "directed_by": [
       "Anton Corbijn"
-    ]
+    ],
+    "name": "Control"
   },
   {
     "id": "/en/cope_2005",
-    "name": "Cope",
     "initial_release_date": "2007-01-23",
     "genre": [
       "Horror",
@@ -8738,11 +8726,11 @@
     "directed_by": [
       "Ronald Jackson",
       "Ronald Jerry"
-    ]
+    ],
+    "name": "Cope"
   },
   {
     "id": "/en/copying_beethoven",
-    "name": "Copying Beethoven",
     "initial_release_date": "2006-07-30",
     "genre": [
       "Biographical film",
@@ -8752,22 +8740,22 @@
     ],
     "directed_by": [
       "Agnieszka Holland"
-    ]
+    ],
+    "name": "Copying Beethoven"
   },
   {
     "id": "/en/corporate",
-    "name": "Corporate",
     "initial_release_date": "2006-07-07",
     "genre": [
       "Drama"
     ],
     "directed_by": [
       "Madhur Bhandarkar"
-    ]
+    ],
+    "name": "Corporate"
   },
   {
     "id": "/en/corpse_bride",
-    "name": "Corpse Bride",
     "initial_release_date": "2005-09-07",
     "genre": [
       "Fantasy",
@@ -8778,11 +8766,11 @@
     "directed_by": [
       "Tim Burton",
       "Mike Johnson"
-    ]
+    ],
+    "name": "Corpse Bride"
   },
   {
     "id": "/en/covert_one_the_hades_factor",
-    "name": "Covert One: The Hades Factor",
     "genre": [
       "Thriller",
       "Action Film",
@@ -8790,11 +8778,11 @@
     ],
     "directed_by": [
       "Mick Jackson"
-    ]
+    ],
+    "name": "Covert One: The Hades Factor"
   },
   {
     "id": "/en/cow_belles",
-    "name": "Cow Belles",
     "initial_release_date": "2006-03-24",
     "genre": [
       "Family",
@@ -8805,11 +8793,11 @@
     ],
     "directed_by": [
       "Francine McDougall"
-    ]
+    ],
+    "name": "Cow Belles"
   },
   {
     "id": "/en/cowards_bend_the_knee",
-    "name": "Cowards Bend the Knee",
     "initial_release_date": "2003-02-26",
     "genre": [
       "Silent film",
@@ -8823,11 +8811,11 @@
     ],
     "directed_by": [
       "Guy Maddin"
-    ]
+    ],
+    "name": "Cowards Bend the Knee"
   },
   {
     "id": "/en/cowboy_bebop_the_movie",
-    "name": "Cowboy Bebop: The Movie",
     "initial_release_date": "2001-09-01",
     "genre": [
       "Anime",
@@ -8839,11 +8827,11 @@
     ],
     "directed_by": [
       "Shinichir\u014d Watanabe"
-    ]
+    ],
+    "name": "Cowboy Bebop: The Movie"
   },
   {
     "id": "/en/coyote_ugly",
-    "name": "Coyote Ugly",
     "initial_release_date": "2000-07-31",
     "genre": [
       "Musical",
@@ -8855,22 +8843,22 @@
     ],
     "directed_by": [
       "David McNally"
-    ]
+    ],
+    "name": "Coyote Ugly"
   },
   {
     "id": "/en/crackerjack_2002",
-    "name": "Crackerjack",
     "initial_release_date": "2002-11-07",
     "genre": [
       "Comedy"
     ],
     "directed_by": [
       "Paul Moloney"
-    ]
+    ],
+    "name": "Crackerjack"
   },
   {
     "id": "/en/cradle_2_the_grave",
-    "name": "Cradle 2 the Grave",
     "initial_release_date": "2003-02-28",
     "genre": [
       "Martial Arts Film",
@@ -8884,11 +8872,11 @@
     ],
     "directed_by": [
       "Andrzej Bartkowiak"
-    ]
+    ],
+    "name": "Cradle 2 the Grave"
   },
   {
     "id": "/en/cradle_of_fear",
-    "name": "Cradle of Fear",
     "genre": [
       "Horror",
       "B movie",
@@ -8896,11 +8884,11 @@
     ],
     "directed_by": [
       "Alex Chandon"
-    ]
+    ],
+    "name": "Cradle of Fear"
   },
   {
     "id": "/en/crank",
-    "name": "Crank",
     "initial_release_date": "2006-08-31",
     "genre": [
       "Thriller",
@@ -8912,11 +8900,11 @@
     ],
     "directed_by": [
       "Neveldine/Taylor"
-    ]
+    ],
+    "name": "Crank"
   },
   {
     "id": "/en/crash_2004",
-    "name": "Crash",
     "initial_release_date": "2004-09-10",
     "genre": [
       "Crime Fiction",
@@ -8925,11 +8913,11 @@
     ],
     "directed_by": [
       "Paul Haggis"
-    ]
+    ],
+    "name": "Crash"
   },
   {
     "id": "/en/crazy_beautiful",
-    "name": "Crazy/Beautiful",
     "initial_release_date": "2001-06-28",
     "genre": [
       "Teen film",
@@ -8938,11 +8926,11 @@
     ],
     "directed_by": [
       "John Stockwell"
-    ]
+    ],
+    "name": "Crazy/Beautiful"
   },
   {
     "id": "/en/creep_2005",
-    "name": "Creep",
     "initial_release_date": "2004-08-10",
     "genre": [
       "Horror",
@@ -8951,11 +8939,11 @@
     ],
     "directed_by": [
       "Christopher Smith"
-    ]
+    ],
+    "name": "Creep"
   },
   {
     "id": "/en/criminal",
-    "name": "Criminal",
     "initial_release_date": "2004-09-10",
     "genre": [
       "Thriller",
@@ -8968,11 +8956,11 @@
     ],
     "directed_by": [
       "Gregory Jacobs"
-    ]
+    ],
+    "name": "Criminal"
   },
   {
     "id": "/en/crimson_gold",
-    "name": "Crimson Gold",
     "genre": [
       "World cinema",
       "Thriller",
@@ -8980,11 +8968,11 @@
     ],
     "directed_by": [
       "Jafar Panahi"
-    ]
+    ],
+    "name": "Crimson Gold"
   },
   {
     "id": "/en/crimson_rivers_ii_angels_of_the_apocalypse",
-    "name": "Crimson Rivers II: Angels of the Apocalypse",
     "initial_release_date": "2004-02-18",
     "genre": [
       "Action Film",
@@ -8993,11 +8981,11 @@
     ],
     "directed_by": [
       "Olivier Dahan"
-    ]
+    ],
+    "name": "Crimson Rivers II: Angels of the Apocalypse"
   },
   {
     "id": "/en/crocodile_2000",
-    "name": "Crocodile",
     "initial_release_date": "2000-12-26",
     "genre": [
       "Horror",
@@ -9009,11 +8997,11 @@
     ],
     "directed_by": [
       "Tobe Hooper"
-    ]
+    ],
+    "name": "Crocodile"
   },
   {
     "id": "/en/crocodile_2_death_swamp",
-    "name": "Crocodile 2: Death Swamp",
     "initial_release_date": "2002-08-01",
     "genre": [
       "Horror",
@@ -9028,11 +9016,11 @@
     ],
     "directed_by": [
       "Gary Jones"
-    ]
+    ],
+    "name": "Crocodile 2: Death Swamp"
   },
   {
     "id": "/en/crocodile_dundee_in_los_angeles",
-    "name": "Crocodile Dundee in Los Angeles",
     "initial_release_date": "2001-04-12",
     "genre": [
       "Action Film",
@@ -9045,11 +9033,11 @@
     ],
     "directed_by": [
       "Simon Wincer"
-    ]
+    ],
+    "name": "Crocodile Dundee in Los Angeles"
   },
   {
     "id": "/en/crossing_the_bridge_the_sound_of_istanbul",
-    "name": "Crossing the Bridge: The Sound of Istanbul",
     "initial_release_date": "2005-06-09",
     "genre": [
       "Musical",
@@ -9059,11 +9047,11 @@
     ],
     "directed_by": [
       "Fatih Ak\u0131n"
-    ]
+    ],
+    "name": "Crossing the Bridge: The Sound of Istanbul"
   },
   {
     "id": "/en/crossover_2006",
-    "name": "Crossover",
     "initial_release_date": "2006-09-01",
     "genre": [
       "Action Film",
@@ -9076,11 +9064,11 @@
     ],
     "directed_by": [
       "Preston A. Whitmore II"
-    ]
+    ],
+    "name": "Crossover"
   },
   {
     "id": "/en/crossroads_2002",
-    "name": "Crossroads",
     "initial_release_date": "2002-02-11",
     "genre": [
       "Coming of age",
@@ -9097,11 +9085,11 @@
     ],
     "directed_by": [
       "Tamra Davis"
-    ]
+    ],
+    "name": "Crossroads"
   },
   {
     "id": "/en/crouching_tiger_hidden_dragon",
-    "name": "Crouching Tiger, Hidden Dragon",
     "initial_release_date": "2000-05-16",
     "genre": [
       "Romance Film",
@@ -9111,11 +9099,11 @@
     ],
     "directed_by": [
       "Ang Lee"
-    ]
+    ],
+    "name": "Crouching Tiger, Hidden Dragon"
   },
   {
     "id": "/en/cruel_intentions_3",
-    "name": "Cruel Intentions 3",
     "initial_release_date": "2004-05-25",
     "genre": [
       "Erotica",
@@ -9130,11 +9118,11 @@
     ],
     "directed_by": [
       "Scott Ziehl"
-    ]
+    ],
+    "name": "Cruel Intentions 3"
   },
   {
     "id": "/en/crustaces_et_coquillages",
-    "name": "Crustac\u00e9s et Coquillages",
     "initial_release_date": "2005-02-12",
     "genre": [
       "Musical",
@@ -9150,11 +9138,11 @@
     "directed_by": [
       "Jacques Martineau",
       "Olivier Ducastel"
-    ]
+    ],
+    "name": "Crustac\u00e9s et Coquillages"
   },
   {
     "id": "/en/cry_wolf",
-    "name": "Cry_Wolf",
     "initial_release_date": "2005-09-16",
     "genre": [
       "Slasher",
@@ -9165,11 +9153,11 @@
     ],
     "directed_by": [
       "Jeff Wadlow"
-    ]
+    ],
+    "name": "Cry_Wolf"
   },
   {
     "id": "/en/cube_2_hypercube",
-    "name": "Cube 2: Hypercube",
     "initial_release_date": "2002-04-15",
     "genre": [
       "Science Fiction",
@@ -9180,11 +9168,11 @@
     ],
     "directed_by": [
       "Andrzej Seku\u0142a"
-    ]
+    ],
+    "name": "Cube 2: Hypercube"
   },
   {
     "id": "/en/curious_george_2006",
-    "name": "Curious George",
     "initial_release_date": "2006-02-10",
     "genre": [
       "Animation",
@@ -9194,11 +9182,11 @@
     ],
     "directed_by": [
       "Matthew O'Callaghan"
-    ]
+    ],
+    "name": "Curious George"
   },
   {
     "id": "/en/curse_of_the_golden_flower",
-    "name": "Curse of the Golden Flower",
     "initial_release_date": "2006-12-21",
     "genre": [
       "Romance Film",
@@ -9207,11 +9195,11 @@
     ],
     "directed_by": [
       "Zhang Yimou"
-    ]
+    ],
+    "name": "Curse of the Golden Flower"
   },
   {
     "id": "/en/cursed",
-    "name": "Cursed",
     "initial_release_date": "2004-11-07",
     "genre": [
       "Horror",
@@ -9221,11 +9209,11 @@
     ],
     "directed_by": [
       "Wes Craven"
-    ]
+    ],
+    "name": "Cursed"
   },
   {
     "id": "/en/d-tox",
-    "name": "D-Tox",
     "initial_release_date": "2002-01-04",
     "genre": [
       "Thriller",
@@ -9235,11 +9223,11 @@
     ],
     "directed_by": [
       "Jim Gillespie"
-    ]
+    ],
+    "name": "D-Tox"
   },
   {
     "id": "/en/daddy",
-    "name": "Daddy",
     "initial_release_date": "2001-10-04",
     "genre": [
       "Family",
@@ -9249,11 +9237,11 @@
     ],
     "directed_by": [
       "Suresh Krissna"
-    ]
+    ],
+    "name": "Daddy"
   },
   {
     "id": "/en/daddy_day_care",
-    "name": "Daddy Day Care",
     "initial_release_date": "2003-05-04",
     "genre": [
       "Family",
@@ -9261,11 +9249,11 @@
     ],
     "directed_by": [
       "Steve Carr"
-    ]
+    ],
+    "name": "Daddy Day Care"
   },
   {
     "id": "/en/daddy_long-legs",
-    "name": "Daddy-Long-Legs",
     "initial_release_date": "2005-01-13",
     "genre": [
       "Romantic comedy",
@@ -9275,11 +9263,11 @@
     ],
     "directed_by": [
       "Gong Jeong-shik"
-    ]
+    ],
+    "name": "Daddy-Long-Legs"
   },
   {
     "id": "/en/dahmer_2002",
-    "name": "Dahmer",
     "initial_release_date": "2002-06-21",
     "genre": [
       "Thriller",
@@ -9295,11 +9283,11 @@
     ],
     "directed_by": [
       "David Jacobson"
-    ]
+    ],
+    "name": "Dahmer"
   },
   {
     "id": "/en/daisy_2006",
-    "name": "Daisy",
     "initial_release_date": "2006-03-09",
     "genre": [
       "Chinese Movies",
@@ -9309,11 +9297,11 @@
     ],
     "directed_by": [
       "Andrew Lau"
-    ]
+    ],
+    "name": "Daisy"
   },
   {
     "id": "/en/daivanamathil",
-    "name": "Daivanamathil",
     "genre": [
       "Drama",
       "Malayalam Cinema",
@@ -9321,11 +9309,11 @@
     ],
     "directed_by": [
       "Jayaraj"
-    ]
+    ],
+    "name": "Daivanamathil"
   },
   {
     "id": "/en/daltry_calhoun",
-    "name": "Daltry Calhoun",
     "initial_release_date": "2005-09-25",
     "genre": [
       "Black comedy",
@@ -9335,11 +9323,11 @@
     ],
     "directed_by": [
       "Katrina Holden Bronson"
-    ]
+    ],
+    "name": "Daltry Calhoun"
   },
   {
     "id": "/en/dan_in_real_life",
-    "name": "Dan in Real Life",
     "initial_release_date": "2007-10-26",
     "genre": [
       "Romance Film",
@@ -9351,11 +9339,11 @@
     ],
     "directed_by": [
       "Peter Hedges"
-    ]
+    ],
+    "name": "Dan in Real Life"
   },
   {
     "id": "/en/dancer_in_the_dark",
-    "name": "Dancer in the Dark",
     "initial_release_date": "2000-05-17",
     "genre": [
       "Musical",
@@ -9366,21 +9354,21 @@
     ],
     "directed_by": [
       "Lars von Trier"
-    ]
+    ],
+    "name": "Dancer in the Dark"
   },
   {
     "id": "/en/daniel_amos_live_in_anaheim_1985",
-    "name": "Daniel Amos Live in Anaheim 1985",
     "genre": [
       "Music video"
     ],
     "directed_by": [
       "Dave Perry"
-    ]
+    ],
+    "name": "Daniel Amos Live in Anaheim 1985"
   },
   {
     "id": "/en/danny_deckchair",
-    "name": "Danny Deckchair",
     "genre": [
       "Romantic comedy",
       "Indie film",
@@ -9391,11 +9379,11 @@
     ],
     "directed_by": [
       "Jeff Balsmeyer"
-    ]
+    ],
+    "name": "Danny Deckchair"
   },
   {
     "id": "/en/daredevil_2003",
-    "name": "Daredevil",
     "initial_release_date": "2003-02-09",
     "genre": [
       "Action Film",
@@ -9406,11 +9394,11 @@
     ],
     "directed_by": [
       "Mark Steven Johnson"
-    ]
+    ],
+    "name": "Daredevil"
   },
   {
     "id": "/en/dark_blue",
-    "name": "Dark Blue",
     "initial_release_date": "2002-12-14",
     "genre": [
       "Action Film",
@@ -9420,22 +9408,22 @@
     ],
     "directed_by": [
       "Ron Shelton"
-    ]
+    ],
+    "name": "Dark Blue"
   },
   {
     "id": "/en/dark_harvest",
-    "name": "Dark Harvest",
     "genre": [
       "Horror",
       "Slasher"
     ],
     "directed_by": [
       "Paul Moore, Jr."
-    ]
+    ],
+    "name": "Dark Harvest"
   },
   {
     "id": "/en/dark_water",
-    "name": "Dark Water",
     "initial_release_date": "2005-06-27",
     "genre": [
       "Thriller",
@@ -9444,11 +9432,11 @@
     ],
     "directed_by": [
       "Walter Salles"
-    ]
+    ],
+    "name": "Dark Water"
   },
   {
     "id": "/en/dark_water_2002",
-    "name": "Dark Water",
     "initial_release_date": "2002-01-19",
     "genre": [
       "Thriller",
@@ -9458,22 +9446,22 @@
     ],
     "directed_by": [
       "Hideo Nakata"
-    ]
+    ],
+    "name": "Dark Water"
   },
   {
     "id": "/en/darkness_2002",
-    "name": "Darkness",
     "initial_release_date": "2002-10-03",
     "genre": [
       "Horror"
     ],
     "directed_by": [
       "Jaume Balaguer\u00f3"
-    ]
+    ],
+    "name": "Darkness"
   },
   {
     "id": "/en/darna_mana_hai",
-    "name": "Darna Mana Hai",
     "initial_release_date": "2003-07-25",
     "genre": [
       "Horror",
@@ -9483,11 +9471,11 @@
     ],
     "directed_by": [
       "Prawaal Raman"
-    ]
+    ],
+    "name": "Darna Mana Hai"
   },
   {
     "id": "/en/darna_zaroori_hai",
-    "name": "Darna Zaroori Hai",
     "initial_release_date": "2006-04-28",
     "genre": [
       "Horror",
@@ -9504,11 +9492,11 @@
       "J. D. Chakravarthy",
       "Sajid Khan",
       "Manish Gupta"
-    ]
+    ],
+    "name": "Darna Zaroori Hai"
   },
   {
     "id": "/en/darth_vaders_psychic_hotline",
-    "name": "Darth Vader's Psychic Hotline",
     "initial_release_date": "2002-04-16",
     "genre": [
       "Indie film",
@@ -9517,11 +9505,11 @@
     ],
     "directed_by": [
       "John E. Hudgens"
-    ]
+    ],
+    "name": "Darth Vader's Psychic Hotline"
   },
   {
     "id": "/en/darwins_nightmare",
-    "name": "Darwin's Nightmare",
     "initial_release_date": "2004-09-01",
     "genre": [
       "Documentary film",
@@ -9530,11 +9518,11 @@
     ],
     "directed_by": [
       "Hubert Sauper"
-    ]
+    ],
+    "name": "Darwin's Nightmare"
   },
   {
     "id": "/en/das_experiment",
-    "name": "The Experiment",
     "initial_release_date": "2010-07-15",
     "genre": [
       "Thriller",
@@ -9543,11 +9531,11 @@
     ],
     "directed_by": [
       "Paul Scheuring"
-    ]
+    ],
+    "name": "The Experiment"
   },
   {
     "id": "/en/dasavatharam",
-    "name": "Dasavathaaram",
     "initial_release_date": "2008-06-12",
     "genre": [
       "Science Fiction",
@@ -9556,11 +9544,11 @@
     ],
     "directed_by": [
       "K. S. Ravikumar"
-    ]
+    ],
+    "name": "Dasavathaaram"
   },
   {
     "id": "/en/date_movie",
-    "name": "Date Movie",
     "initial_release_date": "2006-02-17",
     "genre": [
       "Romantic comedy",
@@ -9571,11 +9559,11 @@
     "directed_by": [
       "Aaron Seltzer",
       "Jason Friedberg"
-    ]
+    ],
+    "name": "Date Movie"
   },
   {
     "id": "/en/dave_attells_insomniac_tour",
-    "name": "Dave Attell's Insomniac Tour",
     "initial_release_date": "2006-04-11",
     "genre": [
       "Stand-up comedy",
@@ -9583,11 +9571,11 @@
     ],
     "directed_by": [
       "Joel Gallen"
-    ]
+    ],
+    "name": "Dave Attell's Insomniac Tour"
   },
   {
     "id": "/en/dave_chappelles_block_party",
-    "name": "Dave Chappelle's Block Party",
     "initial_release_date": "2006-03-03",
     "genre": [
       "Documentary film",
@@ -9599,11 +9587,11 @@
     ],
     "directed_by": [
       "Michel Gondry"
-    ]
+    ],
+    "name": "Dave Chappelle's Block Party"
   },
   {
     "id": "/en/david_layla",
-    "name": "David &amp; Layla",
     "initial_release_date": "2005-10-21",
     "genre": [
       "Romantic comedy",
@@ -9615,22 +9603,22 @@
     ],
     "directed_by": [
       "Jay Jonroy"
-    ]
+    ],
+    "name": "David &amp; Layla"
   },
   {
     "id": "/en/david_gilmour_in_concert",
-    "name": "David Gilmour in Concert",
     "genre": [
       "Music video",
       "Concert film"
     ],
     "directed_by": [
       "David Mallet"
-    ]
+    ],
+    "name": "David Gilmour in Concert"
   },
   {
     "id": "/en/dawn_of_the_dead_2004",
-    "name": "Dawn of the Dead",
     "initial_release_date": "2004-03-10",
     "genre": [
       "Horror",
@@ -9641,11 +9629,11 @@
     ],
     "directed_by": [
       "Zack Snyder"
-    ]
+    ],
+    "name": "Dawn of the Dead"
   },
   {
     "id": "/en/day_of_the_dead_2007",
-    "name": "Day of the Dead",
     "initial_release_date": "2008-04-08",
     "genre": [
       "Splatter film",
@@ -9657,11 +9645,11 @@
     ],
     "directed_by": [
       "Steve Miner"
-    ]
+    ],
+    "name": "Day of the Dead"
   },
   {
     "id": "/en/day_of_the_dead_2_contagium",
-    "name": "Day of the Dead 2: Contagium",
     "initial_release_date": "2005-10-18",
     "genre": [
       "Horror",
@@ -9670,11 +9658,11 @@
     "directed_by": [
       "Ana Clavell",
       "James Glenn Dudelson"
-    ]
+    ],
+    "name": "Day of the Dead 2: Contagium"
   },
   {
     "id": "/en/day_watch",
-    "name": "Day Watch",
     "initial_release_date": "2006-01-01",
     "genre": [
       "Thriller",
@@ -9683,11 +9671,11 @@
     ],
     "directed_by": [
       "Timur Bekmambetov"
-    ]
+    ],
+    "name": "Day Watch"
   },
   {
     "id": "/en/day_zero",
-    "name": "Day Zero",
     "initial_release_date": "2007-11-02",
     "genre": [
       "Indie film",
@@ -9696,11 +9684,11 @@
     ],
     "directed_by": [
       "Bryan Gunnar Cole"
-    ]
+    ],
+    "name": "Day Zero"
   },
   {
     "id": "/en/de-lovely",
-    "name": "De-Lovely",
     "initial_release_date": "2004-05-22",
     "genre": [
       "Musical",
@@ -9710,11 +9698,11 @@
     ],
     "directed_by": [
       "Irwin Winkler"
-    ]
+    ],
+    "name": "De-Lovely"
   },
   {
     "id": "/en/dead_breakfast",
-    "name": "Dead &amp; Breakfast",
     "initial_release_date": "2004-03-19",
     "genre": [
       "Horror",
@@ -9726,22 +9714,22 @@
     ],
     "directed_by": [
       "Matthew Leutwyler"
-    ]
+    ],
+    "name": "Dead &amp; Breakfast"
   },
   {
     "id": "/en/dead_birds_2005",
-    "name": "Dead Birds",
     "initial_release_date": "2005-03-15",
     "genre": [
       "Horror"
     ],
     "directed_by": [
       "Alex Turner"
-    ]
+    ],
+    "name": "Dead Birds"
   },
   {
     "id": "/en/dead_end_2003",
-    "name": "Dead End",
     "initial_release_date": "2003-01-30",
     "genre": [
       "Horror",
@@ -9752,11 +9740,11 @@
     "directed_by": [
       "Jean-Baptiste Andrea",
       "Fabrice Canepa"
-    ]
+    ],
+    "name": "Dead End"
   },
   {
     "id": "/en/dead_friend",
-    "name": "Dead Friend",
     "initial_release_date": "2004-06-18",
     "genre": [
       "Horror",
@@ -9765,11 +9753,11 @@
     ],
     "directed_by": [
       "Kim Tae-kyeong"
-    ]
+    ],
+    "name": "Dead Friend"
   },
   {
     "id": "/en/dead_mans_shoes",
-    "name": "Dead Man's Shoes",
     "initial_release_date": "2004-10-01",
     "genre": [
       "Psychological thriller",
@@ -9779,11 +9767,11 @@
     ],
     "directed_by": [
       "Shane Meadows"
-    ]
+    ],
+    "name": "Dead Man's Shoes"
   },
   {
     "id": "/en/dear_frankie",
-    "name": "Dear Frankie",
     "initial_release_date": "2004-05-04",
     "genre": [
       "Indie film",
@@ -9792,11 +9780,11 @@
     ],
     "directed_by": [
       "Shona Auerbach"
-    ]
+    ],
+    "name": "Dear Frankie"
   },
   {
     "id": "/en/dear_wendy",
-    "name": "Dear Wendy",
     "initial_release_date": "2004-05-16",
     "genre": [
       "Indie film",
@@ -9808,11 +9796,11 @@
     ],
     "directed_by": [
       "Thomas Vinterberg"
-    ]
+    ],
+    "name": "Dear Wendy"
   },
   {
     "id": "/en/death_in_gaza",
-    "name": "Death in Gaza",
     "initial_release_date": "2004-02-11",
     "genre": [
       "Documentary film",
@@ -9823,11 +9811,11 @@
     ],
     "directed_by": [
       "James Miller"
-    ]
+    ],
+    "name": "Death in Gaza"
   },
   {
     "id": "/en/death_to_smoochy",
-    "name": "Death to Smoochy",
     "initial_release_date": "2002-03-29",
     "genre": [
       "Comedy",
@@ -9837,11 +9825,11 @@
     ],
     "directed_by": [
       "Danny DeVito"
-    ]
+    ],
+    "name": "Death to Smoochy"
   },
   {
     "id": "/en/death_trance",
-    "name": "Death Trance",
     "initial_release_date": "2005-05-12",
     "genre": [
       "Action Film",
@@ -9855,11 +9843,11 @@
     ],
     "directed_by": [
       "Yuji Shimomura"
-    ]
+    ],
+    "name": "Death Trance"
   },
   {
     "id": "/en/death_walks_the_streets",
-    "name": "Death Walks the Streets",
     "initial_release_date": "2008-06-26",
     "genre": [
       "Indie film",
@@ -9868,11 +9856,11 @@
     ],
     "directed_by": [
       "James Zahn"
-    ]
+    ],
+    "name": "Death Walks the Streets"
   },
   {
     "id": "/en/deathwatch",
-    "name": "Deathwatch",
     "initial_release_date": "2002-10-06",
     "genre": [
       "Horror",
@@ -9882,11 +9870,11 @@
     ],
     "directed_by": [
       "Michael J. Bassett"
-    ]
+    ],
+    "name": "Deathwatch"
   },
   {
     "id": "/en/december_boys",
-    "name": "December Boys",
     "genre": [
       "Coming of age",
       "Film adaptation",
@@ -9897,11 +9885,11 @@
     ],
     "directed_by": [
       "Rod Hardy"
-    ]
+    ],
+    "name": "December Boys"
   },
   {
     "id": "/en/decoys",
-    "name": "Decoys",
     "genre": [
       "Science Fiction",
       "Horror",
@@ -9911,11 +9899,11 @@
     ],
     "directed_by": [
       "Matthew Hastings"
-    ]
+    ],
+    "name": "Decoys"
   },
   {
     "id": "/en/deepavali",
-    "name": "Deepavali",
     "initial_release_date": "2007-02-09",
     "genre": [
       "Romance Film",
@@ -9924,11 +9912,11 @@
     ],
     "directed_by": [
       "Ezhil"
-    ]
+    ],
+    "name": "Deepavali"
   },
   {
     "id": "/en/deewane_huye_pagal",
-    "name": "Deewane Huye Paagal",
     "initial_release_date": "2005-11-25",
     "genre": [
       "Romance Film",
@@ -9940,11 +9928,11 @@
     ],
     "directed_by": [
       "Vikram Bhatt"
-    ]
+    ],
+    "name": "Deewane Huye Paagal"
   },
   {
     "id": "/wikipedia/ja_id/980449",
-    "name": "D\u00e9j\u00e0 Vu",
     "initial_release_date": "2006-11-20",
     "genre": [
       "Thriller",
@@ -9957,11 +9945,11 @@
     ],
     "directed_by": [
       "Tony Scott"
-    ]
+    ],
+    "name": "D\u00e9j\u00e0 Vu"
   },
   {
     "id": "/en/democrazy_2005",
-    "name": "Democrazy",
     "genre": [
       "Parody",
       "Action/Adventure",
@@ -9972,11 +9960,11 @@
     ],
     "directed_by": [
       "Michael Legge"
-    ]
+    ],
+    "name": "Democrazy"
   },
   {
     "id": "/en/demonium",
-    "name": "Demonium",
     "initial_release_date": "2001-08-25",
     "genre": [
       "Horror",
@@ -9984,11 +9972,11 @@
     ],
     "directed_by": [
       "Andreas Schnaas"
-    ]
+    ],
+    "name": "Demonium"
   },
   {
     "id": "/en/der_schuh_des_manitu",
-    "name": "Der Schuh des Manitu",
     "initial_release_date": "2001-07-13",
     "genre": [
       "Western",
@@ -9997,11 +9985,11 @@
     ],
     "directed_by": [
       "Michael Herbig"
-    ]
+    ],
+    "name": "Der Schuh des Manitu"
   },
   {
     "id": "/en/der_tunnel",
-    "name": "The Tunnel",
     "initial_release_date": "2001-01-21",
     "genre": [
       "World cinema",
@@ -10012,11 +10000,11 @@
     ],
     "directed_by": [
       "Roland Suso Richter"
-    ]
+    ],
+    "name": "The Tunnel"
   },
   {
     "id": "/en/derailed",
-    "name": "Derailed",
     "initial_release_date": "2005-11-11",
     "genre": [
       "Thriller",
@@ -10026,11 +10014,11 @@
     ],
     "directed_by": [
       "Mikael H\u00e5fstr\u00f6m"
-    ]
+    ],
+    "name": "Derailed"
   },
   {
     "id": "/en/derailed_2002",
-    "name": "Derailed",
     "genre": [
       "Thriller",
       "Action Film",
@@ -10040,11 +10028,11 @@
     ],
     "directed_by": [
       "Bob Misiorowski"
-    ]
+    ],
+    "name": "Derailed"
   },
   {
     "id": "/en/destinys_child_live_in_atlana",
-    "name": "Destiny's Child: Live In Atlana",
     "initial_release_date": "2006-03-27",
     "genre": [
       "Music",
@@ -10052,10 +10040,12 @@
     ],
     "directed_by": [
       "Julia Knowles"
-    ]
+    ],
+    "name": "Destiny's Child: Live In Atlana"
   },
   {
     "id": "/en/deuce_bigalow_european_gigolo",
+    "initial_release_date": "2005-08-06",
     "name": "Deuce Bigalow: European Gigolo",
     "directed_by": [
       "Mike Bigelow"
@@ -10066,11 +10056,11 @@
       "Gross out",
       "Gross-out film",
       "Comedy"
-    ],
-    "initial_release_date": "2005-08-06"
+    ]
   },
   {
     "id": "/en/dev",
+    "initial_release_date": "2004-06-11",
     "name": "Dev",
     "directed_by": [
       "Govind Nihalani"
@@ -10078,11 +10068,11 @@
     "genre": [
       "Drama",
       "Bollywood"
-    ],
-    "initial_release_date": "2004-06-11"
+    ]
   },
   {
     "id": "/en/devadasu",
+    "initial_release_date": "2006-01-11",
     "name": "Devadasu",
     "directed_by": [
       "YVS Chowdary",
@@ -10093,11 +10083,11 @@
       "Drama",
       "Tollywood",
       "World cinema"
-    ],
-    "initial_release_date": "2006-01-11"
+    ]
   },
   {
     "id": "/en/devdas_2002",
+    "initial_release_date": "2002-05-23",
     "name": "Devdas",
     "directed_by": [
       "Sanjay Leela Bhansali"
@@ -10109,22 +10099,22 @@
       "Bollywood",
       "World cinema",
       "Musical Drama"
-    ],
-    "initial_release_date": "2002-05-23"
+    ]
   },
   {
     "id": "/en/devils_playground_2003",
+    "initial_release_date": "2003-02-04",
     "name": "Devil's Playground",
     "directed_by": [
       "Lucy Walker"
     ],
     "genre": [
       "Documentary film"
-    ],
-    "initial_release_date": "2003-02-04"
+    ]
   },
   {
     "id": "/en/the_devils_pond",
+    "initial_release_date": "2003-10-21",
     "name": "Devil's Pond",
     "directed_by": [
       "Joel Viertel"
@@ -10132,11 +10122,11 @@
     "genre": [
       "Thriller",
       "Suspense"
-    ],
-    "initial_release_date": "2003-10-21"
+    ]
   },
   {
     "id": "/en/dhadkan",
+    "initial_release_date": "2000-08-11",
     "name": "Dhadkan",
     "directed_by": [
       "Dharmesh Darshan"
@@ -10149,11 +10139,11 @@
       "World cinema",
       "Drama",
       "Musical Drama"
-    ],
-    "initial_release_date": "2000-08-11"
+    ]
   },
   {
     "id": "/en/dhool",
+    "initial_release_date": "2003-01-10",
     "name": "Dhool",
     "directed_by": [
       "Dharani"
@@ -10166,11 +10156,11 @@
       "World cinema",
       "Drama",
       "Musical Drama"
-    ],
-    "initial_release_date": "2003-01-10"
+    ]
   },
   {
     "id": "/en/dhoom_2",
+    "initial_release_date": "2006-11-23",
     "name": "Dhoom 2",
     "directed_by": [
       "Sanjay Gadhvi"
@@ -10186,8 +10176,7 @@
       "Action Thriller",
       "Musical comedy",
       "Comedy"
-    ],
-    "initial_release_date": "2006-11-23"
+    ]
   },
   {
     "id": "/en/dhyaas_parva",
@@ -10215,6 +10204,7 @@
   },
   {
     "id": "/en/diary_of_a_mad_black_woman",
+    "initial_release_date": "2005-02-25",
     "name": "Diary of a Mad Black Woman",
     "directed_by": [
       "Darren Grant"
@@ -10225,11 +10215,11 @@
       "Romantic comedy",
       "Comedy",
       "Drama"
-    ],
-    "initial_release_date": "2005-02-25"
+    ]
   },
   {
     "id": "/en/dickie_roberts_former_child_star",
+    "initial_release_date": "2003-09-03",
     "name": "Dickie Roberts: Former Child Star",
     "directed_by": [
       "Sam Weisman"
@@ -10238,11 +10228,11 @@
       "Parody",
       "Slapstick",
       "Comedy"
-    ],
-    "initial_release_date": "2003-09-03"
+    ]
   },
   {
     "id": "/en/die_bad",
+    "initial_release_date": "2000-07-15",
     "name": "Die Bad",
     "directed_by": [
       "Ryoo Seung-wan"
@@ -10250,22 +10240,22 @@
     "genre": [
       "Crime Fiction",
       "Drama"
-    ],
-    "initial_release_date": "2000-07-15"
+    ]
   },
   {
     "id": "/en/die_mommie_die",
+    "initial_release_date": "2003-01-20",
     "name": "Die Mommie Die!",
     "directed_by": [
       "Mark Rucker"
     ],
     "genre": [
       "Comedy"
-    ],
-    "initial_release_date": "2003-01-20"
+    ]
   },
   {
     "id": "/en/dieu_est_grand_je_suis_toute_petite",
+    "initial_release_date": "2001-09-26",
     "name": "God Is Great and I'm Not",
     "directed_by": [
       "Pascale Bailly"
@@ -10278,11 +10268,11 @@
       "Comedy of manners",
       "Comedy",
       "Drama"
-    ],
-    "initial_release_date": "2001-09-26"
+    ]
   },
   {
     "id": "/en/digimon_the_movie",
+    "initial_release_date": "2000-03-17",
     "name": "Digimon: The Movie",
     "directed_by": [
       "Mamoru Hosoda",
@@ -10296,11 +10286,11 @@
       "Adventure Film",
       "Action Film",
       "Thriller"
-    ],
-    "initial_release_date": "2000-03-17"
+    ]
   },
   {
     "id": "/en/digital_monster_x-evolution",
+    "initial_release_date": "2005-01-03",
     "name": "Digital Monster X-Evolution",
     "directed_by": [
       "Hiroyuki Kakud\u014d"
@@ -10309,11 +10299,11 @@
       "Computer Animation",
       "Animation",
       "Japanese Movies"
-    ],
-    "initial_release_date": "2005-01-03"
+    ]
   },
   {
     "id": "/en/digna_hasta_el_ultimo_aliento",
+    "initial_release_date": "2004-12-17",
     "name": "Digna... hasta el \u00faltimo aliento",
     "directed_by": [
       "Felipe Cazals"
@@ -10323,11 +10313,11 @@
       "Culture &amp; Society",
       "Law &amp; Crime",
       "Biographical film"
-    ],
-    "initial_release_date": "2004-12-17"
+    ]
   },
   {
     "id": "/en/dil_chahta_hai",
+    "initial_release_date": "2001-07-24",
     "name": "Dil Chahta Hai",
     "directed_by": [
       "Farhan Akhtar"
@@ -10342,11 +10332,11 @@
       "Musical comedy",
       "Comedy",
       "Drama"
-    ],
-    "initial_release_date": "2001-07-24"
+    ]
   },
   {
     "id": "/en/dil_diya_hai",
+    "initial_release_date": "2006-09-08",
     "name": "Dil Diya Hai",
     "directed_by": [
       "Aditya Datt",
@@ -10357,11 +10347,11 @@
       "Bollywood",
       "World cinema",
       "Drama"
-    ],
-    "initial_release_date": "2006-09-08"
+    ]
   },
   {
     "id": "/en/dil_hai_tumhaara",
+    "initial_release_date": "2002-09-06",
     "name": "Dil Hai Tumhara",
     "directed_by": [
       "Kundan Shah"
@@ -10374,11 +10364,11 @@
       "World cinema",
       "Drama",
       "Musical Drama"
-    ],
-    "initial_release_date": "2002-09-06"
+    ]
   },
   {
     "id": "/en/dil_ka_rishta",
+    "initial_release_date": "2003-01-17",
     "name": "Dil Ka Rishta",
     "directed_by": [
       "Naresh Malhotra"
@@ -10386,11 +10376,11 @@
     "genre": [
       "Romance Film",
       "Bollywood"
-    ],
-    "initial_release_date": "2003-01-17"
+    ]
   },
   {
     "id": "/en/dil_ne_jise_apna_kahaa",
+    "initial_release_date": "2004-09-10",
     "name": "Dil Ne Jise Apna Kahaa",
     "directed_by": [
       "Atul Agnihotri"
@@ -10404,11 +10394,11 @@
       "Comedy",
       "Bollywood",
       "Drama"
-    ],
-    "initial_release_date": "2004-09-10"
+    ]
   },
   {
     "id": "/en/dinosaur_2000",
+    "initial_release_date": "2000-05-13",
     "name": "Dinosaur",
     "directed_by": [
       "Eric Leighton",
@@ -10422,11 +10412,11 @@
       "Family",
       "Adventure Film",
       "Thriller"
-    ],
-    "initial_release_date": "2000-05-13"
+    ]
   },
   {
     "id": "/en/dirty_dancing_2004",
+    "initial_release_date": "2004-02-27",
     "name": "Dirty Dancing: Havana Nights",
     "directed_by": [
       "Guy Ferland"
@@ -10441,11 +10431,11 @@
       "Dance film",
       "Musical Drama",
       "Drama"
-    ],
-    "initial_release_date": "2004-02-27"
+    ]
   },
   {
     "id": "/en/dirty_deeds",
+    "initial_release_date": "2002-07-18",
     "name": "Dirty Deeds",
     "directed_by": [
       "David Caesar"
@@ -10459,22 +10449,22 @@
       "World cinema",
       "Gangster Film",
       "Drama"
-    ],
-    "initial_release_date": "2002-07-18"
+    ]
   },
   {
     "id": "/en/dirty_deeds_2005",
+    "initial_release_date": "2005-08-26",
     "name": "Dirty Deeds",
     "directed_by": [
       "David Kendall"
     ],
     "genre": [
       "Comedy"
-    ],
-    "initial_release_date": "2005-08-26"
+    ]
   },
   {
     "id": "/en/dirty_love",
+    "initial_release_date": "2005-09-23",
     "name": "Dirty Love",
     "directed_by": [
       "John Mallory Asher"
@@ -10485,11 +10475,11 @@
       "Romantic comedy",
       "Romance Film",
       "Comedy"
-    ],
-    "initial_release_date": "2005-09-23"
+    ]
   },
   {
     "id": "/en/disappearing_acts",
+    "initial_release_date": "2000-12-09",
     "name": "Disappearing Acts",
     "directed_by": [
       "Gina Prince-Bythewood"
@@ -10500,11 +10490,11 @@
       "Film adaptation",
       "Comedy-drama",
       "Drama"
-    ],
-    "initial_release_date": "2000-12-09"
+    ]
   },
   {
     "id": "/en/dishyum",
+    "initial_release_date": "2006-02-02",
     "name": "Dishyum",
     "directed_by": [
       "Sasi"
@@ -10515,22 +10505,22 @@
       "Drama",
       "Tamil cinema",
       "World cinema"
-    ],
-    "initial_release_date": "2006-02-02"
+    ]
   },
   {
     "id": "/en/distant_lights",
+    "initial_release_date": "2003-02-11",
     "name": "Distant Lights",
     "directed_by": [
       "Hans-Christian Schmid"
     ],
     "genre": [
       "Drama"
-    ],
-    "initial_release_date": "2003-02-11"
+    ]
   },
   {
     "id": "/en/district_b13",
+    "initial_release_date": "2004-11-10",
     "name": "District 13",
     "directed_by": [
       "Pierre Morel"
@@ -10541,11 +10531,11 @@
       "Action Film",
       "Science Fiction",
       "Crime Fiction"
-    ],
-    "initial_release_date": "2004-11-10"
+    ]
   },
   {
     "id": "/en/disturbia",
+    "initial_release_date": "2007-04-04",
     "name": "Disturbia",
     "directed_by": [
       "D. J. Caruso"
@@ -10555,11 +10545,11 @@
       "Mystery",
       "Teen film",
       "Drama"
-    ],
-    "initial_release_date": "2007-04-04"
+    ]
   },
   {
     "id": "/en/ditto_2000",
+    "initial_release_date": "2000-05-27",
     "name": "Ditto",
     "directed_by": [
       "Jeong-kwon Kim"
@@ -10569,11 +10559,11 @@
       "Science Fiction",
       "East Asian cinema",
       "World cinema"
-    ],
-    "initial_release_date": "2000-05-27"
+    ]
   },
   {
     "id": "/en/divine_intervention_2002",
+    "initial_release_date": "2002-05-19",
     "name": "Divine Intervention",
     "directed_by": [
       "Elia Suleiman"
@@ -10584,11 +10574,11 @@
       "Romance Film",
       "Comedy",
       "Drama"
-    ],
-    "initial_release_date": "2002-05-19"
+    ]
   },
   {
     "id": "/en/divine_secrets_of_the_ya_ya_sisterhood",
+    "initial_release_date": "2002-06-03",
     "name": "Divine Secrets of the Ya-Ya Sisterhood",
     "directed_by": [
       "Callie Khouri"
@@ -10601,11 +10591,11 @@
       "Ensemble Film",
       "Comedy",
       "Drama"
-    ],
-    "initial_release_date": "2002-06-03"
+    ]
   },
   {
     "id": "/en/doa_dead_or_alive",
+    "initial_release_date": "2006-09-07",
     "name": "DOA: Dead or Alive",
     "directed_by": [
       "Corey Yuen"
@@ -10613,11 +10603,11 @@
     "genre": [
       "Action Film",
       "Adventure Film"
-    ],
-    "initial_release_date": "2006-09-07"
+    ]
   },
   {
     "id": "/en/dodgeball_a_true_underdog_story",
+    "initial_release_date": "2004-06-18",
     "name": "DodgeBall: A True Underdog Story",
     "directed_by": [
       "Rawson Marshall Thurber"
@@ -10625,11 +10615,11 @@
     "genre": [
       "Sports",
       "Comedy"
-    ],
-    "initial_release_date": "2004-06-18"
+    ]
   },
   {
     "id": "/en/dog_soldiers",
+    "initial_release_date": "2002-03-22",
     "name": "Dog Soldiers",
     "directed_by": [
       "Neil Marshall"
@@ -10638,11 +10628,11 @@
       "Horror",
       "Action Film",
       "Creature Film"
-    ],
-    "initial_release_date": "2002-03-22"
+    ]
   },
   {
     "id": "/en/dogtown_and_z-boys",
+    "initial_release_date": "2001-01-19",
     "name": "Dogtown and Z-Boys",
     "directed_by": [
       "Stacy Peralta"
@@ -10652,22 +10642,22 @@
       "Sports",
       "Extreme Sports",
       "Biographical film"
-    ],
-    "initial_release_date": "2001-01-19"
+    ]
   },
   {
     "id": "/en/dogville",
+    "initial_release_date": "2003-05-19",
     "name": "Dogville",
     "directed_by": [
       "Lars von Trier"
     ],
     "genre": [
       "Drama"
-    ],
-    "initial_release_date": "2003-05-19"
+    ]
   },
   {
     "id": "/en/doll_master",
+    "initial_release_date": "2004-07-30",
     "name": "The Doll Master",
     "directed_by": [
       "Jeong Yong-Gi"
@@ -10677,11 +10667,11 @@
       "Thriller",
       "East Asian cinema",
       "World cinema"
-    ],
-    "initial_release_date": "2004-07-30"
+    ]
   },
   {
     "id": "/en/dolls",
+    "initial_release_date": "2002-09-05",
     "name": "Dolls",
     "directed_by": [
       "Takeshi Kitano"
@@ -10689,11 +10679,11 @@
     "genre": [
       "Romance Film",
       "Drama"
-    ],
-    "initial_release_date": "2002-09-05"
+    ]
   },
   {
     "id": "/en/dominion_prequel_to_the_exorcist",
+    "initial_release_date": "2005-05-20",
     "name": "Dominion: Prequel to the Exorcist",
     "directed_by": [
       "Paul Schrader"
@@ -10703,11 +10693,11 @@
       "Supernatural",
       "Psychological thriller",
       "Cult film"
-    ],
-    "initial_release_date": "2005-05-20"
+    ]
   },
   {
     "id": "/en/domino_2005",
+    "initial_release_date": "2005-09-25",
     "name": "Domino",
     "directed_by": [
       "Tony Scott"
@@ -10720,11 +10710,11 @@
       "Comedy",
       "Adventure Film",
       "Drama"
-    ],
-    "initial_release_date": "2005-09-25"
+    ]
   },
   {
     "id": "/en/don_2006",
+    "initial_release_date": "2006-10-20",
     "name": "Don: The Chase Begins Again",
     "directed_by": [
       "Farhan Akhtar"
@@ -10738,11 +10728,11 @@
       "Comedy",
       "Bollywood",
       "World cinema"
-    ],
-    "initial_release_date": "2006-10-20"
+    ]
   },
   {
     "id": "/en/dons_plum",
+    "initial_release_date": "2001-02-10",
     "name": "Don's Plum",
     "directed_by": [
       "R.D. Robb"
@@ -10752,11 +10742,11 @@
       "Ensemble Film",
       "Comedy",
       "Drama"
-    ],
-    "initial_release_date": "2001-02-10"
+    ]
   },
   {
     "id": "/en/dont_come_knocking",
+    "initial_release_date": "2005-05-19",
     "name": "Don't Come Knocking",
     "directed_by": [
       "Wim Wenders"
@@ -10768,11 +10758,11 @@
       "Drama",
       "Music",
       "Musical Drama"
-    ],
-    "initial_release_date": "2005-05-19"
+    ]
   },
   {
     "id": "/en/dont_move",
+    "initial_release_date": "2004-03-12",
     "name": "Don't Move",
     "directed_by": [
       "Sergio Castellitto"
@@ -10780,11 +10770,11 @@
     "genre": [
       "Romance Film",
       "Drama"
-    ],
-    "initial_release_date": "2004-03-12"
+    ]
   },
   {
     "id": "/en/dont_say_a_word_2001",
+    "initial_release_date": "2001-09-24",
     "name": "Don't Say a Word",
     "directed_by": [
       "Gary Fleder"
@@ -10794,11 +10784,11 @@
       "Psychological thriller",
       "Crime Fiction",
       "Suspense"
-    ],
-    "initial_release_date": "2001-09-24"
+    ]
   },
   {
     "id": "/en/donnie_darko",
+    "initial_release_date": "2001-01-19",
     "name": "Donnie Darko",
     "directed_by": [
       "Richard Kelly"
@@ -10807,11 +10797,11 @@
       "Science Fiction",
       "Mystery",
       "Drama"
-    ],
-    "initial_release_date": "2001-01-19"
+    ]
   },
   {
     "id": "/en/doomsday_2008",
+    "initial_release_date": "2008-03-14",
     "name": "Doomsday",
     "directed_by": [
       "Neil Marshall"
@@ -10819,11 +10809,11 @@
     "genre": [
       "Science Fiction",
       "Action Film"
-    ],
-    "initial_release_date": "2008-03-14"
+    ]
   },
   {
     "id": "/en/dopamine_2003",
+    "initial_release_date": "2003-01-23",
     "name": "Dopamine",
     "directed_by": [
       "Mark Decena"
@@ -10835,11 +10825,11 @@
       "Romantic comedy",
       "Comedy",
       "Drama"
-    ],
-    "initial_release_date": "2003-01-23"
+    ]
   },
   {
     "id": "/en/dosti_friends_forever",
+    "initial_release_date": "2005-12-23",
     "name": "Dosti: Friends Forever",
     "directed_by": [
       "Suneel Darshan"
@@ -10847,11 +10837,11 @@
     "genre": [
       "Romance Film",
       "Drama"
-    ],
-    "initial_release_date": "2005-12-23"
+    ]
   },
   {
     "id": "/en/double_take",
+    "initial_release_date": "2001-01-12",
     "name": "Double Take",
     "directed_by": [
       "George Gallo"
@@ -10861,11 +10851,11 @@
       "Action/Adventure",
       "Action Film",
       "Comedy"
-    ],
-    "initial_release_date": "2001-01-12"
+    ]
   },
   {
     "id": "/en/double_teamed",
+    "initial_release_date": "2002-01-18",
     "name": "Double Teamed",
     "directed_by": [
       "Duwayne Dunham"
@@ -10876,11 +10866,11 @@
       "Family Drama",
       "Children's/Family",
       "Sports"
-    ],
-    "initial_release_date": "2002-01-18"
+    ]
   },
   {
     "id": "/en/double_vision_2002",
+    "initial_release_date": "2002-05-20",
     "name": "Double Vision",
     "directed_by": [
       "Chen Kuo-Fu"
@@ -10897,11 +10887,11 @@
       "Crime Thriller",
       "Action/Adventure",
       "Chinese Movies"
-    ],
-    "initial_release_date": "2002-05-20"
+    ]
   },
   {
     "id": "/en/double_whammy",
+    "initial_release_date": "2001-01-20",
     "name": "Double Whammy",
     "directed_by": [
       "Tom DiCillo"
@@ -10916,11 +10906,11 @@
       "Romantic comedy",
       "Comedy",
       "Drama"
-    ],
-    "initial_release_date": "2001-01-20"
+    ]
   },
   {
     "id": "/en/down_and_derby",
+    "initial_release_date": "2005-04-15",
     "name": "Down and Derby",
     "directed_by": [
       "Eric Hendershot"
@@ -10929,11 +10919,11 @@
       "Family",
       "Sports",
       "Comedy"
-    ],
-    "initial_release_date": "2005-04-15"
+    ]
   },
   {
     "id": "/en/down_in_the_valley",
+    "initial_release_date": "2005-05-13",
     "name": "Down in the Valley",
     "directed_by": [
       "David Jacobson"
@@ -10944,11 +10934,11 @@
       "Family Drama",
       "Psychological thriller",
       "Drama"
-    ],
-    "initial_release_date": "2005-05-13"
+    ]
   },
   {
     "id": "/en/down_to_earth",
+    "initial_release_date": "2001-02-12",
     "name": "Down to Earth",
     "directed_by": [
       "Chris Weitz",
@@ -10957,11 +10947,11 @@
     "genre": [
       "Fantasy",
       "Comedy"
-    ],
-    "initial_release_date": "2001-02-12"
+    ]
   },
   {
     "id": "/en/down_with_love",
+    "initial_release_date": "2003-05-09",
     "name": "Down with Love",
     "directed_by": [
       "Peyton Reed"
@@ -10972,11 +10962,11 @@
       "Screwball comedy",
       "Parody",
       "Comedy"
-    ],
-    "initial_release_date": "2003-05-09"
+    ]
   },
   {
     "id": "/en/downfall",
+    "initial_release_date": "2004-09-08",
     "name": "Downfall",
     "directed_by": [
       "Oliver Hirschbiegel"
@@ -10986,11 +10976,11 @@
       "War film",
       "Historical drama",
       "Drama"
-    ],
-    "initial_release_date": "2004-09-08"
+    ]
   },
   {
     "id": "/en/dr_dolittle_2",
+    "initial_release_date": "2001-06-19",
     "name": "Dr. Dolittle 2",
     "directed_by": [
       "Steve Carr"
@@ -11000,11 +10990,11 @@
       "Fantasy Comedy",
       "Comedy",
       "Romance Film"
-    ],
-    "initial_release_date": "2001-06-19"
+    ]
   },
   {
     "id": "/en/dr_dolittle_3",
+    "initial_release_date": "2006-04-25",
     "name": "Dr. Dolittle 3",
     "directed_by": [
       "Rich Thorne"
@@ -11012,11 +11002,11 @@
     "genre": [
       "Family",
       "Comedy"
-    ],
-    "initial_release_date": "2006-04-25"
+    ]
   },
   {
     "id": "/en/dracula_pages_from_a_virgins_diary",
+    "initial_release_date": "2002-02-28",
     "name": "Dracula: Pages from a Virgin's Diary",
     "directed_by": [
       "Guy Maddin"
@@ -11031,8 +11021,7 @@
       "Horror comedy",
       "Musical comedy",
       "Comedy"
-    ],
-    "initial_release_date": "2002-02-28"
+    ]
   },
   {
     "id": "/en/dragon_boys",
@@ -11048,6 +11037,7 @@
   },
   {
     "id": "/en/dragon_tiger_gate",
+    "initial_release_date": "2006-07-27",
     "name": "Dragon Tiger Gate",
     "directed_by": [
       "Wilson Yip"
@@ -11062,11 +11052,11 @@
       "World cinema",
       "Action Thriller",
       "Chinese Movies"
-    ],
-    "initial_release_date": "2006-07-27"
+    ]
   },
   {
     "id": "/en/dragonfly_2002",
+    "initial_release_date": "2002-02-18",
     "name": "Dragonfly",
     "directed_by": [
       "Tom Shadyac"
@@ -11077,11 +11067,11 @@
       "Romance Film",
       "Fantasy",
       "Drama"
-    ],
-    "initial_release_date": "2002-02-18"
+    ]
   },
   {
     "id": "/en/dragonlance_dragons_of_autumn_twilight",
+    "initial_release_date": "2008-01-15",
     "name": "Dragonlance: Dragons of Autumn Twilight",
     "directed_by": [
       "Will Meugniot"
@@ -11092,11 +11082,11 @@
       "Fantasy",
       "Adventure Film",
       "Science Fiction"
-    ],
-    "initial_release_date": "2008-01-15"
+    ]
   },
   {
     "id": "/en/drake_josh_go_hollywood",
+    "initial_release_date": "2006-01-06",
     "name": "Drake &amp; Josh Go Hollywood",
     "directed_by": [
       "Adam Weissman",
@@ -11106,11 +11096,11 @@
       "Family",
       "Adventure Film",
       "Comedy"
-    ],
-    "initial_release_date": "2006-01-06"
+    ]
   },
   {
     "id": "/en/drawing_restraint_9",
+    "initial_release_date": "2005-07-01",
     "name": "Drawing Restraint 9",
     "directed_by": [
       "Matthew Barney"
@@ -11122,11 +11112,11 @@
       "Avant-garde",
       "Experimental film",
       "Japanese Movies"
-    ],
-    "initial_release_date": "2005-07-01"
+    ]
   },
   {
     "id": "/en/dreamcatcher",
+    "initial_release_date": "2003-03-06",
     "name": "Dreamcatcher",
     "directed_by": [
       "Lawrence Kasdan"
@@ -11136,11 +11126,11 @@
       "Horror",
       "Thriller",
       "Drama"
-    ],
-    "initial_release_date": "2003-03-06"
+    ]
   },
   {
     "id": "/en/dreamer_2005",
+    "initial_release_date": "2005-09-10",
     "name": "Dreamer",
     "directed_by": [
       "John Gatins"
@@ -11149,11 +11139,11 @@
       "Family",
       "Sports",
       "Drama"
-    ],
-    "initial_release_date": "2005-09-10"
+    ]
   },
   {
     "id": "/en/dreaming_of_julia",
+    "initial_release_date": "2003-10-24",
     "name": "Dreaming of Julia",
     "directed_by": [
       "Juan Gerard"
@@ -11165,11 +11155,11 @@
       "Action/Adventure",
       "Comedy",
       "Drama"
-    ],
-    "initial_release_date": "2003-10-24"
+    ]
   },
   {
     "id": "/en/driving_miss_wealthy_juet_sai_ho_bun",
+    "initial_release_date": "2004-05-03",
     "name": "Driving Miss Wealthy",
     "directed_by": [
       "James Yuen"
@@ -11181,11 +11171,11 @@
       "Chinese Movies",
       "Comedy",
       "Drama"
-    ],
-    "initial_release_date": "2004-05-03"
+    ]
   },
   {
     "id": "/en/drowning_mona",
+    "initial_release_date": "2000-01-02",
     "name": "Drowning Mona",
     "directed_by": [
       "Nick Gomez"
@@ -11197,8 +11187,7 @@
       "Crime Comedy",
       "Crime Fiction",
       "Comedy"
-    ],
-    "initial_release_date": "2000-01-02"
+    ]
   },
   {
     "id": "/en/drugstore_girl",
@@ -11213,6 +11202,7 @@
   },
   {
     "id": "/en/druids",
+    "initial_release_date": "2001-08-31",
     "name": "Druids",
     "directed_by": [
       "Jacques Dorfmann"
@@ -11227,11 +11217,11 @@
       "Historical fiction",
       "Biographical film",
       "Drama"
-    ],
-    "initial_release_date": "2001-08-31"
+    ]
   },
   {
     "id": "/en/duck_the_carbine_high_massacre",
+    "initial_release_date": "2000-04-20",
     "name": "Duck! The Carbine High Massacre",
     "directed_by": [
       "William Hellfire",
@@ -11244,11 +11234,11 @@
       "Indie film",
       "Teen film",
       "Comedy"
-    ],
-    "initial_release_date": "2000-04-20"
+    ]
   },
   {
     "id": "/en/dude_wheres_my_car",
+    "initial_release_date": "2000-12-10",
     "name": "Dude, Where's My Car?",
     "directed_by": [
       "Danny Leiner"
@@ -11257,8 +11247,7 @@
       "Mystery",
       "Comedy",
       "Science Fiction"
-    ],
-    "initial_release_date": "2000-12-10"
+    ]
   },
   {
     "id": "/en/dude_wheres_the_party",
@@ -11274,6 +11263,7 @@
   },
   {
     "id": "/en/duets",
+    "initial_release_date": "2000-09-09",
     "name": "Duets",
     "directed_by": [
       "Bruce Paltrow"
@@ -11284,11 +11274,11 @@
       "Musical comedy",
       "Comedy",
       "Drama"
-    ],
-    "initial_release_date": "2000-09-09"
+    ]
   },
   {
     "id": "/en/dumb_dumberer",
+    "initial_release_date": "2003-06-13",
     "name": "Dumb &amp; Dumberer: When Harry Met Lloyd",
     "directed_by": [
       "Troy Miller"
@@ -11299,11 +11289,11 @@
       "Screwball comedy",
       "Slapstick",
       "Comedy"
-    ],
-    "initial_release_date": "2003-06-13"
+    ]
   },
   {
     "id": "/en/dumm_dumm_dumm",
+    "initial_release_date": "2001-04-13",
     "name": "Dumm Dumm Dumm",
     "directed_by": [
       "Azhagam Perumal"
@@ -11312,11 +11302,11 @@
       "Romance Film",
       "Comedy",
       "Drama"
-    ],
-    "initial_release_date": "2001-04-13"
+    ]
   },
   {
     "id": "/en/dummy_2003",
+    "initial_release_date": "2003-09-12",
     "name": "Dummy",
     "directed_by": [
       "Greg Pritikin"
@@ -11328,11 +11318,11 @@
       "Comedy",
       "Comedy-drama",
       "Drama"
-    ],
-    "initial_release_date": "2003-09-12"
+    ]
   },
   {
     "id": "/en/dumplings",
+    "initial_release_date": "2004-08-04",
     "name": "Dumplings",
     "directed_by": [
       "Fruit Chan"
@@ -11340,11 +11330,11 @@
     "genre": [
       "Horror",
       "Drama"
-    ],
-    "initial_release_date": "2004-08-04"
+    ]
   },
   {
     "id": "/en/duplex",
+    "initial_release_date": "2003-09-26",
     "name": "Duplex",
     "directed_by": [
       "Danny DeVito"
@@ -11352,11 +11342,11 @@
     "genre": [
       "Black comedy",
       "Comedy"
-    ],
-    "initial_release_date": "2003-09-26"
+    ]
   },
   {
     "id": "/en/dus",
+    "initial_release_date": "2005-07-08",
     "name": "Dus",
     "directed_by": [
       "Anubhav Sinha"
@@ -11366,11 +11356,11 @@
       "Action Film",
       "Crime Fiction",
       "Bollywood"
-    ],
-    "initial_release_date": "2005-07-08"
+    ]
   },
   {
     "id": "/en/dust_2001",
+    "initial_release_date": "2001-08-29",
     "name": "Dust",
     "directed_by": [
       "Milcho Manchevski"
@@ -11378,11 +11368,11 @@
     "genre": [
       "Western",
       "Drama"
-    ],
-    "initial_release_date": "2001-08-29"
+    ]
   },
   {
     "id": "/wikipedia/en_title/E_$0028film$0029",
+    "initial_release_date": "2006-10-21",
     "name": "E",
     "directed_by": [
       "S. P. Jananathan"
@@ -11391,8 +11381,7 @@
       "Action Film",
       "Thriller",
       "Drama"
-    ],
-    "initial_release_date": "2006-10-21"
+    ]
   },
   {
     "id": "/en/earthlings",
@@ -11409,6 +11398,7 @@
   },
   {
     "id": "/en/eastern_promises",
+    "initial_release_date": "2007-09-08",
     "name": "Eastern Promises",
     "directed_by": [
       "David Cronenberg"
@@ -11418,8 +11408,7 @@
       "Crime Fiction",
       "Mystery",
       "Drama"
-    ],
-    "initial_release_date": "2007-09-08"
+    ]
   },
   {
     "id": "/en/eating_out",
@@ -11439,6 +11428,7 @@
   },
   {
     "id": "/en/echoes_of_innocence",
+    "initial_release_date": "2005-09-09",
     "name": "Echoes of Innocence",
     "directed_by": [
       "Nathan Todd Sims"
@@ -11450,22 +11440,22 @@
       "Mystery",
       "Supernatural",
       "Drama"
-    ],
-    "initial_release_date": "2005-09-09"
+    ]
   },
   {
     "id": "/en/eddies_million_dollar_cook_off",
+    "initial_release_date": "2003-07-18",
     "name": "Eddie's Million Dollar Cook-Off",
     "directed_by": [
       "Paul Hoen"
     ],
     "genre": [
       "Teen film"
-    ],
-    "initial_release_date": "2003-07-18"
+    ]
   },
   {
     "id": "/en/edison_2006",
+    "initial_release_date": "2005-03-05",
     "name": "Edison",
     "directed_by": [
       "David J. Burke"
@@ -11476,11 +11466,11 @@
       "Mystery",
       "Crime Thriller",
       "Drama"
-    ],
-    "initial_release_date": "2005-03-05"
+    ]
   },
   {
     "id": "/en/edmond_2006",
+    "initial_release_date": "2005-09-02",
     "name": "Edmond",
     "directed_by": [
       "Stuart Gordon"
@@ -11491,11 +11481,11 @@
       "Indie film",
       "Crime Fiction",
       "Drama"
-    ],
-    "initial_release_date": "2005-09-02"
+    ]
   },
   {
     "id": "/en/eight_below",
+    "initial_release_date": "2006-02-17",
     "name": "Eight Below",
     "directed_by": [
       "Frank Marshall"
@@ -11504,12 +11494,13 @@
       "Adventure Film",
       "Family",
       "Drama"
-    ],
-    "initial_release_date": "2006-02-17"
+    ]
   },
   {
     "id": "/en/eight_crazy_nights",
-    "name": "Eight Crazy Nights",
+    "directed_by": [
+      "Seth Kearsley"
+    ],
     "initial_release_date": "2002-11-27",
     "genre": [
       "Christmas movie",
@@ -11518,13 +11509,13 @@
       "Musical comedy",
       "Comedy"
     ],
-    "directed_by": [
-      "Seth Kearsley"
-    ]
+    "name": "Eight Crazy Nights"
   },
   {
     "id": "/en/eight_legged_freaks",
-    "name": "Eight Legged Freaks",
+    "directed_by": [
+      "Ellory Elkayem"
+    ],
     "initial_release_date": "2002-05-30",
     "genre": [
       "Horror",
@@ -11537,13 +11528,13 @@
       "Thriller",
       "Horror comedy"
     ],
-    "directed_by": [
-      "Ellory Elkayem"
-    ]
+    "name": "Eight Legged Freaks"
   },
   {
     "id": "/en/ek_ajnabee",
-    "name": "Ek Ajnabee",
+    "directed_by": [
+      "Apoorva Lakhia"
+    ],
     "initial_release_date": "2005-12-09",
     "genre": [
       "Action Film",
@@ -11553,13 +11544,13 @@
       "Drama",
       "Bollywood"
     ],
-    "directed_by": [
-      "Apoorva Lakhia"
-    ]
+    "name": "Ek Ajnabee"
   },
   {
     "id": "/en/eklavya_the_royal_guard",
-    "name": "Eklavya: The Royal Guard",
+    "directed_by": [
+      "Vidhu Vinod Chopra"
+    ],
     "initial_release_date": "2007-02-16",
     "genre": [
       "Historical drama",
@@ -11570,13 +11561,13 @@
       "Bollywood",
       "World cinema"
     ],
-    "directed_by": [
-      "Vidhu Vinod Chopra"
-    ]
+    "name": "Eklavya: The Royal Guard"
   },
   {
     "id": "/en/el_abrazo_partido",
-    "name": "Lost Embrace",
+    "directed_by": [
+      "Daniel Burman"
+    ],
     "initial_release_date": "2004-02-09",
     "genre": [
       "Indie film",
@@ -11584,38 +11575,38 @@
       "Comedy-drama",
       "Drama"
     ],
-    "directed_by": [
-      "Daniel Burman"
-    ]
+    "name": "Lost Embrace"
   },
   {
     "id": "/en/el_aura",
-    "name": "El Aura",
+    "directed_by": [
+      "Fabi\u00e1n Bielinsky"
+    ],
     "initial_release_date": "2005-09-15",
     "genre": [
       "Thriller",
       "Crime Fiction",
       "Drama"
     ],
-    "directed_by": [
-      "Fabi\u00e1n Bielinsky"
-    ]
+    "name": "El Aura"
   },
   {
     "id": "/en/el_crimen_del_padre_amaro",
-    "name": "The Crime of Father Amaro",
+    "directed_by": [
+      "Carlos Carrera"
+    ],
     "initial_release_date": "2002-08-16",
     "genre": [
       "Romance Film",
       "Drama"
     ],
-    "directed_by": [
-      "Carlos Carrera"
-    ]
+    "name": "The Crime of Father Amaro"
   },
   {
     "id": "/en/el_juego_de_arcibel",
-    "name": "El juego de Arcibel",
+    "directed_by": [
+      "Alberto Lecchi"
+    ],
     "initial_release_date": "2003-05-29",
     "genre": [
       "Indie film",
@@ -11623,13 +11614,13 @@
       "World cinema",
       "Drama"
     ],
-    "directed_by": [
-      "Alberto Lecchi"
-    ]
+    "name": "El juego de Arcibel"
   },
   {
     "id": "/wikipedia/en_title/El_Muerto_$0028film$0029",
-    "name": "El Muerto",
+    "directed_by": [
+      "Brian Cox"
+    ],
     "genre": [
       "Indie film",
       "Supernatural",
@@ -11637,24 +11628,24 @@
       "Superhero movie",
       "Action/Adventure"
     ],
-    "directed_by": [
-      "Brian Cox"
-    ]
+    "name": "El Muerto"
   },
   {
     "id": "/en/el_principio_de_arquimedes",
-    "name": "The Archimedes Principle",
+    "directed_by": [
+      "Gerardo Herrero"
+    ],
     "initial_release_date": "2004-03-26",
     "genre": [
       "Drama"
     ],
-    "directed_by": [
-      "Gerardo Herrero"
-    ]
+    "name": "The Archimedes Principle"
   },
   {
     "id": "/en/el_raton_perez",
-    "name": "The Hairy Tooth Fairy",
+    "directed_by": [
+      "Juan Pablo Buscarini"
+    ],
     "initial_release_date": "2006-07-13",
     "genre": [
       "Fantasy",
@@ -11662,39 +11653,40 @@
       "Comedy",
       "Family"
     ],
-    "directed_by": [
-      "Juan Pablo Buscarini"
-    ]
+    "name": "The Hairy Tooth Fairy"
   },
   {
     "id": "/en/election_2005",
-    "name": "Election",
+    "directed_by": [
+      "Johnnie To"
+    ],
     "initial_release_date": "2005-05-14",
     "genre": [
       "Crime Fiction",
       "Thriller",
       "Drama"
     ],
-    "directed_by": [
-      "Johnnie To"
-    ]
+    "name": "Election"
   },
   {
     "id": "/en/election_2",
-    "name": "Election 2",
+    "directed_by": [
+      "Johnnie To"
+    ],
     "initial_release_date": "2006-04-04",
     "genre": [
       "Thriller",
       "Crime Fiction",
       "Drama"
     ],
-    "directed_by": [
-      "Johnnie To"
-    ]
+    "name": "Election 2"
   },
   {
     "id": "/en/daft_punks_electroma",
-    "name": "Daft Punk's Electroma",
+    "directed_by": [
+      "Thomas Bangalter",
+      "Guy-Manuel de Homem-Christo"
+    ],
     "initial_release_date": "2006-05-21",
     "genre": [
       "Indie film",
@@ -11706,14 +11698,13 @@
       "Road movie",
       "Drama"
     ],
-    "directed_by": [
-      "Thomas Bangalter",
-      "Guy-Manuel de Homem-Christo"
-    ]
+    "name": "Daft Punk's Electroma"
   },
   {
     "id": "/en/elektra_2005",
-    "name": "Elektra",
+    "directed_by": [
+      "Rob Bowman"
+    ],
     "initial_release_date": "2005-01-08",
     "genre": [
       "Action Film",
@@ -11724,13 +11715,13 @@
       "Fantasy",
       "Crime Fiction"
     ],
-    "directed_by": [
-      "Rob Bowman"
-    ]
+    "name": "Elektra"
   },
   {
     "id": "/en/elephant_2003",
-    "name": "Elephant",
+    "directed_by": [
+      "Gus Van Sant"
+    ],
     "initial_release_date": "2003-05-18",
     "genre": [
       "Teen film",
@@ -11739,25 +11730,25 @@
       "Thriller",
       "Drama"
     ],
-    "directed_by": [
-      "Gus Van Sant"
-    ]
+    "name": "Elephant"
   },
   {
     "id": "/en/elephants_dream",
-    "name": "Elephants Dream",
+    "directed_by": [
+      "Bassam Kurdali"
+    ],
     "initial_release_date": "2006-03-24",
     "genre": [
       "Short Film",
       "Computer Animation"
     ],
-    "directed_by": [
-      "Bassam Kurdali"
-    ]
+    "name": "Elephants Dream"
   },
   {
     "id": "/en/elf_2003",
-    "name": "Elf",
+    "directed_by": [
+      "Jon Favreau"
+    ],
     "initial_release_date": "2003-10-09",
     "genre": [
       "Family",
@@ -11765,13 +11756,13 @@
       "Comedy",
       "Fantasy"
     ],
-    "directed_by": [
-      "Jon Favreau"
-    ]
+    "name": "Elf"
   },
   {
     "id": "/en/elizabethtown_2005",
-    "name": "Elizabethtown",
+    "directed_by": [
+      "Cameron Crowe"
+    ],
     "initial_release_date": "2005-09-04",
     "genre": [
       "Romantic comedy",
@@ -11781,13 +11772,13 @@
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Cameron Crowe"
-    ]
+    "name": "Elizabethtown"
   },
   {
     "id": "/en/elviras_haunted_hills",
-    "name": "Elvira's Haunted Hills",
+    "directed_by": [
+      "Sam Irvin"
+    ],
     "initial_release_date": "2001-06-23",
     "genre": [
       "Parody",
@@ -11797,13 +11788,13 @@
       "Horror comedy",
       "Comedy"
     ],
-    "directed_by": [
-      "Sam Irvin"
-    ]
+    "name": "Elvira's Haunted Hills"
   },
   {
     "id": "/en/elvis_has_left_the_building_2004",
-    "name": "Elvis Has Left the Building",
+    "directed_by": [
+      "Joel Zwick"
+    ],
     "genre": [
       "Action Film",
       "Action/Adventure",
@@ -11812,13 +11803,13 @@
       "Crime Fiction",
       "Comedy"
     ],
-    "directed_by": [
-      "Joel Zwick"
-    ]
+    "name": "Elvis Has Left the Building"
   },
   {
     "id": "/en/empire_2002",
-    "name": "Empire",
+    "directed_by": [
+      "Franc. Reyes"
+    ],
     "genre": [
       "Thriller",
       "Crime Fiction",
@@ -11827,13 +11818,13 @@
       "Drama",
       "Action Thriller"
     ],
-    "directed_by": [
-      "Franc. Reyes"
-    ]
+    "name": "Empire"
   },
   {
     "id": "/en/employee_of_the_month_2004",
-    "name": "Employee of the Month",
+    "directed_by": [
+      "Mitch Rouse"
+    ],
     "initial_release_date": "2004-01-17",
     "genre": [
       "Black comedy",
@@ -11841,26 +11832,26 @@
       "Heist film",
       "Comedy"
     ],
-    "directed_by": [
-      "Mitch Rouse"
-    ]
+    "name": "Employee of the Month"
   },
   {
     "id": "/en/employee_of_the_month",
-    "name": "Employee of the Month",
+    "directed_by": [
+      "Greg Coolidge"
+    ],
     "initial_release_date": "2006-10-06",
     "genre": [
       "Romantic comedy",
       "Romance Film",
       "Comedy"
     ],
-    "directed_by": [
-      "Greg Coolidge"
-    ]
+    "name": "Employee of the Month"
   },
   {
     "id": "/en/empress_chung",
-    "name": "Empress Chung",
+    "directed_by": [
+      "Nelson Shin"
+    ],
     "initial_release_date": "2005-08-12",
     "genre": [
       "Animation",
@@ -11868,38 +11859,38 @@
       "East Asian cinema",
       "World cinema"
     ],
-    "directed_by": [
-      "Nelson Shin"
-    ]
+    "name": "Empress Chung"
   },
   {
     "id": "/en/emr",
-    "name": "EMR",
+    "directed_by": [
+      "Danny McCullough",
+      "James Erskine"
+    ],
     "initial_release_date": "2004-03-08",
     "genre": [
       "Thriller",
       "Mystery",
       "Psychological thriller"
     ],
-    "directed_by": [
-      "Danny McCullough",
-      "James Erskine"
-    ]
+    "name": "EMR"
   },
   {
     "id": "/en/en_route",
-    "name": "En Route",
+    "directed_by": [
+      "Jan Kr\u00fcger"
+    ],
     "initial_release_date": "2004-06-17",
     "genre": [
       "Drama"
     ],
-    "directed_by": [
-      "Jan Kr\u00fcger"
-    ]
+    "name": "En Route"
   },
   {
     "id": "/en/enakku_20_unakku_18",
-    "name": "Enakku 20 Unakku 18",
+    "directed_by": [
+      "Jyothi Krishna"
+    ],
     "initial_release_date": "2003-12-19",
     "genre": [
       "Musical",
@@ -11907,13 +11898,13 @@
       "Drama",
       "Musical Drama"
     ],
-    "directed_by": [
-      "Jyothi Krishna"
-    ]
+    "name": "Enakku 20 Unakku 18"
   },
   {
     "id": "/en/enchanted_2007",
-    "name": "Enchanted",
+    "directed_by": [
+      "Kevin Lima"
+    ],
     "initial_release_date": "2007-10-20",
     "genre": [
       "Musical",
@@ -11927,13 +11918,13 @@
       "Musical comedy",
       "Musical Drama"
     ],
-    "directed_by": [
-      "Kevin Lima"
-    ]
+    "name": "Enchanted"
   },
   {
     "id": "/en/end_of_the_spear",
-    "name": "End of the Spear",
+    "directed_by": [
+      "Jim Hanon"
+    ],
     "genre": [
       "Docudrama",
       "Christian film",
@@ -11944,13 +11935,13 @@
       "Inspirational Drama",
       "Drama"
     ],
-    "directed_by": [
-      "Jim Hanon"
-    ]
+    "name": "End of the Spear"
   },
   {
     "id": "/en/enduring_love",
-    "name": "Enduring Love",
+    "directed_by": [
+      "Roger Michell"
+    ],
     "initial_release_date": "2004-09-04",
     "genre": [
       "Thriller",
@@ -11961,13 +11952,13 @@
       "Psychological thriller",
       "Drama"
     ],
-    "directed_by": [
-      "Roger Michell"
-    ]
+    "name": "Enduring Love"
   },
   {
     "id": "/en/enemy_at_the_gates",
-    "name": "Enemy at the Gates",
+    "directed_by": [
+      "Jean-Jacques Annaud"
+    ],
     "initial_release_date": "2001-02-07",
     "genre": [
       "War film",
@@ -11977,13 +11968,13 @@
       "Thriller",
       "Drama"
     ],
-    "directed_by": [
-      "Jean-Jacques Annaud"
-    ]
+    "name": "Enemy at the Gates"
   },
   {
     "id": "/en/enigma_2001",
-    "name": "Enigma",
+    "directed_by": [
+      "Michael Apted"
+    ],
     "initial_release_date": "2001-01-22",
     "genre": [
       "Thriller",
@@ -11993,25 +11984,25 @@
       "Mystery",
       "Drama"
     ],
-    "directed_by": [
-      "Michael Apted"
-    ]
+    "name": "Enigma"
   },
   {
     "id": "/en/enigma_the_best_of_jeff_hardy",
-    "name": "Enigma: The Best of Jeff Hardy",
+    "directed_by": [
+      "Craig Leathers"
+    ],
     "initial_release_date": "2005-10-04",
     "genre": [
       "Sports",
       "Action Film"
     ],
-    "directed_by": [
-      "Craig Leathers"
-    ]
+    "name": "Enigma: The Best of Jeff Hardy"
   },
   {
     "id": "/en/enron_the_smartest_guys_in_the_room",
-    "name": "Enron: The Smartest Guys in the Room",
+    "directed_by": [
+      "Alex Gibney"
+    ],
     "initial_release_date": "2005-04-22",
     "genre": [
       "Documentary film",
@@ -12023,26 +12014,26 @@
       "Law &amp; Crime",
       "Biographical film"
     ],
-    "directed_by": [
-      "Alex Gibney"
-    ]
+    "name": "Enron: The Smartest Guys in the Room"
   },
   {
     "id": "/en/envy_2004",
-    "name": "Envy",
+    "directed_by": [
+      "Barry Levinson"
+    ],
     "initial_release_date": "2004-04-30",
     "genre": [
       "Black comedy",
       "Cult film",
       "Comedy"
     ],
-    "directed_by": [
-      "Barry Levinson"
-    ]
+    "name": "Envy"
   },
   {
     "id": "/en/equilibrium_2002",
-    "name": "Equilibrium",
+    "directed_by": [
+      "Kurt Wimmer"
+    ],
     "initial_release_date": "2002-12-06",
     "genre": [
       "Science Fiction",
@@ -12052,13 +12043,13 @@
       "Action Film",
       "Drama"
     ],
-    "directed_by": [
-      "Kurt Wimmer"
-    ]
+    "name": "Equilibrium"
   },
   {
     "id": "/en/eragon_2006",
-    "name": "Eragon",
+    "directed_by": [
+      "Stefen Fangmeier"
+    ],
     "initial_release_date": "2006-12-13",
     "genre": [
       "Family",
@@ -12068,13 +12059,13 @@
       "Action Film",
       "Drama"
     ],
-    "directed_by": [
-      "Stefen Fangmeier"
-    ]
+    "name": "Eragon"
   },
   {
     "id": "/en/erin_brockovich_2000",
-    "name": "Erin Brockovich",
+    "directed_by": [
+      "Steven Soderbergh"
+    ],
     "initial_release_date": "2000-03-14",
     "genre": [
       "Biographical film",
@@ -12087,28 +12078,28 @@
       "Drama",
       "Drama film"
     ],
-    "directed_by": [
-      "Steven Soderbergh"
-    ]
+    "name": "Erin Brockovich"
   },
   {
     "id": "/en/eros_2004",
-    "name": "Eros",
+    "directed_by": [
+      "Michelangelo Antonioni",
+      "Steven Soderbergh",
+      "Wong Kar-wai"
+    ],
     "initial_release_date": "2004-09-10",
     "genre": [
       "Romance Film",
       "Erotica",
       "Drama"
     ],
-    "directed_by": [
-      "Michelangelo Antonioni",
-      "Steven Soderbergh",
-      "Wong Kar-wai"
-    ]
+    "name": "Eros"
   },
   {
     "id": "/en/escaflowne",
-    "name": "Escaflowne",
+    "directed_by": [
+      "Kazuki Akane"
+    ],
     "initial_release_date": "2000-06-24",
     "genre": [
       "Adventure Film",
@@ -12120,36 +12111,36 @@
       "Thriller",
       "Drama"
     ],
-    "directed_by": [
-      "Kazuki Akane"
-    ]
+    "name": "Escaflowne"
   },
   {
     "id": "/en/escape_2006",
-    "name": "A Few Days Later",
+    "directed_by": [
+      "Niki Karimi"
+    ],
     "genre": [
       "Drama"
     ],
-    "directed_by": [
-      "Niki Karimi"
-    ]
+    "name": "A Few Days Later"
   },
   {
     "id": "/en/eternal_sunshine_of_the_spotless_mind",
-    "name": "Eternal Sunshine of the Spotless Mind",
+    "directed_by": [
+      "Michel Gondry"
+    ],
     "initial_release_date": "2004-03-19",
     "genre": [
       "Romance Film",
       "Science Fiction",
       "Drama"
     ],
-    "directed_by": [
-      "Michel Gondry"
-    ]
+    "name": "Eternal Sunshine of the Spotless Mind"
   },
   {
     "id": "/en/eulogy_2004",
-    "name": "Eulogy",
+    "directed_by": [
+      "Michael Clancy"
+    ],
     "initial_release_date": "2004-10-15",
     "genre": [
       "LGBT",
@@ -12157,13 +12148,15 @@
       "Indie film",
       "Comedy"
     ],
-    "directed_by": [
-      "Michael Clancy"
-    ]
+    "name": "Eulogy"
   },
   {
     "id": "/en/eurotrip",
-    "name": "EuroTrip",
+    "directed_by": [
+      "Jeff Schaffer",
+      "Alec Berg",
+      "David Mandel"
+    ],
     "initial_release_date": "2004-02-20",
     "genre": [
       "Sex comedy",
@@ -12171,15 +12164,13 @@
       "Teen film",
       "Comedy"
     ],
-    "directed_by": [
-      "Jeff Schaffer",
-      "Alec Berg",
-      "David Mandel"
-    ]
+    "name": "EuroTrip"
   },
   {
     "id": "/en/evan_almighty",
-    "name": "Evan Almighty",
+    "directed_by": [
+      "Tom Shadyac"
+    ],
     "initial_release_date": "2007-06-21",
     "genre": [
       "Religious Film",
@@ -12190,50 +12181,52 @@
       "Heavenly Comedy",
       "Comedy"
     ],
-    "directed_by": [
-      "Tom Shadyac"
-    ]
+    "name": "Evan Almighty"
   },
   {
     "id": "/en/everlasting_regret",
-    "name": "Everlasting Regret",
+    "directed_by": [
+      "Stanley Kwan"
+    ],
     "initial_release_date": "2005-09-08",
     "genre": [
       "Romance Film",
       "Chinese Movies",
       "Drama"
     ],
-    "directed_by": [
-      "Stanley Kwan"
-    ]
+    "name": "Everlasting Regret"
   },
   {
     "id": "/en/everybody_famous",
-    "name": "Everybody's Famous!",
+    "directed_by": [
+      "Dominique Deruddere"
+    ],
     "initial_release_date": "2000-04-12",
     "genre": [
       "World cinema",
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Dominique Deruddere"
-    ]
+    "name": "Everybody's Famous!"
   },
   {
     "id": "/en/everymans_feast",
-    "name": "Everyman's Feast",
+    "directed_by": [
+      "Fritz Lehner"
+    ],
     "initial_release_date": "2002-01-25",
     "genre": [
       "Drama"
     ],
-    "directed_by": [
-      "Fritz Lehner"
-    ]
+    "name": "Everyman's Feast"
   },
   {
     "id": "/en/everyones_hero",
-    "name": "Everyone's Hero",
+    "directed_by": [
+      "Christopher Reeve",
+      "Daniel St. Pierre",
+      "Colin Brady"
+    ],
     "initial_release_date": "2006-09-15",
     "genre": [
       "Computer Animation",
@@ -12244,36 +12237,34 @@
       "Children's/Family",
       "Family-Oriented Adventure"
     ],
-    "directed_by": [
-      "Christopher Reeve",
-      "Daniel St. Pierre",
-      "Colin Brady"
-    ]
+    "name": "Everyone's Hero"
   },
   {
     "id": "/en/everything_2005",
-    "name": "Everything",
+    "directed_by": [],
     "initial_release_date": "2005-11-22",
     "genre": [
       "Music video"
     ],
-    "directed_by": []
+    "name": "Everything"
   },
   {
     "id": "/en/everything_goes",
-    "name": "Everything Goes",
+    "directed_by": [
+      "Andrew Kotatko"
+    ],
     "initial_release_date": "2004-06-14",
     "genre": [
       "Short Film",
       "Drama"
     ],
-    "directed_by": [
-      "Andrew Kotatko"
-    ]
+    "name": "Everything Goes"
   },
   {
     "id": "/en/everything_is_illuminated_2005",
-    "name": "Everything Is Illuminated",
+    "directed_by": [
+      "Liev Schreiber"
+    ],
     "initial_release_date": "2005-09-16",
     "genre": [
       "Adventure Film",
@@ -12284,26 +12275,26 @@
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Liev Schreiber"
-    ]
+    "name": "Everything Is Illuminated"
   },
   {
     "id": "/en/evilenko",
-    "name": "Evilenko",
+    "directed_by": [
+      "David Grieco"
+    ],
     "initial_release_date": "2004-04-16",
     "genre": [
       "Thriller",
       "Horror",
       "Crime Fiction"
     ],
-    "directed_by": [
-      "David Grieco"
-    ]
+    "name": "Evilenko"
   },
   {
     "id": "/en/evolution_2001",
-    "name": "Evolution",
+    "directed_by": [
+      "Ivan Reitman"
+    ],
     "initial_release_date": "2001-06-08",
     "genre": [
       "Science Fiction",
@@ -12312,13 +12303,13 @@
       "Action/Adventure",
       "Comedy"
     ],
-    "directed_by": [
-      "Ivan Reitman"
-    ]
+    "name": "Evolution"
   },
   {
     "id": "/en/exit_wounds",
-    "name": "Exit Wounds",
+    "directed_by": [
+      "Andrzej Bartkowiak"
+    ],
     "initial_release_date": "2001-03-16",
     "genre": [
       "Action Film",
@@ -12328,13 +12319,13 @@
       "Thriller",
       "Crime Fiction"
     ],
-    "directed_by": [
-      "Andrzej Bartkowiak"
-    ]
+    "name": "Exit Wounds"
   },
   {
     "id": "/en/exorcist_the_beginning",
-    "name": "Exorcist: The Beginning",
+    "directed_by": [
+      "Renny Harlin"
+    ],
     "initial_release_date": "2004-08-18",
     "genre": [
       "Horror",
@@ -12343,13 +12334,13 @@
       "Cult film",
       "Historical period drama"
     ],
-    "directed_by": [
-      "Renny Harlin"
-    ]
+    "name": "Exorcist: The Beginning"
   },
   {
     "id": "/en/extreme_days",
-    "name": "Extreme Days",
+    "directed_by": [
+      "Eric Hannah"
+    ],
     "initial_release_date": "2001-09-28",
     "genre": [
       "Comedy-drama",
@@ -12360,13 +12351,13 @@
       "Teen film",
       "Sports"
     ],
-    "directed_by": [
-      "Eric Hannah"
-    ]
+    "name": "Extreme Days"
   },
   {
     "id": "/en/extreme_ops",
-    "name": "Extreme Ops",
+    "directed_by": [
+      "Christian Duguay"
+    ],
     "initial_release_date": "2002-11-27",
     "genre": [
       "Action Film",
@@ -12377,13 +12368,13 @@
       "Action Thriller",
       "Chase Movie"
     ],
-    "directed_by": [
-      "Christian Duguay"
-    ]
+    "name": "Extreme Ops"
   },
   {
     "id": "/en/face_2004",
-    "name": "Face",
+    "directed_by": [
+      "Yoo Sang-gon"
+    ],
     "initial_release_date": "2004-06-11",
     "genre": [
       "Horror",
@@ -12392,25 +12383,25 @@
       "East Asian cinema",
       "World cinema"
     ],
-    "directed_by": [
-      "Yoo Sang-gon"
-    ]
+    "name": "Face"
   },
   {
     "id": "/en/la_finestra_di_fronte",
-    "name": "Facing Windows",
+    "directed_by": [
+      "Ferzan \u00d6zpetek"
+    ],
     "initial_release_date": "2003-02-28",
     "genre": [
       "Romance Film",
       "Drama"
     ],
-    "directed_by": [
-      "Ferzan \u00d6zpetek"
-    ]
+    "name": "Facing Windows"
   },
   {
     "id": "/en/factory_girl",
-    "name": "Factory Girl",
+    "directed_by": [
+      "George Hickenlooper"
+    ],
     "initial_release_date": "2006-12-29",
     "genre": [
       "Biographical film",
@@ -12418,13 +12409,13 @@
       "Historical period drama",
       "Drama"
     ],
-    "directed_by": [
-      "George Hickenlooper"
-    ]
+    "name": "Factory Girl"
   },
   {
     "id": "/en/fahrenheit_9_11",
-    "name": "Fahrenheit 9/11",
+    "directed_by": [
+      "Michael Moore"
+    ],
     "initial_release_date": "2004-05-17",
     "genre": [
       "Indie film",
@@ -12434,23 +12425,23 @@
       "Crime Fiction",
       "Drama"
     ],
-    "directed_by": [
-      "Michael Moore"
-    ]
+    "name": "Fahrenheit 9/11"
   },
   {
     "id": "/en/fahrenheit_9_111_2",
-    "name": "Fahrenheit 9/11\u00bd",
+    "directed_by": [
+      "Michael Moore"
+    ],
     "genre": [
       "Documentary film"
     ],
-    "directed_by": [
-      "Michael Moore"
-    ]
+    "name": "Fahrenheit 9/11\u00bd"
   },
   {
     "id": "/en/fail_safe_2000",
-    "name": "Fail Safe",
+    "directed_by": [
+      "Stephen Frears"
+    ],
     "initial_release_date": "2000-04-09",
     "genre": [
       "Thriller",
@@ -12462,74 +12453,74 @@
       "Political drama",
       "Drama"
     ],
-    "directed_by": [
-      "Stephen Frears"
-    ]
+    "name": "Fail Safe"
   },
   {
     "id": "/en/failan",
-    "name": "Failan",
+    "directed_by": [
+      "Song Hae-sung"
+    ],
     "initial_release_date": "2001-04-28",
     "genre": [
       "Romance Film",
       "World cinema",
       "Drama"
     ],
-    "directed_by": [
-      "Song Hae-sung"
-    ]
+    "name": "Failan"
   },
   {
     "id": "/en/failure_to_launch",
-    "name": "Failure to Launch",
+    "directed_by": [
+      "Tom Dey"
+    ],
     "initial_release_date": "2006-03-10",
     "genre": [
       "Romantic comedy",
       "Romance Film",
       "Comedy"
     ],
-    "directed_by": [
-      "Tom Dey"
-    ]
+    "name": "Failure to Launch"
   },
   {
     "id": "/en/fake_2003",
-    "name": "Fake",
+    "directed_by": [
+      "Thanakorn Pongsuwan"
+    ],
     "initial_release_date": "2003-04-28",
     "genre": [
       "Romance Film"
     ],
-    "directed_by": [
-      "Thanakorn Pongsuwan"
-    ]
+    "name": "Fake"
   },
   {
     "id": "/en/falcons_2002",
-    "name": "Falcons",
+    "directed_by": [
+      "Fri\u00f0rik \u00de\u00f3r Fri\u00f0riksson"
+    ],
     "genre": [
       "Drama"
     ],
-    "directed_by": [
-      "Fri\u00f0rik \u00de\u00f3r Fri\u00f0riksson"
-    ]
+    "name": "Falcons"
   },
   {
     "id": "/en/fallen_2006",
-    "name": "Fallen",
+    "directed_by": [
+      "Mikael Salomon",
+      "Kevin Kerslake"
+    ],
     "genre": [
       "Science Fiction",
       "Fantasy",
       "Action/Adventure",
       "Drama"
     ],
-    "directed_by": [
-      "Mikael Salomon",
-      "Kevin Kerslake"
-    ]
+    "name": "Fallen"
   },
   {
     "id": "/en/family_-_ties_of_blood",
-    "name": "Family",
+    "directed_by": [
+      "Rajkumar Santoshi"
+    ],
     "initial_release_date": "2006-01-11",
     "genre": [
       "Musical",
@@ -12540,31 +12531,23 @@
       "Drama",
       "Musical Drama"
     ],
-    "directed_by": [
-      "Rajkumar Santoshi"
-    ]
+    "name": "Family"
   },
   {
     "id": "/en/familywala",
-    "name": "Familywala",
+    "directed_by": [
+      "Neeraj Vora"
+    ],
     "genre": [
       "Comedy",
       "Drama",
       "Bollywood",
       "World cinema"
     ],
-    "directed_by": [
-      "Neeraj Vora"
-    ]
+    "name": "Familywala"
   },
   {
     "id": "/en/fan_chan",
-    "name": "Fan Chan",
-    "initial_release_date": "2003-10-03",
-    "genre": [
-      "Comedy",
-      "Romance Film"
-    ],
     "directed_by": [
       "Vitcha Gojiew",
       "Witthaya Thongyooyong",
@@ -12572,11 +12555,19 @@
       "Nithiwat Tharathorn",
       "Songyos Sugmakanan",
       "Adisorn Tresirikasem"
-    ]
+    ],
+    "initial_release_date": "2003-10-03",
+    "genre": [
+      "Comedy",
+      "Romance Film"
+    ],
+    "name": "Fan Chan"
   },
   {
     "id": "/en/fanaa",
-    "name": "Fanaa",
+    "directed_by": [
+      "Kunal Kohli"
+    ],
     "initial_release_date": "2006-05-26",
     "genre": [
       "Thriller",
@@ -12586,13 +12577,13 @@
       "Musical Drama",
       "Drama"
     ],
-    "directed_by": [
-      "Kunal Kohli"
-    ]
+    "name": "Fanaa"
   },
   {
     "id": "/en/fantastic_four_2005",
-    "name": "Fantastic Four",
+    "directed_by": [
+      "Tim Story"
+    ],
     "initial_release_date": "2005-06-29",
     "genre": [
       "Fantasy",
@@ -12600,13 +12591,13 @@
       "Adventure Film",
       "Action Film"
     ],
-    "directed_by": [
-      "Tim Story"
-    ]
+    "name": "Fantastic Four"
   },
   {
     "id": "/en/fantastic_four_and_the_silver_surfer",
-    "name": "Fantastic Four: Rise of the Silver Surfer",
+    "directed_by": [
+      "Tim Story"
+    ],
     "initial_release_date": "2007-06-12",
     "genre": [
       "Fantasy",
@@ -12614,13 +12605,13 @@
       "Action Film",
       "Thriller"
     ],
-    "directed_by": [
-      "Tim Story"
-    ]
+    "name": "Fantastic Four: Rise of the Silver Surfer"
   },
   {
     "id": "/en/fantastic_mr_fox_2007",
-    "name": "Fantastic Mr. Fox",
+    "directed_by": [
+      "Wes Anderson"
+    ],
     "initial_release_date": "2009-10-14",
     "genre": [
       "Animation",
@@ -12628,24 +12619,24 @@
       "Comedy",
       "Family"
     ],
-    "directed_by": [
-      "Wes Anderson"
-    ]
+    "name": "Fantastic Mr. Fox"
   },
   {
     "id": "/en/faq_frequently_asked_questions",
-    "name": "FAQ: Frequently Asked Questions",
+    "directed_by": [
+      "Carlos Atanes"
+    ],
     "initial_release_date": "2004-10-12",
     "genre": [
       "Science Fiction"
     ],
-    "directed_by": [
-      "Carlos Atanes"
-    ]
+    "name": "FAQ: Frequently Asked Questions"
   },
   {
     "id": "/en/far_cry_2008",
-    "name": "Far Cry",
+    "directed_by": [
+      "Uwe Boll"
+    ],
     "initial_release_date": "2008-10-02",
     "genre": [
       "Action Film",
@@ -12653,50 +12644,50 @@
       "Thriller",
       "Adventure Film"
     ],
-    "directed_by": [
-      "Uwe Boll"
-    ]
+    "name": "Far Cry"
   },
   {
     "id": "/en/far_from_heaven",
-    "name": "Far from Heaven",
+    "directed_by": [
+      "Todd Haynes"
+    ],
     "initial_release_date": "2002-09-01",
     "genre": [
       "Romance Film",
       "Melodrama",
       "Drama"
     ],
-    "directed_by": [
-      "Todd Haynes"
-    ]
+    "name": "Far from Heaven"
   },
   {
     "id": "/en/farce_of_the_penguins",
-    "name": "Farce of the Penguins",
+    "directed_by": [
+      "Bob Saget"
+    ],
     "genre": [
       "Parody",
       "Mockumentary",
       "Adventure Comedy",
       "Comedy"
     ],
-    "directed_by": [
-      "Bob Saget"
-    ]
+    "name": "Farce of the Penguins"
   },
   {
     "id": "/en/eagles_farewell_1_tour_live_from_melbourne",
-    "name": "Eagles: Farewell 1 Tour-Live from Melbourne",
+    "directed_by": [
+      "Carol Dodds"
+    ],
     "initial_release_date": "2005-06-14",
     "genre": [
       "Music video"
     ],
-    "directed_by": [
-      "Carol Dodds"
-    ]
+    "name": "Eagles: Farewell 1 Tour-Live from Melbourne"
   },
   {
     "id": "/en/fat_albert",
-    "name": "Fat Albert",
+    "directed_by": [
+      "Joel Zwick"
+    ],
     "initial_release_date": "2004-12-12",
     "genre": [
       "Family",
@@ -12704,48 +12695,48 @@
       "Romance Film",
       "Comedy"
     ],
-    "directed_by": [
-      "Joel Zwick"
-    ]
+    "name": "Fat Albert"
   },
   {
     "id": "/en/fat_pizza_the_movie",
-    "name": "Fat Pizza",
+    "directed_by": [
+      "Paul Fenech"
+    ],
     "genre": [
       "Comedy"
     ],
-    "directed_by": [
-      "Paul Fenech"
-    ]
+    "name": "Fat Pizza"
   },
   {
     "id": "/en/fatwa_2006",
-    "name": "Fatwa",
+    "directed_by": [
+      "John Carter"
+    ],
     "initial_release_date": "2006-03-24",
     "genre": [
       "Thriller",
       "Political thriller",
       "Drama"
     ],
-    "directed_by": [
-      "John Carter"
-    ]
+    "name": "Fatwa"
   },
   {
     "id": "/en/faust_love_of_the_damned",
-    "name": "Faust: Love of the Damned",
+    "directed_by": [
+      "Brian Yuzna"
+    ],
     "initial_release_date": "2000-10-12",
     "genre": [
       "Horror",
       "Supernatural"
     ],
-    "directed_by": [
-      "Brian Yuzna"
-    ]
+    "name": "Faust: Love of the Damned"
   },
   {
     "id": "/en/fay_grim",
-    "name": "Fay Grim",
+    "directed_by": [
+      "Hal Hartley"
+    ],
     "initial_release_date": "2006-09-11",
     "genre": [
       "Thriller",
@@ -12757,26 +12748,26 @@
       "Crime Fiction",
       "Drama"
     ],
-    "directed_by": [
-      "Hal Hartley"
-    ]
+    "name": "Fay Grim"
   },
   {
     "id": "/en/fear_and_trembling_2003",
-    "name": "Fear and Trembling",
+    "directed_by": [
+      "Alain Corneau"
+    ],
     "genre": [
       "World cinema",
       "Japanese Movies",
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Alain Corneau"
-    ]
+    "name": "Fear and Trembling"
   },
   {
     "id": "/en/fear_of_the_dark_2006",
-    "name": "Fear of the Dark",
+    "directed_by": [
+      "Glen Baisley"
+    ],
     "initial_release_date": "2001-10-06",
     "genre": [
       "Horror",
@@ -12785,25 +12776,25 @@
       "Thriller",
       "Drama"
     ],
-    "directed_by": [
-      "Glen Baisley"
-    ]
+    "name": "Fear of the Dark"
   },
   {
     "id": "/en/fear_x",
-    "name": "Fear X",
+    "directed_by": [
+      "Nicolas Winding Refn"
+    ],
     "initial_release_date": "2003-01-19",
     "genre": [
       "Psychological thriller",
       "Thriller"
     ],
-    "directed_by": [
-      "Nicolas Winding Refn"
-    ]
+    "name": "Fear X"
   },
   {
     "id": "/en/feardotcom",
-    "name": "FeardotCom",
+    "directed_by": [
+      "William Malone"
+    ],
     "initial_release_date": "2002-08-09",
     "genre": [
       "Horror",
@@ -12811,13 +12802,13 @@
       "Thriller",
       "Mystery"
     ],
-    "directed_by": [
-      "William Malone"
-    ]
+    "name": "FeardotCom"
   },
   {
     "id": "/en/fearless",
-    "name": "Fearless",
+    "directed_by": [
+      "Ronny Yu"
+    ],
     "initial_release_date": "2006-01-26",
     "genre": [
       "Biographical film",
@@ -12825,13 +12816,13 @@
       "Sports",
       "Drama"
     ],
-    "directed_by": [
-      "Ronny Yu"
-    ]
+    "name": "Fearless"
   },
   {
     "id": "/en/feast",
-    "name": "Feast",
+    "directed_by": [
+      "John Gulager"
+    ],
     "initial_release_date": "2006-09-22",
     "genre": [
       "Horror",
@@ -12840,13 +12831,13 @@
       "Horror comedy",
       "Comedy"
     ],
-    "directed_by": [
-      "John Gulager"
-    ]
+    "name": "Feast"
   },
   {
     "id": "/en/femme_fatale_2002",
-    "name": "Femme Fatale",
+    "directed_by": [
+      "Brian De Palma"
+    ],
     "initial_release_date": "2002-04-30",
     "genre": [
       "Thriller",
@@ -12854,26 +12845,26 @@
       "Crime Fiction",
       "Erotic thriller"
     ],
-    "directed_by": [
-      "Brian De Palma"
-    ]
+    "name": "Femme Fatale"
   },
   {
     "id": "/en/festival_2005",
-    "name": "Festival",
+    "directed_by": [
+      "Annie Griffin"
+    ],
     "initial_release_date": "2005-07-15",
     "genre": [
       "Black comedy",
       "Parody",
       "Comedy"
     ],
-    "directed_by": [
-      "Annie Griffin"
-    ]
+    "name": "Festival"
   },
   {
     "id": "/en/festival_express",
-    "name": "Festival Express",
+    "directed_by": [
+      "Bob Smeaton"
+    ],
     "genre": [
       "Documentary film",
       "Concert film",
@@ -12883,13 +12874,13 @@
       "Rockumentary",
       "Music"
     ],
-    "directed_by": [
-      "Bob Smeaton"
-    ]
+    "name": "Festival Express"
   },
   {
     "id": "/en/festival_in_cannes",
-    "name": "Festival in Cannes",
+    "directed_by": [
+      "Henry Jaglom"
+    ],
     "initial_release_date": "2001-11-03",
     "genre": [
       "Mockumentary",
@@ -12899,13 +12890,14 @@
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Henry Jaglom"
-    ]
+    "name": "Festival in Cannes"
   },
   {
     "id": "/en/fever_pitch_2005",
-    "name": "Fever Pitch",
+    "directed_by": [
+      "Bobby Farrelly",
+      "Peter Farrelly"
+    ],
     "initial_release_date": "2005-04-06",
     "genre": [
       "Romance Film",
@@ -12913,14 +12905,13 @@
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Bobby Farrelly",
-      "Peter Farrelly"
-    ]
+    "name": "Fever Pitch"
   },
   {
     "id": "/en/fida",
-    "name": "Fida",
+    "directed_by": [
+      "Ken Ghosh"
+    ],
     "initial_release_date": "2004-08-20",
     "genre": [
       "Romance Film",
@@ -12928,13 +12919,13 @@
       "Thriller",
       "Drama"
     ],
-    "directed_by": [
-      "Ken Ghosh"
-    ]
+    "name": "Fida"
   },
   {
     "id": "/en/fido_2006",
-    "name": "Fido",
+    "directed_by": [
+      "Andrew Currie"
+    ],
     "initial_release_date": "2006-09-07",
     "genre": [
       "Horror",
@@ -12944,83 +12935,57 @@
       "Comedy",
       "Drama"
     ],
-    "directed_by": [
-      "Andrew Currie"
-    ]
+    "name": "Fido"
   },
   {
+    "id": "/en/fighter_in_the_wind",
+    "initial_release_date": "2004-08-06",
+    "name": "Fighter in the Wind",
+    "directed_by": [
+      "Yang Yun-ho",
+      "Yang Yun-ho"
+    ],
     "genre": [
       "Action/Adventure",
       "Action Film",
       "War film",
       "Biographical film",
       "Drama"
-    ],
-    "name": "Fighter in the Wind",
-    "initial_release_date": "2004-08-06",
-    "id": "/en/fighter_in_the_wind",
-    "directed_by": [
-      "Yang Yun-ho",
-      "Yang Yun-ho"
     ]
   },
   {
+    "id": "/en/filantropica",
+    "initial_release_date": "2002-03-15",
+    "name": "Filantropica",
+    "directed_by": [
+      "Nae Caranfil"
+    ],
     "genre": [
       "Comedy",
       "Black comedy",
       "Drama"
-    ],
-    "name": "Filantropica",
-    "initial_release_date": "2002-03-15",
-    "id": "/en/filantropica",
-    "directed_by": [
-      "Nae Caranfil"
     ]
   },
   {
+    "id": "/en/film_geek",
+    "initial_release_date": "2006-02-10",
+    "name": "Film Geek",
+    "directed_by": [
+      "James Westby"
+    ],
     "genre": [
       "Indie film",
       "Workplace Comedy",
       "Comedy"
-    ],
-    "name": "Film Geek",
-    "initial_release_date": "2006-02-10",
-    "id": "/en/film_geek",
-    "directed_by": [
-      "James Westby"
     ]
   },
   {
-    "genre": [
-      "Slasher",
-      "Teen film",
-      "Supernatural",
-      "Horror",
-      "Cult film",
-      "Thriller"
-    ],
-    "name": "Final Destination",
-    "initial_release_date": "2000-03-16",
     "id": "/en/final_destination",
+    "initial_release_date": "2000-03-16",
+    "name": "Final Destination",
     "directed_by": [
       "James Wong"
-    ]
-  },
-  {
-    "genre": [
-      "Slasher",
-      "Teen film",
-      "Horror",
-      "Thriller"
     ],
-    "name": "Final Destination 3",
-    "initial_release_date": "2006-02-09",
-    "id": "/en/final_destination_3",
-    "directed_by": [
-      "James Wong"
-    ]
-  },
-  {
     "genre": [
       "Slasher",
       "Teen film",
@@ -13028,31 +12993,62 @@
       "Horror",
       "Cult film",
       "Thriller"
+    ]
+  },
+  {
+    "id": "/en/final_destination_3",
+    "initial_release_date": "2006-02-09",
+    "name": "Final Destination 3",
+    "directed_by": [
+      "James Wong"
     ],
-    "name": "Final Destination 2",
-    "initial_release_date": "2003-01-30",
+    "genre": [
+      "Slasher",
+      "Teen film",
+      "Horror",
+      "Thriller"
+    ]
+  },
+  {
     "id": "/en/final_destination_2",
+    "initial_release_date": "2003-01-30",
+    "name": "Final Destination 2",
     "directed_by": [
       "David R. Ellis"
+    ],
+    "genre": [
+      "Slasher",
+      "Teen film",
+      "Supernatural",
+      "Horror",
+      "Cult film",
+      "Thriller"
     ]
   },
   {
+    "id": "/en/final_fantasy_vii_advent_children",
+    "initial_release_date": "2005-08-31",
+    "name": "Final Fantasy VII: Advent Children",
+    "directed_by": [
+      "Tetsuya Nomura",
+      "Takeshi Nozue"
+    ],
     "genre": [
       "Anime",
       "Science Fiction",
       "Animation",
       "Action Film",
       "Thriller"
-    ],
-    "name": "Final Fantasy VII: Advent Children",
-    "initial_release_date": "2005-08-31",
-    "id": "/en/final_fantasy_vii_advent_children",
-    "directed_by": [
-      "Tetsuya Nomura",
-      "Takeshi Nozue"
     ]
   },
   {
+    "id": "/en/final_fantasy_the_spirits_within",
+    "initial_release_date": "2001-07-02",
+    "name": "Final Fantasy: The Spirits Within",
+    "directed_by": [
+      "Hironobu Sakaguchi",
+      "Motonori Sakakibara"
+    ],
     "genre": [
       "Science Fiction",
       "Anime",
@@ -13060,28 +13056,27 @@
       "Fantasy",
       "Action Film",
       "Adventure Film"
-    ],
-    "name": "Final Fantasy: The Spirits Within",
-    "initial_release_date": "2001-07-02",
-    "id": "/en/final_fantasy_the_spirits_within",
-    "directed_by": [
-      "Hironobu Sakaguchi",
-      "Motonori Sakakibara"
     ]
   },
   {
+    "id": "/en/final_stab",
+    "name": "Final Stab",
+    "directed_by": [
+      "David DeCoteau"
+    ],
     "genre": [
       "Horror",
       "Slasher",
       "Teen film"
-    ],
-    "name": "Final Stab",
-    "id": "/en/final_stab",
-    "directed_by": [
-      "David DeCoteau"
     ]
   },
   {
+    "id": "/en/find_me_guilty",
+    "initial_release_date": "2006-02-16",
+    "name": "Find Me Guilty",
+    "directed_by": [
+      "Sidney Lumet"
+    ],
     "genre": [
       "Crime Fiction",
       "Trial drama",
@@ -13092,72 +13087,72 @@
       "Gangster Film",
       "Comedy",
       "Drama"
-    ],
-    "name": "Find Me Guilty",
-    "initial_release_date": "2006-02-16",
-    "id": "/en/find_me_guilty",
-    "directed_by": [
-      "Sidney Lumet"
     ]
   },
   {
+    "id": "/en/finders_fee",
+    "initial_release_date": "2001-06-16",
+    "name": "Finder's Fee",
+    "directed_by": [
+      "Jeff Probst"
+    ],
     "genre": [
       "Thriller",
       "Psychological thriller",
       "Indie film",
       "Suspense",
       "Drama"
-    ],
-    "name": "Finder's Fee",
-    "initial_release_date": "2001-06-16",
-    "id": "/en/finders_fee",
-    "directed_by": [
-      "Jeff Probst"
     ]
   },
   {
+    "id": "/en/finding_nemo",
+    "initial_release_date": "2003-05-30",
+    "name": "Finding Nemo",
+    "directed_by": [
+      "Andrew Stanton",
+      "Lee Unkrich"
+    ],
     "genre": [
       "Animation",
       "Adventure Film",
       "Comedy",
       "Family"
-    ],
-    "name": "Finding Nemo",
-    "initial_release_date": "2003-05-30",
-    "id": "/en/finding_nemo",
-    "directed_by": [
-      "Andrew Stanton",
-      "Lee Unkrich"
     ]
   },
   {
+    "id": "/en/finding_neverland",
+    "initial_release_date": "2004-09-04",
+    "name": "Finding Neverland",
+    "directed_by": [
+      "Marc Forster"
+    ],
     "genre": [
       "Costume drama",
       "Historical period drama",
       "Family",
       "Biographical film",
       "Drama"
-    ],
-    "name": "Finding Neverland",
-    "initial_release_date": "2004-09-04",
-    "id": "/en/finding_neverland",
-    "directed_by": [
-      "Marc Forster"
     ]
   },
   {
+    "id": "/en/fingerprints",
+    "name": "Fingerprints",
+    "directed_by": [
+      "Harry Basil"
+    ],
     "genre": [
       "Thriller",
       "Horror",
       "Mystery"
-    ],
-    "name": "Fingerprints",
-    "id": "/en/fingerprints",
-    "directed_by": [
-      "Harry Basil"
     ]
   },
   {
+    "id": "/en/firewall_2006",
+    "initial_release_date": "2006-02-02",
+    "name": "Firewall",
+    "directed_by": [
+      "Richard Loncraine"
+    ],
     "genre": [
       "Thriller",
       "Action Film",
@@ -13165,57 +13160,57 @@
       "Action/Adventure",
       "Crime Thriller",
       "Action Thriller"
-    ],
-    "name": "Firewall",
-    "initial_release_date": "2006-02-02",
-    "id": "/en/firewall_2006",
-    "directed_by": [
-      "Richard Loncraine"
     ]
   },
   {
+    "id": "/en/first_daughter",
+    "initial_release_date": "2004-09-24",
+    "name": "First Daughter",
+    "directed_by": [
+      "Forest Whitaker"
+    ],
     "genre": [
       "Romantic comedy",
       "Teen film",
       "Romance Film",
       "Comedy",
       "Drama"
-    ],
-    "name": "First Daughter",
-    "initial_release_date": "2004-09-24",
-    "id": "/en/first_daughter",
-    "directed_by": [
-      "Forest Whitaker"
     ]
   },
   {
+    "id": "/en/first_descent",
+    "initial_release_date": "2005-12-02",
+    "name": "First Descent",
+    "directed_by": [
+      "Kemp Curly",
+      "Kevin Harrison"
+    ],
     "genre": [
       "Documentary film",
       "Sports",
       "Extreme Sports",
       "Biographical film"
-    ],
-    "name": "First Descent",
-    "initial_release_date": "2005-12-02",
-    "id": "/en/first_descent",
-    "directed_by": [
-      "Kemp Curly",
-      "Kevin Harrison"
     ]
   },
   {
+    "id": "/en/fiza",
+    "initial_release_date": "2000-09-08",
+    "name": "Fiza",
+    "directed_by": [
+      "Khalid Mohamed"
+    ],
     "genre": [
       "Romance Film",
       "Drama"
-    ],
-    "name": "Fiza",
-    "initial_release_date": "2000-09-08",
-    "id": "/en/fiza",
-    "directed_by": [
-      "Khalid Mohamed"
     ]
   },
   {
+    "id": "/en/flags_of_our_fathers_2006",
+    "initial_release_date": "2006-10-20",
+    "name": "Flags of Our Fathers",
+    "directed_by": [
+      "Clint Eastwood"
+    ],
     "genre": [
       "War film",
       "History",
@@ -13223,26 +13218,26 @@
       "Film adaptation",
       "Historical drama",
       "Drama"
-    ],
-    "name": "Flags of Our Fathers",
-    "initial_release_date": "2006-10-20",
-    "id": "/en/flags_of_our_fathers_2006",
-    "directed_by": [
-      "Clint Eastwood"
     ]
   },
   {
-    "genre": [
-      "Documentary film"
-    ],
-    "name": "Flight from Death",
-    "initial_release_date": "2006-09-06",
     "id": "/en/flight_from_death",
+    "initial_release_date": "2006-09-06",
+    "name": "Flight from Death",
     "directed_by": [
       "Patrick Shen"
+    ],
+    "genre": [
+      "Documentary film"
     ]
   },
   {
+    "id": "/en/flight_of_the_phoenix",
+    "initial_release_date": "2004-12-17",
+    "name": "Flight of the Phoenix",
+    "directed_by": [
+      "John Moore"
+    ],
     "genre": [
       "Airplanes and airports",
       "Disaster Film",
@@ -13251,52 +13246,53 @@
       "Action/Adventure",
       "Film adaptation",
       "Drama"
-    ],
-    "name": "Flight of the Phoenix",
-    "initial_release_date": "2004-12-17",
-    "id": "/en/flight_of_the_phoenix",
-    "directed_by": [
-      "John Moore"
     ]
   },
   {
+    "id": "/en/flightplan",
+    "initial_release_date": "2005-09-22",
+    "name": "Flightplan",
+    "directed_by": [
+      "Robert Schwentke"
+    ],
     "genre": [
       "Thriller",
       "Mystery",
       "Drama"
-    ],
-    "name": "Flightplan",
-    "initial_release_date": "2005-09-22",
-    "id": "/en/flightplan",
-    "directed_by": [
-      "Robert Schwentke"
     ]
   },
   {
+    "id": "/en/flock_of_dodos",
+    "name": "Flock of Dodos",
+    "directed_by": [
+      "Randy Olson"
+    ],
     "genre": [
       "Documentary film",
       "History"
-    ],
-    "name": "Flock of Dodos",
-    "id": "/en/flock_of_dodos",
-    "directed_by": [
-      "Randy Olson"
     ]
   },
   {
+    "id": "/en/fluffy_the_english_vampire_slayer",
+    "name": "Fluffy the English Vampire Slayer",
+    "directed_by": [
+      "Henry Burrows"
+    ],
     "genre": [
       "Horror comedy",
       "Short Film",
       "Fan film",
       "Parody"
-    ],
-    "name": "Fluffy the English Vampire Slayer",
-    "id": "/en/fluffy_the_english_vampire_slayer",
-    "directed_by": [
-      "Henry Burrows"
     ]
   },
   {
+    "id": "/en/flushed_away",
+    "initial_release_date": "2006-10-22",
+    "name": "Flushed Away",
+    "directed_by": [
+      "David Bowers",
+      "Sam Fell"
+    ],
     "genre": [
       "Animation",
       "Family",
@@ -13304,31 +13300,30 @@
       "Children's/Family",
       "Family-Oriented Adventure",
       "Comedy"
-    ],
-    "name": "Flushed Away",
-    "initial_release_date": "2006-10-22",
-    "id": "/en/flushed_away",
-    "directed_by": [
-      "David Bowers",
-      "Sam Fell"
     ]
   },
   {
+    "id": "/en/fool_and_final",
+    "initial_release_date": "2007-06-01",
+    "name": "Fool &amp; Final",
+    "directed_by": [
+      "Ahmed Khan"
+    ],
     "genre": [
       "Comedy",
       "Action Film",
       "Romance Film",
       "Bollywood",
       "World cinema"
-    ],
-    "name": "Fool &amp; Final",
-    "initial_release_date": "2007-06-01",
-    "id": "/en/fool_and_final",
-    "directed_by": [
-      "Ahmed Khan"
     ]
   },
   {
+    "id": "/en/foolproof",
+    "initial_release_date": "2003-10-03",
+    "name": "Foolproof",
+    "directed_by": [
+      "William Phillips"
+    ],
     "genre": [
       "Action Film",
       "Thriller",
@@ -13337,79 +13332,79 @@
       "Caper story",
       "Crime Fiction",
       "Comedy"
-    ],
-    "name": "Foolproof",
-    "initial_release_date": "2003-10-03",
-    "id": "/en/foolproof",
-    "directed_by": [
-      "William Phillips"
     ]
   },
   {
+    "id": "/en/for_the_birds",
+    "initial_release_date": "2000-06-05",
+    "name": "For the Birds",
+    "directed_by": [
+      "Ralph Eggleston"
+    ],
     "genre": [
       "Short Film",
       "Animation",
       "Comedy",
       "Family"
-    ],
-    "name": "For the Birds",
-    "initial_release_date": "2000-06-05",
-    "id": "/en/for_the_birds",
-    "directed_by": [
-      "Ralph Eggleston"
     ]
   },
   {
+    "id": "/en/for_your_consideration_2006",
+    "initial_release_date": "2006-11-17",
+    "name": "For Your Consideration",
+    "directed_by": [
+      "Christopher Guest"
+    ],
     "genre": [
       "Mockumentary",
       "Parody",
       "Comedy"
-    ],
-    "name": "For Your Consideration",
-    "initial_release_date": "2006-11-17",
-    "id": "/en/for_your_consideration_2006",
-    "directed_by": [
-      "Christopher Guest"
     ]
   },
   {
+    "id": "/en/diev_mi_kas",
+    "initial_release_date": "2005-09-23",
+    "name": "Forest of the Gods",
+    "directed_by": [
+      "Algimantas Puipa"
+    ],
     "genre": [
       "War film",
       "Drama"
-    ],
-    "name": "Forest of the Gods",
-    "initial_release_date": "2005-09-23",
-    "id": "/en/diev_mi_kas",
-    "directed_by": [
-      "Algimantas Puipa"
     ]
   },
   {
+    "id": "/en/formula_17",
+    "initial_release_date": "2004-04-02",
+    "name": "Formula 17",
+    "directed_by": [
+      "Chen Yin-jung"
+    ],
     "genre": [
       "Romantic comedy",
       "Romance Film",
       "Comedy"
-    ],
-    "name": "Formula 17",
-    "initial_release_date": "2004-04-02",
-    "id": "/en/formula_17",
-    "directed_by": [
-      "Chen Yin-jung"
     ]
   },
   {
+    "id": "/en/forty_shades_of_blue",
+    "name": "Forty Shades of Blue",
+    "directed_by": [
+      "Ira Sachs"
+    ],
     "genre": [
       "Indie film",
       "Romance Film",
       "Drama"
-    ],
-    "name": "Forty Shades of Blue",
-    "id": "/en/forty_shades_of_blue",
-    "directed_by": [
-      "Ira Sachs"
     ]
   },
   {
+    "id": "/en/four_brothers_2005",
+    "initial_release_date": "2005-08-12",
+    "name": "Four Brothers",
+    "directed_by": [
+      "John Singleton"
+    ],
     "genre": [
       "Action Film",
       "Crime Fiction",
@@ -13418,108 +13413,108 @@
       "Family Drama",
       "Crime Drama",
       "Drama"
-    ],
-    "name": "Four Brothers",
-    "initial_release_date": "2005-08-12",
-    "id": "/en/four_brothers_2005",
-    "directed_by": [
-      "John Singleton"
     ]
   },
   {
+    "id": "/en/frailty",
+    "initial_release_date": "2001-11-17",
+    "name": "Frailty",
+    "directed_by": [
+      "Bill Paxton"
+    ],
     "genre": [
       "Psychological thriller",
       "Thriller",
       "Crime Fiction",
       "Drama"
-    ],
-    "name": "Frailty",
-    "initial_release_date": "2001-11-17",
-    "id": "/en/frailty",
-    "directed_by": [
-      "Bill Paxton"
     ]
   },
   {
+    "id": "/en/frankenfish",
+    "initial_release_date": "2004-10-09",
+    "name": "Frankenfish",
+    "directed_by": [
+      "Mark A.Z. Dipp\u00e9"
+    ],
     "genre": [
       "Action Film",
       "Horror",
       "Natural horror film",
       "Monster",
       "Science Fiction"
-    ],
-    "name": "Frankenfish",
-    "initial_release_date": "2004-10-09",
-    "id": "/en/frankenfish",
-    "directed_by": [
-      "Mark A.Z. Dipp\u00e9"
     ]
   },
   {
-    "genre": [
-      "Family",
-      "Animation"
-    ],
-    "name": "Franklin and the Turtle Lake Treasure",
-    "initial_release_date": "2006-12-20",
     "id": "/en/franklin_and_grannys_secret",
+    "initial_release_date": "2006-12-20",
+    "name": "Franklin and the Turtle Lake Treasure",
     "directed_by": [
       "Dominique Monf\u00e9ry"
-    ]
-  },
-  {
+    ],
     "genre": [
       "Family",
       "Animation"
-    ],
-    "name": "Franklin and the Green Knight",
-    "initial_release_date": "2000-10-17",
+    ]
+  },
+  {
     "id": "/en/franklin_and_the_green_knight",
+    "initial_release_date": "2000-10-17",
+    "name": "Franklin and the Green Knight",
     "directed_by": [
       "John van Bruggen"
-    ]
-  },
-  {
+    ],
     "genre": [
       "Family",
       "Animation"
-    ],
-    "name": "Franklin's Magic Christmas",
-    "initial_release_date": "2001-11-06",
-    "id": "/en/franklins_magic_christmas",
-    "directed_by": [
-      "John van Bruggen"
     ]
   },
   {
+    "id": "/en/franklins_magic_christmas",
+    "initial_release_date": "2001-11-06",
+    "name": "Franklin's Magic Christmas",
+    "directed_by": [
+      "John van Bruggen"
+    ],
+    "genre": [
+      "Family",
+      "Animation"
+    ]
+  },
+  {
+    "id": "/en/freaky_friday_2003",
+    "initial_release_date": "2003-08-04",
+    "name": "Freaky Friday",
+    "directed_by": [
+      "Mark Waters"
+    ],
     "genre": [
       "Family",
       "Fantasy",
       "Comedy"
-    ],
-    "name": "Freaky Friday",
-    "initial_release_date": "2003-08-04",
-    "id": "/en/freaky_friday_2003",
-    "directed_by": [
-      "Mark Waters"
     ]
   },
   {
+    "id": "/en/freddy_vs_jason",
+    "initial_release_date": "2003-08-13",
+    "name": "Freddy vs. Jason",
+    "directed_by": [
+      "Ronny Yu"
+    ],
     "genre": [
       "Horror",
       "Thriller",
       "Slasher",
       "Action Film",
       "Crime Fiction"
-    ],
-    "name": "Freddy vs. Jason",
-    "initial_release_date": "2003-08-13",
-    "id": "/en/freddy_vs_jason",
-    "directed_by": [
-      "Ronny Yu"
     ]
   },
   {
+    "id": "/en/free_jimmy",
+    "initial_release_date": "2006-04-21",
+    "name": "Free Jimmy",
+    "directed_by": [
+      "Christopher Nielsen"
+    ],
     "genre": [
       "Anime",
       "Animation",
@@ -13527,27 +13522,27 @@
       "Satire",
       "Stoner film",
       "Comedy"
-    ],
-    "name": "Free Jimmy",
-    "initial_release_date": "2006-04-21",
-    "id": "/en/free_jimmy",
-    "directed_by": [
-      "Christopher Nielsen"
     ]
   },
   {
+    "id": "/en/free_zone",
+    "initial_release_date": "2005-05-19",
+    "name": "Free Zone",
+    "directed_by": [
+      "Amos Gitai"
+    ],
     "genre": [
       "Comedy",
       "Drama"
-    ],
-    "name": "Free Zone",
-    "initial_release_date": "2005-05-19",
-    "id": "/en/free_zone",
-    "directed_by": [
-      "Amos Gitai"
     ]
   },
   {
+    "id": "/en/freedomland",
+    "initial_release_date": "2006-02-17",
+    "name": "Freedomland",
+    "directed_by": [
+      "Joe Roth"
+    ],
     "genre": [
       "Mystery",
       "Thriller",
@@ -13556,28 +13551,28 @@
       "Crime Thriller",
       "Crime Drama",
       "Drama"
-    ],
-    "name": "Freedomland",
-    "initial_release_date": "2006-02-17",
-    "id": "/en/freedomland",
-    "directed_by": [
-      "Joe Roth"
     ]
   },
   {
+    "id": "/en/french_bean",
+    "initial_release_date": "2007-03-22",
+    "name": "Mr. Bean's Holiday",
+    "directed_by": [
+      "Steve Bendelack"
+    ],
     "genre": [
       "Family",
       "Comedy",
       "Road movie"
-    ],
-    "name": "Mr. Bean's Holiday",
-    "initial_release_date": "2007-03-22",
-    "id": "/en/french_bean",
-    "directed_by": [
-      "Steve Bendelack"
     ]
   },
   {
+    "id": "/en/frequency_2000",
+    "initial_release_date": "2000-04-28",
+    "name": "Frequency",
+    "directed_by": [
+      "Gregory Hoblit"
+    ],
     "genre": [
       "Thriller",
       "Time travel",
@@ -13587,69 +13582,69 @@
       "Crime Fiction",
       "Family Drama",
       "Drama"
-    ],
-    "name": "Frequency",
-    "initial_release_date": "2000-04-28",
-    "id": "/en/frequency_2000",
-    "directed_by": [
-      "Gregory Hoblit"
     ]
   },
   {
+    "id": "/en/frida",
+    "initial_release_date": "2002-08-29",
+    "name": "Frida",
+    "directed_by": [
+      "Julie Taymor"
+    ],
     "genre": [
       "Biographical film",
       "Romance Film",
       "Political drama",
       "Drama"
-    ],
-    "name": "Frida",
-    "initial_release_date": "2002-08-29",
-    "id": "/en/frida",
-    "directed_by": [
-      "Julie Taymor"
     ]
   },
   {
+    "id": "/en/friday_after_next",
+    "initial_release_date": "2002-11-22",
+    "name": "Friday After Next",
+    "directed_by": [
+      "Marcus Raboy"
+    ],
     "genre": [
       "Buddy film",
       "Comedy"
-    ],
-    "name": "Friday After Next",
-    "initial_release_date": "2002-11-22",
-    "id": "/en/friday_after_next",
-    "directed_by": [
-      "Marcus Raboy"
     ]
   },
   {
+    "id": "/en/friday_night_lights",
+    "initial_release_date": "2004-10-06",
+    "name": "Friday Night Lights",
+    "directed_by": [
+      "Peter Berg"
+    ],
     "genre": [
       "Action Film",
       "Sports",
       "Drama"
-    ],
-    "name": "Friday Night Lights",
-    "initial_release_date": "2004-10-06",
-    "id": "/en/friday_night_lights",
-    "directed_by": [
-      "Peter Berg"
     ]
   },
   {
+    "id": "/en/friends_2001",
+    "initial_release_date": "2001-01-14",
+    "name": "Friends",
+    "directed_by": [
+      "Siddique"
+    ],
     "genre": [
       "Romance Film",
       "Comedy",
       "Drama",
       "Tamil cinema",
       "World cinema"
-    ],
-    "name": "Friends",
-    "initial_release_date": "2001-01-14",
-    "id": "/en/friends_2001",
-    "directed_by": [
-      "Siddique"
     ]
   },
   {
+    "id": "/en/friends_with_money",
+    "initial_release_date": "2006-04-07",
+    "name": "Friends with Money",
+    "directed_by": [
+      "Nicole Holofcener"
+    ],
     "genre": [
       "Romance Film",
       "Indie film",
@@ -13658,26 +13653,27 @@
       "Ensemble Film",
       "Comedy",
       "Drama"
-    ],
-    "name": "Friends with Money",
-    "initial_release_date": "2006-04-07",
-    "id": "/en/friends_with_money",
-    "directed_by": [
-      "Nicole Holofcener"
     ]
   },
   {
-    "genre": [
-      "Comedy-drama"
-    ],
-    "name": "FRO - The Movie",
     "id": "/en/fro_the_movie",
+    "name": "FRO - The Movie",
     "directed_by": [
       "Brad Gashler",
       "Michael J. Brooks"
+    ],
+    "genre": [
+      "Comedy-drama"
     ]
   },
   {
+    "id": "/en/from_hell_2001",
+    "initial_release_date": "2001-09-08",
+    "name": "From Hell",
+    "directed_by": [
+      "Allen Hughes",
+      "Albert Hughes"
+    ],
     "genre": [
       "Thriller",
       "Mystery",
@@ -13687,29 +13683,28 @@
       "Film adaptation",
       "Horror",
       "Drama"
-    ],
-    "name": "From Hell",
-    "initial_release_date": "2001-09-08",
-    "id": "/en/from_hell_2001",
-    "directed_by": [
-      "Allen Hughes",
-      "Albert Hughes"
     ]
   },
   {
-    "genre": [
-      "Music video"
-    ],
-    "name": "From Janet to Damita Jo: The Videos",
-    "initial_release_date": "2004-09-07",
     "id": "/en/from_janet_to_damita_jo_the_videos",
+    "initial_release_date": "2004-09-07",
+    "name": "From Janet to Damita Jo: The Videos",
     "directed_by": [
       "Jonathan Dayton",
       "Mark Romanek",
       "Paul Hunter"
+    ],
+    "genre": [
+      "Music video"
     ]
   },
   {
+    "id": "/en/from_justin_to_kelly",
+    "initial_release_date": "2003-06-20",
+    "name": "From Justin to Kelly",
+    "directed_by": [
+      "Robert Iscove"
+    ],
     "genre": [
       "Musical",
       "Romantic comedy",
@@ -13718,54 +13713,54 @@
       "Beach Film",
       "Musical comedy",
       "Comedy"
-    ],
-    "name": "From Justin to Kelly",
-    "initial_release_date": "2003-06-20",
-    "id": "/en/from_justin_to_kelly",
-    "directed_by": [
-      "Robert Iscove"
     ]
   },
   {
+    "id": "/en/frostbite_2005",
+    "name": "Frostbite",
+    "directed_by": [
+      "Jonathan Schwartz"
+    ],
     "genre": [
       "Sports",
       "Comedy"
-    ],
-    "name": "Frostbite",
-    "id": "/en/frostbite_2005",
-    "directed_by": [
-      "Jonathan Schwartz"
     ]
   },
   {
+    "id": "/en/fubar_2002",
+    "initial_release_date": "2002-01-01",
+    "name": "FUBAR",
+    "directed_by": [
+      "Michael Dowse"
+    ],
     "genre": [
       "Mockumentary",
       "Indie film",
       "Buddy film",
       "Comedy",
       "Drama"
-    ],
-    "name": "FUBAR",
-    "initial_release_date": "2002-01-01",
-    "id": "/en/fubar_2002",
-    "directed_by": [
-      "Michael Dowse"
     ]
   },
   {
+    "id": "/en/fuck_2005",
+    "initial_release_date": "2005-11-07",
+    "name": "Fuck",
+    "directed_by": [
+      "Steve Anderson"
+    ],
     "genre": [
       "Documentary film",
       "Indie film",
       "Political cinema"
-    ],
-    "name": "Fuck",
-    "initial_release_date": "2005-11-07",
-    "id": "/en/fuck_2005",
-    "directed_by": [
-      "Steve Anderson"
     ]
   },
   {
+    "id": "/en/fuckland",
+    "initial_release_date": "2000-09-21",
+    "name": "Fuckland",
+    "directed_by": [
+      "Jos\u00e9 Luis M\u00e1rques"
+    ],
     "genre": [
       "Indie film",
       "Dogme 95",
@@ -13774,41 +13769,41 @@
       "Comedy of manners",
       "Comedy",
       "Drama"
-    ],
-    "name": "Fuckland",
-    "initial_release_date": "2000-09-21",
-    "id": "/en/fuckland",
-    "directed_by": [
-      "Jos\u00e9 Luis M\u00e1rques"
     ]
   },
   {
+    "id": "/en/full_court_miracle",
+    "initial_release_date": "2003-11-21",
+    "name": "Full-Court Miracle",
+    "directed_by": [
+      "Stuart Gillard"
+    ],
     "genre": [
       "Family",
       "Drama"
-    ],
-    "name": "Full-Court Miracle",
-    "initial_release_date": "2003-11-21",
-    "id": "/en/full_court_miracle",
-    "directed_by": [
-      "Stuart Gillard"
     ]
   },
   {
+    "id": "/en/full_disclosure_2001",
+    "initial_release_date": "2001-05-15",
+    "name": "Full Disclosure",
+    "directed_by": [
+      "John Bradshaw"
+    ],
     "genre": [
       "Thriller",
       "Action/Adventure",
       "Action Film",
       "Political thriller"
-    ],
-    "name": "Full Disclosure",
-    "initial_release_date": "2001-05-15",
-    "id": "/en/full_disclosure_2001",
-    "directed_by": [
-      "John Bradshaw"
     ]
   },
   {
+    "id": "/en/full_frontal",
+    "initial_release_date": "2002-08-02",
+    "name": "Full Frontal",
+    "directed_by": [
+      "Steven Soderbergh"
+    ],
     "genre": [
       "Romantic comedy",
       "Indie film",
@@ -13817,15 +13812,15 @@
       "Ensemble Film",
       "Comedy",
       "Drama"
-    ],
-    "name": "Full Frontal",
-    "initial_release_date": "2002-08-02",
-    "id": "/en/full_frontal",
-    "directed_by": [
-      "Steven Soderbergh"
     ]
   },
   {
+    "id": "/wikipedia/ja/$5287$5834$7248_$92FC$306E$932C$91D1$8853$5E2B_$30B7$30E3$30F3$30D0$30E9$3092$5F81$304F$8005",
+    "initial_release_date": "2005-07-23",
+    "name": "Fullmetal Alchemist the Movie: Conqueror of Shamballa",
+    "directed_by": [
+      "Seiji Mizushima"
+    ],
     "genre": [
       "Anime",
       "Fantasy",
@@ -13833,15 +13828,16 @@
       "Animation",
       "Adventure Film",
       "Drama"
-    ],
-    "name": "Fullmetal Alchemist the Movie: Conqueror of Shamballa",
-    "initial_release_date": "2005-07-23",
-    "id": "/wikipedia/ja/$5287$5834$7248_$92FC$306E$932C$91D1$8853$5E2B_$30B7$30E3$30F3$30D0$30E9$3092$5F81$304F$8005",
-    "directed_by": [
-      "Seiji Mizushima"
     ]
   },
   {
+    "id": "/en/fulltime_killer",
+    "initial_release_date": "2001-08-03",
+    "name": "Fulltime Killer",
+    "directed_by": [
+      "Johnnie To",
+      "Wai Ka-fai"
+    ],
     "genre": [
       "Action Film",
       "Thriller",
@@ -13849,28 +13845,26 @@
       "Martial Arts Film",
       "Action Thriller",
       "Drama"
-    ],
-    "name": "Fulltime Killer",
-    "initial_release_date": "2001-08-03",
-    "id": "/en/fulltime_killer",
-    "directed_by": [
-      "Johnnie To",
-      "Wai Ka-fai"
     ]
   },
   {
+    "id": "/en/fun_with_dick_and_jane_2005",
+    "initial_release_date": "2005-12-21",
+    "name": "Fun with Dick and Jane",
+    "directed_by": [
+      "Dean Parisot"
+    ],
     "genre": [
       "Crime Fiction",
       "Comedy"
-    ],
-    "name": "Fun with Dick and Jane",
-    "initial_release_date": "2005-12-21",
-    "id": "/en/fun_with_dick_and_jane_2005",
-    "directed_by": [
-      "Dean Parisot"
     ]
   },
   {
+    "id": "/en/funny_ha_ha",
+    "name": "Funny Ha Ha",
+    "directed_by": [
+      "Andrew Bujalski"
+    ],
     "genre": [
       "Indie film",
       "Romantic comedy",
@@ -13879,131 +13873,132 @@
       "Comedy-drama",
       "Comedy of manners",
       "Comedy"
-    ],
-    "name": "Funny Ha Ha",
-    "id": "/en/funny_ha_ha",
-    "directed_by": [
-      "Andrew Bujalski"
     ]
   },
   {
+    "id": "/en/g-sale",
+    "initial_release_date": "2005-11-15",
+    "name": "G-Sale",
+    "directed_by": [
+      "Randy Nargi"
+    ],
     "genre": [
       "Mockumentary",
       "Comedy of manners",
       "Comedy"
-    ],
-    "name": "G-Sale",
-    "initial_release_date": "2005-11-15",
-    "id": "/en/g-sale",
-    "directed_by": [
-      "Randy Nargi"
     ]
   },
   {
+    "id": "/en/gabrielle_2006",
+    "initial_release_date": "2005-09-05",
+    "name": "Gabrielle",
+    "directed_by": [
+      "Patrice Ch\u00e9reau"
+    ],
     "genre": [
       "Romance Film",
       "Drama"
-    ],
-    "name": "Gabrielle",
-    "initial_release_date": "2005-09-05",
-    "id": "/en/gabrielle_2006",
-    "directed_by": [
-      "Patrice Ch\u00e9reau"
     ]
   },
   {
+    "id": "/en/gagamboy",
+    "initial_release_date": "2004-01-01",
+    "name": "Gagamboy",
+    "directed_by": [
+      "Erik Matti"
+    ],
     "genre": [
       "Action Film",
       "Science Fiction",
       "Comedy",
       "Fantasy"
-    ],
-    "name": "Gagamboy",
-    "initial_release_date": "2004-01-01",
-    "id": "/en/gagamboy",
-    "directed_by": [
-      "Erik Matti"
     ]
   },
   {
+    "id": "/en/gallipoli_2005",
+    "initial_release_date": "2005-03-18",
+    "name": "Gallipoli",
+    "directed_by": [
+      "Tolga \u00d6rnek"
+    ],
     "genre": [
       "Documentary film",
       "War film"
-    ],
-    "name": "Gallipoli",
-    "initial_release_date": "2005-03-18",
-    "id": "/en/gallipoli_2005",
-    "directed_by": [
-      "Tolga \u00d6rnek"
     ]
   },
   {
+    "id": "/en/game_6_2006",
+    "initial_release_date": "2006-03-10",
+    "name": "Game 6",
+    "directed_by": [
+      "Michael Hoffman"
+    ],
     "genre": [
       "Indie film",
       "Sports",
       "Comedy-drama",
       "Drama"
-    ],
-    "name": "Game 6",
-    "initial_release_date": "2006-03-10",
-    "id": "/en/game_6_2006",
-    "directed_by": [
-      "Michael Hoffman"
     ]
   },
   {
-    "genre": [
-      "Science Fiction"
-    ],
-    "name": "Maximum Surge",
-    "initial_release_date": "2003-06-23",
     "id": "/en/game_over_2003",
+    "initial_release_date": "2003-06-23",
+    "name": "Maximum Surge",
     "directed_by": [
       "Jason Bourque"
+    ],
+    "genre": [
+      "Science Fiction"
     ]
   },
   {
+    "id": "/en/gamma_squad",
+    "initial_release_date": "2004-06-14",
+    "name": "Expendable",
+    "directed_by": [
+      "Nathaniel Barker",
+      "Eliot Lash"
+    ],
     "genre": [
       "Indie film",
       "Short Film",
       "War film"
-    ],
-    "name": "Expendable",
-    "initial_release_date": "2004-06-14",
-    "id": "/en/gamma_squad",
-    "directed_by": [
-      "Nathaniel Barker",
-      "Eliot Lash"
     ]
   },
   {
+    "id": "/en/gangotri_2003",
+    "initial_release_date": "2003-03-28",
+    "name": "Gangotri",
+    "directed_by": [
+      "Kovelamudi Raghavendra Rao"
+    ],
     "genre": [
       "Romance Film",
       "Drama",
       "Tollywood",
       "World cinema"
-    ],
-    "name": "Gangotri",
-    "initial_release_date": "2003-03-28",
-    "id": "/en/gangotri_2003",
-    "directed_by": [
-      "Kovelamudi Raghavendra Rao"
     ]
   },
   {
+    "id": "/en/gangs_of_new_york",
+    "initial_release_date": "2002-12-09",
+    "name": "Gangs of New York",
+    "directed_by": [
+      "Martin Scorsese"
+    ],
     "genre": [
       "Crime Fiction",
       "Historical drama",
       "Drama"
-    ],
-    "name": "Gangs of New York",
-    "initial_release_date": "2002-12-09",
-    "id": "/en/gangs_of_new_york",
-    "directed_by": [
-      "Martin Scorsese"
     ]
   },
   {
+    "id": "/en/gangster_2006",
+    "initial_release_date": "2006-04-28",
+    "name": "Gangster",
+    "directed_by": [
+      "Anurag Basu"
+    ],
     "genre": [
       "Thriller",
       "Romance Film",
@@ -14012,15 +14007,15 @@
       "Crime Fiction",
       "Bollywood",
       "Drama"
-    ],
-    "name": "Gangster",
-    "initial_release_date": "2006-04-28",
-    "id": "/en/gangster_2006",
-    "directed_by": [
-      "Anurag Basu"
     ]
   },
   {
+    "id": "/en/gangster_no_1",
+    "initial_release_date": "2000-06-09",
+    "name": "Gangster No. 1",
+    "directed_by": [
+      "Paul McGuigan"
+    ],
     "genre": [
       "Thriller",
       "Crime Fiction",
@@ -14030,26 +14025,26 @@
       "Action/Adventure",
       "Gangster Film",
       "Drama"
-    ],
-    "name": "Gangster No. 1",
-    "initial_release_date": "2000-06-09",
-    "id": "/en/gangster_no_1",
-    "directed_by": [
-      "Paul McGuigan"
     ]
   },
   {
-    "genre": [
-      "Comedy"
-    ],
-    "name": "Garam Masala",
-    "initial_release_date": "2005-11-02",
     "id": "/en/garam_masala_2005",
+    "initial_release_date": "2005-11-02",
+    "name": "Garam Masala",
     "directed_by": [
       "Priyadarshan"
+    ],
+    "genre": [
+      "Comedy"
     ]
   },
   {
+    "id": "/en/garcon_stupide",
+    "initial_release_date": "2004-03-10",
+    "name": "Gar\u00e7on stupide",
+    "directed_by": [
+      "Lionel Baier"
+    ],
     "genre": [
       "LGBT",
       "World cinema",
@@ -14059,15 +14054,15 @@
       "Coming of age",
       "Comedy",
       "Drama"
-    ],
-    "name": "Gar\u00e7on stupide",
-    "initial_release_date": "2004-03-10",
-    "id": "/en/garcon_stupide",
-    "directed_by": [
-      "Lionel Baier"
     ]
   },
   {
+    "id": "/en/garden_state",
+    "initial_release_date": "2004-01-16",
+    "name": "Garden State",
+    "directed_by": [
+      "Zach Braff"
+    ],
     "genre": [
       "Romantic comedy",
       "Coming of age",
@@ -14075,98 +14070,98 @@
       "Comedy-drama",
       "Comedy",
       "Drama"
-    ],
-    "name": "Garden State",
-    "initial_release_date": "2004-01-16",
-    "id": "/en/garden_state",
-    "directed_by": [
-      "Zach Braff"
     ]
   },
   {
+    "id": "/en/garfield_2004",
+    "initial_release_date": "2004-06-06",
+    "name": "Garfield: The Movie",
+    "directed_by": [
+      "Peter Hewitt"
+    ],
     "genre": [
       "Slapstick",
       "Animation",
       "Family",
       "Comedy"
-    ],
-    "name": "Garfield: The Movie",
-    "initial_release_date": "2004-06-06",
-    "id": "/en/garfield_2004",
-    "directed_by": [
-      "Peter Hewitt"
     ]
   },
   {
+    "id": "/en/garfield_a_tail_of_two_kitties",
+    "initial_release_date": "2006-06-15",
+    "name": "Garfield: A Tail of Two Kitties",
+    "directed_by": [
+      "Tim Hill"
+    ],
     "genre": [
       "Family",
       "Animal Picture",
       "Children's/Family",
       "Family-Oriented Adventure",
       "Comedy"
-    ],
-    "name": "Garfield: A Tail of Two Kitties",
-    "initial_release_date": "2006-06-15",
-    "id": "/en/garfield_a_tail_of_two_kitties",
-    "directed_by": [
-      "Tim Hill"
     ]
   },
   {
+    "id": "/en/gene-x",
+    "name": "Gene-X",
+    "directed_by": [
+      "Martin Simpson"
+    ],
     "genre": [
       "Thriller",
       "Romance Film"
-    ],
-    "name": "Gene-X",
-    "id": "/en/gene-x",
-    "directed_by": [
-      "Martin Simpson"
     ]
   },
   {
+    "id": "/en/george_of_the_jungle_2",
+    "initial_release_date": "2003-08-18",
+    "name": "George of the Jungle 2",
+    "directed_by": [
+      "David Grossman"
+    ],
     "genre": [
       "Parody",
       "Slapstick",
       "Family",
       "Jungle Film",
       "Comedy"
-    ],
-    "name": "George of the Jungle 2",
-    "initial_release_date": "2003-08-18",
-    "id": "/en/george_of_the_jungle_2",
-    "directed_by": [
-      "David Grossman"
     ]
   },
   {
+    "id": "/en/george_washington_2000",
+    "initial_release_date": "2000-09-29",
+    "name": "George Washington",
+    "directed_by": [
+      "David Gordon Green"
+    ],
     "genre": [
       "Coming of age",
       "Indie film",
       "Drama"
-    ],
-    "name": "George Washington",
-    "initial_release_date": "2000-09-29",
-    "id": "/en/george_washington_2000",
-    "directed_by": [
-      "David Gordon Green"
     ]
   },
   {
+    "id": "/en/georgia_rule",
+    "initial_release_date": "2007-05-10",
+    "name": "Georgia Rule",
+    "directed_by": [
+      "Garry Marshall"
+    ],
     "genre": [
       "Comedy-drama",
       "Romance Film",
       "Melodrama",
       "Comedy",
       "Drama"
-    ],
-    "name": "Georgia Rule",
-    "initial_release_date": "2007-05-10",
-    "id": "/en/georgia_rule",
-    "directed_by": [
-      "Garry Marshall"
     ]
   },
   {
+    "id": "/en/gerry",
+    "initial_release_date": "2003-02-14",
+    "name": "Gerry",
+    "directed_by": [
+      "Gus Van Sant"
+    ],
     "genre": [
       "Indie film",
       "Adventure Film",
@@ -14175,27 +14170,27 @@
       "Experimental film",
       "Buddy film",
       "Drama"
-    ],
-    "name": "Gerry",
-    "initial_release_date": "2003-02-14",
-    "id": "/en/gerry",
-    "directed_by": [
-      "Gus Van Sant"
     ]
   },
   {
+    "id": "/en/get_a_clue",
+    "initial_release_date": "2002-06-28",
+    "name": "Get a Clue",
+    "directed_by": [
+      "Maggie Greenwald Mansfield"
+    ],
     "genre": [
       "Mystery",
       "Comedy"
-    ],
-    "name": "Get a Clue",
-    "initial_release_date": "2002-06-28",
-    "id": "/en/get_a_clue",
-    "directed_by": [
-      "Maggie Greenwald Mansfield"
     ]
   },
   {
+    "id": "/en/get_over_it",
+    "initial_release_date": "2001-03-09",
+    "name": "Get Over It",
+    "directed_by": [
+      "Tommy O'Haver"
+    ],
     "genre": [
       "Musical",
       "Romantic comedy",
@@ -14209,15 +14204,15 @@
       "Sex comedy",
       "Musical comedy",
       "Comedy"
-    ],
-    "name": "Get Over It",
-    "initial_release_date": "2001-03-09",
-    "id": "/en/get_over_it",
-    "directed_by": [
-      "Tommy O'Haver"
     ]
   },
   {
+    "id": "/en/get_rich_or_die_tryin",
+    "initial_release_date": "2005-11-09",
+    "name": "Get Rich or Die Tryin'",
+    "directed_by": [
+      "Jim Sheridan"
+    ],
     "genre": [
       "Coming of age",
       "Crime Fiction",
@@ -14226,15 +14221,14 @@
       "Biographical film",
       "Musical Drama",
       "Drama"
-    ],
-    "name": "Get Rich or Die Tryin'",
-    "initial_release_date": "2005-11-09",
-    "id": "/en/get_rich_or_die_tryin",
-    "directed_by": [
-      "Jim Sheridan"
     ]
   },
   {
+    "id": "/en/get_up",
+    "name": "Get Up!",
+    "directed_by": [
+      "Kazuyuki Izutsu"
+    ],
     "genre": [
       "Musical",
       "Action Film",
@@ -14243,55 +14237,56 @@
       "Musical comedy",
       "Comedy",
       "Drama"
-    ],
-    "name": "Get Up!",
-    "id": "/en/get_up",
-    "directed_by": [
-      "Kazuyuki Izutsu"
     ]
   },
   {
+    "id": "/en/getting_my_brother_laid",
+    "name": "Getting My Brother Laid",
+    "directed_by": [
+      "Sven Taddicken"
+    ],
     "genre": [
       "Romantic comedy",
       "Romance Film",
       "Comedy",
       "Drama"
-    ],
-    "name": "Getting My Brother Laid",
-    "id": "/en/getting_my_brother_laid",
-    "directed_by": [
-      "Sven Taddicken"
     ]
   },
   {
+    "id": "/en/getting_there",
+    "initial_release_date": "2002-06-11",
+    "name": "Getting There: Sweet 16 and Licensed to Drive",
+    "directed_by": [
+      "Steve Purcell"
+    ],
     "genre": [
       "Family",
       "Teen film",
       "Comedy"
-    ],
-    "name": "Getting There: Sweet 16 and Licensed to Drive",
-    "initial_release_date": "2002-06-11",
-    "id": "/en/getting_there",
-    "directed_by": [
-      "Steve Purcell"
     ]
   },
   {
+    "id": "/en/ghajini",
+    "initial_release_date": "2005-09-29",
+    "name": "Ghajini",
+    "directed_by": [
+      "A.R. Murugadoss"
+    ],
     "genre": [
       "Thriller",
       "Action Film",
       "Mystery",
       "Romance Film",
       "Drama"
-    ],
-    "name": "Ghajini",
-    "initial_release_date": "2005-09-29",
-    "id": "/en/ghajini",
-    "directed_by": [
-      "A.R. Murugadoss"
     ]
   },
   {
+    "id": "/en/gharshana",
+    "initial_release_date": "2004-07-30",
+    "name": "Gharshana",
+    "directed_by": [
+      "Gautham Menon"
+    ],
     "genre": [
       "Mystery",
       "Crime Fiction",
@@ -14300,55 +14295,55 @@
       "Tollywood",
       "World cinema",
       "Drama"
-    ],
-    "name": "Gharshana",
-    "initial_release_date": "2004-07-30",
-    "id": "/en/gharshana",
-    "directed_by": [
-      "Gautham Menon"
     ]
   },
   {
+    "id": "/en/ghilli",
+    "initial_release_date": "2004-04-17",
+    "name": "Ghilli",
+    "directed_by": [
+      "Dharani"
+    ],
     "genre": [
       "Sports",
       "Action Film",
       "Romance Film",
       "Comedy"
-    ],
-    "name": "Ghilli",
-    "initial_release_date": "2004-04-17",
-    "id": "/en/ghilli",
-    "directed_by": [
-      "Dharani"
     ]
   },
   {
-    "genre": [
-      "Horror comedy"
-    ],
-    "name": "Ghost Game",
-    "initial_release_date": "2005-09-01",
     "id": "/en/ghost_game_2006",
+    "initial_release_date": "2005-09-01",
+    "name": "Ghost Game",
     "directed_by": [
       "Joe Knee"
+    ],
+    "genre": [
+      "Horror comedy"
     ]
   },
   {
+    "id": "/en/ghost_house",
+    "initial_release_date": "2004-09-17",
+    "name": "Ghost House",
+    "directed_by": [
+      "Kim Sang-jin"
+    ],
     "genre": [
       "Horror",
       "Horror comedy",
       "Comedy",
       "East Asian cinema",
       "World cinema"
-    ],
-    "name": "Ghost House",
-    "initial_release_date": "2004-09-17",
-    "id": "/en/ghost_house",
-    "directed_by": [
-      "Kim Sang-jin"
     ]
   },
   {
+    "id": "/en/ghost_in_the_shell_2_innocence",
+    "initial_release_date": "2004-03-06",
+    "name": "Ghost in the Shell 2: Innocence",
+    "directed_by": [
+      "Mamoru Oshii"
+    ],
     "genre": [
       "Science Fiction",
       "Anime",
@@ -14356,15 +14351,15 @@
       "Animation",
       "Thriller",
       "Drama"
-    ],
-    "name": "Ghost in the Shell 2: Innocence",
-    "initial_release_date": "2004-03-06",
-    "id": "/en/ghost_in_the_shell_2_innocence",
-    "directed_by": [
-      "Mamoru Oshii"
     ]
   },
   {
+    "id": "/en/s_a_c_solid_state_society",
+    "initial_release_date": "2006-09-01",
+    "name": "Ghost in the Shell: Solid State Society",
+    "directed_by": [
+      "Kenji Kamiyama"
+    ],
     "genre": [
       "Anime",
       "Science Fiction",
@@ -14373,27 +14368,24 @@
       "Thriller",
       "Adventure Film",
       "Fantasy"
-    ],
-    "name": "Ghost in the Shell: Solid State Society",
-    "initial_release_date": "2006-09-01",
-    "id": "/en/s_a_c_solid_state_society",
-    "directed_by": [
-      "Kenji Kamiyama"
     ]
   },
   {
+    "id": "/en/ghost_lake",
+    "initial_release_date": "2005-05-17",
+    "name": "Ghost Lake",
+    "directed_by": [
+      "Jay Woelfel"
+    ],
     "genre": [
       "Horror",
       "Zombie Film"
-    ],
-    "name": "Ghost Lake",
-    "initial_release_date": "2005-05-17",
-    "id": "/en/ghost_lake",
-    "directed_by": [
-      "Jay Woelfel"
     ]
   },
   {
+    "id": "/en/ghost_rider_2007",
+    "initial_release_date": "2007-01-15",
+    "name": "Ghost Rider",
     "genre": [
       "Adventure Film",
       "Thriller",
@@ -14402,39 +14394,39 @@
       "Horror",
       "Drama"
     ],
-    "name": "Ghost Rider",
-    "initial_release_date": "2007-01-15",
-    "id": "/en/ghost_rider_2007",
     "directed_by": [
       "Mark Steven Johnson"
     ]
   },
   {
+    "id": "/en/ghost_ship_2002",
+    "initial_release_date": "2002-10-22",
+    "name": "Ghost Ship",
     "genre": [
       "Horror",
       "Supernatural",
       "Slasher"
     ],
-    "name": "Ghost Ship",
-    "initial_release_date": "2002-10-22",
-    "id": "/en/ghost_ship_2002",
     "directed_by": [
       "Steve Beck"
     ]
   },
   {
+    "id": "/en/ghost_world_2001",
+    "initial_release_date": "2001-06-16",
+    "name": "Ghost World",
     "genre": [
       "Indie film",
       "Comedy-drama"
     ],
-    "name": "Ghost World",
-    "initial_release_date": "2001-06-16",
-    "id": "/en/ghost_world_2001",
     "directed_by": [
       "Terry Zwigoff"
     ]
   },
   {
+    "id": "/en/ghosts_of_mars",
+    "initial_release_date": "2001-08-24",
+    "name": "Ghosts of Mars",
     "genre": [
       "Adventure Film",
       "Science Fiction",
@@ -14444,51 +14436,51 @@
       "Thriller",
       "Space Western"
     ],
-    "name": "Ghosts of Mars",
-    "initial_release_date": "2001-08-24",
-    "id": "/en/ghosts_of_mars",
     "directed_by": [
       "John Carpenter"
     ]
   },
   {
+    "id": "/m/06ry42",
+    "initial_release_date": "2004-10-28",
+    "name": "The International Playboys' First Movie: Ghouls Gone Wild!",
     "genre": [
       "Short Film",
       "Musical"
     ],
-    "name": "The International Playboys' First Movie: Ghouls Gone Wild!",
-    "initial_release_date": "2004-10-28",
-    "id": "/m/06ry42",
     "directed_by": [
       "Ted Geoghegan"
     ]
   },
   {
+    "id": "/en/gie",
+    "initial_release_date": "2005-07-14",
+    "name": "Gie",
     "genre": [
       "Biographical film",
       "Political drama",
       "Drama"
     ],
-    "name": "Gie",
-    "initial_release_date": "2005-07-14",
-    "id": "/en/gie",
     "directed_by": [
       "Riri Riza"
     ]
   },
   {
+    "id": "/en/gigantic_2003",
+    "initial_release_date": "2003-03-10",
+    "name": "Gigantic (A Tale of Two Johns)",
     "genre": [
       "Indie film",
       "Documentary film"
     ],
-    "name": "Gigantic (A Tale of Two Johns)",
-    "initial_release_date": "2003-03-10",
-    "id": "/en/gigantic_2003",
     "directed_by": [
       "A. J. Schnack"
     ]
   },
   {
+    "id": "/en/gigli",
+    "initial_release_date": "2003-07-27",
+    "name": "Gigli",
     "genre": [
       "Crime Thriller",
       "Romance Film",
@@ -14496,27 +14488,27 @@
       "Crime Fiction",
       "Comedy"
     ],
-    "name": "Gigli",
-    "initial_release_date": "2003-07-27",
-    "id": "/en/gigli",
     "directed_by": [
       "Martin Brest"
     ]
   },
   {
+    "id": "/en/ginger_snaps",
+    "initial_release_date": "2000-09-10",
+    "name": "Ginger Snaps",
     "genre": [
       "Teen film",
       "Horror",
       "Cult film"
     ],
-    "name": "Ginger Snaps",
-    "initial_release_date": "2000-09-10",
-    "id": "/en/ginger_snaps",
     "directed_by": [
       "John Fawcett"
     ]
   },
   {
+    "id": "/en/ginger_snaps_2_unleashed",
+    "initial_release_date": "2004-01-30",
+    "name": "Ginger Snaps 2: Unleashed",
     "genre": [
       "Thriller",
       "Horror",
@@ -14526,28 +14518,28 @@
       "Horror comedy",
       "Comedy"
     ],
-    "name": "Ginger Snaps 2: Unleashed",
-    "initial_release_date": "2004-01-30",
-    "id": "/en/ginger_snaps_2_unleashed",
     "directed_by": [
       "Brett Sullivan"
     ]
   },
   {
+    "id": "/en/girlfight",
+    "initial_release_date": "2000-01-22",
+    "name": "Girlfight",
     "genre": [
       "Teen film",
       "Sports",
       "Coming-of-age story",
       "Drama"
     ],
-    "name": "Girlfight",
-    "initial_release_date": "2000-01-22",
-    "id": "/en/girlfight",
     "directed_by": [
       "Karyn Kusama"
     ]
   },
   {
+    "id": "/en/gladiator_2000",
+    "initial_release_date": "2000-05-01",
+    "name": "Gladiator",
     "genre": [
       "Historical drama",
       "Epic film",
@@ -14555,35 +14547,32 @@
       "Adventure Film",
       "Drama"
     ],
-    "name": "Gladiator",
-    "initial_release_date": "2000-05-01",
-    "id": "/en/gladiator_2000",
     "directed_by": [
       "Ridley Scott"
     ]
   },
   {
+    "id": "/en/glastonbury_2006",
+    "initial_release_date": "2006-04-14",
+    "name": "Glastonbury",
     "genre": [
       "Documentary film",
       "Music",
       "Concert film",
       "Biographical film"
     ],
-    "name": "Glastonbury",
-    "initial_release_date": "2006-04-14",
-    "id": "/en/glastonbury_2006",
     "directed_by": [
       "Julien Temple"
     ]
   },
   {
+    "id": "/en/glastonbury_anthems",
+    "name": "Glastonbury Anthems",
     "genre": [
       "Documentary film",
       "Music",
       "Concert film"
     ],
-    "name": "Glastonbury Anthems",
-    "id": "/en/glastonbury_anthems",
     "directed_by": [
       "Gavin Taylor",
       "Declan Lowney",
@@ -14592,123 +14581,126 @@
     ]
   },
   {
+    "id": "/en/glitter_2001",
+    "initial_release_date": "2001-09-21",
+    "name": "Glitter",
     "genre": [
       "Musical",
       "Romance Film",
       "Musical Drama",
       "Drama"
     ],
-    "name": "Glitter",
-    "initial_release_date": "2001-09-21",
-    "id": "/en/glitter_2001",
     "directed_by": [
       "Vondie Curtis-Hall"
     ]
   },
   {
+    "id": "/en/global_heresy",
+    "initial_release_date": "2002-09-03",
+    "name": "Global Heresy",
     "genre": [
       "Comedy"
     ],
-    "name": "Global Heresy",
-    "initial_release_date": "2002-09-03",
-    "id": "/en/global_heresy",
     "directed_by": [
       "Sidney J. Furie"
     ]
   },
   {
+    "id": "/en/glory_road_2006",
+    "initial_release_date": "2006-01-13",
+    "name": "Glory Road",
     "genre": [
       "Sports",
       "Historical period drama",
       "Docudrama",
       "Drama"
     ],
-    "name": "Glory Road",
-    "initial_release_date": "2006-01-13",
-    "id": "/en/glory_road_2006",
     "directed_by": [
       "James Gartner"
     ]
   },
   {
+    "id": "/en/go_figure_2005",
+    "initial_release_date": "2005-06-10",
+    "name": "Go Figure",
     "genre": [
       "Family",
       "Comedy",
       "Drama"
     ],
-    "name": "Go Figure",
-    "initial_release_date": "2005-06-10",
-    "id": "/en/go_figure_2005",
     "directed_by": [
       "Francine McDougall"
     ]
   },
   {
+    "id": "/en/goal__2005",
+    "initial_release_date": "2005-09-08",
+    "name": "Goal!",
     "genre": [
       "Sports",
       "Romance Film",
       "Drama"
     ],
-    "name": "Goal!",
-    "initial_release_date": "2005-09-08",
-    "id": "/en/goal__2005",
     "directed_by": [
       "Danny Cannon"
     ]
   },
   {
+    "id": "/en/goal_2_living_the_dream",
+    "initial_release_date": "2007-02-09",
+    "name": "Goal II: Living the Dream",
     "genre": [
       "Sports",
       "Drama"
     ],
-    "name": "Goal II: Living the Dream",
-    "initial_release_date": "2007-02-09",
-    "id": "/en/goal_2_living_the_dream",
     "directed_by": [
       "Jaume Collet-Serra"
     ]
   },
   {
+    "id": "/en/god_grew_tired_of_us",
+    "initial_release_date": "2006-09-04",
+    "name": "God Grew Tired of Us",
     "genre": [
       "Documentary film",
       "Indie film",
       "Historical fiction"
     ],
-    "name": "God Grew Tired of Us",
-    "initial_release_date": "2006-09-04",
-    "id": "/en/god_grew_tired_of_us",
     "directed_by": [
       "Christopher Dillon Quinn",
       "Tommy Walker"
     ]
   },
   {
+    "id": "/en/god_on_my_side",
+    "initial_release_date": "2006-11-02",
+    "name": "God on My Side",
     "genre": [
       "Documentary film",
       "Christian film"
     ],
-    "name": "God on My Side",
-    "initial_release_date": "2006-11-02",
-    "id": "/en/god_on_my_side",
     "directed_by": [
       "Andrew Denton"
     ]
   },
   {
+    "id": "/en/godavari",
+    "initial_release_date": "2006-05-19",
+    "name": "Godavari",
     "genre": [
       "Romance Film",
       "Drama",
       "Tollywood",
       "World cinema"
     ],
-    "name": "Godavari",
-    "initial_release_date": "2006-05-19",
-    "id": "/en/godavari",
     "directed_by": [
       "Sekhar Kammula"
     ]
   },
   {
+    "id": "/en/godfather",
+    "initial_release_date": "2006-02-24",
+    "name": "Varalaru",
     "genre": [
       "Action Film",
       "Musical",
@@ -14717,14 +14709,14 @@
       "Drama",
       "Musical Drama"
     ],
-    "name": "Varalaru",
-    "initial_release_date": "2006-02-24",
-    "id": "/en/godfather",
     "directed_by": [
       "K. S. Ravikumar"
     ]
   },
   {
+    "id": "/en/godsend",
+    "initial_release_date": "2004-04-30",
+    "name": "Godsend",
     "genre": [
       "Thriller",
       "Science Fiction",
@@ -14733,46 +14725,46 @@
       "Sci-Fi Horror",
       "Drama"
     ],
-    "name": "Godsend",
-    "initial_release_date": "2004-04-30",
-    "id": "/en/godsend",
     "directed_by": [
       "Nick Hamm"
     ]
   },
   {
+    "id": "/en/godzilla_3d_to_the_max",
+    "initial_release_date": "2007-09-12",
+    "name": "Godzilla 3D to the MAX",
     "genre": [
       "Horror",
       "Action Film",
       "Science Fiction",
       "Short Film"
     ],
-    "name": "Godzilla 3D to the MAX",
-    "initial_release_date": "2007-09-12",
-    "id": "/en/godzilla_3d_to_the_max",
     "directed_by": [
       "Keith Melton",
       "Yoshimitsu Banno"
     ]
   },
   {
-    "genre": [
-      "Monster",
-      "Science Fiction",
-      "Cult film",
-      "World cinema",
-      "Action Film",
-      "Creature Film",
-      "Japanese Movies"
-    ],
-    "name": "Godzilla Against Mechagodzilla",
-    "initial_release_date": "2002-12-15",
     "id": "/en/godzilla_against_mechagodzilla",
+    "initial_release_date": "2002-12-15",
+    "name": "Godzilla Against Mechagodzilla",
+    "genre": [
+      "Monster",
+      "Science Fiction",
+      "Cult film",
+      "World cinema",
+      "Action Film",
+      "Creature Film",
+      "Japanese Movies"
+    ],
     "directed_by": [
       "Masaaki Tezuka"
     ]
   },
   {
+    "id": "/en/godzilla_vs_megaguirus",
+    "initial_release_date": "2000-11-03",
+    "name": "Godzilla vs. Megaguirus",
     "genre": [
       "Monster",
       "World cinema",
@@ -14782,14 +14774,14 @@
       "Creature Film",
       "Japanese Movies"
     ],
-    "name": "Godzilla vs. Megaguirus",
-    "initial_release_date": "2000-11-03",
-    "id": "/en/godzilla_vs_megaguirus",
     "directed_by": [
       "Masaaki Tezuka"
     ]
   },
   {
+    "id": "/en/godzilla_tokyo_sos",
+    "initial_release_date": "2003-11-03",
+    "name": "Godzilla: Tokyo SOS",
     "genre": [
       "Monster",
       "Fantasy",
@@ -14799,91 +14791,91 @@
       "Cult film",
       "Japanese Movies"
     ],
-    "name": "Godzilla: Tokyo SOS",
-    "initial_release_date": "2003-11-03",
-    "id": "/en/godzilla_tokyo_sos",
     "directed_by": [
       "Masaaki Tezuka"
     ]
   },
   {
+    "id": "/wikipedia/fr/Godzilla$002C_Mothra_and_King_Ghidorah$003A_Giant_Monsters_All-Out_Attack",
+    "initial_release_date": "2001-11-03",
+    "name": "Godzilla, Mothra and King Ghidorah: Giant Monsters All-Out Attack",
     "genre": [
       "Science Fiction",
       "Action Film",
       "Adventure Film",
       "Drama"
     ],
-    "name": "Godzilla, Mothra and King Ghidorah: Giant Monsters All-Out Attack",
-    "initial_release_date": "2001-11-03",
-    "id": "/wikipedia/fr/Godzilla$002C_Mothra_and_King_Ghidorah$003A_Giant_Monsters_All-Out_Attack",
     "directed_by": [
       "Shusuke Kaneko"
     ]
   },
   {
+    "id": "/en/godzilla_final_wars",
+    "initial_release_date": "2004-11-29",
+    "name": "Godzilla: Final Wars",
     "genre": [
       "Fantasy",
       "Science Fiction",
       "Monster movie"
     ],
-    "name": "Godzilla: Final Wars",
-    "initial_release_date": "2004-11-29",
-    "id": "/en/godzilla_final_wars",
     "directed_by": [
       "Ryuhei Kitamura"
     ]
   },
   {
+    "id": "/en/going_the_distance",
+    "initial_release_date": "2004-08-20",
+    "name": "Going the Distance",
     "genre": [
       "Comedy"
     ],
-    "name": "Going the Distance",
-    "initial_release_date": "2004-08-20",
-    "id": "/en/going_the_distance",
     "directed_by": [
       "Mark Griffiths"
     ]
   },
   {
+    "id": "/en/going_to_the_mat",
+    "initial_release_date": "2004-03-19",
+    "name": "Going to the Mat",
     "genre": [
       "Family",
       "Sports",
       "Drama"
     ],
-    "name": "Going to the Mat",
-    "initial_release_date": "2004-03-19",
-    "id": "/en/going_to_the_mat",
     "directed_by": [
       "Stuart Gillard"
     ]
   },
   {
+    "id": "/en/going_upriver",
+    "initial_release_date": "2004-09-14",
+    "name": "Going Upriver",
     "genre": [
       "Documentary film",
       "War film",
       "Political cinema"
     ],
-    "name": "Going Upriver",
-    "initial_release_date": "2004-09-14",
-    "id": "/en/going_upriver",
     "directed_by": [
       "George Butler"
     ]
   },
   {
+    "id": "/en/golmaal",
+    "initial_release_date": "2006-07-14",
+    "name": "Golmaal: Fun Unlimited",
     "genre": [
       "Musical",
       "Musical comedy",
       "Comedy"
     ],
-    "name": "Golmaal: Fun Unlimited",
-    "initial_release_date": "2006-07-14",
-    "id": "/en/golmaal",
     "directed_by": [
       "Rohit Shetty"
     ]
   },
   {
+    "id": "/en/gone_in_sixty_seconds",
+    "initial_release_date": "2000-06-05",
+    "name": "Gone in 60 Seconds",
     "genre": [
       "Thriller",
       "Action Film",
@@ -14892,42 +14884,42 @@
       "Heist film",
       "Action/Adventure"
     ],
-    "name": "Gone in 60 Seconds",
-    "initial_release_date": "2000-06-05",
-    "id": "/en/gone_in_sixty_seconds",
     "directed_by": [
       "Dominic Sena"
     ]
   },
   {
+    "id": "/en/good_bye_lenin",
+    "initial_release_date": "2003-02-09",
+    "name": "Good bye, Lenin!",
     "genre": [
       "Romance Film",
       "Comedy",
       "Drama",
       "Tragicomedy"
     ],
-    "name": "Good bye, Lenin!",
-    "initial_release_date": "2003-02-09",
-    "id": "/en/good_bye_lenin",
     "directed_by": [
       "Wolfgang Becker"
     ]
   },
   {
+    "id": "/en/good_luck_chuck",
+    "initial_release_date": "2007-06-13",
+    "name": "Good Luck Chuck",
     "genre": [
       "Romance Film",
       "Fantasy",
       "Comedy",
       "Drama"
     ],
-    "name": "Good Luck Chuck",
-    "initial_release_date": "2007-06-13",
-    "id": "/en/good_luck_chuck",
     "directed_by": [
       "Mark Helfrich"
     ]
   },
   {
+    "id": "/en/good_night_and_good_luck",
+    "initial_release_date": "2005-09-01",
+    "name": "Good Night, and Good Luck",
     "genre": [
       "Political drama",
       "Historical drama",
@@ -14936,40 +14928,40 @@
       "Historical fiction",
       "Drama"
     ],
-    "name": "Good Night, and Good Luck",
-    "initial_release_date": "2005-09-01",
-    "id": "/en/good_night_and_good_luck",
     "directed_by": [
       "George Clooney"
     ]
   },
   {
+    "id": "/en/goodbye_dragon_inn",
+    "initial_release_date": "2003-12-12",
+    "name": "Goodbye, Dragon Inn",
     "genre": [
       "Comedy-drama",
       "Comedy of manners",
       "Comedy",
       "Drama"
     ],
-    "name": "Goodbye, Dragon Inn",
-    "initial_release_date": "2003-12-12",
-    "id": "/en/goodbye_dragon_inn",
     "directed_by": [
       "Tsai Ming-liang"
     ]
   },
   {
+    "id": "/en/gosford_park",
+    "initial_release_date": "2001-11-07",
+    "name": "Gosford Park",
     "genre": [
       "Mystery",
       "Drama"
     ],
-    "name": "Gosford Park",
-    "initial_release_date": "2001-11-07",
-    "id": "/en/gosford_park",
     "directed_by": [
       "Robert Altman"
     ]
   },
   {
+    "id": "/en/gothika",
+    "initial_release_date": "2003-11-13",
+    "name": "Gothika",
     "genre": [
       "Thriller",
       "Horror",
@@ -14978,40 +14970,40 @@
       "Crime Thriller",
       "Mystery"
     ],
-    "name": "Gothika",
-    "initial_release_date": "2003-11-13",
-    "id": "/en/gothika",
     "directed_by": [
       "Mathieu Kassovitz"
     ]
   },
   {
+    "id": "/en/gotta_kick_it_up",
+    "name": "Gotta Kick It Up!",
     "genre": [
       "Teen film",
       "Television film",
       "Children's/Family",
       "Family"
     ],
-    "name": "Gotta Kick It Up!",
-    "id": "/en/gotta_kick_it_up",
     "directed_by": [
       "Ram\u00f3n Men\u00e9ndez"
     ]
   },
   {
+    "id": "/en/goyas_ghosts",
+    "initial_release_date": "2006-11-08",
+    "name": "Goya's Ghosts",
     "genre": [
       "Biographical film",
       "War film",
       "Drama"
     ],
-    "name": "Goya's Ghosts",
-    "initial_release_date": "2006-11-08",
-    "id": "/en/goyas_ghosts",
     "directed_by": [
       "Milo\u0161 Forman"
     ]
   },
   {
+    "id": "/en/gozu",
+    "initial_release_date": "2003-07-12",
+    "name": "Gozu",
     "genre": [
       "Horror",
       "Surrealism",
@@ -15020,14 +15012,14 @@
       "Horror comedy",
       "Comedy"
     ],
-    "name": "Gozu",
-    "initial_release_date": "2003-07-12",
-    "id": "/en/gozu",
     "directed_by": [
       "Takashi Miike"
     ]
   },
   {
+    "id": "/en/grande_ecole",
+    "initial_release_date": "2004-02-04",
+    "name": "Grande \u00c9cole",
     "genre": [
       "World cinema",
       "LGBT",
@@ -15039,132 +15031,132 @@
       "Erotic Drama",
       "Drama"
     ],
-    "name": "Grande \u00c9cole",
-    "initial_release_date": "2004-02-04",
-    "id": "/en/grande_ecole",
     "directed_by": [
       "Robert Salis"
     ]
   },
   {
+    "id": "/en/grandmas_boy",
+    "initial_release_date": "2006-01-06",
+    "name": "Grandma's Boy",
     "genre": [
       "Stoner film",
       "Comedy"
     ],
-    "name": "Grandma's Boy",
-    "initial_release_date": "2006-01-06",
-    "id": "/en/grandmas_boy",
     "directed_by": [
       "Nicholaus Goossen"
     ]
   },
   {
+    "id": "/en/grayson_2004",
+    "initial_release_date": "2004-07-20",
+    "name": "Grayson",
     "genre": [
       "Indie film",
       "Fan film",
       "Short Film"
     ],
-    "name": "Grayson",
-    "initial_release_date": "2004-07-20",
-    "id": "/en/grayson_2004",
     "directed_by": [
       "John Fiorella"
     ]
   },
   {
+    "id": "/en/grbavica_2006",
+    "initial_release_date": "2006-02-12",
+    "name": "Grbavica: The Land of My Dreams",
     "genre": [
       "War film",
       "Art film",
       "Drama"
     ],
-    "name": "Grbavica: The Land of My Dreams",
-    "initial_release_date": "2006-02-12",
-    "id": "/en/grbavica_2006",
     "directed_by": [
       "Jasmila \u017dbani\u0107"
     ]
   },
   {
+    "id": "/en/green_street",
+    "initial_release_date": "2005-03-12",
+    "name": "Green Street",
     "genre": [
       "Sports",
       "Crime Fiction",
       "Drama"
     ],
-    "name": "Green Street",
-    "initial_release_date": "2005-03-12",
-    "id": "/en/green_street",
     "directed_by": [
       "Lexi Alexander"
     ]
   },
   {
+    "id": "/en/green_tea_2003",
+    "initial_release_date": "2003-08-18",
+    "name": "Green Tea",
     "genre": [
       "Romance Film",
       "Drama"
     ],
-    "name": "Green Tea",
-    "initial_release_date": "2003-08-18",
-    "id": "/en/green_tea_2003",
     "directed_by": [
       "Zhang Yuan"
     ]
   },
   {
+    "id": "/en/greenfingers",
+    "initial_release_date": "2001-09-14",
+    "name": "Greenfingers",
     "genre": [
       "Comedy-drama",
       "Prison film",
       "Comedy",
       "Drama"
     ],
-    "name": "Greenfingers",
-    "initial_release_date": "2001-09-14",
-    "id": "/en/greenfingers",
     "directed_by": [
       "Joel Hershman"
     ]
   },
   {
+    "id": "/en/gridiron_gang",
+    "initial_release_date": "2006-09-15",
+    "name": "Gridiron Gang",
     "genre": [
       "Sports",
       "Crime Fiction",
       "Drama"
     ],
-    "name": "Gridiron Gang",
-    "initial_release_date": "2006-09-15",
-    "id": "/en/gridiron_gang",
     "directed_by": [
       "Phil Joanou"
     ]
   },
   {
+    "id": "/en/grill_point",
+    "initial_release_date": "2002-02-12",
+    "name": "Grill Point",
     "genre": [
       "Drama",
       "Comedy",
       "Tragicomedy",
       "Comedy-drama"
     ],
-    "name": "Grill Point",
-    "initial_release_date": "2002-02-12",
-    "id": "/en/grill_point",
     "directed_by": [
       "Andreas Dresen"
     ]
   },
   {
+    "id": "/en/grilled",
+    "initial_release_date": "2006-07-11",
+    "name": "Grilled",
     "genre": [
       "Black comedy",
       "Buddy film",
       "Workplace Comedy",
       "Comedy"
     ],
-    "name": "Grilled",
-    "initial_release_date": "2006-07-11",
-    "id": "/en/grilled",
     "directed_by": [
       "Jason Ensler"
     ]
   },
   {
+    "id": "/en/grind_house",
+    "initial_release_date": "2007-04-06",
+    "name": "Grindhouse",
     "genre": [
       "Slasher",
       "Thriller",
@@ -15172,9 +15164,6 @@
       "Horror",
       "Zombie Film"
     ],
-    "name": "Grindhouse",
-    "initial_release_date": "2007-04-06",
-    "id": "/en/grind_house",
     "directed_by": [
       "Robert Rodriguez",
       "Quentin Tarantino",
@@ -15185,6 +15174,9 @@
     ]
   },
   {
+    "id": "/en/grizzly_falls",
+    "initial_release_date": "2004-06-28",
+    "name": "Grizzly Falls",
     "genre": [
       "Adventure Film",
       "Animal Picture",
@@ -15192,66 +15184,66 @@
       "Family",
       "Drama"
     ],
-    "name": "Grizzly Falls",
-    "initial_release_date": "2004-06-28",
-    "id": "/en/grizzly_falls",
     "directed_by": [
       "Stewart Raffill"
     ]
   },
   {
+    "id": "/en/grizzly_man",
+    "initial_release_date": "2005-01-24",
+    "name": "Grizzly Man",
     "genre": [
       "Documentary film",
       "Biographical film"
     ],
-    "name": "Grizzly Man",
-    "initial_release_date": "2005-01-24",
-    "id": "/en/grizzly_man",
     "directed_by": [
       "Werner Herzog"
     ]
   },
   {
+    "id": "/en/grodmin",
+    "name": "GRODMIN",
     "genre": [
       "Avant-garde",
       "Experimental film",
       "Drama"
     ],
-    "name": "GRODMIN",
-    "id": "/en/grodmin",
     "directed_by": [
       "Jim Horwitz"
     ]
   },
   {
+    "id": "/en/gudumba_shankar",
+    "initial_release_date": "2004-09-09",
+    "name": "Gudumba Shankar",
     "genre": [
       "Action Film",
       "Drama",
       "Tollywood",
       "World cinema"
     ],
-    "name": "Gudumba Shankar",
-    "initial_release_date": "2004-09-09",
-    "id": "/en/gudumba_shankar",
     "directed_by": [
       "Veera Shankar"
     ]
   },
   {
+    "id": "/en/che_part_two",
+    "initial_release_date": "2008-05-21",
+    "name": "Che: Part Two",
     "genre": [
       "Biographical film",
       "War film",
       "Historical drama",
       "Drama"
     ],
-    "name": "Che: Part Two",
-    "initial_release_date": "2008-05-21",
-    "id": "/en/che_part_two",
     "directed_by": [
       "Steven Soderbergh"
     ]
   },
   {
+    "id": "/en/guess_who_2005",
+    "initial_release_date": "2005-03-25",
+    "name": "Guess Who",
     "genre": [
       "Romance Film",
       "Romantic comedy",
@@ -15259,28 +15251,28 @@
       "Domestic Comedy",
       "Comedy"
     ],
-    "name": "Guess Who",
-    "initial_release_date": "2005-03-25",
-    "id": "/en/guess_who_2005",
     "directed_by": [
       "Kevin Rodney Sullivan"
     ]
   },
   {
+    "id": "/en/gunner_palace",
+    "initial_release_date": "2005-03-04",
+    "name": "Gunner Palace",
     "genre": [
       "Documentary film",
       "Indie film",
       "War film"
     ],
-    "name": "Gunner Palace",
-    "initial_release_date": "2005-03-04",
-    "id": "/en/gunner_palace",
     "directed_by": [
       "Michael Tucker",
       "Petra Epperlein"
     ]
   },
   {
+    "id": "/en/guru_2007",
+    "initial_release_date": "2007-01-12",
+    "name": "Guru",
     "genre": [
       "Biographical film",
       "Musical",
@@ -15288,14 +15280,14 @@
       "Drama",
       "Musical Drama"
     ],
-    "name": "Guru",
-    "initial_release_date": "2007-01-12",
-    "id": "/en/guru_2007",
     "directed_by": [
       "Mani Ratnam"
     ]
   },
   {
+    "id": "/en/primeval_2007",
+    "initial_release_date": "2007-01-12",
+    "name": "Primeval",
     "genre": [
       "Thriller",
       "Horror",
@@ -15303,14 +15295,13 @@
       "Action/Adventure",
       "Action Film"
     ],
-    "name": "Primeval",
-    "initial_release_date": "2007-01-12",
-    "id": "/en/primeval_2007",
     "directed_by": [
       "Michael Katleman"
     ]
   },
   {
+    "id": "/en/gypsy_83",
+    "name": "Gypsy 83",
     "genre": [
       "Coming of age",
       "LGBT",
@@ -15321,13 +15312,14 @@
       "Comedy",
       "Drama"
     ],
-    "name": "Gypsy 83",
-    "id": "/en/gypsy_83",
     "directed_by": [
       "Todd Stephens"
     ]
   },
   {
+    "id": "/en/h_2002",
+    "initial_release_date": "2002-12-27",
+    "name": "H",
     "genre": [
       "Thriller",
       "Horror",
@@ -15337,28 +15329,28 @@
       "East Asian cinema",
       "World cinema"
     ],
-    "name": "H",
-    "initial_release_date": "2002-12-27",
-    "id": "/en/h_2002",
     "directed_by": [
       "Jong-hyuk Lee"
     ]
   },
   {
+    "id": "/en/h_g_wells_the_war_of_the_worlds",
+    "initial_release_date": "2005-06-14",
+    "name": "H. G. Wells' The War of the Worlds",
     "genre": [
       "Indie film",
       "Steampunk",
       "Science Fiction",
       "Thriller"
     ],
-    "name": "H. G. Wells' The War of the Worlds",
-    "initial_release_date": "2005-06-14",
-    "id": "/en/h_g_wells_the_war_of_the_worlds",
     "directed_by": [
       "Timothy Hines"
     ]
   },
   {
+    "id": "/en/h_g_wells_war_of_the_worlds",
+    "initial_release_date": "2005-06-28",
+    "name": "H. G. Wells' War of the Worlds",
     "genre": [
       "Indie film",
       "Science Fiction",
@@ -15370,38 +15362,37 @@
       "Mockbuster",
       "Drama"
     ],
-    "name": "H. G. Wells' War of the Worlds",
-    "initial_release_date": "2005-06-28",
-    "id": "/en/h_g_wells_war_of_the_worlds",
     "directed_by": [
       "David Michael Latt"
     ]
   },
   {
+    "id": "/en/hadh_kar_di_aapne",
+    "initial_release_date": "2000-04-14",
+    "name": "Hadh Kar Di Aapne",
     "genre": [
       "Romantic comedy",
       "Bollywood"
     ],
-    "name": "Hadh Kar Di Aapne",
-    "initial_release_date": "2000-04-14",
-    "id": "/en/hadh_kar_di_aapne",
     "directed_by": [
       "Manoj Agrawal"
     ]
   },
   {
+    "id": "/en/haggard_the_movie",
+    "initial_release_date": "2003-06-24",
+    "name": "Haggard: The Movie",
     "genre": [
       "Indie film",
       "Comedy"
     ],
-    "name": "Haggard: The Movie",
-    "initial_release_date": "2003-06-24",
-    "id": "/en/haggard_the_movie",
     "directed_by": [
       "Bam Margera"
     ]
   },
   {
+    "id": "/en/haiku_tunnel",
+    "name": "Haiku Tunnel",
     "genre": [
       "Black comedy",
       "Indie film",
@@ -15409,40 +15400,40 @@
       "Workplace Comedy",
       "Comedy"
     ],
-    "name": "Haiku Tunnel",
-    "id": "/en/haiku_tunnel",
     "directed_by": [
       "Jacob Kornbluth",
       "Josh Kornbluth"
     ]
   },
   {
+    "id": "/en/hairspray",
+    "initial_release_date": "2007-07-13",
+    "name": "Hairspray",
     "genre": [
       "Musical",
       "Romance Film",
       "Comedy",
       "Musical comedy"
     ],
-    "name": "Hairspray",
-    "initial_release_date": "2007-07-13",
-    "id": "/en/hairspray",
     "directed_by": [
       "Adam Shankman"
     ]
   },
   {
+    "id": "/en/half_nelson",
+    "initial_release_date": "2006-01-23",
+    "name": "Half Nelson",
     "genre": [
       "Social problem film",
       "Drama"
     ],
-    "name": "Half Nelson",
-    "initial_release_date": "2006-01-23",
-    "id": "/en/half_nelson",
     "directed_by": [
       "Ryan Fleck"
     ]
   },
   {
+    "id": "/en/half_life_2006",
+    "name": "Half-Life",
     "genre": [
       "Fantasy",
       "Indie film",
@@ -15450,27 +15441,28 @@
       "Fantasy Drama",
       "Drama"
     ],
-    "name": "Half-Life",
-    "id": "/en/half_life_2006",
     "directed_by": [
       "Jennifer Phang"
     ]
   },
   {
+    "id": "/en/halloween_resurrection",
+    "initial_release_date": "2002-07-12",
+    "name": "Halloween Resurrection",
     "genre": [
       "Slasher",
       "Horror",
       "Cult film",
       "Teen film"
     ],
-    "name": "Halloween Resurrection",
-    "initial_release_date": "2002-07-12",
-    "id": "/en/halloween_resurrection",
     "directed_by": [
       "Rick Rosenthal"
     ]
   },
   {
+    "id": "/en/halloweentown_high",
+    "initial_release_date": "2004-10-08",
+    "name": "Halloweentown High",
     "genre": [
       "Fantasy",
       "Teen film",
@@ -15478,69 +15470,69 @@
       "Comedy",
       "Family"
     ],
-    "name": "Halloweentown High",
-    "initial_release_date": "2004-10-08",
-    "id": "/en/halloweentown_high",
     "directed_by": [
       "Mark A.Z. Dipp\u00e9"
     ]
   },
   {
+    "id": "/en/halloweentown_ii_kalabars_revenge",
+    "initial_release_date": "2001-10-12",
+    "name": "Halloweentown II: Kalabar's Revenge",
     "genre": [
       "Fantasy",
       "Children's Fantasy",
       "Children's/Family",
       "Family"
     ],
-    "name": "Halloweentown II: Kalabar's Revenge",
-    "initial_release_date": "2001-10-12",
-    "id": "/en/halloweentown_ii_kalabars_revenge",
     "directed_by": [
       "Mary Lambert"
     ]
   },
   {
+    "id": "/en/halloweentown_witch_u",
+    "initial_release_date": "2006-10-20",
+    "name": "Return to Halloweentown",
     "genre": [
       "Family",
       "Children's/Family",
       "Fantasy Comedy",
       "Comedy"
     ],
-    "name": "Return to Halloweentown",
-    "initial_release_date": "2006-10-20",
-    "id": "/en/halloweentown_witch_u",
     "directed_by": [
       "David Jackson"
     ]
   },
   {
+    "id": "/en/hamlet_2000",
+    "initial_release_date": "2000-05-12",
+    "name": "Hamlet",
     "genre": [
       "Thriller",
       "Romance Film",
       "Drama"
     ],
-    "name": "Hamlet",
-    "initial_release_date": "2000-05-12",
-    "id": "/en/hamlet_2000",
     "directed_by": [
       "Michael Almereyda"
     ]
   },
   {
+    "id": "/en/hana_alice",
+    "initial_release_date": "2004-03-13",
+    "name": "Hana and Alice",
     "genre": [
       "Romance Film",
       "Romantic comedy",
       "Comedy",
       "Drama"
     ],
-    "name": "Hana and Alice",
-    "initial_release_date": "2004-03-13",
-    "id": "/en/hana_alice",
     "directed_by": [
       "Shunji Iwai"
     ]
   },
   {
+    "id": "/en/hannibal",
+    "initial_release_date": "2001-02-09",
+    "name": "Hannibal",
     "genre": [
       "Thriller",
       "Psychological thriller",
@@ -15550,39 +15542,39 @@
       "Crime Thriller",
       "Drama"
     ],
-    "name": "Hannibal",
-    "initial_release_date": "2001-02-09",
-    "id": "/en/hannibal",
     "directed_by": [
       "Ridley Scott"
     ]
   },
   {
+    "id": "/en/hans_och_hennes",
+    "initial_release_date": "2001-01-29",
+    "name": "Making Babies",
     "genre": [
       "Drama"
     ],
-    "name": "Making Babies",
-    "initial_release_date": "2001-01-29",
-    "id": "/en/hans_och_hennes",
     "directed_by": [
       "Daniel Lind Lagerl\u00f6f"
     ]
   },
   {
+    "id": "/en/hanuman_2005",
+    "initial_release_date": "2005-10-21",
+    "name": "Hanuman",
     "genre": [
       "Animation",
       "Bollywood",
       "World cinema"
     ],
-    "name": "Hanuman",
-    "initial_release_date": "2005-10-21",
-    "id": "/en/hanuman_2005",
     "directed_by": [
       "V.G. Samant",
       "Milind Ukey"
     ]
   },
   {
+    "id": "/en/hanuman_junction",
+    "initial_release_date": "2001-12-21",
+    "name": "Hanuman Junction",
     "genre": [
       "Action Film",
       "Comedy",
@@ -15590,14 +15582,14 @@
       "Tollywood",
       "World cinema"
     ],
-    "name": "Hanuman Junction",
-    "initial_release_date": "2001-12-21",
-    "id": "/en/hanuman_junction",
     "directed_by": [
       "M.Raja"
     ]
   },
   {
+    "id": "/en/happily_never_after",
+    "initial_release_date": "2006-12-16",
+    "name": "Happily N'Ever After",
     "genre": [
       "Fantasy",
       "Animation",
@@ -15605,15 +15597,15 @@
       "Comedy",
       "Adventure Film"
     ],
-    "name": "Happily N'Ever After",
-    "initial_release_date": "2006-12-16",
-    "id": "/en/happily_never_after",
     "directed_by": [
       "Paul J. Bolger",
       "Yvette Kaplan"
     ]
   },
   {
+    "id": "/en/happy_2006",
+    "initial_release_date": "2006-01-27",
+    "name": "Happy",
     "genre": [
       "Romance Film",
       "Musical",
@@ -15622,14 +15614,14 @@
       "Musical comedy",
       "Musical Drama"
     ],
-    "name": "Happy",
-    "initial_release_date": "2006-01-27",
-    "id": "/en/happy_2006",
     "directed_by": [
       "A. Karunakaran"
     ]
   },
   {
+    "id": "/en/happy_endings",
+    "initial_release_date": "2005-01-20",
+    "name": "Happy Endings",
     "genre": [
       "LGBT",
       "Music",
@@ -15640,28 +15632,28 @@
       "Comedy",
       "Drama"
     ],
-    "name": "Happy Endings",
-    "initial_release_date": "2005-01-20",
-    "id": "/en/happy_endings",
     "directed_by": [
       "Don Roos"
     ]
   },
   {
+    "id": "/en/happy_ero_christmas",
+    "initial_release_date": "2003-12-17",
+    "name": "Happy Ero Christmas",
     "genre": [
       "Romance Film",
       "Comedy",
       "East Asian cinema",
       "World cinema"
     ],
-    "name": "Happy Ero Christmas",
-    "initial_release_date": "2003-12-17",
-    "id": "/en/happy_ero_christmas",
     "directed_by": [
       "Lee Geon-dong"
     ]
   },
   {
+    "id": "/en/happy_feet",
+    "initial_release_date": "2006-11-16",
+    "name": "Happy Feet",
     "genre": [
       "Family",
       "Animation",
@@ -15670,9 +15662,6 @@
       "Musical",
       "Musical comedy"
     ],
-    "name": "Happy Feet",
-    "initial_release_date": "2006-11-16",
-    "id": "/en/happy_feet",
     "directed_by": [
       "George Miller",
       "Warren Coleman",
@@ -15680,6 +15669,9 @@
     ]
   },
   {
+    "id": "/wikipedia/en_title/I_Love_New_Year",
+    "initial_release_date": "2013-12-30",
+    "name": "I Love New Year",
     "genre": [
       "Caper story",
       "Crime Fiction",
@@ -15688,15 +15680,15 @@
       "Bollywood",
       "World cinema"
     ],
-    "name": "I Love New Year",
-    "initial_release_date": "2013-12-30",
-    "id": "/wikipedia/en_title/I_Love_New_Year",
     "directed_by": [
       "Radhika Rao",
       "Vinay Sapru"
     ]
   },
   {
+    "id": "/en/har_dil_jo_pyar_karega",
+    "initial_release_date": "2000-07-24",
+    "name": "Har Dil Jo Pyar Karega",
     "genre": [
       "Musical",
       "Romance Film",
@@ -15704,14 +15696,13 @@
       "Musical Drama",
       "Drama"
     ],
-    "name": "Har Dil Jo Pyar Karega",
-    "initial_release_date": "2000-07-24",
-    "id": "/en/har_dil_jo_pyar_karega",
     "directed_by": [
       "Raj Kanwar"
     ]
   },
   {
+    "id": "/en/hard_candy",
+    "name": "Hard Candy",
     "genre": [
       "Psychological thriller",
       "Thriller",
@@ -15720,13 +15711,14 @@
       "Erotic thriller",
       "Drama"
     ],
-    "name": "Hard Candy",
-    "id": "/en/hard_candy",
     "directed_by": [
       "David Slade"
     ]
   },
   {
+    "id": "/en/hard_luck",
+    "initial_release_date": "2006-10-17",
+    "name": "Hard Luck",
     "genre": [
       "Thriller",
       "Crime Fiction",
@@ -15734,54 +15726,54 @@
       "Action Film",
       "Drama"
     ],
-    "name": "Hard Luck",
-    "initial_release_date": "2006-10-17",
-    "id": "/en/hard_luck",
     "directed_by": [
       "Mario Van Peebles"
     ]
   },
   {
+    "id": "/en/hardball",
+    "initial_release_date": "2001-09-14",
+    "name": "Hardball",
     "genre": [
       "Sports",
       "Drama"
     ],
-    "name": "Hardball",
-    "initial_release_date": "2001-09-14",
-    "id": "/en/hardball",
     "directed_by": [
       "Brian Robbins"
     ]
   },
   {
+    "id": "/en/harold_kumar_go_to_white_castle",
+    "initial_release_date": "2004-05-20",
+    "name": "Harold &amp; Kumar Go to White Castle",
     "genre": [
       "Stoner film",
       "Buddy film",
       "Adventure Film",
       "Comedy"
     ],
-    "name": "Harold &amp; Kumar Go to White Castle",
-    "initial_release_date": "2004-05-20",
-    "id": "/en/harold_kumar_go_to_white_castle",
     "directed_by": [
       "Danny Leiner"
     ]
   },
   {
+    "id": "/en/harry_potter_and_the_chamber_of_secrets_2002",
+    "initial_release_date": "2002-11-03",
+    "name": "Harry Potter and the Chamber of Secrets",
     "genre": [
       "Adventure Film",
       "Family",
       "Fantasy",
       "Mystery"
     ],
-    "name": "Harry Potter and the Chamber of Secrets",
-    "initial_release_date": "2002-11-03",
-    "id": "/en/harry_potter_and_the_chamber_of_secrets_2002",
     "directed_by": [
       "Chris Columbus"
     ]
   },
   {
+    "id": "/en/harry_potter_and_the_goblet_of_fire_2005",
+    "initial_release_date": "2005-11-06",
+    "name": "Harry Potter and the Goblet of Fire",
     "genre": [
       "Family",
       "Fantasy",
@@ -15795,14 +15787,14 @@
       "Fantasy Adventure",
       "Fiction"
     ],
-    "name": "Harry Potter and the Goblet of Fire",
-    "initial_release_date": "2005-11-06",
-    "id": "/en/harry_potter_and_the_goblet_of_fire_2005",
     "directed_by": [
       "Mike Newell"
     ]
   },
   {
+    "id": "/en/harry_potter_and_the_half_blood_prince_2008",
+    "initial_release_date": "2009-07-06",
+    "name": "Harry Potter and the Half-Blood Prince",
     "genre": [
       "Adventure Film",
       "Fantasy",
@@ -15815,14 +15807,14 @@
       "Fantasy Adventure",
       "Fiction"
     ],
-    "name": "Harry Potter and the Half-Blood Prince",
-    "initial_release_date": "2009-07-06",
-    "id": "/en/harry_potter_and_the_half_blood_prince_2008",
     "directed_by": [
       "David Yates"
     ]
   },
   {
+    "id": "/en/harry_potter_and_the_order_of_the_phoenix_2007",
+    "initial_release_date": "2007-06-28",
+    "name": "Harry Potter and the Order of the Phoenix",
     "genre": [
       "Family",
       "Mystery",
@@ -15831,9 +15823,6 @@
       "Fantasy Adventure",
       "Fiction"
     ],
-    "name": "Harry Potter and the Order of the Phoenix",
-    "initial_release_date": "2007-06-28",
-    "id": "/en/harry_potter_and_the_order_of_the_phoenix_2007",
     "directed_by": [
       "David Yates"
     ]
diff --git a/solr/example/films/films.xml b/solr/example/films/films.xml
index a4dd961..e801ad4 100644
--- a/solr/example/films/films.xml
+++ b/solr/example/films/films.xml
@@ -1,15 +1,8 @@
 <?xml version="1.0" ?>
 <add>
   <doc>
-    <field name="genre">Comedy</field>
-    <field name="initial_release_date">2014-03-28</field>
-    <field name="directed_by">Wes Anderson</field>
-    <field name="name">The Grand Budapest Hotel</field>
-    <field name="id">/en/001</field>
-  </doc>
-  <doc>
     <field name="id">/en/45_2006</field>
-    <field name="name">.45</field>
+    <field name="directed_by">Gary Lennon</field>
     <field name="initial_release_date">2006-11-30</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Thriller</field>
@@ -19,11 +12,11 @@
     <field name="genre">Crime Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Gary Lennon</field>
+    <field name="name">.45</field>
   </doc>
   <doc>
     <field name="id">/en/9_2005</field>
-    <field name="name">9</field>
+    <field name="directed_by">Shane Acker</field>
     <field name="initial_release_date">2005-04-21</field>
     <field name="genre">Computer Animation</field>
     <field name="genre">Animation</field>
@@ -32,19 +25,19 @@
     <field name="genre">Short Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Fantasy</field>
-    <field name="directed_by">Shane Acker</field>
+    <field name="name">9</field>
   </doc>
   <doc>
     <field name="id">/en/69_2004</field>
-    <field name="name">69</field>
+    <field name="directed_by">Lee Sang-il</field>
     <field name="initial_release_date">2004-07-10</field>
     <field name="genre">Japanese Movies</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Lee Sang-il</field>
+    <field name="name">69</field>
   </doc>
   <doc>
     <field name="id">/en/300_2007</field>
-    <field name="name">300</field>
+    <field name="directed_by">Zack Snyder</field>
     <field name="initial_release_date">2006-12-09</field>
     <field name="genre">Epic film</field>
     <field name="genre">Adventure Film</field>
@@ -54,122 +47,116 @@
     <field name="genre">War film</field>
     <field name="genre">Superhero movie</field>
     <field name="genre">Historical Epic</field>
-    <field name="directed_by">Zack Snyder</field>
+    <field name="name">300</field>
   </doc>
   <doc>
     <field name="id">/en/2046_2004</field>
-    <field name="name">2046</field>
+    <field name="directed_by">Wong Kar-wai</field>
     <field name="initial_release_date">2004-05-20</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Wong Kar-wai</field>
+    <field name="name">2046</field>
   </doc>
   <doc>
     <field name="id">/en/quien_es_el_senor_lopez</field>
-    <field name="name">¿Quién es el señor López?</field>
-    <field name="genre">Documentary film</field>
     <field name="directed_by">Luis Mandoki</field>
+    <field name="genre">Documentary film</field>
+    <field name="name">¿Quién es el señor López?</field>
   </doc>
   <doc>
     <field name="id">/en/weird_al_yankovic_the_ultimate_video_collection</field>
-    <field name="name">&quot;Weird Al&quot; Yankovic: The Ultimate Video Collection</field>
+    <field name="directed_by">Jay Levey</field>
+    <field name="directed_by">&quot;Weird Al&quot; Yankovic</field>
     <field name="initial_release_date">2003-11-04</field>
     <field name="genre">Music video</field>
     <field name="genre">Parody</field>
-    <field name="directed_by">Jay Levey</field>
-    <field name="directed_by">&quot;Weird Al&quot; Yankovic</field>
+    <field name="name">&quot;Weird Al&quot; Yankovic: The Ultimate Video Collection</field>
   </doc>
   <doc>
     <field name="id">/en/15_park_avenue</field>
-    <field name="name">15 Park Avenue</field>
+    <field name="directed_by">Aparna Sen</field>
     <field name="initial_release_date">2005-10-27</field>
     <field name="genre">Art film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Musical</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="directed_by">Aparna Sen</field>
+    <field name="name">15 Park Avenue</field>
   </doc>
   <doc>
     <field name="id">/en/2_fast_2_furious</field>
-    <field name="name">2 Fast 2 Furious</field>
+    <field name="directed_by">John Singleton</field>
     <field name="initial_release_date">2003-06-03</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
-    <field name="directed_by">John Singleton</field>
+    <field name="name">2 Fast 2 Furious</field>
   </doc>
   <doc>
     <field name="id">/en/7g_rainbow_colony</field>
-    <field name="name">7G Rainbow Colony</field>
+    <field name="directed_by">Selvaraghavan</field>
     <field name="initial_release_date">2004-10-15</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Selvaraghavan</field>
+    <field name="name">7G Rainbow Colony</field>
   </doc>
   <doc>
     <field name="id">/en/3-iron</field>
-    <field name="name">3-Iron</field>
+    <field name="directed_by">Kim Ki-duk</field>
     <field name="initial_release_date">2004-09-07</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Romance Film</field>
     <field name="genre">East Asian cinema</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Kim Ki-duk</field>
+    <field name="name">3-Iron</field>
   </doc>
   <doc>
     <field name="id">/en/10_5_apocalypse</field>
-    <field name="name">10.5: Apocalypse</field>
+    <field name="directed_by">John Lafia</field>
     <field name="initial_release_date">2006-03-18</field>
     <field name="genre">Disaster Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Television film</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Action Film</field>
-    <field name="directed_by">John Lafia</field>
+    <field name="name">10.5: Apocalypse</field>
   </doc>
   <doc>
     <field name="id">/en/8_mile</field>
-    <field name="name">8 Mile</field>
+    <field name="directed_by">Curtis Hanson</field>
     <field name="initial_release_date">2002-09-08</field>
     <field name="genre">Musical</field>
     <field name="genre">Hip hop film</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="directed_by">Curtis Hanson</field>
+    <field name="name">8 Mile</field>
   </doc>
   <doc>
     <field name="id">/en/100_girls</field>
-    <field name="name">100 Girls</field>
+    <field name="directed_by">Michael Davis</field>
     <field name="initial_release_date">2001-09-25</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Indie film</field>
     <field name="genre">Teen film</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Michael Davis</field>
+    <field name="name">100 Girls</field>
   </doc>
   <doc>
     <field name="id">/en/40_days_and_40_nights</field>
-    <field name="name">40 Days and 40 Nights</field>
+    <field name="directed_by">Michael Lehmann</field>
     <field name="initial_release_date">2002-03-01</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Sex comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Michael Lehmann</field>
+    <field name="name">40 Days and 40 Nights</field>
   </doc>
   <doc>
     <field name="id">/en/50_cent_the_new_breed</field>
-    <field name="name">50 Cent: The New Breed</field>
-    <field name="initial_release_date">2003-04-15</field>
-    <field name="genre">Documentary film</field>
-    <field name="genre">Music</field>
-    <field name="genre">Concert film</field>
-    <field name="genre">Biographical film</field>
     <field name="directed_by">Don Robinson</field>
     <field name="directed_by">Damon Johnson</field>
     <field name="directed_by">Philip Atwell</field>
@@ -178,42 +165,48 @@
     <field name="directed_by">John Quigley</field>
     <field name="directed_by">Jessy Terrero</field>
     <field name="directed_by">Noa Shaw</field>
+    <field name="initial_release_date">2003-04-15</field>
+    <field name="genre">Documentary film</field>
+    <field name="genre">Music</field>
+    <field name="genre">Concert film</field>
+    <field name="genre">Biographical film</field>
+    <field name="name">50 Cent: The New Breed</field>
   </doc>
   <doc>
     <field name="id">/en/3_the_dale_earnhardt_story</field>
-    <field name="name">3: The Dale Earnhardt Story</field>
+    <field name="directed_by">Russell Mulcahy</field>
     <field name="initial_release_date">2004-12-11</field>
     <field name="genre">Sports</field>
     <field name="genre">Auto racing</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Russell Mulcahy</field>
+    <field name="name">3: The Dale Earnhardt Story</field>
   </doc>
   <doc>
     <field name="id">/en/61__2001</field>
-    <field name="name">61*</field>
+    <field name="directed_by">Billy Crystal</field>
     <field name="initial_release_date">2001-04-28</field>
     <field name="genre">Sports</field>
     <field name="genre">History</field>
     <field name="genre">Historical period drama</field>
     <field name="genre">Television film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Billy Crystal</field>
+    <field name="name">61*</field>
   </doc>
   <doc>
     <field name="id">/en/24_hour_party_people</field>
-    <field name="name">24 Hour Party People</field>
+    <field name="directed_by">Michael Winterbottom</field>
     <field name="initial_release_date">2002-02-13</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Comedy-drama</field>
     <field name="genre">Comedy</field>
     <field name="genre">Music</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Michael Winterbottom</field>
+    <field name="name">24 Hour Party People</field>
   </doc>
   <doc>
     <field name="id">/en/10th_wolf</field>
-    <field name="name">10th &amp;amp; Wolf</field>
+    <field name="directed_by">Robert Moresco</field>
     <field name="initial_release_date">2006-08-18</field>
     <field name="genre">Mystery</field>
     <field name="genre">Thriller</field>
@@ -221,126 +214,126 @@
     <field name="genre">Crime Thriller</field>
     <field name="genre">Gangster Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Robert Moresco</field>
+    <field name="name">10th &amp;amp; Wolf</field>
   </doc>
   <doc>
     <field name="id">/en/25th_hour</field>
-    <field name="name">25th Hour</field>
+    <field name="directed_by">Spike Lee</field>
     <field name="initial_release_date">2002-12-16</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Spike Lee</field>
+    <field name="name">25th Hour</field>
   </doc>
   <doc>
     <field name="id">/en/7_seconds_2005</field>
-    <field name="name">7 Seconds</field>
+    <field name="directed_by">Simon Fellows</field>
     <field name="initial_release_date">2005-06-28</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
-    <field name="directed_by">Simon Fellows</field>
+    <field name="name">7 Seconds</field>
   </doc>
   <doc>
     <field name="id">/en/28_days_later</field>
-    <field name="name">28 Days Later</field>
+    <field name="directed_by">Danny Boyle</field>
     <field name="initial_release_date">2002-11-01</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Horror</field>
     <field name="genre">Thriller</field>
-    <field name="directed_by">Danny Boyle</field>
+    <field name="name">28 Days Later</field>
   </doc>
   <doc>
     <field name="id">/en/21_grams</field>
-    <field name="name">21 Grams</field>
+    <field name="directed_by">Alejandro González Iñárritu</field>
     <field name="initial_release_date">2003-09-05</field>
     <field name="genre">Thriller</field>
     <field name="genre">Ensemble Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Alejandro González Iñárritu</field>
+    <field name="name">21 Grams</field>
   </doc>
   <doc>
     <field name="id">/en/9th_company</field>
-    <field name="name">The 9th Company</field>
+    <field name="directed_by">Fedor Bondarchuk</field>
     <field name="initial_release_date">2005-09-29</field>
     <field name="genre">War film</field>
     <field name="genre">Action Film</field>
     <field name="genre">Historical fiction</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Fedor Bondarchuk</field>
+    <field name="name">The 9th Company</field>
   </doc>
   <doc>
     <field name="id">/en/102_dalmatians</field>
-    <field name="name">102 Dalmatians</field>
+    <field name="directed_by">Kevin Lima</field>
     <field name="initial_release_date">2000-11-22</field>
     <field name="genre">Family</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Kevin Lima</field>
+    <field name="name">102 Dalmatians</field>
   </doc>
   <doc>
     <field name="id">/en/16_years_of_alcohol</field>
-    <field name="name">16 Years of Alcohol</field>
+    <field name="directed_by">Richard Jobson</field>
     <field name="initial_release_date">2003-08-14</field>
     <field name="genre">Indie film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Richard Jobson</field>
+    <field name="name">16 Years of Alcohol</field>
   </doc>
   <doc>
     <field name="id">/en/12b</field>
-    <field name="name">12B</field>
+    <field name="directed_by">Jeeva</field>
     <field name="initial_release_date">2001-09-28</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Jeeva</field>
+    <field name="name">12B</field>
   </doc>
   <doc>
     <field name="id">/en/2009_lost_memories</field>
-    <field name="name">2009 Lost Memories</field>
+    <field name="directed_by">Lee Si-myung</field>
     <field name="initial_release_date">2002-02-01</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Mystery</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Lee Si-myung</field>
+    <field name="name">2009 Lost Memories</field>
   </doc>
   <doc>
     <field name="id">/en/16_blocks</field>
-    <field name="name">16 Blocks</field>
+    <field name="directed_by">Richard Donner</field>
     <field name="initial_release_date">2006-03-01</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Action Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Richard Donner</field>
+    <field name="name">16 Blocks</field>
   </doc>
   <doc>
     <field name="id">/en/15_minutes</field>
-    <field name="name">15 Minutes</field>
+    <field name="directed_by">John Herzfeld</field>
     <field name="initial_release_date">2001-03-01</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Crime Thriller</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">John Herzfeld</field>
+    <field name="name">15 Minutes</field>
   </doc>
   <doc>
     <field name="id">/en/50_first_dates</field>
-    <field name="name">50 First Dates</field>
+    <field name="directed_by">Peter Segal</field>
     <field name="initial_release_date">2004-02-13</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Peter Segal</field>
+    <field name="name">50 First Dates</field>
   </doc>
   <doc>
     <field name="id">/en/9_songs</field>
-    <field name="name">9 Songs</field>
+    <field name="directed_by">Michael Winterbottom</field>
     <field name="initial_release_date">2004-05-16</field>
     <field name="genre">Erotica</field>
     <field name="genre">Musical</field>
@@ -348,39 +341,40 @@
     <field name="genre">Erotic Drama</field>
     <field name="genre">Musical Drama</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Michael Winterbottom</field>
+    <field name="name">9 Songs</field>
   </doc>
   <doc>
     <field name="id">/en/20_fingers_2004</field>
-    <field name="name">20 Fingers</field>
+    <field name="directed_by">Mania Akbari</field>
     <field name="initial_release_date">2004-09-01</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Mania Akbari</field>
+    <field name="name">20 Fingers</field>
   </doc>
   <doc>
     <field name="id">/en/3_needles</field>
-    <field name="name">3 Needles</field>
+    <field name="directed_by">Thom Fitzgerald</field>
     <field name="initial_release_date">2006-12-01</field>
     <field name="genre">Indie film</field>
     <field name="genre">Social problem film</field>
     <field name="genre">Chinese Movies</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Thom Fitzgerald</field>
+    <field name="name">3 Needles</field>
   </doc>
   <doc>
     <field name="id">/en/28_days_2000</field>
-    <field name="name">28 Days</field>
+    <field name="directed_by">Betty Thomas</field>
     <field name="initial_release_date">2000-02-08</field>
     <field name="genre">Comedy-drama</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Betty Thomas</field>
+    <field name="name">28 Days</field>
   </doc>
   <doc>
     <field name="id">/en/36_china_town</field>
-    <field name="name">36 China Town</field>
+    <field name="directed_by">Abbas Burmawalla</field>
+    <field name="directed_by">Mustan Burmawalla</field>
     <field name="initial_release_date">2006-04-21</field>
     <field name="genre">Thriller</field>
     <field name="genre">Musical</field>
@@ -389,12 +383,11 @@
     <field name="genre">Crime Fiction</field>
     <field name="genre">Bollywood</field>
     <field name="genre">Musical comedy</field>
-    <field name="directed_by">Abbas Burmawalla</field>
-    <field name="directed_by">Mustan Burmawalla</field>
+    <field name="name">36 China Town</field>
   </doc>
   <doc>
     <field name="id">/en/7_mujeres_1_homosexual_y_carlos</field>
-    <field name="name">7 mujeres, 1 homosexual y Carlos</field>
+    <field name="directed_by">Rene Bueno</field>
     <field name="initial_release_date">2004-06-01</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">LGBT</field>
@@ -403,30 +396,30 @@
     <field name="genre">Sex comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Rene Bueno</field>
+    <field name="name">7 mujeres, 1 homosexual y Carlos</field>
   </doc>
   <doc>
     <field name="id">/en/88_minutes</field>
-    <field name="name">88 Minutes</field>
+    <field name="directed_by">Jon Avnet</field>
     <field name="initial_release_date">2007-02-14</field>
     <field name="genre">Thriller</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Mystery</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Jon Avnet</field>
+    <field name="name">88 Minutes</field>
   </doc>
   <doc>
     <field name="id">/en/500_years_later</field>
-    <field name="name">500 Years Later</field>
+    <field name="directed_by">Owen 'Alik Shahadah</field>
     <field name="initial_release_date">2005-10-11</field>
     <field name="genre">Indie film</field>
     <field name="genre">Documentary film</field>
     <field name="genre">History</field>
-    <field name="directed_by">Owen 'Alik Shahadah</field>
+    <field name="name">500 Years Later</field>
   </doc>
   <doc>
     <field name="id">/en/50_ways_of_saying_fabulous</field>
-    <field name="name">50 Ways of Saying Fabulous</field>
+    <field name="directed_by">Stewart Main</field>
     <field name="genre">LGBT</field>
     <field name="genre">Indie film</field>
     <field name="genre">Historical period drama</field>
@@ -434,41 +427,41 @@
     <field name="genre">World cinema</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Stewart Main</field>
+    <field name="name">50 Ways of Saying Fabulous</field>
   </doc>
   <doc>
     <field name="id">/en/5x2</field>
-    <field name="name">5x2</field>
+    <field name="directed_by">François Ozon</field>
     <field name="initial_release_date">2004-09-01</field>
     <field name="genre">Romance Film</field>
     <field name="genre">World cinema</field>
     <field name="genre">Marriage Drama</field>
     <field name="genre">Fiction</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">François Ozon</field>
+    <field name="name">5x2</field>
   </doc>
   <doc>
     <field name="id">/en/28_weeks_later</field>
-    <field name="name">28 Weeks Later</field>
+    <field name="directed_by">Juan Carlos Fresnadillo</field>
     <field name="initial_release_date">2007-04-26</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Horror</field>
     <field name="genre">Thriller</field>
-    <field name="directed_by">Juan Carlos Fresnadillo</field>
+    <field name="name">28 Weeks Later</field>
   </doc>
   <doc>
     <field name="id">/en/10_5</field>
-    <field name="name">10.5</field>
+    <field name="directed_by">John Lafia</field>
     <field name="initial_release_date">2004-05-02</field>
     <field name="genre">Disaster Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">John Lafia</field>
+    <field name="name">10.5</field>
   </doc>
   <doc>
     <field name="id">/en/13_going_on_30</field>
-    <field name="name">13 Going on 30</field>
+    <field name="directed_by">Gary Winick</field>
     <field name="initial_release_date">2004-04-14</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Coming of age</field>
@@ -476,11 +469,11 @@
     <field name="genre">Romance Film</field>
     <field name="genre">Fantasy Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Gary Winick</field>
+    <field name="name">13 Going on 30</field>
   </doc>
   <doc>
     <field name="id">/en/2ldk</field>
-    <field name="name">2LDK</field>
+    <field name="directed_by">Yukihiko Tsutsumi</field>
     <field name="initial_release_date">2004-05-13</field>
     <field name="genre">LGBT</field>
     <field name="genre">Thriller</field>
@@ -489,20 +482,20 @@
     <field name="genre">Japanese Movies</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Yukihiko Tsutsumi</field>
+    <field name="name">2LDK</field>
   </doc>
   <doc>
     <field name="id">/en/7_phere</field>
-    <field name="name">7½ Phere</field>
+    <field name="directed_by">Ishaan Trivedi</field>
     <field name="initial_release_date">2005-07-29</field>
     <field name="genre">Bollywood</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Ishaan Trivedi</field>
+    <field name="name">7½ Phere</field>
   </doc>
   <doc>
     <field name="id">/en/a_beautiful_mind</field>
-    <field name="name">A Beautiful Mind</field>
+    <field name="directed_by">Ron Howard</field>
     <field name="initial_release_date">2001-12-13</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Psychological thriller</field>
@@ -511,40 +504,40 @@
     <field name="genre">Marriage Drama</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Ron Howard</field>
+    <field name="name">A Beautiful Mind</field>
   </doc>
   <doc>
     <field name="id">/en/a_cinderella_story</field>
-    <field name="name">A Cinderella Story</field>
+    <field name="directed_by">Mark Rosman</field>
     <field name="initial_release_date">2004-07-10</field>
     <field name="genre">Teen film</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Family</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Mark Rosman</field>
+    <field name="name">A Cinderella Story</field>
   </doc>
   <doc>
     <field name="id">/en/a_cock_and_bull_story</field>
-    <field name="name">A Cock and Bull Story</field>
+    <field name="directed_by">Michael Winterbottom</field>
     <field name="initial_release_date">2005-07-17</field>
     <field name="genre">Mockumentary</field>
     <field name="genre">Indie film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Michael Winterbottom</field>
+    <field name="name">A Cock and Bull Story</field>
   </doc>
   <doc>
     <field name="id">/en/a_common_thread</field>
-    <field name="name">A Common Thread</field>
+    <field name="directed_by">Éléonore Faucher</field>
     <field name="initial_release_date">2004-05-14</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Éléonore Faucher</field>
+    <field name="name">A Common Thread</field>
   </doc>
   <doc>
     <field name="id">/en/a_dirty_shame</field>
-    <field name="name">A Dirty Shame</field>
+    <field name="directed_by">John Waters</field>
     <field name="initial_release_date">2004-09-12</field>
     <field name="genre">Sex comedy</field>
     <field name="genre">Cult film</field>
@@ -553,18 +546,18 @@
     <field name="genre">Gross out</field>
     <field name="genre">Gross-out film</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">John Waters</field>
+    <field name="name">A Dirty Shame</field>
   </doc>
   <doc>
     <field name="id">/en/a_duo_occasion</field>
-    <field name="name">A Duo Occasion</field>
+    <field name="directed_by">Pierre Lamoureux</field>
     <field name="initial_release_date">2005-11-22</field>
     <field name="genre">Music video</field>
-    <field name="directed_by">Pierre Lamoureux</field>
+    <field name="name">A Duo Occasion</field>
   </doc>
   <doc>
     <field name="id">/en/a_good_year</field>
-    <field name="name">A Good Year</field>
+    <field name="directed_by">Ridley Scott</field>
     <field name="initial_release_date">2006-09-09</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Film adaptation</field>
@@ -574,31 +567,31 @@
     <field name="genre">Comedy of manners</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Ridley Scott</field>
+    <field name="name">A Good Year</field>
   </doc>
   <doc>
     <field name="id">/en/a_history_of_violence_2005</field>
-    <field name="name">A History of Violence</field>
+    <field name="directed_by">David Cronenberg</field>
     <field name="initial_release_date">2005-05-16</field>
     <field name="genre">Thriller</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">David Cronenberg</field>
+    <field name="name">A History of Violence</field>
   </doc>
   <doc>
     <field name="id">/en/ett_hal_i_mitt_hjarta</field>
-    <field name="name">A Hole in My Heart</field>
+    <field name="directed_by">Lukas Moodysson</field>
     <field name="initial_release_date">2004-09-10</field>
     <field name="genre">Horror</field>
     <field name="genre">Experimental film</field>
     <field name="genre">Social problem film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Lukas Moodysson</field>
+    <field name="name">A Hole in My Heart</field>
   </doc>
   <doc>
     <field name="id">/en/a_knights_tale</field>
-    <field name="name">A Knight's Tale</field>
+    <field name="directed_by">Brian Helgeland</field>
     <field name="initial_release_date">2001-03-08</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Adventure Film</field>
@@ -608,22 +601,22 @@
     <field name="genre">Costume Adventure</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Brian Helgeland</field>
+    <field name="name">A Knight's Tale</field>
   </doc>
   <doc>
     <field name="id">/en/a_league_of_ordinary_gentlemen</field>
-    <field name="name">A League of Ordinary Gentlemen</field>
+    <field name="directed_by">Christopher Browne</field>
+    <field name="directed_by">Alexander H. Browne</field>
     <field name="initial_release_date">2006-03-21</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Sports</field>
     <field name="genre">Culture &amp;amp; Society</field>
     <field name="genre">Biographical film</field>
-    <field name="directed_by">Christopher Browne</field>
-    <field name="directed_by">Alexander H. Browne</field>
+    <field name="name">A League of Ordinary Gentlemen</field>
   </doc>
   <doc>
     <field name="id">/en/a_little_trip_to_heaven</field>
-    <field name="name">A Little Trip to Heaven</field>
+    <field name="directed_by">Baltasar Kormákur</field>
     <field name="initial_release_date">2005-12-26</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
@@ -633,84 +626,84 @@
     <field name="genre">Detective fiction</field>
     <field name="genre">Ensemble Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Baltasar Kormákur</field>
+    <field name="name">A Little Trip to Heaven</field>
   </doc>
   <doc>
     <field name="id">/en/a_lot_like_love</field>
-    <field name="name">A Lot like Love</field>
+    <field name="directed_by">Nigel Cole</field>
     <field name="initial_release_date">2005-04-21</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy-drama</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Nigel Cole</field>
+    <field name="name">A Lot like Love</field>
   </doc>
   <doc>
     <field name="id">/en/a_love_song_for_bobby_long</field>
-    <field name="name">A Love Song for Bobby Long</field>
+    <field name="directed_by">Shainee Gabel</field>
     <field name="initial_release_date">2004-09-02</field>
     <field name="genre">Film adaptation</field>
     <field name="genre">Melodrama</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Shainee Gabel</field>
+    <field name="name">A Love Song for Bobby Long</field>
   </doc>
   <doc>
     <field name="id">/en/a_man_a_real_one</field>
-    <field name="name">A Man, a Real One</field>
+    <field name="directed_by">Arnaud Larrieu</field>
+    <field name="directed_by">Jean-Marie Larrieu</field>
     <field name="initial_release_date">2003-05-28</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Arnaud Larrieu</field>
-    <field name="directed_by">Jean-Marie Larrieu</field>
+    <field name="name">A Man, a Real One</field>
   </doc>
   <doc>
     <field name="id">/en/a_midsummer_nights_rave</field>
-    <field name="name">A Midsummer Night's Rave</field>
+    <field name="directed_by">Gil Cates Jr.</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Teen film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Gil Cates Jr.</field>
+    <field name="name">A Midsummer Night's Rave</field>
   </doc>
   <doc>
     <field name="id">/en/a_mighty_wind</field>
-    <field name="name">A Mighty Wind</field>
+    <field name="directed_by">Christopher Guest</field>
     <field name="initial_release_date">2003-03-12</field>
     <field name="genre">Mockumentary</field>
     <field name="genre">Parody</field>
     <field name="genre">Musical</field>
     <field name="genre">Musical comedy</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Christopher Guest</field>
+    <field name="name">A Mighty Wind</field>
   </doc>
   <doc>
     <field name="id">/en/a_perfect_day</field>
-    <field name="name">A Perfect Day</field>
-    <field name="genre">World cinema</field>
-    <field name="genre">Drama</field>
     <field name="directed_by">Khalil Joreige</field>
     <field name="directed_by">Joana Hadjithomas</field>
+    <field name="genre">World cinema</field>
+    <field name="genre">Drama</field>
+    <field name="name">A Perfect Day</field>
   </doc>
   <doc>
     <field name="id">/en/a_prairie_home_companion_2006</field>
-    <field name="name">A Prairie Home Companion</field>
+    <field name="directed_by">Robert Altman</field>
     <field name="initial_release_date">2006-02-12</field>
     <field name="genre">Musical comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Robert Altman</field>
+    <field name="name">A Prairie Home Companion</field>
   </doc>
   <doc>
     <field name="id">/en/a_ring_of_endless_light_2002</field>
-    <field name="name">A Ring of Endless Light</field>
+    <field name="directed_by">Greg Beeman</field>
     <field name="initial_release_date">2002-08-23</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Greg Beeman</field>
+    <field name="name">A Ring of Endless Light</field>
   </doc>
   <doc>
     <field name="id">/en/a_scanner_darkly_2006</field>
-    <field name="name">A Scanner Darkly</field>
+    <field name="directed_by">Richard Linklater</field>
     <field name="initial_release_date">2006-07-07</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Dystopia</field>
@@ -719,11 +712,11 @@
     <field name="genre">Film adaptation</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Richard Linklater</field>
+    <field name="name">A Scanner Darkly</field>
   </doc>
   <doc>
     <field name="id">/en/a_short_film_about_john_bolton</field>
-    <field name="name">A Short Film About John Bolton</field>
+    <field name="directed_by">Neil Gaiman</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Short Film</field>
     <field name="genre">Black comedy</field>
@@ -732,19 +725,19 @@
     <field name="genre">Graphic &amp;amp; Applied Arts</field>
     <field name="genre">Comedy</field>
     <field name="genre">Biographical film</field>
-    <field name="directed_by">Neil Gaiman</field>
+    <field name="name">A Short Film About John Bolton</field>
   </doc>
   <doc>
     <field name="id">/en/a_shot_in_the_west</field>
-    <field name="name">A Shot in the West</field>
+    <field name="directed_by">Bob Kelly</field>
     <field name="initial_release_date">2006-07-16</field>
     <field name="genre">Western</field>
     <field name="genre">Short Film</field>
-    <field name="directed_by">Bob Kelly</field>
+    <field name="name">A Shot in the West</field>
   </doc>
   <doc>
     <field name="id">/en/a_sound_of_thunder_2005</field>
-    <field name="name">A Sound of Thunder</field>
+    <field name="directed_by">Peter Hyams</field>
     <field name="initial_release_date">2005-05-15</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Adventure Film</field>
@@ -752,120 +745,120 @@
     <field name="genre">Action Film</field>
     <field name="genre">Apocalyptic and post-apocalyptic fiction</field>
     <field name="genre">Time travel</field>
-    <field name="directed_by">Peter Hyams</field>
+    <field name="name">A Sound of Thunder</field>
   </doc>
   <doc>
     <field name="id">/en/a_state_of_mind</field>
-    <field name="name">A State of Mind</field>
+    <field name="directed_by">Daniel Gordon</field>
     <field name="initial_release_date">2005-08-10</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Political cinema</field>
     <field name="genre">Sports</field>
-    <field name="directed_by">Daniel Gordon</field>
+    <field name="name">A State of Mind</field>
   </doc>
   <doc>
     <field name="id">/en/a_time_for_drunken_horses</field>
-    <field name="name">A Time for Drunken Horses</field>
+    <field name="directed_by">Bahman Ghobadi</field>
     <field name="genre">World cinema</field>
     <field name="genre">War film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Bahman Ghobadi</field>
+    <field name="name">A Time for Drunken Horses</field>
   </doc>
   <doc>
     <field name="id">/en/a_ton_image</field>
-    <field name="name">À ton image</field>
+    <field name="directed_by">Aruna Villiers</field>
     <field name="initial_release_date">2004-05-26</field>
     <field name="genre">Thriller</field>
     <field name="genre">Science Fiction</field>
-    <field name="directed_by">Aruna Villiers</field>
+    <field name="name">À ton image</field>
   </doc>
   <doc>
     <field name="id">/en/a_very_long_engagement</field>
-    <field name="name">A Very Long Engagement</field>
+    <field name="directed_by">Jean-Pierre Jeunet</field>
     <field name="initial_release_date">2004-10-27</field>
     <field name="genre">War film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Jean-Pierre Jeunet</field>
+    <field name="name">A Very Long Engagement</field>
   </doc>
   <doc>
     <field name="id">/en/a_view_from_the_eiffel_tower</field>
-    <field name="name">A View from Eiffel Tower</field>
-    <field name="genre">Drama</field>
     <field name="directed_by">Nikola Vukčević</field>
+    <field name="genre">Drama</field>
+    <field name="name">A View from Eiffel Tower</field>
   </doc>
   <doc>
     <field name="id">/en/a_walk_to_remember</field>
-    <field name="name">A Walk to Remember</field>
+    <field name="directed_by">Adam Shankman</field>
     <field name="initial_release_date">2002-01-23</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Adam Shankman</field>
+    <field name="name">A Walk to Remember</field>
   </doc>
   <doc>
     <field name="id">/en/a_i</field>
-    <field name="name">A.I. Artificial Intelligence</field>
+    <field name="directed_by">Steven Spielberg</field>
     <field name="initial_release_date">2001-06-26</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Future noir</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Steven Spielberg</field>
+    <field name="name">A.I. Artificial Intelligence</field>
   </doc>
   <doc>
     <field name="id">/en/a_k_a_tommy_chong</field>
-    <field name="name">a/k/a Tommy Chong</field>
+    <field name="directed_by">Josh Gilbert</field>
     <field name="initial_release_date">2006-06-14</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Culture &amp;amp; Society</field>
     <field name="genre">Law &amp;amp; Crime</field>
     <field name="genre">Biographical film</field>
-    <field name="directed_by">Josh Gilbert</field>
+    <field name="name">a/k/a Tommy Chong</field>
   </doc>
   <doc>
     <field name="id">/en/aalvar</field>
-    <field name="name">Aalvar</field>
+    <field name="directed_by">Chella</field>
     <field name="initial_release_date">2007-01-12</field>
     <field name="genre">Action Film</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
-    <field name="directed_by">Chella</field>
+    <field name="name">Aalvar</field>
   </doc>
   <doc>
     <field name="id">/en/aap_ki_khatir</field>
-    <field name="name">Aap Ki Khatir</field>
+    <field name="directed_by">Dharmesh Darshan</field>
     <field name="initial_release_date">2006-08-25</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Bollywood</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Dharmesh Darshan</field>
+    <field name="name">Aap Ki Khatir</field>
   </doc>
   <doc>
     <field name="id">/en/aaru_2005</field>
-    <field name="name">Aaru</field>
+    <field name="directed_by">Hari</field>
     <field name="initial_release_date">2005-12-09</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Drama</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
-    <field name="directed_by">Hari</field>
+    <field name="name">Aaru</field>
   </doc>
   <doc>
     <field name="id">/en/aata</field>
-    <field name="name">Aata</field>
+    <field name="directed_by">V.N. Aditya</field>
     <field name="initial_release_date">2007-05-09</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
-    <field name="directed_by">V.N. Aditya</field>
+    <field name="name">Aata</field>
   </doc>
   <doc>
     <field name="id">/en/aathi</field>
-    <field name="name">Aadhi</field>
+    <field name="directed_by">Ramana</field>
     <field name="initial_release_date">2006-01-14</field>
     <field name="genre">Thriller</field>
     <field name="genre">Romance Film</field>
@@ -875,53 +868,53 @@
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="directed_by">Ramana</field>
+    <field name="name">Aadhi</field>
   </doc>
   <doc>
     <field name="id">/en/aayitha_ezhuthu</field>
-    <field name="name">Aaytha Ezhuthu</field>
+    <field name="directed_by">Mani Ratnam</field>
     <field name="initial_release_date">2004-05-21</field>
     <field name="genre">Thriller</field>
     <field name="genre">Political thriller</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Mani Ratnam</field>
+    <field name="name">Aaytha Ezhuthu</field>
   </doc>
   <doc>
     <field name="id">/en/abandon_2002</field>
-    <field name="name">Abandon</field>
+    <field name="directed_by">Stephen Gaghan</field>
     <field name="initial_release_date">2002-10-18</field>
     <field name="genre">Mystery</field>
     <field name="genre">Thriller</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Suspense</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Stephen Gaghan</field>
+    <field name="name">Abandon</field>
   </doc>
   <doc>
     <field name="id">/en/abduction_the_megumi_yokota_story</field>
-    <field name="name">Abduction: The Megumi Yokota Story</field>
+    <field name="directed_by">Patty Kim</field>
+    <field name="directed_by">Chris Sheridan</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Political cinema</field>
     <field name="genre">Culture &amp;amp; Society</field>
     <field name="genre">Law &amp;amp; Crime</field>
-    <field name="directed_by">Patty Kim</field>
-    <field name="directed_by">Chris Sheridan</field>
+    <field name="name">Abduction: The Megumi Yokota Story</field>
   </doc>
   <doc>
     <field name="id">/en/about_a_boy_2002</field>
-    <field name="name">About a Boy</field>
+    <field name="directed_by">Chris Weitz</field>
+    <field name="directed_by">Paul Weitz</field>
     <field name="initial_release_date">2002-04-26</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Chris Weitz</field>
-    <field name="directed_by">Paul Weitz</field>
+    <field name="name">About a Boy</field>
   </doc>
   <doc>
     <field name="id">/en/about_schmidt</field>
-    <field name="name">About Schmidt</field>
+    <field name="directed_by">Alexander Payne</field>
     <field name="initial_release_date">2002-05-22</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Indie film</field>
@@ -930,28 +923,28 @@
     <field name="genre">Comedy of manners</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Alexander Payne</field>
+    <field name="name">About Schmidt</field>
   </doc>
   <doc>
     <field name="id">/en/accepted</field>
-    <field name="name">Accepted</field>
+    <field name="directed_by">Steve Pink</field>
     <field name="initial_release_date">2006-08-18</field>
     <field name="genre">Teen film</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Steve Pink</field>
+    <field name="name">Accepted</field>
   </doc>
   <doc>
     <field name="id">/en/across_the_hall</field>
-    <field name="name">Across the Hall</field>
+    <field name="directed_by">Alex Merkin</field>
+    <field name="directed_by">Alex Merkin</field>
     <field name="genre">Short Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Alex Merkin</field>
-    <field name="directed_by">Alex Merkin</field>
+    <field name="name">Across the Hall</field>
   </doc>
   <doc>
     <field name="id">/en/adam_steve</field>
-    <field name="name">Adam &amp;amp; Steve</field>
+    <field name="directed_by">Craig Chester</field>
     <field name="initial_release_date">2005-04-24</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Romantic comedy</field>
@@ -961,66 +954,66 @@
     <field name="genre">Gay</field>
     <field name="genre">Gay Interest</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Craig Chester</field>
+    <field name="name">Adam &amp;amp; Steve</field>
   </doc>
   <doc>
     <field name="id">/en/adam_resurrected</field>
-    <field name="name">Adam Resurrected</field>
+    <field name="directed_by">Paul Schrader</field>
     <field name="initial_release_date">2008-08-30</field>
     <field name="genre">Historical period drama</field>
     <field name="genre">Film adaptation</field>
     <field name="genre">War film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Paul Schrader</field>
+    <field name="name">Adam Resurrected</field>
   </doc>
   <doc>
     <field name="id">/en/adaptation_2002</field>
-    <field name="name">Adaptation</field>
+    <field name="directed_by">Spike Jonze</field>
     <field name="initial_release_date">2002-12-06</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Spike Jonze</field>
+    <field name="name">Adaptation</field>
   </doc>
   <doc>
     <field name="id">/en/address_unknown</field>
-    <field name="name">Address Unknown</field>
+    <field name="directed_by">Kim Ki-duk</field>
     <field name="initial_release_date">2001-06-02</field>
     <field name="genre">War film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Kim Ki-duk</field>
+    <field name="name">Address Unknown</field>
   </doc>
   <doc>
     <field name="id">/en/adrenaline_rush_2002</field>
-    <field name="name">Adrenaline Rush</field>
+    <field name="directed_by">Marc Fafard</field>
     <field name="initial_release_date">2002-10-18</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Short Film</field>
-    <field name="directed_by">Marc Fafard</field>
+    <field name="name">Adrenaline Rush</field>
   </doc>
   <doc>
     <field name="id">/en/essential_keys_to_better_bowling_2006</field>
-    <field name="name">Essential Keys To Better Bowling</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Sports</field>
+    <field name="name">Essential Keys To Better Bowling</field>
   </doc>
   <doc>
     <field name="id">/en/adventures_into_digital_comics</field>
-    <field name="name">Adventures Into Digital Comics</field>
-    <field name="genre">Documentary film</field>
     <field name="directed_by">Sébastien Dumesnil</field>
+    <field name="genre">Documentary film</field>
+    <field name="name">Adventures Into Digital Comics</field>
   </doc>
   <doc>
     <field name="id">/en/ae_fond_kiss</field>
-    <field name="name">Ae Fond Kiss...</field>
+    <field name="directed_by">Ken Loach</field>
     <field name="initial_release_date">2004-02-13</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Ken Loach</field>
+    <field name="name">Ae Fond Kiss...</field>
   </doc>
   <doc>
     <field name="id">/en/aetbaar</field>
-    <field name="name">Aetbaar</field>
+    <field name="directed_by">Vikram Bhatt</field>
     <field name="initial_release_date">2004-01-23</field>
     <field name="genre">Thriller</field>
     <field name="genre">Romance Film</field>
@@ -1031,20 +1024,19 @@
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="directed_by">Vikram Bhatt</field>
+    <field name="name">Aetbaar</field>
   </doc>
   <doc>
     <field name="id">/en/aethiree</field>
-    <field name="name">Aethiree</field>
     <field name="initial_release_date">2004-04-23</field>
     <field name="genre">Comedy</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
     <field name="directed_by">K. S. Ravikumar</field>
+    <field name="name">Aethirree</field>
   </doc>
   <doc>
     <field name="id">/en/after_innocence</field>
-    <field name="name">After Innocence</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Political cinema</field>
@@ -1052,10 +1044,10 @@
     <field name="genre">Law &amp;amp; Crime</field>
     <field name="genre">Biographical film</field>
     <field name="directed_by">Jessica Sanders</field>
+    <field name="name">After Innocence</field>
   </doc>
   <doc>
     <field name="id">/en/after_the_sunset</field>
-    <field name="name">After the Sunset</field>
     <field name="initial_release_date">2004-11-10</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Action/Adventure</field>
@@ -1066,27 +1058,27 @@
     <field name="genre">Crime Comedy</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Brett Ratner</field>
+    <field name="name">After the Sunset</field>
   </doc>
   <doc>
     <field name="id">/en/aftermath_2007</field>
-    <field name="name">Aftermath</field>
     <field name="initial_release_date">2013-03-01</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Thriller</field>
     <field name="directed_by">Thomas Farone</field>
+    <field name="name">Aftermath</field>
   </doc>
   <doc>
     <field name="id">/en/against_the_ropes</field>
-    <field name="name">Against the Ropes</field>
     <field name="initial_release_date">2004-02-20</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Sports</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Charles S. Dutton</field>
+    <field name="name">Against the Ropes</field>
   </doc>
   <doc>
     <field name="id">/en/agent_cody_banks_2_destination_london</field>
-    <field name="name">Agent Cody Banks 2: Destination London</field>
     <field name="initial_release_date">2004-03-12</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Action Film</field>
@@ -1097,42 +1089,42 @@
     <field name="genre">Family-Oriented Adventure</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Kevin Allen</field>
+    <field name="name">Agent Cody Banks 2: Destination London</field>
   </doc>
   <doc>
     <field name="id">/en/agent_one-half</field>
-    <field name="name">Agent One-Half</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Brian Bero</field>
+    <field name="name">Agent One-Half</field>
   </doc>
   <doc>
     <field name="id">/en/agnes_and_his_brothers</field>
-    <field name="name">Agnes and His Brothers</field>
     <field name="initial_release_date">2004-09-05</field>
     <field name="genre">Drama</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Oskar Roehler</field>
+    <field name="name">Agnes and His Brothers</field>
   </doc>
   <doc>
     <field name="id">/en/aideista_parhain</field>
-    <field name="name">Mother of Mine</field>
     <field name="initial_release_date">2005-08-25</field>
     <field name="genre">War film</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Klaus Härö</field>
+    <field name="name">Mother of Mine</field>
   </doc>
   <doc>
     <field name="id">/en/aileen_life_and_death_of_a_serial_killer</field>
-    <field name="name">Aileen: Life and Death of a Serial Killer</field>
     <field name="initial_release_date">2003-05-10</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Political drama</field>
     <field name="directed_by">Nick Broomfield</field>
     <field name="directed_by">Joan Churchill</field>
+    <field name="name">Aileen: Life and Death of a Serial Killer</field>
   </doc>
   <doc>
     <field name="id">/en/air_2005</field>
-    <field name="name">Air</field>
     <field name="initial_release_date">2005-02-05</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Anime</field>
@@ -1140,29 +1132,29 @@
     <field name="genre">Japanese Movies</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Osamu Dezaki</field>
+    <field name="name">Air</field>
   </doc>
   <doc>
     <field name="id">/en/air_bud_seventh_inning_fetch</field>
-    <field name="name">Air Bud: Seventh Inning Fetch</field>
     <field name="initial_release_date">2002-02-21</field>
     <field name="genre">Family</field>
     <field name="genre">Sports</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Robert Vince</field>
+    <field name="name">Air Bud: Seventh Inning Fetch</field>
   </doc>
   <doc>
     <field name="id">/en/air_bud_spikes_back</field>
-    <field name="name">Air Bud: Spikes Back</field>
     <field name="initial_release_date">2003-06-24</field>
     <field name="genre">Family</field>
     <field name="genre">Sports</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Mike Southon</field>
+    <field name="name">Air Bud: Spikes Back</field>
   </doc>
   <doc>
     <field name="id">/en/air_buddies</field>
-    <field name="name">Air Buddies</field>
     <field name="initial_release_date">2006-12-10</field>
     <field name="genre">Family</field>
     <field name="genre">Animal Picture</field>
@@ -1170,10 +1162,10 @@
     <field name="genre">Family-Oriented Adventure</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Robert Vince</field>
+    <field name="name">Air Buddies</field>
   </doc>
   <doc>
     <field name="id">/en/aitraaz</field>
-    <field name="name">Aitraaz</field>
     <field name="initial_release_date">2004-11-12</field>
     <field name="genre">Trial drama</field>
     <field name="genre">Thriller</field>
@@ -1182,30 +1174,30 @@
     <field name="genre">Drama</field>
     <field name="directed_by">Abbas Burmawalla</field>
     <field name="directed_by">Mustan Burmawalla</field>
+    <field name="name">Aitraaz</field>
   </doc>
   <doc>
     <field name="id">/en/aka_2002</field>
-    <field name="name">AKA</field>
     <field name="initial_release_date">2002-01-19</field>
     <field name="genre">LGBT</field>
     <field name="genre">Indie film</field>
     <field name="genre">Historical period drama</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Duncan Roy</field>
+    <field name="name">AKA</field>
   </doc>
   <doc>
     <field name="id">/en/aakasha_gopuram</field>
-    <field name="name">Aakasha Gopuram</field>
     <field name="initial_release_date">2008-08-22</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
     <field name="genre">Malayalam Cinema</field>
     <field name="genre">World cinema</field>
     <field name="directed_by">K.P.Kumaran</field>
+    <field name="name">Aakasha Gopuram</field>
   </doc>
   <doc>
     <field name="id">/en/akbar-jodha</field>
-    <field name="name">Jodhaa Akbar</field>
     <field name="initial_release_date">2008-02-13</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Romance Film</field>
@@ -1217,17 +1209,17 @@
     <field name="genre">Musical Drama</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Ashutosh Gowariker</field>
+    <field name="name">Jodhaa Akbar</field>
   </doc>
   <doc>
     <field name="id">/en/akeelah_and_the_bee</field>
-    <field name="name">Akeelah and the Bee</field>
     <field name="initial_release_date">2006-03-16</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Doug Atchison</field>
+    <field name="name">Akeelah and the Bee</field>
   </doc>
   <doc>
     <field name="id">/en/aks</field>
-    <field name="name">The Reflection</field>
     <field name="initial_release_date">2001-07-13</field>
     <field name="genre">Horror</field>
     <field name="genre">Thriller</field>
@@ -1235,20 +1227,20 @@
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
     <field name="directed_by">Rakeysh Omprakash Mehra</field>
+    <field name="name">The Reflection</field>
   </doc>
   <doc>
     <field name="id">/en/aksar</field>
-    <field name="name">Aksar</field>
     <field name="initial_release_date">2006-02-03</field>
     <field name="genre">Romance Film</field>
     <field name="genre">World cinema</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Anant Mahadevan</field>
+    <field name="name">Aksar</field>
   </doc>
   <doc>
     <field name="id">/en/al_franken_god_spoke</field>
-    <field name="name">Al Franken: God Spoke</field>
     <field name="initial_release_date">2006-09-13</field>
     <field name="genre">Mockumentary</field>
     <field name="genre">Documentary film</field>
@@ -1257,20 +1249,20 @@
     <field name="genre">Biographical film</field>
     <field name="directed_by">Nick Doob</field>
     <field name="directed_by">Chris Hegedus</field>
+    <field name="name">Al Franken: God Spoke</field>
   </doc>
   <doc>
     <field name="id">/en/alag</field>
-    <field name="name">Different</field>
     <field name="initial_release_date">2006-06-16</field>
     <field name="genre">Thriller</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
     <field name="directed_by">Ashu Trikha</field>
+    <field name="name">Different</field>
   </doc>
   <doc>
     <field name="id">/en/alai</field>
-    <field name="name">Wave</field>
     <field name="initial_release_date">2003-09-10</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
@@ -1278,20 +1270,20 @@
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
     <field name="directed_by">Vikram Kumar</field>
+    <field name="name">Wave</field>
   </doc>
   <doc>
     <field name="id">/en/alaipayuthey</field>
-    <field name="name">Waves</field>
     <field name="initial_release_date">2000-04-14</field>
     <field name="genre">Musical</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Musical Drama</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Mani Ratnam</field>
+    <field name="name">Waves</field>
   </doc>
   <doc>
     <field name="id">/en/alatriste</field>
-    <field name="name">Alatriste</field>
     <field name="initial_release_date">2006-09-01</field>
     <field name="genre">Thriller</field>
     <field name="genre">War film</field>
@@ -1300,19 +1292,19 @@
     <field name="genre">Drama</field>
     <field name="genre">Historical fiction</field>
     <field name="directed_by">Agustín Díaz Yanes</field>
+    <field name="name">Alatriste</field>
   </doc>
   <doc>
     <field name="id">/en/alex_emma</field>
-    <field name="name">Alex &amp;amp; Emma</field>
     <field name="initial_release_date">2003-06-20</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Rob Reiner</field>
+    <field name="name">Alex &amp;amp; Emma</field>
   </doc>
   <doc>
     <field name="id">/en/alexander_2004</field>
-    <field name="name">Alexander</field>
     <field name="initial_release_date">2004-11-16</field>
     <field name="genre">War film</field>
     <field name="genre">Action Film</field>
@@ -1324,10 +1316,10 @@
     <field name="directed_by">Oliver Stone</field>
     <field name="directed_by">Wilhelm Sasnal</field>
     <field name="directed_by">Anka Sasnal</field>
+    <field name="name">Alexander</field>
   </doc>
   <doc>
     <field name="id">/en/alexandras_project</field>
-    <field name="name">Alexandra's Project</field>
     <field name="genre">Thriller</field>
     <field name="genre">Suspense</field>
     <field name="genre">Psychological thriller</field>
@@ -1335,10 +1327,10 @@
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Rolf de Heer</field>
+    <field name="name">Alexandra's Project</field>
   </doc>
   <doc>
     <field name="id">/en/alfie_2004</field>
-    <field name="name">Alfie</field>
     <field name="initial_release_date">2004-10-22</field>
     <field name="genre">Sex comedy</field>
     <field name="genre">Remake</field>
@@ -1348,10 +1340,10 @@
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Charles Shyer</field>
+    <field name="name">Alfie</field>
   </doc>
   <doc>
     <field name="id">/en/ali_2001</field>
-    <field name="name">Ali</field>
     <field name="initial_release_date">2001-12-11</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Sports</field>
@@ -1359,10 +1351,10 @@
     <field name="genre">Sports films</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Michael Mann</field>
+    <field name="name">Ali</field>
   </doc>
   <doc>
     <field name="id">/en/ali_g_indahouse</field>
-    <field name="name">Ali G Indahouse</field>
     <field name="initial_release_date">2002-03-22</field>
     <field name="genre">Stoner film</field>
     <field name="genre">Parody</field>
@@ -1370,19 +1362,19 @@
     <field name="genre">Gross-out film</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Mark Mylod</field>
+    <field name="name">Ali G Indahouse</field>
   </doc>
   <doc>
     <field name="id">/en/alien_autopsy_2006</field>
-    <field name="name">Alien Autopsy</field>
     <field name="initial_release_date">2006-04-07</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Mockumentary</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Jonny Campbell</field>
+    <field name="name">Alien Autopsy</field>
   </doc>
   <doc>
     <field name="id">/en/avp_alien_vs_predator</field>
-    <field name="name">Alien vs. Predator</field>
     <field name="initial_release_date">2004-08-12</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Horror</field>
@@ -1391,10 +1383,10 @@
     <field name="genre">Thriller</field>
     <field name="genre">Adventure Film</field>
     <field name="directed_by">Paul W. S. Anderson</field>
+    <field name="name">Alien vs. Predator</field>
   </doc>
   <doc>
     <field name="id">/en/avpr_aliens_vs_predator_requiem</field>
-    <field name="name">AVPR: Aliens vs Predator - Requiem</field>
     <field name="initial_release_date">2007-12-25</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Action Film</field>
@@ -1404,10 +1396,10 @@
     <field name="genre">Thriller</field>
     <field name="directed_by">Colin Strause</field>
     <field name="directed_by">Greg Strause</field>
+    <field name="name">AVPR: Aliens vs Predator - Requiem</field>
   </doc>
   <doc>
     <field name="id">/en/aliens_of_the_deep</field>
-    <field name="name">Aliens of the Deep</field>
     <field name="initial_release_date">2005-01-28</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Travel</field>
@@ -1416,10 +1408,10 @@
     <field name="directed_by">James Cameron</field>
     <field name="directed_by">Steven Quale</field>
     <field name="directed_by">Steven Quale</field>
+    <field name="name">Aliens of the Deep</field>
   </doc>
   <doc>
     <field name="id">/en/alive_2002</field>
-    <field name="name">Alive</field>
     <field name="initial_release_date">2002-09-12</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Action Film</field>
@@ -1429,10 +1421,10 @@
     <field name="genre">Action/Adventure</field>
     <field name="genre">Japanese Movies</field>
     <field name="directed_by">Ryuhei Kitamura</field>
+    <field name="name">Alive</field>
   </doc>
   <doc>
     <field name="id">/en/all_about_lily_chou-chou</field>
-    <field name="name">All About Lily Chou-Chou</field>
     <field name="initial_release_date">2001-09-07</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Musical</field>
@@ -1442,30 +1434,30 @@
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
     <field name="directed_by">Shunji Iwai</field>
+    <field name="name">All About Lily Chou-Chou</field>
   </doc>
   <doc>
     <field name="id">/en/all_about_the_benjamins</field>
-    <field name="name">All About the Benjamins</field>
     <field name="initial_release_date">2002-03-08</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Comedy</field>
     <field name="genre">Thriller</field>
     <field name="directed_by">Kevin Bray</field>
+    <field name="name">All About the Benjamins</field>
   </doc>
   <doc>
     <field name="id">/en/all_i_want_2002</field>
-    <field name="name">All I Want</field>
     <field name="initial_release_date">2002-09-10</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Jeffrey Porter</field>
+    <field name="name">All I Want</field>
   </doc>
   <doc>
     <field name="id">/en/all_over_the_guy</field>
-    <field name="name">All Over the Guy</field>
     <field name="genre">Indie film</field>
     <field name="genre">LGBT</field>
     <field name="genre">Romantic comedy</field>
@@ -1475,71 +1467,71 @@
     <field name="genre">Gay Themed</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Julie Davis</field>
+    <field name="name">All Over the Guy</field>
   </doc>
   <doc>
     <field name="id">/en/all_souls_day_2005</field>
-    <field name="name">All Souls Day</field>
     <field name="initial_release_date">2005-01-25</field>
     <field name="genre">Horror</field>
     <field name="genre">Supernatural</field>
     <field name="genre">Zombie Film</field>
     <field name="directed_by">Jeremy Kasten</field>
     <field name="directed_by">Mark A. Altman</field>
+    <field name="name">All Souls Day</field>
   </doc>
   <doc>
     <field name="id">/en/all_the_kings_men_2006</field>
-    <field name="name">All the King's Men</field>
     <field name="initial_release_date">2006-09-10</field>
     <field name="genre">Political drama</field>
     <field name="genre">Thriller</field>
     <field name="directed_by">Steven Zaillian</field>
+    <field name="name">All the King's Men</field>
   </doc>
   <doc>
     <field name="id">/en/all_the_real_girls</field>
-    <field name="name">All the Real Girls</field>
     <field name="initial_release_date">2003-01-19</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Indie film</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Drama</field>
     <field name="directed_by">David Gordon Green</field>
+    <field name="name">All the Real Girls</field>
   </doc>
   <doc>
     <field name="id">/en/allari_bullodu</field>
-    <field name="name">Allari Bullodu</field>
     <field name="genre">Comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
     <field name="directed_by">Kovelamudi Raghavendra Rao</field>
+    <field name="name">Allari Bullodu</field>
   </doc>
   <doc>
     <field name="id">/en/allari_pidugu</field>
-    <field name="name">Allari Pidugu</field>
     <field name="initial_release_date">2005-10-05</field>
     <field name="genre">Drama</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
     <field name="directed_by">Jayant Paranji</field>
+    <field name="name">Allari Pidugu</field>
   </doc>
   <doc>
     <field name="id">/en/alles_auf_zucker</field>
-    <field name="name">Alles auf Zucker!</field>
     <field name="initial_release_date">2004-12-31</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Dani Levy</field>
+    <field name="name">Alles auf Zucker!</field>
   </doc>
   <doc>
     <field name="id">/en/alley_cats_strike</field>
-    <field name="name">Alley Cats Strike!</field>
     <field name="initial_release_date">2000-03-18</field>
     <field name="genre">Family</field>
     <field name="genre">Sports</field>
     <field name="directed_by">Rod Daniel</field>
+    <field name="name">Alley Cats Strike!</field>
   </doc>
   <doc>
     <field name="id">/en/almost_famous</field>
-    <field name="name">Almost Famous</field>
     <field name="initial_release_date">2000-09-08</field>
     <field name="genre">Musical</field>
     <field name="genre">Comedy-drama</field>
@@ -1550,24 +1542,24 @@
     <field name="genre">Music</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Cameron Crowe</field>
+    <field name="name">Almost Famous</field>
   </doc>
   <doc>
     <field name="id">/en/almost_round_three</field>
-    <field name="name">Almost: Round Three</field>
     <field name="initial_release_date">2004-11-10</field>
     <field name="genre">Sports</field>
     <field name="directed_by">Matt Hill</field>
     <field name="directed_by">Matt Hill</field>
+    <field name="name">Almost: Round Three</field>
   </doc>
   <doc>
     <field name="id">/en/alone_and_restless</field>
-    <field name="name">Alone and Restless</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Michael Thomas Dunn</field>
+    <field name="name">Alone and Restless</field>
   </doc>
   <doc>
     <field name="id">/en/alone_in_the_dark</field>
-    <field name="name">Alone in the Dark</field>
     <field name="initial_release_date">2005-01-28</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Horror</field>
@@ -1576,10 +1568,10 @@
     <field name="genre">B movie</field>
     <field name="genre">Action/Adventure</field>
     <field name="directed_by">Uwe Boll</field>
+    <field name="name">Alone in the Dark</field>
   </doc>
   <doc>
     <field name="id">/en/along_came_polly</field>
-    <field name="name">Along Came Polly</field>
     <field name="initial_release_date">2004-01-12</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
@@ -1587,54 +1579,54 @@
     <field name="genre">Gross-out film</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">John Hamburg</field>
+    <field name="name">Along Came Polly</field>
   </doc>
   <doc>
     <field name="id">/en/alpha_dog</field>
-    <field name="name">Alpha Dog</field>
     <field name="initial_release_date">2006-01-27</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Nick Cassavetes</field>
+    <field name="name">Alpha Dog</field>
   </doc>
   <doc>
     <field name="id">/en/amelie</field>
-    <field name="name">Amélie</field>
     <field name="initial_release_date">2001-04-25</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Jean-Pierre Jeunet</field>
+    <field name="name">Amélie</field>
   </doc>
   <doc>
     <field name="id">/en/america_freedom_to_fascism</field>
-    <field name="name">America: Freedom to Fascism</field>
     <field name="initial_release_date">2006-07-28</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Political cinema</field>
     <field name="genre">Culture &amp;amp; Society</field>
     <field name="directed_by">Aaron Russo</field>
+    <field name="name">America: Freedom to Fascism</field>
   </doc>
   <doc>
     <field name="id">/en/americas_sweethearts</field>
-    <field name="name">America's Sweethearts</field>
     <field name="initial_release_date">2001-07-17</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Joe Roth</field>
+    <field name="name">America's Sweethearts</field>
   </doc>
   <doc>
     <field name="id">/en/american_cowslip</field>
-    <field name="name">American Cowslip</field>
     <field name="initial_release_date">2009-07-24</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Indie film</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Mark David</field>
+    <field name="name">American Cowslip</field>
   </doc>
   <doc>
     <field name="id">/en/american_desi</field>
-    <field name="name">American Desi</field>
     <field name="genre">Indie film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Romantic comedy</field>
@@ -1642,10 +1634,10 @@
     <field name="genre">Teen film</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Piyush Dinker Pandya</field>
+    <field name="name">American Desi</field>
   </doc>
   <doc>
     <field name="id">/en/american_dog</field>
-    <field name="name">Bolt</field>
     <field name="initial_release_date">2008-11-17</field>
     <field name="genre">Family</field>
     <field name="genre">Adventure Film</field>
@@ -1653,10 +1645,10 @@
     <field name="genre">Comedy</field>
     <field name="directed_by">Chris Williams</field>
     <field name="directed_by">Byron Howard</field>
+    <field name="name">Bolt</field>
   </doc>
   <doc>
     <field name="id">/en/american_dreamz</field>
-    <field name="name">American Dreamz</field>
     <field name="initial_release_date">2006-04-21</field>
     <field name="genre">Political cinema</field>
     <field name="genre">Parody</field>
@@ -1664,10 +1656,10 @@
     <field name="genre">Media Satire</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Paul Weitz</field>
+    <field name="name">American Dreamz</field>
   </doc>
   <doc>
     <field name="id">/en/american_gangster</field>
-    <field name="name">American Gangster</field>
     <field name="initial_release_date">2007-10-19</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">War film</field>
@@ -1679,18 +1671,18 @@
     <field name="genre">True crime</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Ridley Scott</field>
+    <field name="name">American Gangster</field>
   </doc>
   <doc>
     <field name="id">/en/american_gun</field>
-    <field name="name">American Gun</field>
     <field name="initial_release_date">2005-09-15</field>
     <field name="genre">Indie film</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Aric Avelino</field>
+    <field name="name">American Gun</field>
   </doc>
   <doc>
     <field name="id">/en/american_hardcore_2006</field>
-    <field name="name">American Hardcore</field>
     <field name="initial_release_date">2006-03-11</field>
     <field name="genre">Music</field>
     <field name="genre">Documentary film</field>
@@ -1698,10 +1690,10 @@
     <field name="genre">Punk rock</field>
     <field name="genre">Biographical film</field>
     <field name="directed_by">Paul Rachman</field>
+    <field name="name">American Hardcore</field>
   </doc>
   <doc>
     <field name="id">/en/american_outlaws</field>
-    <field name="name">American Outlaws</field>
     <field name="initial_release_date">2001-08-17</field>
     <field name="genre">Western</field>
     <field name="genre">Costume drama</field>
@@ -1711,32 +1703,32 @@
     <field name="genre">Comedy Western</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Les Mayfield</field>
+    <field name="name">American Outlaws</field>
   </doc>
   <doc>
     <field name="id">/en/american_pie_the_naked_mile</field>
-    <field name="name">American Pie Presents: The Naked Mile</field>
     <field name="initial_release_date">2006-12-07</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Joe Nussbaum</field>
+    <field name="name">American Pie Presents: The Naked Mile</field>
   </doc>
   <doc>
     <field name="id">/en/american_pie_2</field>
-    <field name="name">American Pie 2</field>
     <field name="initial_release_date">2001-08-06</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">James B. Rogers</field>
+    <field name="name">American Pie 2</field>
   </doc>
   <doc>
     <field name="id">/en/american_pie_presents_band_camp</field>
-    <field name="name">American Pie Presents: Band Camp</field>
     <field name="initial_release_date">2005-10-31</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Steve Rash</field>
+    <field name="name">American Pie Presents: Band Camp</field>
   </doc>
   <doc>
     <field name="id">/en/american_psycho_2000</field>
-    <field name="name">American Psycho</field>
     <field name="initial_release_date">2000-01-21</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Slasher</field>
@@ -1748,10 +1740,10 @@
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Mary Harron</field>
+    <field name="name">American Psycho</field>
   </doc>
   <doc>
     <field name="id">/en/american_splendor_2003</field>
-    <field name="name">American Splendor</field>
     <field name="initial_release_date">2003-01-20</field>
     <field name="genre">Indie film</field>
     <field name="genre">Biographical film</field>
@@ -1761,73 +1753,73 @@
     <field name="genre">Drama</field>
     <field name="directed_by">Shari Springer Berman</field>
     <field name="directed_by">Robert Pulcini</field>
+    <field name="name">American Splendor</field>
   </doc>
   <doc>
     <field name="id">/en/american_wedding</field>
-    <field name="name">American Wedding</field>
     <field name="initial_release_date">2003-07-24</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Jesse Dylan</field>
+    <field name="name">American Wedding</field>
   </doc>
   <doc>
     <field name="id">/en/americano_2005</field>
-    <field name="name">Americano</field>
     <field name="initial_release_date">2005-01-07</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Kevin Noland</field>
+    <field name="name">Americano</field>
   </doc>
   <doc>
     <field name="id">/en/amma_nanna_o_tamila_ammayi</field>
-    <field name="name">Amma Nanna O Tamila Ammayi</field>
     <field name="initial_release_date">2003-04-19</field>
     <field name="genre">Sports</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Puri Jagannadh</field>
+    <field name="name">Amma Nanna O Tamila Ammayi</field>
   </doc>
   <doc>
     <field name="id">/en/amores_perros</field>
-    <field name="name">Amores perros</field>
     <field name="initial_release_date">2000-05-14</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Alejandro González Iñárritu</field>
+    <field name="name">Amores perros</field>
   </doc>
   <doc>
     <field name="id">/en/amrutham</field>
-    <field name="name">Amrutham</field>
     <field name="initial_release_date">2004-12-24</field>
     <field name="genre">Drama</field>
     <field name="genre">Malayalam Cinema</field>
     <field name="genre">World cinema</field>
     <field name="directed_by">Sibi Malayil</field>
+    <field name="name">Amrutham</field>
   </doc>
   <doc>
     <field name="id">/en/an_american_crime</field>
-    <field name="name">An American Crime</field>
     <field name="initial_release_date">2007-01-19</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Indie film</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Tommy O'Haver</field>
+    <field name="name">An American Crime</field>
   </doc>
   <doc>
     <field name="id">/en/an_american_haunting</field>
-    <field name="name">An American Haunting</field>
     <field name="initial_release_date">2005-11-05</field>
     <field name="genre">Horror</field>
     <field name="genre">Mystery</field>
     <field name="genre">Thriller</field>
     <field name="directed_by">Courtney Solomon</field>
+    <field name="name">An American Haunting</field>
   </doc>
   <doc>
     <field name="id">/en/an_american_tail_the_mystery_of_the_night_monster</field>
-    <field name="name">An American Tail: The Mystery of the Night Monster</field>
     <field name="initial_release_date">2000-07-25</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Animated cartoon</field>
@@ -1839,10 +1831,10 @@
     <field name="genre">Children's/Family</field>
     <field name="genre">Family-Oriented Adventure</field>
     <field name="directed_by">Larry Latham</field>
+    <field name="name">An American Tail: The Mystery of the Night Monster</field>
   </doc>
   <doc>
     <field name="id">/en/an_evening_with_kevin_smith</field>
-    <field name="name">An Evening with Kevin Smith</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Stand-up comedy</field>
     <field name="genre">Indie film</field>
@@ -1851,23 +1843,23 @@
     <field name="genre">Biographical film</field>
     <field name="genre">Media studies</field>
     <field name="directed_by">J.M. Kenny</field>
+    <field name="name">An Evening with Kevin Smith</field>
   </doc>
   <doc>
     <field name="id">/en/an_evening_with_kevin_smith_2006</field>
-    <field name="name">An Evening with Kevin Smith 2: Evening Harder</field>
     <field name="genre">Documentary film</field>
     <field name="directed_by">J.M. Kenny</field>
+    <field name="name">An Evening with Kevin Smith 2: Evening Harder</field>
   </doc>
   <doc>
     <field name="id">/en/an_everlasting_piece</field>
-    <field name="name">An Everlasting Piece</field>
     <field name="initial_release_date">2000-12-25</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Barry Levinson</field>
+    <field name="name">An Everlasting Piece</field>
   </doc>
   <doc>
     <field name="id">/en/an_extremely_goofy_movie</field>
-    <field name="name">An Extremely Goofy Movie</field>
     <field name="initial_release_date">2000-02-29</field>
     <field name="genre">Animation</field>
     <field name="genre">Coming of age</field>
@@ -1876,25 +1868,25 @@
     <field name="genre">Comedy</field>
     <field name="directed_by">Ian Harrowell</field>
     <field name="directed_by">Douglas McCarthy</field>
+    <field name="name">An Extremely Goofy Movie</field>
   </doc>
   <doc>
     <field name="id">/en/an_inconvenient_truth</field>
-    <field name="name">An Inconvenient Truth</field>
     <field name="initial_release_date">2006-01-24</field>
     <field name="genre">Documentary film</field>
     <field name="directed_by">Davis Guggenheim</field>
+    <field name="name">An Inconvenient Truth</field>
   </doc>
   <doc>
     <field name="id">/en/an_unfinished_life</field>
-    <field name="name">An Unfinished Life</field>
     <field name="initial_release_date">2005-08-19</field>
     <field name="genre">Melodrama</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Lasse Hallström</field>
+    <field name="name">An Unfinished Life</field>
   </doc>
   <doc>
     <field name="id">/en/anacondas_the_hunt_for_the_blood_orchid</field>
-    <field name="name">Anacondas: The Hunt for the Blood Orchid</field>
     <field name="initial_release_date">2004-08-25</field>
     <field name="genre">Thriller</field>
     <field name="genre">Adventure Film</field>
@@ -1904,27 +1896,27 @@
     <field name="genre">Natural horror film</field>
     <field name="genre">Jungle Film</field>
     <field name="directed_by">Dwight H. Little</field>
+    <field name="name">Anacondas: The Hunt for the Blood Orchid</field>
   </doc>
   <doc>
     <field name="id">/en/anal_pick-up</field>
-    <field name="name">Anal Pick-Up</field>
     <field name="genre">Pornographic film</field>
     <field name="genre">Gay pornography</field>
     <field name="directed_by">Decklin</field>
+    <field name="name">Anal Pick-Up</field>
   </doc>
   <doc>
     <field name="id">/en/analyze_that</field>
-    <field name="name">Analyze That</field>
     <field name="initial_release_date">2002-12-06</field>
     <field name="genre">Buddy film</field>
     <field name="genre">Crime Comedy</field>
     <field name="genre">Gangster Film</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Harold Ramis</field>
+    <field name="name">Analyze That</field>
   </doc>
   <doc>
     <field name="id">/en/anamorph</field>
-    <field name="name">Anamorph</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Thriller</field>
@@ -1932,10 +1924,10 @@
     <field name="genre">Crime Thriller</field>
     <field name="genre">Suspense</field>
     <field name="directed_by">H.S. Miller</field>
+    <field name="name">Anamorph</field>
   </doc>
   <doc>
     <field name="id">/en/anand_2004</field>
-    <field name="name">Anand</field>
     <field name="initial_release_date">2004-10-15</field>
     <field name="genre">Musical</field>
     <field name="genre">Comedy</field>
@@ -1945,20 +1937,20 @@
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
     <field name="directed_by">Sekhar Kammula</field>
+    <field name="name">Anand</field>
   </doc>
   <doc>
     <field name="id">/en/anbe_aaruyire</field>
-    <field name="name">Anbe Aaruyire</field>
     <field name="initial_release_date">2005-08-15</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
     <field name="directed_by">S. J. Surya</field>
+    <field name="name">Anbe Aaruyire</field>
   </doc>
   <doc>
     <field name="id">/en/anbe_sivam</field>
-    <field name="name">Love is God</field>
     <field name="initial_release_date">2003-01-14</field>
     <field name="genre">Musical</field>
     <field name="genre">Musical comedy</field>
@@ -1969,43 +1961,43 @@
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
     <field name="directed_by">Sundar C.</field>
+    <field name="name">Love is God</field>
   </doc>
   <doc>
     <field name="id">/en/ancanar</field>
-    <field name="name">Ancanar</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Action/Adventure</field>
     <field name="directed_by">Sam R. Balcomb</field>
     <field name="directed_by">Raiya Corsiglia</field>
+    <field name="name">Ancanar</field>
   </doc>
   <doc>
     <field name="id">/en/anchorman_the_legend_of_ron_burgundy</field>
-    <field name="name">Anchorman: The Legend of Ron Burgundy</field>
     <field name="initial_release_date">2004-06-28</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Adam McKay</field>
+    <field name="name">Anchorman: The Legend of Ron Burgundy</field>
   </doc>
   <doc>
     <field name="id">/en/andaaz</field>
-    <field name="name">Andaaz</field>
     <field name="initial_release_date">2003-05-23</field>
     <field name="genre">Musical</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
     <field name="directed_by">Raj Kanwar</field>
+    <field name="name">Andaaz</field>
   </doc>
   <doc>
     <field name="id">/en/andarivaadu</field>
-    <field name="name">Andarivaadu</field>
     <field name="initial_release_date">2005-06-03</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Srinu Vaitla</field>
+    <field name="name">Andarivaadu</field>
   </doc>
   <doc>
     <field name="id">/en/andhrawala</field>
-    <field name="name">Andhrawala</field>
     <field name="initial_release_date">2004-01-01</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Action Film</field>
@@ -2013,27 +2005,27 @@
     <field name="genre">Drama</field>
     <field name="directed_by">Puri Jagannadh</field>
     <field name="directed_by">V.V.S. Ram</field>
+    <field name="name">Andhrawala</field>
   </doc>
   <doc>
     <field name="id">/en/ang_tanging_ina</field>
-    <field name="name">Ang Tanging Ina</field>
     <field name="initial_release_date">2003-05-28</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Wenn V. Deramas</field>
+    <field name="name">Ang Tanging Ina</field>
   </doc>
   <doc>
     <field name="id">/en/angel_eyes</field>
-    <field name="name">Angel Eyes</field>
     <field name="initial_release_date">2001-05-18</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Luis Mandoki</field>
+    <field name="name">Angel Eyes</field>
   </doc>
   <doc>
     <field name="id">/en/angel-a</field>
-    <field name="name">Angel-A</field>
     <field name="initial_release_date">2005-12-21</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Fantasy</field>
@@ -2041,112 +2033,112 @@
     <field name="genre">Romantic comedy</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Luc Besson</field>
+    <field name="name">Angel-A</field>
   </doc>
   <doc>
     <field name="id">/en/angels_and_demons_2008</field>
-    <field name="name">Angels &amp;amp; Demons</field>
     <field name="initial_release_date">2009-05-04</field>
     <field name="genre">Thriller</field>
     <field name="genre">Mystery</field>
     <field name="genre">Crime Fiction</field>
     <field name="directed_by">Ron Howard</field>
+    <field name="name">Angels &amp;amp; Demons</field>
   </doc>
   <doc>
     <field name="id">/en/angels_and_virgins</field>
-    <field name="name">Virgin Territory</field>
-    <field name="directed_by">David Leland</field>
+    <field name="initial_release_date">2007-12-17</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2007-12-17</field>
+    <field name="directed_by">David Leland</field>
+    <field name="name">Virgin Territory</field>
   </doc>
   <doc>
     <field name="id">/en/angels_in_the_infield</field>
-    <field name="name">Angels in the Infield</field>
-    <field name="directed_by">Robert King</field>
+    <field name="initial_release_date">2000-04-09</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Sports</field>
     <field name="genre">Family</field>
     <field name="genre">Children's/Family</field>
     <field name="genre">Heavenly Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2000-04-09</field>
+    <field name="directed_by">Robert King</field>
+    <field name="name">Angels in the Infield</field>
   </doc>
   <doc>
     <field name="id">/en/anger_management_2003</field>
-    <field name="name">Anger Management</field>
-    <field name="directed_by">Peter Segal</field>
+    <field name="initial_release_date">2003-03-05</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Slapstick</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2003-03-05</field>
+    <field name="directed_by">Peter Segal</field>
+    <field name="name">Anger Management</field>
   </doc>
   <doc>
     <field name="id">/en/angli_the_movie</field>
-    <field name="name">Angli: The Movie</field>
-    <field name="directed_by">Mario Busietta</field>
+    <field name="initial_release_date">2005-05-28</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
-    <field name="initial_release_date">2005-05-28</field>
+    <field name="directed_by">Mario Busietta</field>
+    <field name="name">Angli: The Movie</field>
   </doc>
   <doc>
     <field name="id">/en/animal_factory</field>
-    <field name="name">Animal Factory</field>
-    <field name="directed_by">Steve Buscemi</field>
+    <field name="initial_release_date">2000-10-22</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Prison film</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2000-10-22</field>
+    <field name="directed_by">Steve Buscemi</field>
+    <field name="name">Animal Factory</field>
   </doc>
   <doc>
     <field name="id">/en/anjaneya</field>
-    <field name="name">Anjaneya</field>
-    <field name="directed_by">Maharajan</field>
-    <field name="directed_by">N.Maharajan</field>
+    <field name="initial_release_date">2003-10-24</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
     <field name="genre">World cinema</field>
     <field name="genre">Tamil cinema</field>
-    <field name="initial_release_date">2003-10-24</field>
+    <field name="directed_by">Maharajan</field>
+    <field name="directed_by">N.Maharajan</field>
+    <field name="name">Anjaneya</field>
   </doc>
   <doc>
     <field name="id">/en/ankahee</field>
-    <field name="name">Ankahee</field>
-    <field name="directed_by">Vikram Bhatt</field>
+    <field name="initial_release_date">2006-05-19</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2006-05-19</field>
+    <field name="directed_by">Vikram Bhatt</field>
+    <field name="name">Ankahee</field>
   </doc>
   <doc>
     <field name="id">/en/annapolis_2006</field>
-    <field name="name">Annapolis</field>
-    <field name="directed_by">Justin Lin</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Sports</field>
     <field name="genre">Drama</field>
+    <field name="directed_by">Justin Lin</field>
+    <field name="name">Annapolis</field>
   </doc>
   <doc>
     <field name="id">/en/annavaram_2007</field>
-    <field name="name">Annavaram</field>
-    <field name="directed_by">Gridhar</field>
-    <field name="directed_by">Bhimaneni Srinivasa Rao</field>
-    <field name="directed_by">Sippy</field>
+    <field name="initial_release_date">2006-12-29</field>
     <field name="genre">Thriller</field>
     <field name="genre">Musical</field>
     <field name="genre">Action Film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
-    <field name="initial_release_date">2006-12-29</field>
+    <field name="directed_by">Gridhar</field>
+    <field name="directed_by">Bhimaneni Srinivasa Rao</field>
+    <field name="directed_by">Sippy</field>
+    <field name="name">Annavaram</field>
   </doc>
   <doc>
     <field name="id">/en/anniyan</field>
-    <field name="name">Anniyan</field>
-    <field name="directed_by">S. Shankar</field>
+    <field name="initial_release_date">2005-06-10</field>
     <field name="genre">Horror</field>
     <field name="genre">Short Film</field>
     <field name="genre">Psychological thriller</field>
@@ -2154,12 +2146,12 @@
     <field name="genre">Musical Drama</field>
     <field name="genre">Action Film</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2005-06-10</field>
+    <field name="directed_by">S. Shankar</field>
+    <field name="name">Anniyan</field>
   </doc>
   <doc>
     <field name="id">/en/another_gay_movie</field>
-    <field name="name">Another Gay Movie</field>
-    <field name="directed_by">Todd Stephens</field>
+    <field name="initial_release_date">2006-04-28</field>
     <field name="genre">Parody</field>
     <field name="genre">Coming of age</field>
     <field name="genre">LGBT</field>
@@ -2171,99 +2163,99 @@
     <field name="genre">Sex comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Pornographic film</field>
-    <field name="initial_release_date">2006-04-28</field>
+    <field name="directed_by">Todd Stephens</field>
+    <field name="name">Another Gay Movie</field>
   </doc>
   <doc>
     <field name="id">/en/ant_man</field>
-    <field name="name">Ant-Man</field>
-    <field name="directed_by">Peyton Reed</field>
+    <field name="initial_release_date">2015-07-17</field>
     <field name="genre">Thriller</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Superhero movie</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2015-07-17</field>
+    <field name="directed_by">Peyton Reed</field>
+    <field name="name">Ant-Man</field>
   </doc>
   <doc>
     <field name="id">/en/anthony_zimmer</field>
-    <field name="name">Anthony Zimmer</field>
-    <field name="directed_by">Jérôme Salle</field>
+    <field name="initial_release_date">2005-04-27</field>
     <field name="genre">Thriller</field>
     <field name="genre">Romance Film</field>
     <field name="genre">World cinema</field>
     <field name="genre">Crime Thriller</field>
-    <field name="initial_release_date">2005-04-27</field>
+    <field name="directed_by">Jérôme Salle</field>
+    <field name="name">Anthony Zimmer</field>
   </doc>
   <doc>
     <field name="id">/en/antwone_fisher_2003</field>
-    <field name="name">Antwone Fisher</field>
-    <field name="directed_by">Denzel Washington</field>
+    <field name="initial_release_date">2002-09-12</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2002-09-12</field>
+    <field name="directed_by">Denzel Washington</field>
+    <field name="name">Antwone Fisher</field>
   </doc>
   <doc>
     <field name="id">/en/anukokunda_oka_roju</field>
-    <field name="name">Anukokunda Oka Roju</field>
-    <field name="directed_by">Chandra Sekhar Yeleti</field>
+    <field name="initial_release_date">2005-06-30</field>
     <field name="genre">Thriller</field>
     <field name="genre">Horror</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
-    <field name="initial_release_date">2005-06-30</field>
+    <field name="directed_by">Chandra Sekhar Yeleti</field>
+    <field name="name">Anukokunda Oka Roju</field>
   </doc>
   <doc>
     <field name="id">/en/anus_magillicutty</field>
-    <field name="name">Anus Magillicutty</field>
-    <field name="directed_by">Morey Fineburgh</field>
+    <field name="initial_release_date">2003-04-15</field>
     <field name="genre">B movie</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2003-04-15</field>
+    <field name="directed_by">Morey Fineburgh</field>
+    <field name="name">Anus Magillicutty</field>
   </doc>
   <doc>
     <field name="id">/en/any_way_the_wind_blows</field>
-    <field name="name">Any Way the Wind Blows</field>
-    <field name="directed_by">Tom Barman</field>
-    <field name="genre">Comedy-drama</field>
     <field name="initial_release_date">2003-05-17</field>
+    <field name="genre">Comedy-drama</field>
+    <field name="directed_by">Tom Barman</field>
+    <field name="name">Any Way the Wind Blows</field>
   </doc>
   <doc>
     <field name="id">/en/anything_else</field>
-    <field name="name">Anything Else</field>
-    <field name="directed_by">Woody Allen</field>
+    <field name="initial_release_date">2003-08-27</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2003-08-27</field>
+    <field name="directed_by">Woody Allen</field>
+    <field name="name">Anything Else</field>
   </doc>
   <doc>
     <field name="id">/en/apasionados</field>
-    <field name="name">Apasionados</field>
-    <field name="directed_by">Juan José Jusid</field>
+    <field name="initial_release_date">2002-06-06</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">World cinema</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2002-06-06</field>
+    <field name="directed_by">Juan José Jusid</field>
+    <field name="name">Apasionados</field>
   </doc>
   <doc>
     <field name="id">/en/apocalypto</field>
-    <field name="name">Apocalypto</field>
-    <field name="directed_by">Mel Gibson</field>
+    <field name="initial_release_date">2006-12-08</field>
     <field name="genre">Action Film</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Epic film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2006-12-08</field>
+    <field name="directed_by">Mel Gibson</field>
+    <field name="name">Apocalypto</field>
   </doc>
   <doc>
     <field name="id">/en/aprils_shower</field>
-    <field name="name">April's Shower</field>
-    <field name="directed_by">Trish Doolan</field>
+    <field name="initial_release_date">2006-01-13</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Indie film</field>
     <field name="genre">Romance Film</field>
@@ -2274,12 +2266,12 @@
     <field name="genre">Sex comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2006-01-13</field>
+    <field name="directed_by">Trish Doolan</field>
+    <field name="name">April's Shower</field>
   </doc>
   <doc>
     <field name="id">/en/aquamarine_2006</field>
-    <field name="name">Aquamarine</field>
-    <field name="directed_by">Elizabeth Allen Rosenbaum</field>
+    <field name="initial_release_date">2006-02-26</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Teen film</field>
     <field name="genre">Romance Film</field>
@@ -2287,65 +2279,65 @@
     <field name="genre">Fantasy</field>
     <field name="genre">Fantasy Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2006-02-26</field>
+    <field name="directed_by">Elizabeth Allen Rosenbaum</field>
+    <field name="name">Aquamarine</field>
   </doc>
   <doc>
     <field name="id">/en/arabian_nights</field>
-    <field name="name">Arabian Nights</field>
-    <field name="directed_by">Steve Barron</field>
+    <field name="initial_release_date">2000-04-30</field>
     <field name="genre">Family</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Adventure Film</field>
-    <field name="initial_release_date">2000-04-30</field>
+    <field name="directed_by">Steve Barron</field>
+    <field name="name">Arabian Nights</field>
   </doc>
   <doc>
     <field name="id">/en/aragami</field>
-    <field name="name">Aragami</field>
-    <field name="directed_by">Ryuhei Kitamura</field>
+    <field name="initial_release_date">2003-03-27</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">World cinema</field>
     <field name="genre">Japanese Movies</field>
     <field name="genre">Action Film</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2003-03-27</field>
+    <field name="directed_by">Ryuhei Kitamura</field>
+    <field name="name">Aragami</field>
   </doc>
   <doc>
     <field name="id">/en/arahan</field>
-    <field name="name">Arahan</field>
-    <field name="directed_by">Ryoo Seung-wan</field>
+    <field name="initial_release_date">2004-04-30</field>
     <field name="genre">Action Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Korean drama</field>
     <field name="genre">East Asian cinema</field>
     <field name="genre">World cinema</field>
-    <field name="initial_release_date">2004-04-30</field>
+    <field name="directed_by">Ryoo Seung-wan</field>
+    <field name="name">Arahan</field>
   </doc>
   <doc>
     <field name="id">/en/ararat</field>
-    <field name="name">Ararat</field>
-    <field name="directed_by">Atom Egoyan</field>
+    <field name="initial_release_date">2002-05-20</field>
     <field name="genre">LGBT</field>
     <field name="genre">Political drama</field>
     <field name="genre">War film</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2002-05-20</field>
+    <field name="directed_by">Atom Egoyan</field>
+    <field name="name">Ararat</field>
   </doc>
   <doc>
     <field name="id">/en/are_we_there_yet</field>
-    <field name="name">Are We There Yet</field>
-    <field name="directed_by">Brian Levant</field>
+    <field name="initial_release_date">2005-01-21</field>
     <field name="genre">Family</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2005-01-21</field>
+    <field name="directed_by">Brian Levant</field>
+    <field name="name">Are We There Yet</field>
   </doc>
   <doc>
     <field name="id">/en/arinthum_ariyamalum</field>
-    <field name="name">Arinthum Ariyamalum</field>
-    <field name="directed_by">Vishnuvardhan</field>
+    <field name="initial_release_date">2005-05-20</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Family</field>
     <field name="genre">Romance Film</field>
@@ -2353,60 +2345,65 @@
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2005-05-20</field>
+    <field name="directed_by">Vishnuvardhan</field>
+    <field name="name">Arinthum Ariyamalum</field>
   </doc>
   <doc>
     <field name="id">/en/arisan</field>
-    <field name="name">Arisan!</field>
-    <field name="directed_by">Nia Dinata</field>
+    <field name="initial_release_date">2003-12-10</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2003-12-10</field>
+    <field name="directed_by">Nia Dinata</field>
+    <field name="name">Arisan!</field>
   </doc>
   <doc>
     <field name="id">/en/arjun_2004</field>
-    <field name="name">Arjun</field>
-    <field name="directed_by">Gunasekhar</field>
-    <field name="directed_by">J. Hemambar</field>
+    <field name="initial_release_date">2004-08-18</field>
     <field name="genre">Action Film</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
-    <field name="initial_release_date">2004-08-18</field>
+    <field name="directed_by">Gunasekhar</field>
+    <field name="directed_by">J. Hemambar</field>
+    <field name="name">Arjun</field>
   </doc>
   <doc>
     <field name="id">/en/armaan</field>
-    <field name="name">Armaan</field>
-    <field name="directed_by">Honey Irani</field>
+    <field name="initial_release_date">2003-05-16</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Family</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2003-05-16</field>
+    <field name="directed_by">Honey Irani</field>
+    <field name="name">Armaan</field>
   </doc>
   <doc>
     <field name="id">/en/around_the_bend</field>
-    <field name="name">Around the Bend</field>
-    <field name="directed_by">Jordan Roberts</field>
+    <field name="initial_release_date">2004-10-08</field>
     <field name="genre">Family Drama</field>
     <field name="genre">Comedy-drama</field>
     <field name="genre">Road movie</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2004-10-08</field>
+    <field name="directed_by">Jordan Roberts</field>
+    <field name="name">Around the Bend</field>
   </doc>
   <doc>
     <field name="id">/en/around_the_world_in_80_days_2004</field>
-    <field name="name">Around the World in 80 Days</field>
-    <field name="directed_by">Frank Coraci</field>
+    <field name="initial_release_date">2004-06-13</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Action Film</field>
     <field name="genre">Family</field>
     <field name="genre">Western</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2004-06-13</field>
+    <field name="directed_by">Frank Coraci</field>
+    <field name="name">Around the World in 80 Days</field>
   </doc>
   <doc>
     <field name="id">/en/art_of_the_devil_2</field>
-    <field name="name">Art of the Devil 2</field>
+    <field name="initial_release_date">2005-12-01</field>
+    <field name="genre">Horror</field>
+    <field name="genre">Slasher</field>
+    <field name="genre">Fantasy</field>
+    <field name="genre">Mystery</field>
     <field name="directed_by">Pasith Buranajan</field>
     <field name="directed_by">Seree Phongnithi</field>
     <field name="directed_by">Yosapong Polsap</field>
@@ -2414,44 +2411,39 @@
     <field name="directed_by">Art Thamthrakul</field>
     <field name="directed_by">Kongkiat Khomsiri</field>
     <field name="directed_by">Isara Nadee</field>
-    <field name="genre">Horror</field>
-    <field name="genre">Slasher</field>
-    <field name="genre">Fantasy</field>
-    <field name="genre">Mystery</field>
-    <field name="initial_release_date">2005-12-01</field>
+    <field name="name">Art of the Devil 2</field>
   </doc>
   <doc>
     <field name="id">/en/art_school_confidential</field>
-    <field name="name">Art School Confidential</field>
-    <field name="directed_by">Terry Zwigoff</field>
     <field name="genre">Comedy-drama</field>
+    <field name="directed_by">Terry Zwigoff</field>
+    <field name="name">Art School Confidential</field>
   </doc>
   <doc>
     <field name="id">/en/arul</field>
-    <field name="name">Arul</field>
-    <field name="directed_by">Hari</field>
+    <field name="initial_release_date">2004-05-01</field>
     <field name="genre">Musical</field>
     <field name="genre">Action Film</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="initial_release_date">2004-05-01</field>
+    <field name="directed_by">Hari</field>
+    <field name="name">Arul</field>
   </doc>
   <doc>
     <field name="id">/en/arya_2007</field>
-    <field name="name">Aarya</field>
-    <field name="directed_by">Balasekaran</field>
+    <field name="initial_release_date">2007-08-10</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
-    <field name="initial_release_date">2007-08-10</field>
+    <field name="directed_by">Balasekaran</field>
+    <field name="name">Aarya</field>
   </doc>
   <doc>
     <field name="id">/en/arya_2004</field>
-    <field name="name">Arya</field>
-    <field name="directed_by">Sukumar</field>
+    <field name="initial_release_date">2004-05-07</field>
     <field name="genre">Musical</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Romantic comedy</field>
@@ -2461,20 +2453,20 @@
     <field name="genre">Musical Drama</field>
     <field name="genre">World cinema</field>
     <field name="genre">Tollywood</field>
-    <field name="initial_release_date">2004-05-07</field>
+    <field name="directed_by">Sukumar</field>
+    <field name="name">Arya</field>
   </doc>
   <doc>
     <field name="id">/en/aryan_2006</field>
-    <field name="name">Aryan: Unbreakable</field>
-    <field name="directed_by">Abhishek Kapoor</field>
+    <field name="initial_release_date">2006-12-05</field>
     <field name="genre">Action Film</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2006-12-05</field>
+    <field name="directed_by">Abhishek Kapoor</field>
+    <field name="name">Aryan: Unbreakable</field>
   </doc>
   <doc>
     <field name="id">/en/as_it_is_in_heaven</field>
-    <field name="name">As It Is in Heaven</field>
-    <field name="directed_by">Kay Pollak</field>
+    <field name="initial_release_date">2004-08-20</field>
     <field name="genre">Musical</field>
     <field name="genre">Comedy</field>
     <field name="genre">Romance Film</field>
@@ -2482,33 +2474,33 @@
     <field name="genre">Musical comedy</field>
     <field name="genre">Musical Drama</field>
     <field name="genre">World cinema</field>
-    <field name="initial_release_date">2004-08-20</field>
+    <field name="directed_by">Kay Pollak</field>
+    <field name="name">As It Is in Heaven</field>
   </doc>
   <doc>
     <field name="id">/en/ashok</field>
-    <field name="name">Ashok</field>
-    <field name="directed_by">Surender Reddy</field>
+    <field name="initial_release_date">2006-07-13</field>
     <field name="genre">Action Film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
-    <field name="initial_release_date">2006-07-13</field>
+    <field name="directed_by">Surender Reddy</field>
+    <field name="name">Ashok</field>
   </doc>
   <doc>
     <field name="id">/en/ask_the_dust_2006</field>
-    <field name="name">Ask the Dust</field>
-    <field name="directed_by">Robert Towne</field>
+    <field name="initial_release_date">2006-02-02</field>
     <field name="genre">Historical period drama</field>
     <field name="genre">Film adaptation</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2006-02-02</field>
+    <field name="directed_by">Robert Towne</field>
+    <field name="name">Ask the Dust</field>
   </doc>
   <doc>
     <field name="id">/en/asoka</field>
-    <field name="name">Ashoka the Great</field>
-    <field name="directed_by">Santosh Sivan</field>
+    <field name="initial_release_date">2001-09-13</field>
     <field name="genre">Action Film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">War film</field>
@@ -2518,188 +2510,188 @@
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="initial_release_date">2001-09-13</field>
+    <field name="directed_by">Santosh Sivan</field>
+    <field name="name">Ashoka the Great</field>
   </doc>
   <doc>
     <field name="id">/en/assault_on_precinct_13_2005</field>
-    <field name="name">Assault on Precinct 13</field>
-    <field name="directed_by">Jean-François Richet</field>
+    <field name="initial_release_date">2005-01-19</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Remake</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2005-01-19</field>
+    <field name="directed_by">Jean-François Richet</field>
+    <field name="name">Assault on Precinct 13</field>
   </doc>
   <doc>
     <field name="id">/en/astitva</field>
-    <field name="name">Astitva</field>
-    <field name="directed_by">Mahesh Manjrekar</field>
+    <field name="initial_release_date">2000-10-06</field>
     <field name="genre">Art film</field>
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2000-10-06</field>
+    <field name="directed_by">Mahesh Manjrekar</field>
+    <field name="name">Astitva</field>
   </doc>
   <doc>
     <field name="id">/en/asylum_2005</field>
-    <field name="name">Asylum</field>
-    <field name="directed_by">David Mackenzie</field>
+    <field name="initial_release_date">2005-08-12</field>
     <field name="genre">Film adaptation</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2005-08-12</field>
+    <field name="directed_by">David Mackenzie</field>
+    <field name="name">Asylum</field>
   </doc>
   <doc>
     <field name="id">/en/atanarjuat</field>
-    <field name="name">Atanarjuat: The Fast Runner</field>
-    <field name="directed_by">Zacharias Kunuk</field>
+    <field name="initial_release_date">2001-05-13</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2001-05-13</field>
+    <field name="directed_by">Zacharias Kunuk</field>
+    <field name="name">Atanarjuat: The Fast Runner</field>
   </doc>
   <doc>
     <field name="id">/en/athadu</field>
-    <field name="name">Athadu</field>
-    <field name="directed_by">Trivikram Srinivas</field>
+    <field name="initial_release_date">2005-08-10</field>
     <field name="genre">Action Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Musical</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
-    <field name="initial_release_date">2005-08-10</field>
+    <field name="directed_by">Trivikram Srinivas</field>
+    <field name="name">Athadu</field>
   </doc>
   <doc>
     <field name="id">/en/atl_2006</field>
-    <field name="name">ATL</field>
-    <field name="directed_by">Chris Robinson</field>
+    <field name="initial_release_date">2006-03-28</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2006-03-28</field>
+    <field name="directed_by">Chris Robinson</field>
+    <field name="name">ATL</field>
   </doc>
   <doc>
     <field name="id">/en/atlantis_the_lost_empire</field>
-    <field name="name">Atlantis: The Lost Empire</field>
-    <field name="directed_by">Gary Trousdale</field>
-    <field name="directed_by">Kirk Wise</field>
+    <field name="initial_release_date">2001-06-03</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Family</field>
     <field name="genre">Animation</field>
-    <field name="initial_release_date">2001-06-03</field>
+    <field name="directed_by">Gary Trousdale</field>
+    <field name="directed_by">Kirk Wise</field>
+    <field name="name">Atlantis: The Lost Empire</field>
   </doc>
   <doc>
     <field name="id">/en/atonement_2007</field>
-    <field name="name">Atonement</field>
-    <field name="directed_by">Joe Wright</field>
+    <field name="initial_release_date">2007-08-28</field>
     <field name="genre">Romance Film</field>
     <field name="genre">War film</field>
     <field name="genre">Mystery</field>
     <field name="genre">Drama</field>
     <field name="genre">Music</field>
-    <field name="initial_release_date">2007-08-28</field>
+    <field name="directed_by">Joe Wright</field>
+    <field name="name">Atonement</field>
   </doc>
   <doc>
     <field name="id">/en/attagasam</field>
-    <field name="name">Attahasam</field>
-    <field name="directed_by">Saran</field>
+    <field name="initial_release_date">2004-11-12</field>
     <field name="genre">Action Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2004-11-12</field>
+    <field name="directed_by">Saran</field>
+    <field name="name">Attahasam</field>
   </doc>
   <doc>
     <field name="id">/en/attila_2001</field>
-    <field name="name">Attila</field>
-    <field name="directed_by">Dick Lowry</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">History</field>
     <field name="genre">Action Film</field>
     <field name="genre">War film</field>
     <field name="genre">Historical fiction</field>
     <field name="genre">Biographical film</field>
+    <field name="directed_by">Dick Lowry</field>
+    <field name="name">Attila</field>
   </doc>
   <doc>
     <field name="id">/en/austin_powers_goldmember</field>
-    <field name="name">Austin Powers: Goldmember</field>
-    <field name="directed_by">Jay Roach</field>
+    <field name="initial_release_date">2002-07-22</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2002-07-22</field>
+    <field name="directed_by">Jay Roach</field>
+    <field name="name">Austin Powers: Goldmember</field>
   </doc>
   <doc>
     <field name="id">/en/australian_rules</field>
-    <field name="name">Australian Rules</field>
-    <field name="directed_by">Paul Goldman</field>
     <field name="genre">Drama</field>
+    <field name="directed_by">Paul Goldman</field>
+    <field name="name">Australian Rules</field>
   </doc>
   <doc>
     <field name="id">/en/auto</field>
-    <field name="name">Oram Po</field>
-    <field name="directed_by">Pushkar</field>
-    <field name="directed_by">Gayatri</field>
+    <field name="initial_release_date">2007-02-16</field>
     <field name="genre">Action Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2007-02-16</field>
+    <field name="directed_by">Pushkar</field>
+    <field name="directed_by">Gayatri</field>
+    <field name="name">Oram Po</field>
   </doc>
   <doc>
     <field name="id">/en/auto_focus</field>
-    <field name="name">Auto Focus</field>
-    <field name="directed_by">Paul Schrader</field>
-    <field name="directed_by">Larry Karaszewski</field>
+    <field name="initial_release_date">2002-09-08</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Indie film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2002-09-08</field>
+    <field name="directed_by">Paul Schrader</field>
+    <field name="directed_by">Larry Karaszewski</field>
+    <field name="name">Auto Focus</field>
   </doc>
   <doc>
     <field name="id">/en/autograph_2004</field>
-    <field name="name">Autograph</field>
-    <field name="directed_by">Cheran</field>
+    <field name="initial_release_date">2004-02-14</field>
     <field name="genre">Musical</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
-    <field name="initial_release_date">2004-02-14</field>
+    <field name="directed_by">Cheran</field>
+    <field name="name">Autograph</field>
   </doc>
   <doc>
     <field name="id">/en/avalon_2001</field>
-    <field name="name">Avalon</field>
-    <field name="directed_by">Mamoru Oshii</field>
+    <field name="initial_release_date">2001-01-20</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2001-01-20</field>
+    <field name="directed_by">Mamoru Oshii</field>
+    <field name="name">Avalon</field>
   </doc>
   <doc>
     <field name="id">/en/avatar_2009</field>
-    <field name="name">Avatar</field>
-    <field name="directed_by">James Cameron</field>
+    <field name="initial_release_date">2009-12-10</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Action Film</field>
-    <field name="initial_release_date">2009-12-10</field>
+    <field name="directed_by">James Cameron</field>
+    <field name="name">Avatar</field>
   </doc>
   <doc>
     <field name="id">/en/avenging_angelo</field>
-    <field name="name">Avenging Angelo</field>
-    <field name="directed_by">Martyn Burke</field>
+    <field name="initial_release_date">2002-08-30</field>
     <field name="genre">Action Film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Crime Fiction</field>
@@ -2709,21 +2701,21 @@
     <field name="genre">Crime Comedy</field>
     <field name="genre">Gangster Film</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2002-08-30</field>
+    <field name="directed_by">Martyn Burke</field>
+    <field name="name">Avenging Angelo</field>
   </doc>
   <doc>
     <field name="id">/en/awake_2007</field>
-    <field name="name">Awake</field>
-    <field name="directed_by">Joby Harold</field>
+    <field name="initial_release_date">2007-11-30</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Mystery</field>
-    <field name="initial_release_date">2007-11-30</field>
+    <field name="directed_by">Joby Harold</field>
+    <field name="name">Awake</field>
   </doc>
   <doc>
     <field name="id">/en/awara_paagal_deewana</field>
-    <field name="name">Awara Paagal Deewana</field>
-    <field name="directed_by">Vikram Bhatt</field>
+    <field name="initial_release_date">2002-06-20</field>
     <field name="genre">Action Film</field>
     <field name="genre">World cinema</field>
     <field name="genre">Musical</field>
@@ -2733,45 +2725,45 @@
     <field name="genre">Bollywood</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="initial_release_date">2002-06-20</field>
+    <field name="directed_by">Vikram Bhatt</field>
+    <field name="name">Awara Paagal Deewana</field>
   </doc>
   <doc>
     <field name="id">/en/awesome_i_fuckin_shot_that</field>
-    <field name="name">Awesome; I Fuckin' Shot That!</field>
-    <field name="directed_by">Adam Yauch</field>
+    <field name="initial_release_date">2006-01-06</field>
     <field name="genre">Concert film</field>
     <field name="genre">Rockumentary</field>
     <field name="genre">Hip hop film</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Indie film</field>
-    <field name="initial_release_date">2006-01-06</field>
+    <field name="directed_by">Adam Yauch</field>
+    <field name="name">Awesome; I Fuckin' Shot That!</field>
   </doc>
   <doc>
     <field name="id">/en/azumi</field>
-    <field name="name">Azumi</field>
-    <field name="directed_by">Ryuhei Kitamura</field>
+    <field name="initial_release_date">2003-05-10</field>
     <field name="genre">Action Film</field>
     <field name="genre">Epic film</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Thriller</field>
-    <field name="initial_release_date">2003-05-10</field>
+    <field name="directed_by">Ryuhei Kitamura</field>
+    <field name="name">Azumi</field>
   </doc>
   <doc>
     <field name="id">/wikipedia/en_title/$00C6on_Flux_$0028film$0029</field>
-    <field name="name">Æon Flux</field>
-    <field name="directed_by">Karyn Kusama</field>
+    <field name="initial_release_date">2005-12-01</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Dystopia</field>
     <field name="genre">Action Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Adventure Film</field>
-    <field name="initial_release_date">2005-12-01</field>
+    <field name="directed_by">Karyn Kusama</field>
+    <field name="name">Æon Flux</field>
   </doc>
   <doc>
     <field name="id">/en/baabul</field>
-    <field name="name">Baabul</field>
-    <field name="directed_by">Ravi Chopra</field>
+    <field name="initial_release_date">2006-12-08</field>
     <field name="genre">Musical</field>
     <field name="genre">Family</field>
     <field name="genre">Romance Film</field>
@@ -2779,146 +2771,145 @@
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="initial_release_date">2006-12-08</field>
+    <field name="directed_by">Ravi Chopra</field>
+    <field name="name">Baabul</field>
   </doc>
   <doc>
     <field name="id">/en/baadasssss_cinema</field>
-    <field name="name">BaadAsssss Cinema</field>
-    <field name="directed_by">Isaac Julien</field>
+    <field name="initial_release_date">2002-08-14</field>
     <field name="genre">Indie film</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Blaxploitation film</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Film &amp;amp; Television History</field>
     <field name="genre">Biographical film</field>
-    <field name="initial_release_date">2002-08-14</field>
+    <field name="directed_by">Isaac Julien</field>
+    <field name="name">BaadAsssss Cinema</field>
   </doc>
   <doc>
     <field name="id">/en/baadasssss</field>
-    <field name="name">Baadasssss!</field>
-    <field name="directed_by">Mario Van Peebles</field>
+    <field name="initial_release_date">2003-09-07</field>
     <field name="genre">Indie film</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Docudrama</field>
     <field name="genre">Historical period drama</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2003-09-07</field>
+    <field name="directed_by">Mario Van Peebles</field>
+    <field name="name">Baadasssss!</field>
   </doc>
   <doc>
     <field name="id">/en/babel_2006</field>
-    <field name="name">Babel</field>
-    <field name="directed_by">Alejandro González Iñárritu</field>
+    <field name="initial_release_date">2006-05-23</field>
     <field name="genre">Indie film</field>
     <field name="genre">Political drama</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2006-05-23</field>
+    <field name="directed_by">Alejandro González Iñárritu</field>
+    <field name="name">Babel</field>
   </doc>
   <doc>
     <field name="id">/en/baby_boy</field>
-    <field name="name">Baby Boy</field>
-    <field name="directed_by">John Singleton</field>
+    <field name="initial_release_date">2001-06-21</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2001-06-21</field>
+    <field name="directed_by">John Singleton</field>
+    <field name="name">Baby Boy</field>
   </doc>
   <doc>
     <field name="id">/en/back_by_midnight</field>
-    <field name="name">Back by Midnight</field>
-    <field name="directed_by">Harry Basil</field>
+    <field name="initial_release_date">2005-01-25</field>
     <field name="genre">Prison film</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2005-01-25</field>
+    <field name="directed_by">Harry Basil</field>
+    <field name="name">Back by Midnight</field>
   </doc>
   <doc>
     <field name="id">/en/back_to_school_with_franklin</field>
-    <field name="name">Back to School with Franklin</field>
-    <field name="directed_by">Arna Selznick</field>
+    <field name="initial_release_date">2003-08-19</field>
     <field name="genre">Family</field>
     <field name="genre">Animation</field>
     <field name="genre">Educational film</field>
-    <field name="initial_release_date">2003-08-19</field>
+    <field name="directed_by">Arna Selznick</field>
+    <field name="name">Back to School with Franklin</field>
   </doc>
   <doc>
     <field name="id">/en/bad_boys_ii</field>
-    <field name="name">Bad Boys II</field>
-    <field name="directed_by">Michael Bay</field>
+    <field name="initial_release_date">2003-07-09</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Thriller</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2003-07-09</field>
+    <field name="directed_by">Michael Bay</field>
+    <field name="name">Bad Boys II</field>
   </doc>
   <doc>
     <field name="id">/wikipedia/ru_id/1598664</field>
-    <field name="name">Bad Company</field>
-    <field name="directed_by">Joel Schumacher</field>
+    <field name="initial_release_date">2002-04-26</field>
     <field name="genre">Spy film</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Action Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2002-04-26</field>
+    <field name="directed_by">Joel Schumacher</field>
+    <field name="name">Bad Company</field>
   </doc>
   <doc>
     <field name="id">/en/bad_education</field>
-    <field name="name">Bad Education</field>
-    <field name="directed_by">Pedro Almodóvar</field>
+    <field name="initial_release_date">2004-03-19</field>
     <field name="genre">Mystery</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2004-03-19</field>
+    <field name="directed_by">Pedro Almodóvar</field>
+    <field name="name">Bad Education</field>
   </doc>
   <doc>
     <field name="id">/en/bad_eggs</field>
-    <field name="name">Bad Eggs</field>
-    <field name="directed_by">Tony Martin</field>
     <field name="genre">Comedy</field>
+    <field name="directed_by">Tony Martin</field>
+    <field name="name">Bad Eggs</field>
   </doc>
   <doc>
     <field name="id">/en/bad_news_bears</field>
-    <field name="name">Bad News Bears</field>
-    <field name="directed_by">Richard Linklater</field>
+    <field name="initial_release_date">2005-07-22</field>
     <field name="genre">Family</field>
     <field name="genre">Sports</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2005-07-22</field>
+    <field name="directed_by">Richard Linklater</field>
+    <field name="name">Bad News Bears</field>
   </doc>
   <doc>
     <field name="id">/en/bad_santa</field>
-    <field name="name">Bad Santa</field>
-    <field name="directed_by">Terry Zwigoff</field>
+    <field name="initial_release_date">2003-11-26</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2003-11-26</field>
+    <field name="directed_by">Terry Zwigoff</field>
+    <field name="name">Bad Santa</field>
   </doc>
   <doc>
     <field name="id">/en/badal</field>
-    <field name="name">Badal</field>
-    <field name="directed_by">Raj Kanwar</field>
+    <field name="initial_release_date">2000-02-11</field>
     <field name="genre">Musical</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="initial_release_date">2000-02-11</field>
+    <field name="directed_by">Raj Kanwar</field>
+    <field name="name">Badal</field>
   </doc>
   <doc>
     <field name="id">/en/baghdad_er</field>
-    <field name="name">Baghdad ER</field>
-    <field name="directed_by">Jon Alpert</field>
-    <field name="directed_by">Matthew O'Neill</field>
+    <field name="initial_release_date">2006-08-29</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Culture &amp;amp; Society</field>
     <field name="genre">War film</field>
     <field name="genre">Biographical film</field>
-    <field name="initial_release_date">2006-08-29</field>
+    <field name="directed_by">Jon Alpert</field>
+    <field name="directed_by">Matthew O'Neill</field>
+    <field name="name">Baghdad ER</field>
   </doc>
   <doc>
     <field name="id">/en/baise_moi</field>
-    <field name="name">Baise Moi</field>
-    <field name="directed_by">Virginie Despentes</field>
-    <field name="directed_by">Coralie Trinh Thi</field>
+    <field name="initial_release_date">2000-06-28</field>
     <field name="genre">Erotica</field>
     <field name="genre">Thriller</field>
     <field name="genre">Erotic thriller</field>
@@ -2926,12 +2917,13 @@
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
     <field name="genre">Road movie</field>
-    <field name="initial_release_date">2000-06-28</field>
+    <field name="directed_by">Virginie Despentes</field>
+    <field name="directed_by">Coralie Trinh Thi</field>
+    <field name="name">Baise Moi</field>
   </doc>
   <doc>
     <field name="id">/en/bait_2000</field>
-    <field name="name">Bait</field>
-    <field name="directed_by">Antoine Fuqua</field>
+    <field name="initial_release_date">2000-09-15</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Adventure Film</field>
@@ -2940,21 +2932,21 @@
     <field name="genre">Crime Thriller</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2000-09-15</field>
+    <field name="directed_by">Antoine Fuqua</field>
+    <field name="name">Bait</field>
   </doc>
   <doc>
     <field name="id">/en/bala_2002</field>
-    <field name="name">Bala</field>
-    <field name="directed_by">Deepak</field>
+    <field name="initial_release_date">2002-12-13</field>
     <field name="genre">Drama</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
-    <field name="initial_release_date">2002-12-13</field>
+    <field name="directed_by">Deepak</field>
+    <field name="name">Bala</field>
   </doc>
   <doc>
     <field name="id">/en/ballistic_ecks_vs_sever</field>
-    <field name="name">Ballistic: Ecks vs. Sever</field>
-    <field name="directed_by">Wych Kaosayananda</field>
+    <field name="initial_release_date">2002-09-20</field>
     <field name="genre">Spy film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
@@ -2962,44 +2954,44 @@
     <field name="genre">Action/Adventure</field>
     <field name="genre">Action Thriller</field>
     <field name="genre">Glamorized Spy Film</field>
-    <field name="initial_release_date">2002-09-20</field>
+    <field name="directed_by">Wych Kaosayananda</field>
+    <field name="name">Ballistic: Ecks vs. Sever</field>
   </doc>
   <doc>
     <field name="id">/en/balu_abcdefg</field>
-    <field name="name">Balu ABCDEFG</field>
-    <field name="directed_by">A. Karunakaran</field>
+    <field name="initial_release_date">2005-01-06</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2005-01-06</field>
+    <field name="directed_by">A. Karunakaran</field>
+    <field name="name">Balu ABCDEFG</field>
   </doc>
   <doc>
     <field name="id">/en/balzac_and_the_little_chinese_seamstress_2002</field>
-    <field name="name">The Little Chinese Seamstress</field>
-    <field name="directed_by">Dai Sijie</field>
+    <field name="initial_release_date">2002-05-16</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy-drama</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2002-05-16</field>
+    <field name="directed_by">Dai Sijie</field>
+    <field name="name">The Little Chinese Seamstress</field>
   </doc>
   <doc>
     <field name="id">/en/bambi_ii</field>
-    <field name="name">Bambi II</field>
-    <field name="directed_by">Brian Pimental</field>
+    <field name="initial_release_date">2006-01-26</field>
     <field name="genre">Animation</field>
     <field name="genre">Family</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Children's/Family</field>
     <field name="genre">Family-Oriented Adventure</field>
-    <field name="initial_release_date">2006-01-26</field>
+    <field name="directed_by">Brian Pimental</field>
+    <field name="name">Bambi II</field>
   </doc>
   <doc>
     <field name="id">/en/bamboozled</field>
-    <field name="name">Bamboozled</field>
-    <field name="directed_by">Spike Lee</field>
+    <field name="initial_release_date">2000-10-06</field>
     <field name="genre">Satire</field>
     <field name="genre">Indie film</field>
     <field name="genre">Music</field>
@@ -3008,25 +3000,25 @@
     <field name="genre">Media Satire</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2000-10-06</field>
+    <field name="directed_by">Spike Lee</field>
+    <field name="name">Bamboozled</field>
   </doc>
   <doc>
     <field name="id">/en/bandidas</field>
-    <field name="name">Bandidas</field>
-    <field name="directed_by">Espen Sandberg</field>
-    <field name="directed_by">Joachim Rønning</field>
+    <field name="initial_release_date">2006-01-18</field>
     <field name="genre">Western</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Buddy film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Adventure Film</field>
-    <field name="initial_release_date">2006-01-18</field>
+    <field name="directed_by">Espen Sandberg</field>
+    <field name="directed_by">Joachim Rønning</field>
+    <field name="name">Bandidas</field>
   </doc>
   <doc>
     <field name="id">/en/bandits</field>
-    <field name="name">Bandits</field>
-    <field name="directed_by">Barry Levinson</field>
+    <field name="initial_release_date">2001-10-12</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Buddy film</field>
@@ -3034,145 +3026,151 @@
     <field name="genre">Heist film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2001-10-12</field>
+    <field name="directed_by">Barry Levinson</field>
+    <field name="name">Bandits</field>
   </doc>
   <doc>
     <field name="id">/en/bangaram</field>
-    <field name="name">Bangaram</field>
-    <field name="directed_by">Dharani</field>
+    <field name="initial_release_date">2006-05-03</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2006-05-03</field>
+    <field name="directed_by">Dharani</field>
+    <field name="name">Bangaram</field>
   </doc>
   <doc>
     <field name="id">/en/bangkok_loco</field>
-    <field name="name">Bangkok Loco</field>
-    <field name="directed_by">Pornchai Hongrattanaporn</field>
+    <field name="initial_release_date">2004-10-07</field>
     <field name="genre">Musical</field>
     <field name="genre">Musical comedy</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2004-10-07</field>
+    <field name="directed_by">Pornchai Hongrattanaporn</field>
+    <field name="name">Bangkok Loco</field>
   </doc>
   <doc>
     <field name="id">/en/baran</field>
-    <field name="name">Baran</field>
-    <field name="directed_by">Majid Majidi</field>
+    <field name="initial_release_date">2001-01-31</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2001-01-31</field>
+    <field name="directed_by">Majid Majidi</field>
+    <field name="name">Baran</field>
   </doc>
   <doc>
     <field name="id">/en/barbershop</field>
-    <field name="name">Barbershop</field>
-    <field name="directed_by">Tim Story</field>
+    <field name="initial_release_date">2002-08-07</field>
     <field name="genre">Ensemble Film</field>
     <field name="genre">Workplace Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2002-08-07</field>
+    <field name="directed_by">Tim Story</field>
+    <field name="name">Barbershop</field>
   </doc>
   <doc>
     <field name="id">/en/bareback_mountain</field>
-    <field name="name">Bareback Mountain</field>
-    <field name="directed_by">Afton Nills</field>
     <field name="genre">Pornographic film</field>
     <field name="genre">Gay pornography</field>
+    <field name="directed_by">Afton Nills</field>
+    <field name="name">Bareback Mountain</field>
   </doc>
   <doc>
     <field name="id">/wikipedia/pt/Barnyard</field>
-    <field name="name">Barnyard</field>
-    <field name="directed_by">Steve Oedekerk</field>
+    <field name="initial_release_date">2006-08-04</field>
     <field name="genre">Family</field>
     <field name="genre">Animation</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2006-08-04</field>
+    <field name="directed_by">Steve Oedekerk</field>
+    <field name="name">Barnyard</field>
   </doc>
   <doc>
     <field name="id">/en/barricade_2007</field>
-    <field name="name">Barricade</field>
-    <field name="directed_by">Timo Rose</field>
     <field name="genre">Slasher</field>
     <field name="genre">Horror</field>
+    <field name="directed_by">Timo Rose</field>
+    <field name="name">Barricade</field>
   </doc>
   <doc>
     <field name="id">/en/bas_itna_sa_khwaab_hai</field>
-    <field name="name">Bas Itna Sa Khwaab Hai</field>
-    <field name="directed_by">Goldie Behl</field>
+    <field name="initial_release_date">2001-07-06</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
-    <field name="initial_release_date">2001-07-06</field>
+    <field name="directed_by">Goldie Behl</field>
+    <field name="name">Bas Itna Sa Khwaab Hai</field>
   </doc>
   <doc>
     <field name="id">/en/basic_2003</field>
-    <field name="name">Basic</field>
-    <field name="directed_by">John McTiernan</field>
+    <field name="initial_release_date">2003-03-28</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Mystery</field>
-    <field name="initial_release_date">2003-03-28</field>
+    <field name="directed_by">John McTiernan</field>
+    <field name="name">Basic</field>
   </doc>
   <doc>
-    <field name="genre">Drama</field>
-    <field name="name">Basic emotions</field>
+    <field name="id">/en/basic_emotions</field>
     <field name="directed_by">Thomas Moon</field>
     <field name="directed_by">Julie Pham</field>
     <field name="directed_by">Georgia Lee</field>
-    <field name="id">/en/basic_emotions</field>
     <field name="initial_release_date">2004-09-09</field>
+    <field name="name">Basic emotions</field>
+    <field name="genre">Drama</field>
   </doc>
   <doc>
+    <field name="id">/en/basic_instinct_2</field>
+    <field name="directed_by">Michael Caton-Jones</field>
+    <field name="initial_release_date">2006-03-31</field>
+    <field name="name">Basic Instinct 2</field>
     <field name="genre">Thriller</field>
     <field name="genre">Erotic thriller</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Mystery</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Horror</field>
-    <field name="name">Basic Instinct 2</field>
-    <field name="directed_by">Michael Caton-Jones</field>
-    <field name="id">/en/basic_instinct_2</field>
-    <field name="initial_release_date">2006-03-31</field>
   </doc>
   <doc>
-    <field name="genre">Drama</field>
-    <field name="name">Battle In Heaven</field>
-    <field name="directed_by">Carlos Reygadas</field>
     <field name="id">/en/batalla_en_el_cielo</field>
+    <field name="directed_by">Carlos Reygadas</field>
     <field name="initial_release_date">2005-05-15</field>
+    <field name="name">Battle In Heaven</field>
+    <field name="genre">Drama</field>
   </doc>
   <doc>
+    <field name="id">/en/batman_begins</field>
+    <field name="directed_by">Christopher Nolan</field>
+    <field name="initial_release_date">2005-06-10</field>
+    <field name="name">Batman Begins</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Film noir</field>
     <field name="genre">Drama</field>
-    <field name="name">Batman Begins</field>
-    <field name="directed_by">Christopher Nolan</field>
-    <field name="id">/en/batman_begins</field>
-    <field name="initial_release_date">2005-06-10</field>
   </doc>
   <doc>
+    <field name="id">/en/batman_beyond_return_of_the_joker</field>
+    <field name="directed_by">Curt Geda</field>
+    <field name="initial_release_date">2000-12-12</field>
+    <field name="name">Batman Beyond: Return of the Joker</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Animation</field>
     <field name="genre">Superhero movie</field>
     <field name="genre">Action Film</field>
-    <field name="name">Batman Beyond: Return of the Joker</field>
-    <field name="directed_by">Curt Geda</field>
-    <field name="id">/en/batman_beyond_return_of_the_joker</field>
-    <field name="initial_release_date">2000-12-12</field>
   </doc>
   <doc>
+    <field name="id">/en/batman_dead_end</field>
+    <field name="directed_by">Sandy Collora</field>
+    <field name="initial_release_date">2003-07-19</field>
+    <field name="name">Batman: Dead End</field>
     <field name="genre">Indie film</field>
     <field name="genre">Short Film</field>
     <field name="genre">Fan film</field>
-    <field name="name">Batman: Dead End</field>
-    <field name="directed_by">Sandy Collora</field>
-    <field name="id">/en/batman_dead_end</field>
-    <field name="initial_release_date">2003-07-19</field>
   </doc>
   <doc>
+    <field name="id">/en/batman_mystery_of_the_batwoman</field>
+    <field name="directed_by">Curt Geda</field>
+    <field name="directed_by">Tim Maltby</field>
+    <field name="initial_release_date">2003-10-21</field>
+    <field name="name">Batman: Mystery of the Batwoman</field>
     <field name="genre">Animated cartoon</field>
     <field name="genre">Animation</field>
     <field name="genre">Family</field>
@@ -3181,24 +3179,23 @@
     <field name="genre">Fantasy</field>
     <field name="genre">Short Film</field>
     <field name="genre">Fantasy Adventure</field>
-    <field name="name">Batman: Mystery of the Batwoman</field>
-    <field name="directed_by">Curt Geda</field>
-    <field name="directed_by">Tim Maltby</field>
-    <field name="id">/en/batman_mystery_of_the_batwoman</field>
-    <field name="initial_release_date">2003-10-21</field>
   </doc>
   <doc>
+    <field name="id">/en/batoru_rowaiaru_ii_chinkonka</field>
+    <field name="directed_by">Kenta Fukasaku</field>
+    <field name="directed_by">Kinji Fukasaku</field>
+    <field name="initial_release_date">2003-07-05</field>
+    <field name="name">Battle Royale II: Requiem</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Drama</field>
-    <field name="name">Battle Royale II: Requiem</field>
-    <field name="directed_by">Kenta Fukasaku</field>
-    <field name="directed_by">Kinji Fukasaku</field>
-    <field name="id">/en/batoru_rowaiaru_ii_chinkonka</field>
-    <field name="initial_release_date">2003-07-05</field>
   </doc>
   <doc>
+    <field name="id">/en/battlefield_baseball</field>
+    <field name="directed_by">Yūdai Yamaguchi</field>
+    <field name="initial_release_date">2003-07-19</field>
+    <field name="name">Battlefield Baseball</field>
     <field name="genre">Martial Arts Film</field>
     <field name="genre">Horror</field>
     <field name="genre">World cinema</field>
@@ -3207,135 +3204,135 @@
     <field name="genre">Japanese Movies</field>
     <field name="genre">Horror comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Battlefield Baseball</field>
-    <field name="directed_by">Yūdai Yamaguchi</field>
-    <field name="id">/en/battlefield_baseball</field>
-    <field name="initial_release_date">2003-07-19</field>
   </doc>
   <doc>
-    <field name="genre">Documentary film</field>
-    <field name="name">BBS: The Documentary</field>
-    <field name="directed_by">Jason Scott Sadofsky</field>
     <field name="id">/en/bbs_the_documentary</field>
+    <field name="directed_by">Jason Scott Sadofsky</field>
+    <field name="name">BBS: The Documentary</field>
+    <field name="genre">Documentary film</field>
   </doc>
   <doc>
+    <field name="id">/en/be_cool</field>
+    <field name="directed_by">F. Gary Gray</field>
+    <field name="initial_release_date">2005-03-04</field>
+    <field name="name">Be Cool</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Crime Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Be Cool</field>
-    <field name="directed_by">F. Gary Gray</field>
-    <field name="id">/en/be_cool</field>
-    <field name="initial_release_date">2005-03-04</field>
   </doc>
   <doc>
+    <field name="id">/en/be_kind_rewind</field>
+    <field name="directed_by">Michel Gondry</field>
+    <field name="initial_release_date">2008-01-20</field>
+    <field name="name">Be Kind Rewind</field>
     <field name="genre">Farce</field>
     <field name="genre">Comedy of Errors</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Be Kind Rewind</field>
-    <field name="directed_by">Michel Gondry</field>
-    <field name="id">/en/be_kind_rewind</field>
-    <field name="initial_release_date">2008-01-20</field>
   </doc>
   <doc>
+    <field name="id">/en/be_with_me</field>
+    <field name="directed_by">Eric Khoo</field>
+    <field name="initial_release_date">2005-05-12</field>
+    <field name="name">Be with Me</field>
     <field name="genre">Indie film</field>
     <field name="genre">LGBT</field>
     <field name="genre">World cinema</field>
     <field name="genre">Art film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="name">Be with Me</field>
-    <field name="directed_by">Eric Khoo</field>
-    <field name="id">/en/be_with_me</field>
-    <field name="initial_release_date">2005-05-12</field>
   </doc>
   <doc>
+    <field name="id">/en/beah_a_black_woman_speaks</field>
+    <field name="directed_by">Lisa Gay Hamilton</field>
+    <field name="initial_release_date">2003-08-22</field>
+    <field name="name">Beah: A Black Woman Speaks</field>
     <field name="genre">Documentary film</field>
     <field name="genre">History</field>
     <field name="genre">Biographical film</field>
-    <field name="name">Beah: A Black Woman Speaks</field>
-    <field name="directed_by">Lisa Gay Hamilton</field>
-    <field name="id">/en/beah_a_black_woman_speaks</field>
-    <field name="initial_release_date">2003-08-22</field>
   </doc>
   <doc>
+    <field name="id">/en/beastly_boyz</field>
+    <field name="directed_by">David DeCoteau</field>
+    <field name="name">Beastly Boyz</field>
     <field name="genre">LGBT</field>
     <field name="genre">Horror</field>
     <field name="genre">B movie</field>
     <field name="genre">Teen film</field>
-    <field name="name">Beastly Boyz</field>
-    <field name="directed_by">David DeCoteau</field>
-    <field name="id">/en/beastly_boyz</field>
   </doc>
   <doc>
-    <field name="genre">Comedy</field>
-    <field name="name">Beauty Shop</field>
-    <field name="directed_by">Bille Woodruff</field>
     <field name="id">/en/beauty_shop</field>
+    <field name="directed_by">Bille Woodruff</field>
     <field name="initial_release_date">2005-03-24</field>
+    <field name="name">Beauty Shop</field>
+    <field name="genre">Comedy</field>
   </doc>
   <doc>
+    <field name="id">/en/bedazzled_2000</field>
+    <field name="directed_by">Harold Ramis</field>
+    <field name="initial_release_date">2000-10-19</field>
+    <field name="name">Bedazzled</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
-    <field name="name">Bedazzled</field>
-    <field name="directed_by">Harold Ramis</field>
-    <field name="id">/en/bedazzled_2000</field>
-    <field name="initial_release_date">2000-10-19</field>
   </doc>
   <doc>
+    <field name="id">/en/bee_movie</field>
+    <field name="directed_by">Steve Hickner</field>
+    <field name="directed_by">Simon J. Smith</field>
+    <field name="initial_release_date">2007-10-28</field>
+    <field name="name">Bee Movie</field>
     <field name="genre">Family</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Animation</field>
     <field name="genre">Comedy</field>
-    <field name="name">Bee Movie</field>
-    <field name="directed_by">Steve Hickner</field>
-    <field name="directed_by">Simon J. Smith</field>
-    <field name="id">/en/bee_movie</field>
-    <field name="initial_release_date">2007-10-28</field>
   </doc>
   <doc>
+    <field name="id">/en/bee_season_2005</field>
+    <field name="directed_by">David Siegel</field>
+    <field name="directed_by">Scott McGehee</field>
+    <field name="initial_release_date">2005-11-11</field>
+    <field name="name">Bee Season</field>
     <field name="genre">Film adaptation</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Family Drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Bee Season</field>
-    <field name="directed_by">David Siegel</field>
-    <field name="directed_by">Scott McGehee</field>
-    <field name="id">/en/bee_season_2005</field>
-    <field name="initial_release_date">2005-11-11</field>
   </doc>
   <doc>
+    <field name="id">/en/beer_league</field>
+    <field name="directed_by">Frank Sebastiano</field>
+    <field name="initial_release_date">2006-09-15</field>
+    <field name="name">Artie Lange's Beer League</field>
     <field name="genre">Sports</field>
     <field name="genre">Indie film</field>
     <field name="genre">Comedy</field>
-    <field name="name">Artie Lange's Beer League</field>
-    <field name="directed_by">Frank Sebastiano</field>
-    <field name="id">/en/beer_league</field>
-    <field name="initial_release_date">2006-09-15</field>
   </doc>
   <doc>
+    <field name="id">/en/beer_the_movie</field>
+    <field name="directed_by">Peter Hoare</field>
+    <field name="initial_release_date">2006-05-16</field>
+    <field name="name">Beer: The Movie</field>
     <field name="genre">Indie film</field>
     <field name="genre">Cult film</field>
     <field name="genre">Parody</field>
     <field name="genre">Bloopers &amp;amp; Candid Camera</field>
     <field name="genre">Comedy</field>
-    <field name="name">Beer: The Movie</field>
-    <field name="directed_by">Peter Hoare</field>
-    <field name="id">/en/beer_the_movie</field>
-    <field name="initial_release_date">2006-05-16</field>
   </doc>
   <doc>
+    <field name="id">/en/beerfest</field>
+    <field name="directed_by">Jay Chandrasekhar</field>
+    <field name="initial_release_date">2006-08-25</field>
+    <field name="name">Beerfest</field>
     <field name="genre">Absurdism</field>
     <field name="genre">Comedy</field>
-    <field name="name">Beerfest</field>
-    <field name="directed_by">Jay Chandrasekhar</field>
-    <field name="id">/en/beerfest</field>
-    <field name="initial_release_date">2006-08-25</field>
   </doc>
   <doc>
+    <field name="id">/en/before_night_falls_2001</field>
+    <field name="directed_by">Julian Schnabel</field>
+    <field name="initial_release_date">2000-09-03</field>
+    <field name="name">Before Night Falls</field>
     <field name="genre">LGBT</field>
     <field name="genre">Gay Themed</field>
     <field name="genre">Political drama</field>
@@ -3343,88 +3340,88 @@
     <field name="genre">Gay Interest</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Drama</field>
-    <field name="name">Before Night Falls</field>
-    <field name="directed_by">Julian Schnabel</field>
-    <field name="id">/en/before_night_falls_2001</field>
-    <field name="initial_release_date">2000-09-03</field>
   </doc>
   <doc>
+    <field name="id">/en/before_sunset</field>
+    <field name="directed_by">Richard Linklater</field>
+    <field name="initial_release_date">2004-02-10</field>
+    <field name="name">Before Sunset</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Indie film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Before Sunset</field>
-    <field name="directed_by">Richard Linklater</field>
-    <field name="id">/en/before_sunset</field>
-    <field name="initial_release_date">2004-02-10</field>
   </doc>
   <doc>
+    <field name="id">/en/behind_enemy_lines</field>
+    <field name="directed_by">John Moore</field>
+    <field name="initial_release_date">2001-11-17</field>
+    <field name="name">Behind Enemy Lines</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">War film</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Drama</field>
-    <field name="name">Behind Enemy Lines</field>
-    <field name="directed_by">John Moore</field>
-    <field name="id">/en/behind_enemy_lines</field>
-    <field name="initial_release_date">2001-11-17</field>
   </doc>
   <doc>
+    <field name="id">/en/behind_the_mask_2006</field>
+    <field name="directed_by">Shannon Keith</field>
+    <field name="initial_release_date">2006-03-21</field>
+    <field name="name">Behind the Mask</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Indie film</field>
     <field name="genre">Political cinema</field>
     <field name="genre">Crime Fiction</field>
-    <field name="name">Behind the Mask</field>
-    <field name="directed_by">Shannon Keith</field>
-    <field name="id">/en/behind_the_mask_2006</field>
-    <field name="initial_release_date">2006-03-21</field>
   </doc>
   <doc>
-    <field name="genre">Drama</field>
-    <field name="name">Behind the Sun</field>
-    <field name="directed_by">Walter Salles</field>
     <field name="id">/en/behind_the_sun_2001</field>
+    <field name="directed_by">Walter Salles</field>
     <field name="initial_release_date">2001-09-06</field>
+    <field name="name">Behind the Sun</field>
+    <field name="genre">Drama</field>
   </doc>
   <doc>
+    <field name="id">/en/being_cyrus</field>
+    <field name="directed_by">Homi Adajania</field>
+    <field name="initial_release_date">2005-11-08</field>
+    <field name="name">Being Cyrus</field>
     <field name="genre">Thriller</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Mystery</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="name">Being Cyrus</field>
-    <field name="directed_by">Homi Adajania</field>
-    <field name="id">/en/being_cyrus</field>
-    <field name="initial_release_date">2005-11-08</field>
   </doc>
   <doc>
+    <field name="id">/en/being_julia</field>
+    <field name="directed_by">István Szabó</field>
+    <field name="initial_release_date">2004-09-03</field>
+    <field name="name">Being Julia</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Comedy-drama</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Being Julia</field>
-    <field name="directed_by">István Szabó</field>
-    <field name="id">/en/being_julia</field>
-    <field name="initial_release_date">2004-09-03</field>
   </doc>
   <doc>
-    <field name="genre">Drama</field>
-    <field name="name">Bekhal's Tears</field>
-    <field name="directed_by">Lauand Omar</field>
     <field name="id">/en/bekhals_tears</field>
+    <field name="directed_by">Lauand Omar</field>
+    <field name="name">Bekhal's Tears</field>
+    <field name="genre">Drama</field>
   </doc>
   <doc>
+    <field name="id">/en/believe_in_me</field>
+    <field name="directed_by">Robert Collector</field>
+    <field name="name">Believe in Me</field>
     <field name="genre">Sports</field>
     <field name="genre">Family Drama</field>
     <field name="genre">Family</field>
     <field name="genre">Drama</field>
-    <field name="name">Believe in Me</field>
-    <field name="directed_by">Robert Collector</field>
-    <field name="id">/en/believe_in_me</field>
   </doc>
   <doc>
+    <field name="id">/en/belly_of_the_beast</field>
+    <field name="directed_by">Ching Siu-tung</field>
+    <field name="initial_release_date">2003-12-30</field>
+    <field name="name">Belly of the Beast</field>
     <field name="genre">Action Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Political thriller</field>
@@ -3433,21 +3430,21 @@
     <field name="genre">Crime Thriller</field>
     <field name="genre">Action Thriller</field>
     <field name="genre">Chinese Movies</field>
-    <field name="name">Belly of the Beast</field>
-    <field name="directed_by">Ching Siu-tung</field>
-    <field name="id">/en/belly_of_the_beast</field>
-    <field name="initial_release_date">2003-12-30</field>
   </doc>
   <doc>
+    <field name="id">/en/bellyful</field>
+    <field name="directed_by">Melvin Van Peebles</field>
+    <field name="initial_release_date">2000-06-28</field>
+    <field name="name">Bellyful</field>
     <field name="genre">Indie film</field>
     <field name="genre">Satire</field>
     <field name="genre">Comedy</field>
-    <field name="name">Bellyful</field>
-    <field name="directed_by">Melvin Van Peebles</field>
-    <field name="id">/en/bellyful</field>
-    <field name="initial_release_date">2000-06-28</field>
   </doc>
   <doc>
+    <field name="id">/en/bend_it_like_beckham</field>
+    <field name="directed_by">Gurinder Chadha</field>
+    <field name="initial_release_date">2002-04-11</field>
+    <field name="name">Bend It Like Beckham</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Indie film</field>
     <field name="genre">Teen film</field>
@@ -3456,54 +3453,54 @@
     <field name="genre">Comedy-drama</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Bend It Like Beckham</field>
-    <field name="directed_by">Gurinder Chadha</field>
-    <field name="id">/en/bend_it_like_beckham</field>
-    <field name="initial_release_date">2002-04-11</field>
   </doc>
   <doc>
+    <field name="id">/en/bendito_infierno</field>
+    <field name="directed_by">Agustín Díaz Yanes</field>
+    <field name="initial_release_date">2001-11-28</field>
+    <field name="name">Don't Tempt Me</field>
     <field name="genre">Religious Film</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Don't Tempt Me</field>
-    <field name="directed_by">Agustín Díaz Yanes</field>
-    <field name="id">/en/bendito_infierno</field>
-    <field name="initial_release_date">2001-11-28</field>
   </doc>
   <doc>
+    <field name="id">/en/beneath</field>
+    <field name="directed_by">Dagen Merrill</field>
+    <field name="initial_release_date">2007-08-07</field>
+    <field name="name">Beneath</field>
     <field name="genre">Horror</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Thriller</field>
     <field name="genre">Supernatural</field>
     <field name="genre">Crime Thriller</field>
-    <field name="name">Beneath</field>
-    <field name="directed_by">Dagen Merrill</field>
-    <field name="id">/en/beneath</field>
-    <field name="initial_release_date">2007-08-07</field>
   </doc>
   <doc>
+    <field name="id">/en/beneath_clouds</field>
+    <field name="directed_by">Ivan Sen</field>
+    <field name="initial_release_date">2002-02-08</field>
+    <field name="name">Beneath Clouds</field>
     <field name="genre">Indie film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Road movie</field>
     <field name="genre">Social problem film</field>
     <field name="genre">Drama</field>
-    <field name="name">Beneath Clouds</field>
-    <field name="directed_by">Ivan Sen</field>
-    <field name="id">/en/beneath_clouds</field>
-    <field name="initial_release_date">2002-02-08</field>
   </doc>
   <doc>
+    <field name="id">/en/beowulf_2007</field>
+    <field name="directed_by">Robert Zemeckis</field>
+    <field name="initial_release_date">2007-11-05</field>
+    <field name="name">Beowulf</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Computer Animation</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Action Film</field>
     <field name="genre">Animation</field>
-    <field name="name">Beowulf</field>
-    <field name="directed_by">Robert Zemeckis</field>
-    <field name="id">/en/beowulf_2007</field>
-    <field name="initial_release_date">2007-11-05</field>
   </doc>
   <doc>
+    <field name="id">/en/beowulf_grendel</field>
+    <field name="directed_by">Sturla Gunnarsson</field>
+    <field name="initial_release_date">2005-09-14</field>
+    <field name="name">Beowulf &amp;amp; Grendel</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Action Film</field>
     <field name="genre">Fantasy</field>
@@ -3513,135 +3510,135 @@
     <field name="genre">Historical period drama</field>
     <field name="genre">Mythological Fantasy</field>
     <field name="genre">Drama</field>
-    <field name="name">Beowulf &amp;amp; Grendel</field>
-    <field name="directed_by">Sturla Gunnarsson</field>
-    <field name="id">/en/beowulf_grendel</field>
-    <field name="initial_release_date">2005-09-14</field>
   </doc>
   <doc>
-    <field name="genre">Comedy</field>
-    <field name="name">Best in Show</field>
-    <field name="directed_by">Christopher Guest</field>
     <field name="id">/en/best_in_show</field>
+    <field name="directed_by">Christopher Guest</field>
     <field name="initial_release_date">2000-09-08</field>
+    <field name="name">Best in Show</field>
+    <field name="genre">Comedy</field>
   </doc>
   <doc>
-    <field name="genre">Sports</field>
-    <field name="name">The Best of The Bloodiest Brawls, Vol. 1</field>
     <field name="id">/en/the_best_of_the_bloodiest_brawls_vol_1</field>
     <field name="initial_release_date">2006-03-14</field>
+    <field name="name">The Best of The Bloodiest Brawls, Vol. 1</field>
+    <field name="genre">Sports</field>
   </doc>
   <doc>
+    <field name="id">/en/better_luck_tomorrow</field>
+    <field name="directed_by">Justin Lin</field>
+    <field name="initial_release_date">2003-04-11</field>
+    <field name="name">Better Luck Tomorrow</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Teen film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Crime Drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Better Luck Tomorrow</field>
-    <field name="directed_by">Justin Lin</field>
-    <field name="id">/en/better_luck_tomorrow</field>
-    <field name="initial_release_date">2003-04-11</field>
   </doc>
   <doc>
+    <field name="id">/en/bettie_page_dark_angel</field>
+    <field name="directed_by">Nico B.</field>
+    <field name="initial_release_date">2004-02-11</field>
+    <field name="name">Bettie Page: Dark Angel</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Drama</field>
-    <field name="name">Bettie Page: Dark Angel</field>
-    <field name="directed_by">Nico B.</field>
-    <field name="id">/en/bettie_page_dark_angel</field>
-    <field name="initial_release_date">2004-02-11</field>
   </doc>
   <doc>
+    <field name="id">/en/bewitched_2005</field>
+    <field name="directed_by">Nora Ephron</field>
+    <field name="initial_release_date">2005-06-24</field>
+    <field name="name">Bewitched</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
-    <field name="name">Bewitched</field>
-    <field name="directed_by">Nora Ephron</field>
-    <field name="id">/en/bewitched_2005</field>
-    <field name="initial_release_date">2005-06-24</field>
   </doc>
   <doc>
+    <field name="id">/en/beyond_borders</field>
+    <field name="directed_by">Martin Campbell</field>
+    <field name="initial_release_date">2003-10-24</field>
+    <field name="name">Beyond Borders</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Historical period drama</field>
     <field name="genre">Romance Film</field>
     <field name="genre">War film</field>
     <field name="genre">Drama</field>
-    <field name="name">Beyond Borders</field>
-    <field name="directed_by">Martin Campbell</field>
-    <field name="id">/en/beyond_borders</field>
-    <field name="initial_release_date">2003-10-24</field>
   </doc>
   <doc>
+    <field name="id">/en/beyond_re-animator</field>
+    <field name="directed_by">Brian Yuzna</field>
+    <field name="initial_release_date">2003-04-04</field>
+    <field name="name">Beyond Re-Animator</field>
     <field name="genre">Horror</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Comedy</field>
-    <field name="name">Beyond Re-Animator</field>
-    <field name="directed_by">Brian Yuzna</field>
-    <field name="id">/en/beyond_re-animator</field>
-    <field name="initial_release_date">2003-04-04</field>
   </doc>
   <doc>
+    <field name="id">/en/beyond_the_sea</field>
+    <field name="directed_by">Kevin Spacey</field>
+    <field name="initial_release_date">2004-09-11</field>
+    <field name="name">Beyond the Sea</field>
     <field name="genre">Musical</field>
     <field name="genre">Music</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="name">Beyond the Sea</field>
-    <field name="directed_by">Kevin Spacey</field>
-    <field name="id">/en/beyond_the_sea</field>
-    <field name="initial_release_date">2004-09-11</field>
   </doc>
   <doc>
+    <field name="id">/en/bhadra_2005</field>
+    <field name="directed_by">Boyapati Srinu</field>
+    <field name="initial_release_date">2005-05-12</field>
+    <field name="name">Bhadra</field>
     <field name="genre">Action Film</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="name">Bhadra</field>
-    <field name="directed_by">Boyapati Srinu</field>
-    <field name="id">/en/bhadra_2005</field>
-    <field name="initial_release_date">2005-05-12</field>
   </doc>
   <doc>
+    <field name="id">/en/bhageeradha</field>
+    <field name="directed_by">Rasool Ellore</field>
+    <field name="initial_release_date">2005-10-13</field>
+    <field name="name">Bhageeratha</field>
     <field name="genre">Drama</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
-    <field name="name">Bhageeratha</field>
-    <field name="directed_by">Rasool Ellore</field>
-    <field name="id">/en/bhageeradha</field>
-    <field name="initial_release_date">2005-10-13</field>
   </doc>
   <doc>
+    <field name="id">/en/bheema</field>
+    <field name="directed_by">N. Lingusamy</field>
+    <field name="initial_release_date">2008-01-14</field>
+    <field name="name">Bheemaa</field>
     <field name="genre">Action Film</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
-    <field name="name">Bheemaa</field>
-    <field name="directed_by">N. Lingusamy</field>
-    <field name="id">/en/bheema</field>
-    <field name="initial_release_date">2008-01-14</field>
   </doc>
   <doc>
+    <field name="id">/en/bhoot</field>
+    <field name="directed_by">Ram Gopal Varma</field>
+    <field name="initial_release_date">2003-05-17</field>
+    <field name="name">Bhoot</field>
     <field name="genre">Horror</field>
     <field name="genre">Thriller</field>
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
-    <field name="name">Bhoot</field>
-    <field name="directed_by">Ram Gopal Varma</field>
-    <field name="id">/en/bhoot</field>
-    <field name="initial_release_date">2003-05-17</field>
   </doc>
   <doc>
+    <field name="id">/en/bichhoo</field>
+    <field name="directed_by">Guddu Dhanoa</field>
+    <field name="initial_release_date">2000-07-07</field>
+    <field name="name">Bichhoo</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="name">Bichhoo</field>
-    <field name="directed_by">Guddu Dhanoa</field>
-    <field name="id">/en/bichhoo</field>
-    <field name="initial_release_date">2000-07-07</field>
   </doc>
   <doc>
+    <field name="id">/en/big_eden</field>
+    <field name="directed_by">Thomas Bezucha</field>
+    <field name="initial_release_date">2000-04-18</field>
+    <field name="name">Big Eden</field>
     <field name="genre">LGBT</field>
     <field name="genre">Indie film</field>
     <field name="genre">Romance Film</field>
@@ -3651,21 +3648,21 @@
     <field name="genre">Gay Themed</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Big Eden</field>
-    <field name="directed_by">Thomas Bezucha</field>
-    <field name="id">/en/big_eden</field>
-    <field name="initial_release_date">2000-04-18</field>
   </doc>
   <doc>
+    <field name="id">/en/big_fat_liar</field>
+    <field name="directed_by">Shawn Levy</field>
+    <field name="initial_release_date">2002-02-02</field>
+    <field name="name">Big Fat Liar</field>
     <field name="genre">Family</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Comedy</field>
-    <field name="name">Big Fat Liar</field>
-    <field name="directed_by">Shawn Levy</field>
-    <field name="id">/en/big_fat_liar</field>
-    <field name="initial_release_date">2002-02-02</field>
   </doc>
   <doc>
+    <field name="id">/en/big_fish</field>
+    <field name="directed_by">Tim Burton</field>
+    <field name="initial_release_date">2003-12-10</field>
+    <field name="name">Big Fish</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">War film</field>
@@ -3675,38 +3672,38 @@
     <field name="genre">Fantasy Comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Big Fish</field>
-    <field name="directed_by">Tim Burton</field>
-    <field name="id">/en/big_fish</field>
-    <field name="initial_release_date">2003-12-10</field>
   </doc>
   <doc>
+    <field name="id">/en/big_girls_dont_cry_2002</field>
+    <field name="directed_by">Maria von Heland</field>
+    <field name="initial_release_date">2002-10-24</field>
+    <field name="name">Big Girls Don't Cry</field>
     <field name="genre">World cinema</field>
     <field name="genre">Melodrama</field>
     <field name="genre">Teen film</field>
     <field name="genre">Drama</field>
-    <field name="name">Big Girls Don't Cry</field>
-    <field name="directed_by">Maria von Heland</field>
-    <field name="id">/en/big_girls_dont_cry_2002</field>
-    <field name="initial_release_date">2002-10-24</field>
   </doc>
   <doc>
-    <field name="genre">Drama</field>
-    <field name="name">Big Man, Little Love</field>
-    <field name="directed_by">Handan İpekçi</field>
     <field name="id">/en/big_man_little_love</field>
+    <field name="directed_by">Handan İpekçi</field>
     <field name="initial_release_date">2001-10-19</field>
+    <field name="name">Big Man, Little Love</field>
+    <field name="genre">Drama</field>
   </doc>
   <doc>
+    <field name="id">/en/big_mommas_house</field>
+    <field name="directed_by">Raja Gosnell</field>
+    <field name="initial_release_date">2000-05-31</field>
+    <field name="name">Big Momma's House</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Comedy</field>
-    <field name="name">Big Momma's House</field>
-    <field name="directed_by">Raja Gosnell</field>
-    <field name="id">/en/big_mommas_house</field>
-    <field name="initial_release_date">2000-05-31</field>
   </doc>
   <doc>
+    <field name="id">/en/big_mommas_house_2</field>
+    <field name="directed_by">John Whitesell</field>
+    <field name="initial_release_date">2006-01-26</field>
+    <field name="name">Big Momma's House 2</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Slapstick</field>
     <field name="genre">Action Film</field>
@@ -3714,41 +3711,41 @@
     <field name="genre">Thriller</field>
     <field name="genre">Farce</field>
     <field name="genre">Comedy</field>
-    <field name="name">Big Momma's House 2</field>
-    <field name="directed_by">John Whitesell</field>
-    <field name="id">/en/big_mommas_house_2</field>
-    <field name="initial_release_date">2006-01-26</field>
   </doc>
   <doc>
-    <field name="genre">Pornographic film</field>
-    <field name="name">Big Toys, No Boys 2</field>
-    <field name="directed_by">Tristán</field>
     <field name="id">/en/big_toys_no_boys_2</field>
+    <field name="directed_by">Tristán</field>
+    <field name="name">Big Toys, No Boys 2</field>
+    <field name="genre">Pornographic film</field>
   </doc>
   <doc>
+    <field name="id">/en/big_trouble_2002</field>
+    <field name="directed_by">Barry Sonnenfeld</field>
+    <field name="initial_release_date">2002-04-05</field>
+    <field name="name">Big Trouble</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Action Film</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Gangster Film</field>
     <field name="genre">Comedy</field>
-    <field name="name">Big Trouble</field>
-    <field name="directed_by">Barry Sonnenfeld</field>
-    <field name="id">/en/big_trouble_2002</field>
-    <field name="initial_release_date">2002-04-05</field>
   </doc>
   <doc>
+    <field name="id">/en/bigger_than_the_sky</field>
+    <field name="directed_by">Al Corley</field>
+    <field name="initial_release_date">2005-02-18</field>
+    <field name="name">Bigger Than the Sky</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy-drama</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Bigger Than the Sky</field>
-    <field name="directed_by">Al Corley</field>
-    <field name="id">/en/bigger_than_the_sky</field>
-    <field name="initial_release_date">2005-02-18</field>
   </doc>
   <doc>
+    <field name="id">/en/biggie_tupac</field>
+    <field name="directed_by">Nick Broomfield</field>
+    <field name="initial_release_date">2002-01-11</field>
+    <field name="name">Biggie &amp;amp; Tupac</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Hip hop film</field>
     <field name="genre">Rockumentary</field>
@@ -3756,61 +3753,62 @@
     <field name="genre">Crime Fiction</field>
     <field name="genre">True crime</field>
     <field name="genre">Biographical film</field>
-    <field name="name">Biggie &amp;amp; Tupac</field>
-    <field name="directed_by">Nick Broomfield</field>
-    <field name="id">/en/biggie_tupac</field>
-    <field name="initial_release_date">2002-01-11</field>
   </doc>
   <doc>
+    <field name="id">/en/bill_2007</field>
+    <field name="directed_by">Bernie Goldmann</field>
+    <field name="directed_by">Melisa Wallick</field>
+    <field name="initial_release_date">2007-09-08</field>
+    <field name="name">Meet Bill</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Meet Bill</field>
-    <field name="directed_by">Bernie Goldmann</field>
-    <field name="directed_by">Melisa Wallick</field>
-    <field name="id">/en/bill_2007</field>
-    <field name="initial_release_date">2007-09-08</field>
   </doc>
   <doc>
+    <field name="id">/en/billy_elliot</field>
+    <field name="directed_by">Stephen Daldry</field>
+    <field name="initial_release_date">2000-05-19</field>
+    <field name="name">Billy Elliot</field>
     <field name="genre">Comedy</field>
     <field name="genre">Music</field>
     <field name="genre">Drama</field>
-    <field name="name">Billy Elliot</field>
-    <field name="directed_by">Stephen Daldry</field>
-    <field name="id">/en/billy_elliot</field>
-    <field name="initial_release_date">2000-05-19</field>
   </doc>
   <doc>
-    <field name="genre">Fantasy</field>
-    <field name="genre">Adventure Film</field>
-    <field name="genre">Animation</field>
-    <field name="genre">Family</field>
-    <field name="genre">Computer Animation</field>
-    <field name="genre">Science Fiction</field>
-    <field name="name">Bionicle 3: Web of Shadows</field>
-    <field name="directed_by">David Molina</field>
-    <field name="directed_by">Terry Shakespeare</field>
     <field name="id">/en/bionicle_3_web_of_shadows</field>
+    <field name="directed_by">David Molina</field>
+    <field name="directed_by">Terry Shakespeare</field>
     <field name="initial_release_date">2005-10-11</field>
-  </doc>
-  <doc>
+    <field name="name">Bionicle 3: Web of Shadows</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Animation</field>
     <field name="genre">Family</field>
     <field name="genre">Computer Animation</field>
     <field name="genre">Science Fiction</field>
-    <field name="genre">Children's Fantasy</field>
-    <field name="genre">Children's/Family</field>
-    <field name="genre">Fantasy Adventure</field>
-    <field name="name">Bionicle 2: Legends of Metru Nui</field>
-    <field name="directed_by">David Molina</field>
-    <field name="directed_by">Terry Shakespeare</field>
+  </doc>
+  <doc>
     <field name="id">/en/bionicle_2_legends_of_metru_nui</field>
+    <field name="directed_by">David Molina</field>
+    <field name="directed_by">Terry Shakespeare</field>
     <field name="initial_release_date">2004-10-19</field>
+    <field name="name">Bionicle 2: Legends of Metru Nui</field>
+    <field name="genre">Fantasy</field>
+    <field name="genre">Adventure Film</field>
+    <field name="genre">Animation</field>
+    <field name="genre">Family</field>
+    <field name="genre">Computer Animation</field>
+    <field name="genre">Science Fiction</field>
+    <field name="genre">Children's Fantasy</field>
+    <field name="genre">Children's/Family</field>
+    <field name="genre">Fantasy Adventure</field>
   </doc>
   <doc>
+    <field name="id">/en/bionicle_mask_of_light</field>
+    <field name="directed_by">David Molina</field>
+    <field name="directed_by">Terry Shakespeare</field>
+    <field name="initial_release_date">2003-09-16</field>
+    <field name="name">Bionicle: Mask of Light: The Movie</field>
     <field name="genre">Family</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Animation</field>
@@ -3820,24 +3818,23 @@
     <field name="genre">Children's Fantasy</field>
     <field name="genre">Children's/Family</field>
     <field name="genre">Fantasy Adventure</field>
-    <field name="name">Bionicle: Mask of Light: The Movie</field>
-    <field name="directed_by">David Molina</field>
-    <field name="directed_by">Terry Shakespeare</field>
-    <field name="id">/en/bionicle_mask_of_light</field>
-    <field name="initial_release_date">2003-09-16</field>
   </doc>
   <doc>
+    <field name="id">/en/birth_2004</field>
+    <field name="directed_by">Jonathan Glazer</field>
+    <field name="initial_release_date">2004-09-08</field>
+    <field name="name">Birth</field>
     <field name="genre">Mystery</field>
     <field name="genre">Indie film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
-    <field name="name">Birth</field>
-    <field name="directed_by">Jonathan Glazer</field>
-    <field name="id">/en/birth_2004</field>
-    <field name="initial_release_date">2004-09-08</field>
   </doc>
   <doc>
+    <field name="id">/en/birthday_girl</field>
+    <field name="directed_by">Jez Butterworth</field>
+    <field name="initial_release_date">2002-02-01</field>
+    <field name="name">Birthday Girl</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Thriller</field>
     <field name="genre">Indie film</field>
@@ -3846,107 +3843,107 @@
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Birthday Girl</field>
-    <field name="directed_by">Jez Butterworth</field>
-    <field name="id">/en/birthday_girl</field>
-    <field name="initial_release_date">2002-02-01</field>
   </doc>
   <doc>
-    <field name="genre">Comedy</field>
-    <field name="name">Bite Me, Fanboy</field>
-    <field name="directed_by">Mat Nastos</field>
     <field name="id">/en/bite_me_fanboy</field>
+    <field name="directed_by">Mat Nastos</field>
     <field name="initial_release_date">2005-06-01</field>
+    <field name="name">Bite Me, Fanboy</field>
+    <field name="genre">Comedy</field>
   </doc>
   <doc>
+    <field name="id">/en/bitter_jester</field>
+    <field name="directed_by">Maija DiGiorgio</field>
+    <field name="initial_release_date">2003-02-26</field>
+    <field name="name">Bitter Jester</field>
     <field name="genre">Indie film</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Stand-up comedy</field>
     <field name="genre">Culture &amp;amp; Society</field>
     <field name="genre">Comedy</field>
     <field name="genre">Biographical film</field>
-    <field name="name">Bitter Jester</field>
-    <field name="directed_by">Maija DiGiorgio</field>
-    <field name="id">/en/bitter_jester</field>
-    <field name="initial_release_date">2003-02-26</field>
   </doc>
   <doc>
+    <field name="id">/en/black_2005</field>
+    <field name="directed_by">Sanjay Leela Bhansali</field>
+    <field name="initial_release_date">2005-02-04</field>
+    <field name="name">Black</field>
     <field name="genre">Family</field>
     <field name="genre">Drama</field>
-    <field name="name">Black</field>
-    <field name="directed_by">Sanjay Leela Bhansali</field>
-    <field name="id">/en/black_2005</field>
-    <field name="initial_release_date">2005-02-04</field>
   </doc>
   <doc>
+    <field name="id">/en/black_and_white_2002</field>
+    <field name="directed_by">Craig Lahiff</field>
+    <field name="initial_release_date">2002-10-31</field>
+    <field name="name">Black and White</field>
     <field name="genre">Trial drama</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="name">Black and White</field>
-    <field name="directed_by">Craig Lahiff</field>
-    <field name="id">/en/black_and_white_2002</field>
-    <field name="initial_release_date">2002-10-31</field>
   </doc>
   <doc>
+    <field name="id">/en/black_book_2006</field>
+    <field name="directed_by">Paul Verhoeven</field>
+    <field name="initial_release_date">2006-09-01</field>
+    <field name="name">Black Book</field>
     <field name="genre">Thriller</field>
     <field name="genre">War film</field>
     <field name="genre">Drama</field>
-    <field name="name">Black Book</field>
-    <field name="directed_by">Paul Verhoeven</field>
-    <field name="id">/en/black_book_2006</field>
-    <field name="initial_release_date">2006-09-01</field>
   </doc>
   <doc>
+    <field name="id">/wikipedia/fr/Black_Christmas_$0028film$002C_2006$0029</field>
+    <field name="directed_by">Glen Morgan</field>
+    <field name="initial_release_date">2006-12-15</field>
+    <field name="name">Black Christmas</field>
     <field name="genre">Slasher</field>
     <field name="genre">Teen film</field>
     <field name="genre">Horror</field>
     <field name="genre">Thriller</field>
-    <field name="name">Black Christmas</field>
-    <field name="directed_by">Glen Morgan</field>
-    <field name="id">/wikipedia/fr/Black_Christmas_$0028film$002C_2006$0029</field>
-    <field name="initial_release_date">2006-12-15</field>
   </doc>
   <doc>
+    <field name="id">/en/black_cloud</field>
+    <field name="directed_by">Ricky Schroder</field>
+    <field name="initial_release_date">2004-04-30</field>
+    <field name="name">Black Cloud</field>
     <field name="genre">Indie film</field>
     <field name="genre">Sports</field>
     <field name="genre">Drama</field>
-    <field name="name">Black Cloud</field>
-    <field name="directed_by">Ricky Schroder</field>
-    <field name="id">/en/black_cloud</field>
-    <field name="initial_release_date">2004-04-30</field>
   </doc>
   <doc>
+    <field name="id">/en/black_friday_1993</field>
+    <field name="directed_by">Anurag Kashyap</field>
+    <field name="initial_release_date">2004-05-20</field>
+    <field name="name">Black Friday</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Historical drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Black Friday</field>
-    <field name="directed_by">Anurag Kashyap</field>
-    <field name="id">/en/black_friday_1993</field>
-    <field name="initial_release_date">2004-05-20</field>
   </doc>
   <doc>
+    <field name="id">/en/black_hawk_down</field>
+    <field name="directed_by">Ridley Scott</field>
+    <field name="initial_release_date">2001-12-18</field>
+    <field name="name">Black Hawk Down</field>
     <field name="genre">War film</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Action Film</field>
     <field name="genre">History</field>
     <field name="genre">Combat Films</field>
     <field name="genre">Drama</field>
-    <field name="name">Black Hawk Down</field>
-    <field name="directed_by">Ridley Scott</field>
-    <field name="id">/en/black_hawk_down</field>
-    <field name="initial_release_date">2001-12-18</field>
   </doc>
   <doc>
+    <field name="id">/en/black_hole_2006</field>
+    <field name="directed_by">Tibor Takács</field>
+    <field name="initial_release_date">2006-06-10</field>
+    <field name="name">The Black Hole</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Thriller</field>
     <field name="genre">Television film</field>
-    <field name="name">The Black Hole</field>
-    <field name="directed_by">Tibor Takács</field>
-    <field name="id">/en/black_hole_2006</field>
-    <field name="initial_release_date">2006-06-10</field>
   </doc>
   <doc>
+    <field name="id">/en/black_knight_2001</field>
+    <field name="directed_by">Gil Junger</field>
+    <field name="initial_release_date">2001-11-15</field>
+    <field name="name">Black Knight</field>
     <field name="genre">Time travel</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Costume drama</field>
@@ -3955,41 +3952,41 @@
     <field name="genre">Adventure Comedy</field>
     <field name="genre">Fantasy Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Black Knight</field>
-    <field name="directed_by">Gil Junger</field>
-    <field name="id">/en/black_knight_2001</field>
-    <field name="initial_release_date">2001-11-15</field>
   </doc>
   <doc>
+    <field name="id">/en/blackball_2005</field>
+    <field name="directed_by">Mel Smith</field>
+    <field name="initial_release_date">2005-02-11</field>
+    <field name="name">Blackball</field>
     <field name="genre">Sports</field>
     <field name="genre">Family Drama</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Blackball</field>
-    <field name="directed_by">Mel Smith</field>
-    <field name="id">/en/blackball_2005</field>
-    <field name="initial_release_date">2005-02-11</field>
   </doc>
   <doc>
+    <field name="id">/en/blackwoods</field>
+    <field name="directed_by">Uwe Boll</field>
+    <field name="name">Blackwoods</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Thriller</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Drama</field>
-    <field name="name">Blackwoods</field>
-    <field name="directed_by">Uwe Boll</field>
-    <field name="id">/en/blackwoods</field>
   </doc>
   <doc>
+    <field name="id">/en/blade_ii</field>
+    <field name="directed_by">Guillermo del Toro</field>
+    <field name="initial_release_date">2002-03-21</field>
+    <field name="name">Blade II</field>
     <field name="genre">Thriller</field>
     <field name="genre">Horror</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Action Film</field>
-    <field name="name">Blade II</field>
-    <field name="directed_by">Guillermo del Toro</field>
-    <field name="id">/en/blade_ii</field>
-    <field name="initial_release_date">2002-03-21</field>
   </doc>
   <doc>
+    <field name="id">/en/blade_trinity</field>
+    <field name="directed_by">David S. Goyer</field>
+    <field name="initial_release_date">2004-12-07</field>
+    <field name="name">Blade: Trinity</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Horror</field>
@@ -3998,70 +3995,70 @@
     <field name="genre">Fantasy</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Action Thriller</field>
-    <field name="name">Blade: Trinity</field>
-    <field name="directed_by">David S. Goyer</field>
-    <field name="id">/en/blade_trinity</field>
-    <field name="initial_release_date">2004-12-07</field>
   </doc>
   <doc>
+    <field name="id">/en/bleach_memories_of_nobody</field>
+    <field name="directed_by">Noriyuki Abe</field>
+    <field name="initial_release_date">2006-12-16</field>
+    <field name="name">Bleach: Memories of Nobody</field>
     <field name="genre">Anime</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Animation</field>
     <field name="genre">Action Film</field>
     <field name="genre">Adventure Film</field>
-    <field name="name">Bleach: Memories of Nobody</field>
-    <field name="directed_by">Noriyuki Abe</field>
-    <field name="id">/en/bleach_memories_of_nobody</field>
-    <field name="initial_release_date">2006-12-16</field>
   </doc>
   <doc>
+    <field name="id">/en/bless_the_child</field>
+    <field name="directed_by">Chuck Russell</field>
+    <field name="initial_release_date">2000-08-11</field>
+    <field name="name">Bless the Child</field>
     <field name="genre">Horror</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
     <field name="genre">Thriller</field>
-    <field name="name">Bless the Child</field>
-    <field name="directed_by">Chuck Russell</field>
-    <field name="id">/en/bless_the_child</field>
-    <field name="initial_release_date">2000-08-11</field>
   </doc>
   <doc>
+    <field name="id">/en/blind_shaft</field>
+    <field name="directed_by">Li Yang</field>
+    <field name="initial_release_date">2003-02-12</field>
+    <field name="name">Blind Shaft</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="name">Blind Shaft</field>
-    <field name="directed_by">Li Yang</field>
-    <field name="id">/en/blind_shaft</field>
-    <field name="initial_release_date">2003-02-12</field>
   </doc>
   <doc>
+    <field name="id">/en/blissfully_yours</field>
+    <field name="directed_by">Apichatpong Weerasethakul</field>
+    <field name="initial_release_date">2002-05-17</field>
+    <field name="name">Blissfully Yours</field>
     <field name="genre">Erotica</field>
     <field name="genre">Romance Film</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="name">Blissfully Yours</field>
-    <field name="directed_by">Apichatpong Weerasethakul</field>
-    <field name="id">/en/blissfully_yours</field>
-    <field name="initial_release_date">2002-05-17</field>
   </doc>
   <doc>
+    <field name="id">/en/blood_of_a_champion</field>
+    <field name="directed_by">Lawrence Page</field>
+    <field name="initial_release_date">2006-03-07</field>
+    <field name="name">Blood of a Champion</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Sports</field>
     <field name="genre">Drama</field>
-    <field name="name">Blood of a Champion</field>
-    <field name="directed_by">Lawrence Page</field>
-    <field name="id">/en/blood_of_a_champion</field>
-    <field name="initial_release_date">2006-03-07</field>
   </doc>
   <doc>
+    <field name="id">/en/blood_rain</field>
+    <field name="directed_by">Kim Dae-seung</field>
+    <field name="initial_release_date">2005-05-04</field>
+    <field name="name">Blood Rain</field>
     <field name="genre">Thriller</field>
     <field name="genre">Mystery</field>
     <field name="genre">East Asian cinema</field>
     <field name="genre">World cinema</field>
-    <field name="name">Blood Rain</field>
-    <field name="directed_by">Kim Dae-seung</field>
-    <field name="id">/en/blood_rain</field>
-    <field name="initial_release_date">2005-05-04</field>
   </doc>
   <doc>
+    <field name="id">/en/blood_work</field>
+    <field name="directed_by">Clint Eastwood</field>
+    <field name="initial_release_date">2002-08-09</field>
+    <field name="name">Blood Work</field>
     <field name="genre">Mystery</field>
     <field name="genre">Crime Thriller</field>
     <field name="genre">Thriller</field>
@@ -4069,116 +4066,115 @@
     <field name="genre">Crime Fiction</field>
     <field name="genre">Detective fiction</field>
     <field name="genre">Drama</field>
-    <field name="name">Blood Work</field>
-    <field name="directed_by">Clint Eastwood</field>
-    <field name="id">/en/blood_work</field>
-    <field name="initial_release_date">2002-08-09</field>
   </doc>
   <doc>
+    <field name="id">/en/bloodrayne_2006</field>
+    <field name="directed_by">Uwe Boll</field>
+    <field name="initial_release_date">2005-10-23</field>
+    <field name="name">BloodRayne</field>
     <field name="genre">Horror</field>
     <field name="genre">Action Film</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Costume drama</field>
-    <field name="name">BloodRayne</field>
-    <field name="directed_by">Uwe Boll</field>
-    <field name="id">/en/bloodrayne_2006</field>
-    <field name="initial_release_date">2005-10-23</field>
   </doc>
   <doc>
-    <field name="genre">Documentary film</field>
-    <field name="genre">Sports</field>
-    <field name="name">Bloodsport - ECW's Most Violent Matches</field>
     <field name="id">/en/bloodsport_ecws_most_violent_matches</field>
     <field name="initial_release_date">2006-02-07</field>
+    <field name="name">Bloodsport - ECW's Most Violent Matches</field>
+    <field name="genre">Documentary film</field>
+    <field name="genre">Sports</field>
   </doc>
   <doc>
+    <field name="id">/en/bloody_sunday</field>
+    <field name="directed_by">Paul Greengrass</field>
+    <field name="initial_release_date">2002-01-16</field>
+    <field name="name">Bloody Sunday</field>
     <field name="genre">Political drama</field>
     <field name="genre">Docudrama</field>
     <field name="genre">Historical fiction</field>
     <field name="genre">War film</field>
     <field name="genre">Drama</field>
-    <field name="name">Bloody Sunday</field>
-    <field name="directed_by">Paul Greengrass</field>
-    <field name="id">/en/bloody_sunday</field>
-    <field name="initial_release_date">2002-01-16</field>
   </doc>
   <doc>
+    <field name="id">/en/blow</field>
+    <field name="directed_by">Ted Demme</field>
+    <field name="initial_release_date">2001-03-29</field>
+    <field name="name">Blow</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Film adaptation</field>
     <field name="genre">Historical period drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Blow</field>
-    <field name="directed_by">Ted Demme</field>
-    <field name="id">/en/blow</field>
-    <field name="initial_release_date">2001-03-29</field>
   </doc>
   <doc>
+    <field name="id">/en/blue_car</field>
+    <field name="directed_by">Karen Moncrieff</field>
+    <field name="initial_release_date">2003-05-02</field>
+    <field name="name">Blue Car</field>
     <field name="genre">Indie film</field>
     <field name="genre">Family Drama</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Drama</field>
-    <field name="name">Blue Car</field>
-    <field name="directed_by">Karen Moncrieff</field>
-    <field name="id">/en/blue_car</field>
-    <field name="initial_release_date">2003-05-02</field>
   </doc>
   <doc>
+    <field name="id">/en/blue_collar_comedy_tour_rides_again</field>
+    <field name="directed_by">C. B. Harding</field>
+    <field name="initial_release_date">2004-12-05</field>
+    <field name="name">Blue Collar Comedy Tour Rides Again</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Stand-up comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Blue Collar Comedy Tour Rides Again</field>
-    <field name="directed_by">C. B. Harding</field>
-    <field name="id">/en/blue_collar_comedy_tour_rides_again</field>
-    <field name="initial_release_date">2004-12-05</field>
   </doc>
   <doc>
+    <field name="id">/en/blue_collar_comedy_tour_one_for_the_road</field>
+    <field name="directed_by">C. B. Harding</field>
+    <field name="initial_release_date">2006-06-27</field>
+    <field name="name">Blue Collar Comedy Tour: One for the Road</field>
     <field name="genre">Stand-up comedy</field>
     <field name="genre">Concert film</field>
     <field name="genre">Comedy</field>
-    <field name="name">Blue Collar Comedy Tour: One for the Road</field>
-    <field name="directed_by">C. B. Harding</field>
-    <field name="id">/en/blue_collar_comedy_tour_one_for_the_road</field>
-    <field name="initial_release_date">2006-06-27</field>
   </doc>
   <doc>
+    <field name="id">/en/blue_collar_comedy_tour_the_movie</field>
+    <field name="directed_by">C. B. Harding</field>
+    <field name="initial_release_date">2003-03-28</field>
+    <field name="name">Blue Collar Comedy Tour: The Movie</field>
     <field name="genre">Stand-up comedy</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Comedy</field>
-    <field name="name">Blue Collar Comedy Tour: The Movie</field>
-    <field name="directed_by">C. B. Harding</field>
-    <field name="id">/en/blue_collar_comedy_tour_the_movie</field>
-    <field name="initial_release_date">2003-03-28</field>
   </doc>
   <doc>
+    <field name="id">/en/blue_crush</field>
+    <field name="directed_by">John Stockwell</field>
+    <field name="initial_release_date">2002-08-08</field>
+    <field name="name">Blue Crush</field>
     <field name="genre">Teen film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Sports</field>
     <field name="genre">Drama</field>
-    <field name="name">Blue Crush</field>
-    <field name="initial_release_date">2002-08-08</field>
-    <field name="id">/en/blue_crush</field>
-    <field name="directed_by">John Stockwell</field>
   </doc>
   <doc>
-    <field name="genre">Romance Film</field>
-    <field name="genre">Drama</field>
-    <field name="name">Blue Gate Crossing</field>
-    <field name="initial_release_date">2002-09-08</field>
     <field name="id">/en/blue_gate_crossing</field>
     <field name="directed_by">Yee Chin-yen</field>
+    <field name="initial_release_date">2002-09-08</field>
+    <field name="name">Blue Gate Crossing</field>
+    <field name="genre">Romance Film</field>
+    <field name="genre">Drama</field>
   </doc>
   <doc>
+    <field name="id">/en/blue_milk</field>
+    <field name="directed_by">William Grammer</field>
+    <field name="initial_release_date">2006-06-20</field>
+    <field name="name">Blue Milk</field>
     <field name="genre">Indie film</field>
     <field name="genre">Short Film</field>
     <field name="genre">Fan film</field>
-    <field name="name">Blue Milk</field>
-    <field name="initial_release_date">2006-06-20</field>
-    <field name="id">/en/blue_milk</field>
-    <field name="directed_by">William Grammer</field>
   </doc>
   <doc>
+    <field name="id">/en/blue_state</field>
+    <field name="directed_by">Marshall Lewy</field>
+    <field name="name">Blue State</field>
     <field name="genre">Indie film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Political cinema</field>
@@ -4186,29 +4182,30 @@
     <field name="genre">Political satire</field>
     <field name="genre">Road movie</field>
     <field name="genre">Comedy</field>
-    <field name="name">Blue State</field>
-    <field name="id">/en/blue_state</field>
-    <field name="directed_by">Marshall Lewy</field>
   </doc>
   <doc>
+    <field name="id">/en/blueberry_2004</field>
+    <field name="directed_by">Jan Kounen</field>
+    <field name="initial_release_date">2004-02-11</field>
+    <field name="name">Blueberry</field>
     <field name="genre">Western</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Adventure Film</field>
-    <field name="name">Blueberry</field>
-    <field name="initial_release_date">2004-02-11</field>
-    <field name="id">/en/blueberry_2004</field>
-    <field name="directed_by">Jan Kounen</field>
   </doc>
   <doc>
-    <field name="genre">Science Fiction</field>
-    <field name="genre">Drama</field>
-    <field name="name">Blueprint</field>
-    <field name="initial_release_date">2003-12-08</field>
     <field name="id">/en/blueprint_2003</field>
     <field name="directed_by">Rolf Schübel</field>
+    <field name="initial_release_date">2003-12-08</field>
+    <field name="name">Blueprint</field>
+    <field name="genre">Science Fiction</field>
+    <field name="genre">Drama</field>
   </doc>
   <doc>
+    <field name="id">/en/bluffmaster</field>
+    <field name="directed_by">Rohan Sippy</field>
+    <field name="initial_release_date">2005-12-16</field>
+    <field name="name">Bluffmaster!</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Musical</field>
     <field name="genre">Crime Fiction</field>
@@ -4219,102 +4216,102 @@
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="name">Bluffmaster!</field>
-    <field name="initial_release_date">2005-12-16</field>
-    <field name="id">/en/bluffmaster</field>
-    <field name="directed_by">Rohan Sippy</field>
   </doc>
   <doc>
+    <field name="id">/en/boa_vs_python</field>
+    <field name="directed_by">David Flores</field>
+    <field name="initial_release_date">2004-05-24</field>
+    <field name="name">Boa vs. Python</field>
     <field name="genre">Horror</field>
     <field name="genre">Natural horror film</field>
     <field name="genre">Monster</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Creature Film</field>
-    <field name="name">Boa vs. Python</field>
-    <field name="initial_release_date">2004-05-24</field>
-    <field name="id">/en/boa_vs_python</field>
-    <field name="directed_by">David Flores</field>
   </doc>
   <doc>
+    <field name="id">/en/bobby</field>
+    <field name="directed_by">Emilio Estevez</field>
+    <field name="initial_release_date">2006-09-05</field>
+    <field name="name">Bobby</field>
     <field name="genre">Political drama</field>
     <field name="genre">Historical period drama</field>
     <field name="genre">History</field>
     <field name="genre">Drama</field>
-    <field name="name">Bobby</field>
-    <field name="initial_release_date">2006-09-05</field>
-    <field name="id">/en/bobby</field>
-    <field name="directed_by">Emilio Estevez</field>
   </doc>
   <doc>
-    <field name="genre">Crime Fiction</field>
-    <field name="genre">Drama</field>
-    <field name="name">Boiler Room</field>
-    <field name="initial_release_date">2000-01-30</field>
     <field name="id">/en/boiler_room</field>
     <field name="directed_by">Ben Younger</field>
+    <field name="initial_release_date">2000-01-30</field>
+    <field name="name">Boiler Room</field>
+    <field name="genre">Crime Fiction</field>
+    <field name="genre">Drama</field>
   </doc>
   <doc>
-    <field name="genre">Musical</field>
-    <field name="name">Bolletjes Blues</field>
-    <field name="initial_release_date">2006-03-23</field>
     <field name="id">/en/bolletjes_blues</field>
     <field name="directed_by">Brigit Hillenius</field>
     <field name="directed_by">Karin Junger</field>
+    <field name="initial_release_date">2006-03-23</field>
+    <field name="name">Bolletjes Blues</field>
+    <field name="genre">Musical</field>
   </doc>
   <doc>
+    <field name="id">/en/bollywood_hollywood</field>
+    <field name="directed_by">Deepa Mehta</field>
+    <field name="initial_release_date">2002-10-25</field>
+    <field name="name">Bollywood/Hollywood</field>
     <field name="genre">Bollywood</field>
     <field name="genre">Musical</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Musical comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Bollywood/Hollywood</field>
-    <field name="initial_release_date">2002-10-25</field>
-    <field name="id">/en/bollywood_hollywood</field>
-    <field name="directed_by">Deepa Mehta</field>
   </doc>
   <doc>
+    <field name="id">/en/bomb_the_system</field>
+    <field name="directed_by">Adam Bhala Lough</field>
+    <field name="name">Bomb the System</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Indie film</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Drama</field>
-    <field name="name">Bomb the System</field>
-    <field name="id">/en/bomb_the_system</field>
-    <field name="directed_by">Adam Bhala Lough</field>
   </doc>
   <doc>
+    <field name="id">/en/bommarillu</field>
+    <field name="directed_by">Bhaskar</field>
+    <field name="initial_release_date">2006-08-09</field>
+    <field name="name">Bommarillu</field>
     <field name="genre">Musical</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
-    <field name="name">Bommarillu</field>
-    <field name="initial_release_date">2006-08-09</field>
-    <field name="id">/en/bommarillu</field>
-    <field name="directed_by">Bhaskar</field>
   </doc>
   <doc>
+    <field name="id">/en/bon_cop_bad_cop</field>
+    <field name="directed_by">Eric Canuel</field>
+    <field name="name">Bon Cop, Bad Cop</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Buddy film</field>
     <field name="genre">Action Film</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Thriller</field>
     <field name="genre">Comedy</field>
-    <field name="name">Bon Cop, Bad Cop</field>
-    <field name="id">/en/bon_cop_bad_cop</field>
-    <field name="directed_by">Eric Canuel</field>
   </doc>
   <doc>
+    <field name="id">/en/bones_2001</field>
+    <field name="directed_by">Ernest R. Dickerson</field>
+    <field name="initial_release_date">2001-10-26</field>
+    <field name="name">Bones</field>
     <field name="genre">Horror</field>
     <field name="genre">Blaxploitation film</field>
     <field name="genre">Action Film</field>
-    <field name="name">Bones</field>
-    <field name="initial_release_date">2001-10-26</field>
-    <field name="id">/en/bones_2001</field>
-    <field name="directed_by">Ernest R. Dickerson</field>
   </doc>
   <doc>
+    <field name="id">/en/bonjour_monsieur_shlomi</field>
+    <field name="directed_by">Shemi Zarhin</field>
+    <field name="initial_release_date">2003-04-03</field>
+    <field name="name">Bonjour Monsieur Shlomi</field>
     <field name="genre">World cinema</field>
     <field name="genre">Family Drama</field>
     <field name="genre">Comedy-drama</field>
@@ -4322,45 +4319,45 @@
     <field name="genre">Family</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Bonjour Monsieur Shlomi</field>
-    <field name="initial_release_date">2003-04-03</field>
-    <field name="id">/en/bonjour_monsieur_shlomi</field>
-    <field name="directed_by">Shemi Zarhin</field>
   </doc>
   <doc>
+    <field name="id">/en/boogeyman</field>
+    <field name="directed_by">Stephen T. Kay</field>
+    <field name="initial_release_date">2005-02-04</field>
+    <field name="name">Boogeyman</field>
     <field name="genre">Horror</field>
     <field name="genre">Supernatural</field>
     <field name="genre">Teen film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Mystery</field>
     <field name="genre">Drama</field>
-    <field name="name">Boogeyman</field>
-    <field name="initial_release_date">2005-02-04</field>
-    <field name="id">/en/boogeyman</field>
-    <field name="directed_by">Stephen T. Kay</field>
   </doc>
   <doc>
+    <field name="id">/en/boogiepop_and_others_2000</field>
+    <field name="directed_by">Ryu Kaneda</field>
+    <field name="initial_release_date">2000-03-11</field>
+    <field name="name">Boogiepop and Others</field>
     <field name="genre">Animation</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Anime</field>
     <field name="genre">Thriller</field>
     <field name="genre">Japanese Movies</field>
-    <field name="name">Boogiepop and Others</field>
-    <field name="initial_release_date">2000-03-11</field>
-    <field name="id">/en/boogiepop_and_others_2000</field>
-    <field name="directed_by">Ryu Kaneda</field>
   </doc>
   <doc>
+    <field name="id">/en/book_of_love_2004</field>
+    <field name="directed_by">Alan Brown</field>
+    <field name="initial_release_date">2004-01-18</field>
+    <field name="name">Book of Love</field>
     <field name="genre">Indie film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Book of Love</field>
-    <field name="initial_release_date">2004-01-18</field>
-    <field name="id">/en/book_of_love_2004</field>
-    <field name="directed_by">Alan Brown</field>
   </doc>
   <doc>
+    <field name="id">/en/book_of_shadows_blair_witch_2</field>
+    <field name="directed_by">Joe Berlinger</field>
+    <field name="initial_release_date">2000-10-27</field>
+    <field name="name">Book of Shadows: Blair Witch 2</field>
     <field name="genre">Horror</field>
     <field name="genre">Supernatural</field>
     <field name="genre">Mystery</field>
@@ -4369,98 +4366,98 @@
     <field name="genre">Thriller</field>
     <field name="genre">Ensemble Film</field>
     <field name="genre">Crime Fiction</field>
-    <field name="name">Book of Shadows: Blair Witch 2</field>
-    <field name="initial_release_date">2000-10-27</field>
-    <field name="id">/en/book_of_shadows_blair_witch_2</field>
-    <field name="directed_by">Joe Berlinger</field>
   </doc>
   <doc>
-    <field name="genre">Crime Fiction</field>
-    <field name="genre">Drama</field>
-    <field name="name">Bimmer</field>
-    <field name="initial_release_date">2003-08-02</field>
     <field name="id">/en/boomer</field>
     <field name="directed_by">Pyotr Buslov</field>
+    <field name="initial_release_date">2003-08-02</field>
+    <field name="name">Bimmer</field>
+    <field name="genre">Crime Fiction</field>
+    <field name="genre">Drama</field>
   </doc>
   <doc>
-    <field name="genre">Comedy</field>
-    <field name="name">Borat: Cultural Learnings of America for Make Benefit Glorious Nation of Kazakhstan</field>
-    <field name="initial_release_date">2006-08-04</field>
     <field name="id">/wikipedia/de_id/1782985</field>
     <field name="directed_by">Larry Charles</field>
+    <field name="initial_release_date">2006-08-04</field>
+    <field name="name">Borat: Cultural Learnings of America for Make Benefit Glorious Nation of Kazakhstan</field>
+    <field name="genre">Comedy</field>
   </doc>
   <doc>
-    <field name="genre">Documentary film</field>
-    <field name="name">Born into Brothels: Calcutta's Red Light Kids</field>
-    <field name="initial_release_date">2004-01-17</field>
     <field name="id">/en/born_into_brothels_calcuttas_red_light_kids</field>
     <field name="directed_by">Zana Briski</field>
     <field name="directed_by">Ross Kauffman</field>
+    <field name="initial_release_date">2004-01-17</field>
+    <field name="name">Born into Brothels: Calcutta's Red Light Kids</field>
+    <field name="genre">Documentary film</field>
   </doc>
   <doc>
+    <field name="id">/en/free_radicals</field>
+    <field name="directed_by">Barbara Albert</field>
+    <field name="name">Free Radicals</field>
     <field name="genre">World cinema</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Art film</field>
     <field name="genre">Drama</field>
-    <field name="name">Free Radicals</field>
-    <field name="id">/en/free_radicals</field>
-    <field name="directed_by">Barbara Albert</field>
   </doc>
   <doc>
+    <field name="id">/en/boss_2006</field>
+    <field name="directed_by">V.N. Aditya</field>
+    <field name="initial_release_date">2006-09-27</field>
+    <field name="name">Boss</field>
     <field name="genre">Musical</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
-    <field name="name">Boss</field>
-    <field name="initial_release_date">2006-09-27</field>
-    <field name="id">/en/boss_2006</field>
-    <field name="directed_by">V.N. Aditya</field>
   </doc>
   <doc>
+    <field name="id">/en/bossn_up</field>
+    <field name="directed_by">Dylan C. Brown</field>
+    <field name="initial_release_date">2005-06-01</field>
+    <field name="name">Boss'n Up</field>
     <field name="genre">Musical</field>
     <field name="genre">Indie film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Musical Drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Boss'n Up</field>
-    <field name="initial_release_date">2005-06-01</field>
-    <field name="id">/en/bossn_up</field>
-    <field name="directed_by">Dylan C. Brown</field>
   </doc>
   <doc>
+    <field name="id">/en/bossa_nova_2000</field>
+    <field name="directed_by">Bruno Barreto</field>
+    <field name="initial_release_date">2000-02-18</field>
+    <field name="name">Bossa Nova</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Bossa Nova</field>
-    <field name="initial_release_date">2000-02-18</field>
-    <field name="id">/en/bossa_nova_2000</field>
-    <field name="directed_by">Bruno Barreto</field>
   </doc>
   <doc>
-    <field name="genre">Musical</field>
-    <field name="name">Bosta</field>
     <field name="id">/en/bosta</field>
     <field name="directed_by">Philippe Aractingi</field>
+    <field name="name">Bosta</field>
+    <field name="genre">Musical</field>
   </doc>
   <doc>
+    <field name="id">/en/bowling_for_columbine</field>
+    <field name="directed_by">Michael Moore</field>
+    <field name="initial_release_date">2002-05-15</field>
+    <field name="name">Bowling for Columbine</field>
     <field name="genre">Indie film</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Political cinema</field>
     <field name="genre">Historical Documentaries</field>
-    <field name="name">Bowling for Columbine</field>
-    <field name="initial_release_date">2002-05-15</field>
-    <field name="id">/en/bowling_for_columbine</field>
-    <field name="directed_by">Michael Moore</field>
   </doc>
   <doc>
+    <field name="id">/en/bowling_fun_and_fundamentals_for_boys_and_girls</field>
+    <field name="name">Bowling Fun And Fundamentals For Boys And Girls</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Sports</field>
-    <field name="name">Bowling Fun And Fundamentals For Boys And Girls</field>
-    <field name="id">/en/bowling_fun_and_fundamentals_for_boys_and_girls</field>
   </doc>
   <doc>
+    <field name="id">/en/boy_eats_girl</field>
+    <field name="directed_by">Stephen Bradley</field>
+    <field name="initial_release_date">2005-04-06</field>
+    <field name="name">Boy Eats Girl</field>
     <field name="genre">Indie film</field>
     <field name="genre">Horror</field>
     <field name="genre">Teen film</field>
@@ -4468,12 +4465,12 @@
     <field name="genre">Zombie Film</field>
     <field name="genre">Horror comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Boy Eats Girl</field>
-    <field name="initial_release_date">2005-04-06</field>
-    <field name="id">/en/boy_eats_girl</field>
-    <field name="directed_by">Stephen Bradley</field>
   </doc>
   <doc>
+    <field name="id">/en/boynton_beach_club</field>
+    <field name="directed_by">Susan Seidelman</field>
+    <field name="initial_release_date">2006-08-04</field>
+    <field name="name">Boynton Beach Club</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Indie film</field>
     <field name="genre">Romance Film</field>
@@ -4481,12 +4478,12 @@
     <field name="genre">Slice of life</field>
     <field name="genre">Ensemble Film</field>
     <field name="genre">Comedy</field>
-    <field name="name">Boynton Beach Club</field>
-    <field name="initial_release_date">2006-08-04</field>
-    <field name="id">/en/boynton_beach_club</field>
-    <field name="directed_by">Susan Seidelman</field>
   </doc>
   <doc>
+    <field name="id">/en/boys_2003</field>
+    <field name="directed_by">S. Shankar</field>
+    <field name="initial_release_date">2003-08-29</field>
+    <field name="name">Boys</field>
     <field name="genre">Musical</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
@@ -4495,41 +4492,41 @@
     <field name="genre">Drama</field>
     <field name="genre">Musical comedy</field>
     <field name="genre">Musical Drama</field>
-    <field name="name">Boys</field>
-    <field name="initial_release_date">2003-08-29</field>
-    <field name="id">/en/boys_2003</field>
-    <field name="directed_by">S. Shankar</field>
   </doc>
   <doc>
+    <field name="id">/en/brain_blockers</field>
+    <field name="directed_by">Lincoln Kupchak</field>
+    <field name="initial_release_date">2007-03-15</field>
+    <field name="name">Brain Blockers</field>
     <field name="genre">Horror</field>
     <field name="genre">Zombie Film</field>
     <field name="genre">Horror comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Brain Blockers</field>
-    <field name="initial_release_date">2007-03-15</field>
-    <field name="id">/en/brain_blockers</field>
-    <field name="directed_by">Lincoln Kupchak</field>
   </doc>
   <doc>
+    <field name="id">/en/breakin_all_the_rules</field>
+    <field name="directed_by">Daniel Taplitz</field>
+    <field name="initial_release_date">2004-05-14</field>
+    <field name="name">Breakin' All the Rules</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Comedy of Errors</field>
     <field name="genre">Comedy</field>
-    <field name="name">Breakin' All the Rules</field>
-    <field name="initial_release_date">2004-05-14</field>
-    <field name="id">/en/breakin_all_the_rules</field>
-    <field name="directed_by">Daniel Taplitz</field>
   </doc>
   <doc>
+    <field name="id">/en/breaking_and_entering</field>
+    <field name="directed_by">Anthony Minghella</field>
+    <field name="initial_release_date">2006-09-13</field>
+    <field name="name">Breaking and Entering</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="name">Breaking and Entering</field>
-    <field name="initial_release_date">2006-09-13</field>
-    <field name="id">/en/breaking_and_entering</field>
-    <field name="directed_by">Anthony Minghella</field>
   </doc>
   <doc>
+    <field name="id">/en/brick_2006</field>
+    <field name="directed_by">Rian Johnson</field>
+    <field name="initial_release_date">2006-04-07</field>
+    <field name="name">Brick</field>
     <field name="genre">Film noir</field>
     <field name="genre">Indie film</field>
     <field name="genre">Teen film</field>
@@ -4540,12 +4537,12 @@
     <field name="genre">Thriller</field>
     <field name="genre">Detective fiction</field>
     <field name="genre">Drama</field>
-    <field name="name">Brick</field>
-    <field name="initial_release_date">2006-04-07</field>
-    <field name="id">/en/brick_2006</field>
-    <field name="directed_by">Rian Johnson</field>
   </doc>
   <doc>
+    <field name="id">/en/bride_and_prejudice</field>
+    <field name="directed_by">Gurinder Chadha</field>
+    <field name="initial_release_date">2004-10-06</field>
+    <field name="name">Bride and Prejudice</field>
     <field name="genre">Musical</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
@@ -4555,44 +4552,44 @@
     <field name="genre">Musical comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Bride and Prejudice</field>
-    <field name="initial_release_date">2004-10-06</field>
-    <field name="id">/en/bride_and_prejudice</field>
-    <field name="directed_by">Gurinder Chadha</field>
   </doc>
   <doc>
+    <field name="id">/en/bridget_jones_the_edge_of_reason</field>
+    <field name="directed_by">Beeban Kidron</field>
+    <field name="initial_release_date">2004-11-08</field>
+    <field name="name">Bridget Jones: The Edge of Reason</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
-    <field name="name">Bridget Jones: The Edge of Reason</field>
-    <field name="initial_release_date">2004-11-08</field>
-    <field name="id">/en/bridget_jones_the_edge_of_reason</field>
-    <field name="directed_by">Beeban Kidron</field>
   </doc>
   <doc>
+    <field name="id">/en/bridget_joness_diary_2001</field>
+    <field name="directed_by">Sharon Maguire</field>
+    <field name="initial_release_date">2001-04-04</field>
+    <field name="name">Bridget Jones's Diary</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Film adaptation</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy of manners</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Bridget Jones's Diary</field>
-    <field name="initial_release_date">2001-04-04</field>
-    <field name="id">/en/bridget_joness_diary_2001</field>
-    <field name="directed_by">Sharon Maguire</field>
   </doc>
   <doc>
+    <field name="id">/en/brigham_city_2001</field>
+    <field name="directed_by">Richard Dutcher</field>
+    <field name="name">Brigham City</field>
     <field name="genre">Mystery</field>
     <field name="genre">Indie film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Thriller</field>
     <field name="genre">Drama</field>
-    <field name="name">Brigham City</field>
-    <field name="id">/en/brigham_city_2001</field>
-    <field name="directed_by">Richard Dutcher</field>
   </doc>
   <doc>
+    <field name="id">/en/bright_young_things</field>
+    <field name="directed_by">Stephen Fry</field>
+    <field name="initial_release_date">2003-10-03</field>
+    <field name="name">Bright Young Things</field>
     <field name="genre">Indie film</field>
     <field name="genre">War film</field>
     <field name="genre">Comedy-drama</field>
@@ -4600,91 +4597,91 @@
     <field name="genre">Comedy of manners</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Bright Young Things</field>
-    <field name="initial_release_date">2003-10-03</field>
-    <field name="id">/en/bright_young_things</field>
-    <field name="directed_by">Stephen Fry</field>
   </doc>
   <doc>
-    <field name="genre">Thriller</field>
-    <field name="name">Brilliant</field>
-    <field name="initial_release_date">2004-02-15</field>
     <field name="id">/wikipedia/en_title/Brilliant_$0028film$0029</field>
     <field name="directed_by">Roger Cardinal</field>
+    <field name="initial_release_date">2004-02-15</field>
+    <field name="name">Brilliant</field>
+    <field name="genre">Thriller</field>
   </doc>
   <doc>
-    <field name="genre">Comedy</field>
-    <field name="genre">Sports</field>
-    <field name="name">Bring It On</field>
-    <field name="initial_release_date">2000-08-22</field>
     <field name="id">/en/bring_it_on</field>
     <field name="directed_by">Peyton Reed</field>
+    <field name="initial_release_date">2000-08-22</field>
+    <field name="name">Bring It On</field>
+    <field name="genre">Comedy</field>
+    <field name="genre">Sports</field>
   </doc>
   <doc>
-    <field name="genre">Teen film</field>
-    <field name="genre">Sports</field>
-    <field name="genre">Comedy</field>
-    <field name="name">Bring It On Again</field>
-    <field name="initial_release_date">2004-01-13</field>
     <field name="id">/en/bring_it_on_again</field>
     <field name="directed_by">Damon Santostefano</field>
-  </doc>
-  <doc>
+    <field name="initial_release_date">2004-01-13</field>
+    <field name="name">Bring It On Again</field>
     <field name="genre">Teen film</field>
     <field name="genre">Sports</field>
     <field name="genre">Comedy</field>
-    <field name="name">Bring It On: All or Nothing</field>
-    <field name="initial_release_date">2006-08-08</field>
-    <field name="id">/en/bring_it_on_all_or_nothing</field>
-    <field name="directed_by">Steve Rash</field>
   </doc>
   <doc>
+    <field name="id">/en/bring_it_on_all_or_nothing</field>
+    <field name="directed_by">Steve Rash</field>
+    <field name="initial_release_date">2006-08-08</field>
+    <field name="name">Bring It On: All or Nothing</field>
+    <field name="genre">Teen film</field>
+    <field name="genre">Sports</field>
+    <field name="genre">Comedy</field>
+  </doc>
+  <doc>
+    <field name="id">/en/bringing_down_the_house</field>
+    <field name="directed_by">Adam Shankman</field>
+    <field name="initial_release_date">2003-03-07</field>
+    <field name="name">Bringing Down the House</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Screwball comedy</field>
     <field name="genre">Comedy of Errors</field>
     <field name="genre">Crime Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Bringing Down the House</field>
-    <field name="initial_release_date">2003-03-07</field>
-    <field name="id">/en/bringing_down_the_house</field>
-    <field name="directed_by">Adam Shankman</field>
   </doc>
   <doc>
-    <field name="genre">Documentary film</field>
-    <field name="genre">Biographical film</field>
-    <field name="name">Broadway: The Golden Age</field>
-    <field name="initial_release_date">2004-06-11</field>
     <field name="id">/en/broadway_the_golden_age</field>
     <field name="directed_by">Rick McKay</field>
+    <field name="initial_release_date">2004-06-11</field>
+    <field name="name">Broadway: The Golden Age</field>
+    <field name="genre">Documentary film</field>
+    <field name="genre">Biographical film</field>
   </doc>
   <doc>
+    <field name="id">/en/brokeback_mountain</field>
+    <field name="directed_by">Ang Lee</field>
+    <field name="initial_release_date">2005-09-02</field>
+    <field name="name">Brokeback Mountain</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Epic film</field>
     <field name="genre">Drama</field>
-    <field name="name">Brokeback Mountain</field>
-    <field name="initial_release_date">2005-09-02</field>
-    <field name="id">/en/brokeback_mountain</field>
-    <field name="directed_by">Ang Lee</field>
   </doc>
   <doc>
+    <field name="id">/en/broken_allegiance</field>
+    <field name="directed_by">Nick Hallam</field>
+    <field name="name">Broken Allegiance</field>
     <field name="genre">Indie film</field>
     <field name="genre">Short Film</field>
     <field name="genre">Fan film</field>
-    <field name="name">Broken Allegiance</field>
-    <field name="id">/en/broken_allegiance</field>
-    <field name="directed_by">Nick Hallam</field>
   </doc>
   <doc>
+    <field name="id">/en/broken_flowers</field>
+    <field name="directed_by">Jim Jarmusch</field>
+    <field name="initial_release_date">2005-08-05</field>
+    <field name="name">Broken Flowers</field>
     <field name="genre">Mystery</field>
     <field name="genre">Road movie</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Broken Flowers</field>
-    <field name="initial_release_date">2005-08-05</field>
-    <field name="id">/en/broken_flowers</field>
-    <field name="directed_by">Jim Jarmusch</field>
   </doc>
   <doc>
+    <field name="id">/en/the_broken_hearts_club_a_romantic_comedy</field>
+    <field name="directed_by">Greg Berlanti</field>
+    <field name="initial_release_date">2000-01-29</field>
+    <field name="name">The Broken Hearts Club: A Romantic Comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">LGBT</field>
     <field name="genre">Romantic comedy</field>
@@ -4696,71 +4693,71 @@
     <field name="genre">Ensemble Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">The Broken Hearts Club: A Romantic Comedy</field>
-    <field name="initial_release_date">2000-01-29</field>
-    <field name="id">/en/the_broken_hearts_club_a_romantic_comedy</field>
-    <field name="directed_by">Greg Berlanti</field>
   </doc>
   <doc>
+    <field name="id">/en/brooklyn_lobster</field>
+    <field name="directed_by">Kevin Jordan</field>
+    <field name="initial_release_date">2005-09-09</field>
+    <field name="name">Brooklyn Lobster</field>
     <field name="genre">Indie film</field>
     <field name="genre">Family Drama</field>
     <field name="genre">Comedy-drama</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Brooklyn Lobster</field>
-    <field name="initial_release_date">2005-09-09</field>
-    <field name="id">/en/brooklyn_lobster</field>
-    <field name="directed_by">Kevin Jordan</field>
   </doc>
   <doc>
-    <field name="genre">Thriller</field>
-    <field name="genre">Crime Fiction</field>
-    <field name="name">Brother</field>
     <field name="id">/en/brother</field>
     <field name="directed_by">Takeshi Kitano</field>
+    <field name="name">Brother</field>
+    <field name="genre">Thriller</field>
+    <field name="genre">Crime Fiction</field>
   </doc>
   <doc>
+    <field name="id">/en/brother_bear</field>
+    <field name="directed_by">Aaron Blaise</field>
+    <field name="directed_by">Robert A. Walker</field>
+    <field name="initial_release_date">2003-10-20</field>
+    <field name="name">Brother Bear</field>
     <field name="genre">Family</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Animation</field>
     <field name="genre">Adventure Film</field>
-    <field name="name">Brother Bear</field>
-    <field name="initial_release_date">2003-10-20</field>
-    <field name="id">/en/brother_bear</field>
-    <field name="directed_by">Aaron Blaise</field>
-    <field name="directed_by">Robert A. Walker</field>
   </doc>
   <doc>
+    <field name="id">/en/brother_bear_2</field>
+    <field name="directed_by">Ben Gluck</field>
+    <field name="initial_release_date">2006-08-29</field>
+    <field name="name">Brother Bear 2</field>
     <field name="genre">Family</field>
     <field name="genre">Animated cartoon</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Animation</field>
-    <field name="name">Brother Bear 2</field>
-    <field name="initial_release_date">2006-08-29</field>
-    <field name="id">/en/brother_bear_2</field>
-    <field name="directed_by">Ben Gluck</field>
   </doc>
   <doc>
+    <field name="id">/en/brother_2</field>
+    <field name="directed_by">Aleksei Balabanov</field>
+    <field name="initial_release_date">2000-05-11</field>
+    <field name="name">Brother 2</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
-    <field name="name">Brother 2</field>
-    <field name="initial_release_date">2000-05-11</field>
-    <field name="id">/en/brother_2</field>
-    <field name="directed_by">Aleksei Balabanov</field>
   </doc>
   <doc>
-    <field name="genre">Horror</field>
-    <field name="genre">Cult film</field>
-    <field name="genre">Creature Film</field>
-    <field name="name">Brotherhood of Blood</field>
     <field name="id">/en/brotherhood_of_blood</field>
     <field name="directed_by">Michael Roesch</field>
     <field name="directed_by">Peter Scheerer</field>
     <field name="directed_by">Sid Haig</field>
+    <field name="name">Brotherhood of Blood</field>
+    <field name="genre">Horror</field>
+    <field name="genre">Cult film</field>
+    <field name="genre">Creature Film</field>
   </doc>
   <doc>
+    <field name="id">/en/brotherhood_of_the_wolf</field>
+    <field name="directed_by">Christophe Gans</field>
+    <field name="initial_release_date">2001-01-31</field>
+    <field name="name">Brotherhood of the Wolf</field>
     <field name="genre">Martial Arts Film</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Mystery</field>
@@ -4768,12 +4765,13 @@
     <field name="genre">Historical fiction</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
-    <field name="name">Brotherhood of the Wolf</field>
-    <field name="initial_release_date">2001-01-31</field>
-    <field name="id">/en/brotherhood_of_the_wolf</field>
-    <field name="directed_by">Christophe Gans</field>
   </doc>
   <doc>
+    <field name="id">/en/brothers_of_the_head</field>
+    <field name="directed_by">Keith Fulton</field>
+    <field name="directed_by">Louis Pepe</field>
+    <field name="initial_release_date">2005-09-10</field>
+    <field name="name">Brothers of the Head</field>
     <field name="genre">Indie film</field>
     <field name="genre">Musical</field>
     <field name="genre">Film adaptation</field>
@@ -4783,13 +4781,12 @@
     <field name="genre">Historical period drama</field>
     <field name="genre">Musical Drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Brothers of the Head</field>
-    <field name="initial_release_date">2005-09-10</field>
-    <field name="id">/en/brothers_of_the_head</field>
-    <field name="directed_by">Keith Fulton</field>
-    <field name="directed_by">Louis Pepe</field>
   </doc>
   <doc>
+    <field name="id">/en/brown_sugar_2002</field>
+    <field name="directed_by">Rick Famuyiwa</field>
+    <field name="initial_release_date">2002-10-05</field>
+    <field name="name">Brown Sugar</field>
     <field name="genre">Musical</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Coming of age</field>
@@ -4798,74 +4795,74 @@
     <field name="genre">Musical comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Brown Sugar</field>
-    <field name="initial_release_date">2002-10-05</field>
-    <field name="id">/en/brown_sugar_2002</field>
-    <field name="directed_by">Rick Famuyiwa</field>
   </doc>
   <doc>
+    <field name="id">/en/bruce_almighty</field>
+    <field name="directed_by">Tom Shadyac</field>
+    <field name="initial_release_date">2003-05-23</field>
+    <field name="name">Bruce Almighty</field>
     <field name="genre">Comedy</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Drama</field>
-    <field name="name">Bruce Almighty</field>
-    <field name="initial_release_date">2003-05-23</field>
-    <field name="id">/en/bruce_almighty</field>
-    <field name="directed_by">Tom Shadyac</field>
   </doc>
   <doc>
+    <field name="id">/en/bubba_ho-tep</field>
+    <field name="directed_by">Don Coscarelli</field>
+    <field name="initial_release_date">2002-06-09</field>
+    <field name="name">Bubba Ho-Tep</field>
     <field name="genre">Horror</field>
     <field name="genre">Parody</field>
     <field name="genre">Comedy</field>
     <field name="genre">Mystery</field>
     <field name="genre">Drama</field>
-    <field name="name">Bubba Ho-Tep</field>
-    <field name="initial_release_date">2002-06-09</field>
-    <field name="id">/en/bubba_ho-tep</field>
-    <field name="directed_by">Don Coscarelli</field>
   </doc>
   <doc>
+    <field name="id">/en/bubble</field>
+    <field name="directed_by">Steven Soderbergh</field>
+    <field name="initial_release_date">2005-09-03</field>
+    <field name="name">Bubble</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Mystery</field>
     <field name="genre">Indie film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
-    <field name="name">Bubble</field>
-    <field name="initial_release_date">2005-09-03</field>
-    <field name="id">/en/bubble</field>
-    <field name="directed_by">Steven Soderbergh</field>
   </doc>
   <doc>
+    <field name="id">/en/bubble_boy</field>
+    <field name="directed_by">Blair Hayes</field>
+    <field name="initial_release_date">2001-08-23</field>
+    <field name="name">Bubble Boy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Teen film</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Bubble Boy</field>
-    <field name="initial_release_date">2001-08-23</field>
-    <field name="id">/en/bubble_boy</field>
-    <field name="directed_by">Blair Hayes</field>
   </doc>
   <doc>
+    <field name="id">/en/buddy_boy</field>
+    <field name="directed_by">Mark Hanlon</field>
+    <field name="initial_release_date">2000-03-24</field>
+    <field name="name">Buddy Boy</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Thriller</field>
     <field name="genre">Indie film</field>
     <field name="genre">Erotic thriller</field>
-    <field name="name">Buddy Boy</field>
-    <field name="initial_release_date">2000-03-24</field>
-    <field name="id">/en/buddy_boy</field>
-    <field name="directed_by">Mark Hanlon</field>
   </doc>
   <doc>
+    <field name="id">/en/buffalo_dreams</field>
+    <field name="directed_by">David Jackson</field>
+    <field name="initial_release_date">2005-03-11</field>
+    <field name="name">Buffalo Dreams</field>
     <field name="genre">Western</field>
     <field name="genre">Teen film</field>
     <field name="genre">Drama</field>
-    <field name="name">Buffalo Dreams</field>
-    <field name="initial_release_date">2005-03-11</field>
-    <field name="id">/en/buffalo_dreams</field>
-    <field name="directed_by">David Jackson</field>
   </doc>
   <doc>
+    <field name="id">/en/buffalo_soldiers</field>
+    <field name="directed_by">Gregor Jordan</field>
+    <field name="initial_release_date">2001-09-08</field>
+    <field name="name">Buffalo Soldiers</field>
     <field name="genre">War film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Comedy</field>
@@ -4873,22 +4870,22 @@
     <field name="genre">Satire</field>
     <field name="genre">Indie film</field>
     <field name="genre">Drama</field>
-    <field name="name">Buffalo Soldiers</field>
-    <field name="initial_release_date">2001-09-08</field>
-    <field name="id">/en/buffalo_soldiers</field>
-    <field name="directed_by">Gregor Jordan</field>
   </doc>
   <doc>
+    <field name="id">/en/bug_2006</field>
+    <field name="directed_by">William Friedkin</field>
+    <field name="initial_release_date">2006-05-19</field>
+    <field name="name">Bug</field>
     <field name="genre">Thriller</field>
     <field name="genre">Horror</field>
     <field name="genre">Indie film</field>
     <field name="genre">Drama</field>
-    <field name="name">Bug</field>
-    <field name="initial_release_date">2006-05-19</field>
-    <field name="id">/en/bug_2006</field>
-    <field name="directed_by">William Friedkin</field>
   </doc>
   <doc>
+    <field name="id">/en/bulletproof_monk</field>
+    <field name="directed_by">Paul Hunter</field>
+    <field name="initial_release_date">2003-04-16</field>
+    <field name="name">Bulletproof Monk</field>
     <field name="genre">Martial Arts Film</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Action Film</field>
@@ -4897,43 +4894,43 @@
     <field name="genre">Action/Adventure</field>
     <field name="genre">Action Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Bulletproof Monk</field>
-    <field name="initial_release_date">2003-04-16</field>
-    <field name="id">/en/bulletproof_monk</field>
-    <field name="directed_by">Paul Hunter</field>
   </doc>
   <doc>
+    <field name="id">/en/bully_2001</field>
+    <field name="directed_by">Larry Clark</field>
+    <field name="initial_release_date">2001-06-15</field>
+    <field name="name">Bully</field>
     <field name="genre">Teen film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
-    <field name="name">Bully</field>
-    <field name="initial_release_date">2001-06-15</field>
-    <field name="id">/en/bully_2001</field>
-    <field name="directed_by">Larry Clark</field>
   </doc>
   <doc>
+    <field name="id">/en/bunny_2005</field>
+    <field name="directed_by">V. V. Vinayak</field>
+    <field name="initial_release_date">2005-04-06</field>
+    <field name="name">Bunny</field>
     <field name="genre">Musical</field>
     <field name="genre">Romance Film</field>
     <field name="genre">World cinema</field>
     <field name="genre">Tollywood</field>
     <field name="genre">Musical Drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Bunny</field>
-    <field name="initial_release_date">2005-04-06</field>
-    <field name="id">/en/bunny_2005</field>
-    <field name="directed_by">V. V. Vinayak</field>
   </doc>
   <doc>
+    <field name="id">/en/bunshinsaba</field>
+    <field name="directed_by">Ahn Byeong-ki</field>
+    <field name="initial_release_date">2004-05-14</field>
+    <field name="name">Bunshinsaba</field>
     <field name="genre">Horror</field>
     <field name="genre">World cinema</field>
     <field name="genre">East Asian cinema</field>
-    <field name="name">Bunshinsaba</field>
-    <field name="initial_release_date">2004-05-14</field>
-    <field name="id">/en/bunshinsaba</field>
-    <field name="directed_by">Ahn Byeong-ki</field>
   </doc>
   <doc>
+    <field name="id">/en/bunty_aur_babli</field>
+    <field name="directed_by">Shaad Ali</field>
+    <field name="initial_release_date">2005-05-27</field>
+    <field name="name">Bunty Aur Babli</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Musical</field>
     <field name="genre">World cinema</field>
@@ -4941,68 +4938,68 @@
     <field name="genre">Comedy</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Crime Fiction</field>
-    <field name="name">Bunty Aur Babli</field>
-    <field name="initial_release_date">2005-05-27</field>
-    <field name="id">/en/bunty_aur_babli</field>
-    <field name="directed_by">Shaad Ali</field>
   </doc>
   <doc>
-    <field name="genre">Documentary film</field>
-    <field name="genre">True crime</field>
-    <field name="name">Bus 174</field>
-    <field name="initial_release_date">2002-10-22</field>
     <field name="id">/en/onibus_174</field>
     <field name="directed_by">José Padilha</field>
+    <field name="initial_release_date">2002-10-22</field>
+    <field name="name">Bus 174</field>
+    <field name="genre">Documentary film</field>
+    <field name="genre">True crime</field>
   </doc>
   <doc>
+    <field name="id">/en/bus_conductor</field>
+    <field name="directed_by">V. M. Vinu</field>
+    <field name="initial_release_date">2005-12-23</field>
+    <field name="name">Bus Conductor</field>
     <field name="genre">Comedy</field>
     <field name="genre">Action Film</field>
     <field name="genre">Malayalam Cinema</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="name">Bus Conductor</field>
-    <field name="initial_release_date">2005-12-23</field>
-    <field name="id">/en/bus_conductor</field>
-    <field name="directed_by">V. M. Vinu</field>
   </doc>
   <doc>
-    <field name="genre">Indie film</field>
-    <field name="genre">Documentary film</field>
-    <field name="name">Busted Shoes and Broken Hearts: A Film About Lowlight</field>
     <field name="id">/m/0bvs38</field>
     <field name="directed_by">Michael Votto</field>
+    <field name="name">Busted Shoes and Broken Hearts: A Film About Lowlight</field>
+    <field name="genre">Indie film</field>
+    <field name="genre">Documentary film</field>
   </doc>
   <doc>
+    <field name="id">/en/butterfly_2004</field>
+    <field name="directed_by">Yan Yan Mak</field>
+    <field name="initial_release_date">2004-09-04</field>
+    <field name="name">Butterfly</field>
     <field name="genre">LGBT</field>
     <field name="genre">Chinese Movies</field>
     <field name="genre">Drama</field>
-    <field name="name">Butterfly</field>
-    <field name="initial_release_date">2004-09-04</field>
-    <field name="id">/en/butterfly_2004</field>
-    <field name="directed_by">Yan Yan Mak</field>
   </doc>
   <doc>
+    <field name="id">/en/butterfly_on_a_wheel</field>
+    <field name="directed_by">Mike Barker</field>
+    <field name="initial_release_date">2007-02-10</field>
+    <field name="name">Butterfly on a Wheel</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Drama</field>
-    <field name="name">Butterfly on a Wheel</field>
-    <field name="initial_release_date">2007-02-10</field>
-    <field name="id">/en/butterfly_on_a_wheel</field>
-    <field name="directed_by">Mike Barker</field>
   </doc>
   <doc>
+    <field name="id">/en/c_i_d_moosa</field>
+    <field name="directed_by">Johny Antony</field>
+    <field name="initial_release_date">2003-07-04</field>
+    <field name="name">C.I.D.Moosa</field>
     <field name="genre">Action Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Malayalam Cinema</field>
     <field name="genre">World cinema</field>
-    <field name="name">C.I.D.Moosa</field>
-    <field name="initial_release_date">2003-07-04</field>
-    <field name="id">/en/c_i_d_moosa</field>
-    <field name="directed_by">Johny Antony</field>
   </doc>
   <doc>
+    <field name="id">/en/c_r_a_z_y</field>
+    <field name="directed_by">Jean-Marc Vallée</field>
+    <field name="initial_release_date">2005-05-27</field>
+    <field name="name">C.R.A.Z.Y.</field>
     <field name="genre">LGBT</field>
     <field name="genre">Indie film</field>
     <field name="genre">Comedy-drama</field>
@@ -5012,12 +5009,11 @@
     <field name="genre">Historical period drama</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Drama</field>
-    <field name="name">C.R.A.Z.Y.</field>
-    <field name="initial_release_date">2005-05-27</field>
-    <field name="id">/en/c_r_a_z_y</field>
-    <field name="directed_by">Jean-Marc Vallée</field>
   </doc>
   <doc>
+    <field name="id">/en/c_s_a_the_confederate_states_of_america</field>
+    <field name="directed_by">Kevin Willmott</field>
+    <field name="name">C.S.A.: The Confederate States of America</field>
     <field name="genre">Mockumentary</field>
     <field name="genre">Satire</field>
     <field name="genre">Black comedy</field>
@@ -5026,122 +5022,123 @@
     <field name="genre">Political cinema</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">C.S.A.: The Confederate States of America</field>
-    <field name="id">/en/c_s_a_the_confederate_states_of_america</field>
-    <field name="directed_by">Kevin Willmott</field>
   </doc>
   <doc>
-    <field name="genre">Comedy</field>
-    <field name="name">Cabaret Paradis</field>
-    <field name="initial_release_date">2006-04-12</field>
     <field name="id">/en/cabaret_paradis</field>
     <field name="directed_by">Corinne Benizio</field>
     <field name="directed_by">Gilles Benizio</field>
+    <field name="initial_release_date">2006-04-12</field>
+    <field name="name">Cabaret Paradis</field>
+    <field name="genre">Comedy</field>
   </doc>
   <doc>
+    <field name="id">/wikipedia/it_id/335645</field>
+    <field name="directed_by">Michael Haneke</field>
+    <field name="initial_release_date">2005-05-14</field>
+    <field name="name">Caché</field>
     <field name="genre">Thriller</field>
     <field name="genre">Mystery</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Drama</field>
-    <field name="name">Caché</field>
-    <field name="initial_release_date">2005-05-14</field>
-    <field name="id">/wikipedia/it_id/335645</field>
-    <field name="directed_by">Michael Haneke</field>
   </doc>
   <doc>
-    <field name="genre">Drama</field>
-    <field name="name">Cactuses</field>
-    <field name="initial_release_date">2006-03-15</field>
     <field name="id">/en/cactuses</field>
     <field name="directed_by">Matt Hannon</field>
     <field name="directed_by">Rick Rapoza</field>
+    <field name="initial_release_date">2006-03-15</field>
+    <field name="name">Cactuses</field>
+    <field name="genre">Drama</field>
   </doc>
   <doc>
+    <field name="id">/en/cadet_kelly</field>
+    <field name="directed_by">Larry Shaw</field>
+    <field name="initial_release_date">2002-03-08</field>
+    <field name="name">Cadet Kelly</field>
     <field name="genre">Teen film</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Family</field>
     <field name="genre">Comedy</field>
-    <field name="name">Cadet Kelly</field>
-    <field name="initial_release_date">2002-03-08</field>
-    <field name="id">/en/cadet_kelly</field>
-    <field name="directed_by">Larry Shaw</field>
   </doc>
   <doc>
+    <field name="id">/en/caffeine_2006</field>
+    <field name="directed_by">John Cosgrove</field>
+    <field name="name">Caffeine</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Indie film</field>
     <field name="genre">Ensemble Film</field>
     <field name="genre">Workplace Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Caffeine</field>
-    <field name="id">/en/caffeine_2006</field>
-    <field name="directed_by">John Cosgrove</field>
   </doc>
   <doc>
+    <field name="id">/wikipedia/es_id/1062610</field>
+    <field name="directed_by">Nisha Ganatra</field>
+    <field name="directed_by">Jennifer Arzt</field>
+    <field name="name">Cake</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Short Film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Cake</field>
-    <field name="id">/wikipedia/es_id/1062610</field>
-    <field name="directed_by">Nisha Ganatra</field>
-    <field name="directed_by">Jennifer Arzt</field>
   </doc>
   <doc>
+    <field name="id">/en/calcutta_mail</field>
+    <field name="directed_by">Sudhir Mishra</field>
+    <field name="initial_release_date">2003-06-30</field>
+    <field name="name">Calcutta Mail</field>
     <field name="genre">Thriller</field>
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
-    <field name="name">Calcutta Mail</field>
-    <field name="initial_release_date">2003-06-30</field>
-    <field name="id">/en/calcutta_mail</field>
-    <field name="directed_by">Sudhir Mishra</field>
   </doc>
   <doc>
-    <field name="genre">Indie film</field>
-    <field name="genre">Documentary film</field>
-    <field name="name">Hackers Wanted</field>
     <field name="id">/en/can_you_hack_it</field>
     <field name="directed_by">Sam Bozzo</field>
+    <field name="name">Hackers Wanted</field>
+    <field name="genre">Indie film</field>
+    <field name="genre">Documentary film</field>
   </doc>
   <doc>
+    <field name="id">/en/candy_2006</field>
+    <field name="directed_by">Neil Armfield</field>
+    <field name="initial_release_date">2006-04-27</field>
+    <field name="name">Candy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Indie film</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="name">Candy</field>
-    <field name="initial_release_date">2006-04-27</field>
-    <field name="id">/en/candy_2006</field>
-    <field name="directed_by">Neil Armfield</field>
   </doc>
   <doc>
+    <field name="id">/en/caotica_ana</field>
+    <field name="directed_by">Julio Medem</field>
+    <field name="initial_release_date">2007-08-24</field>
+    <field name="name">Caótica Ana</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Mystery</field>
     <field name="genre">Drama</field>
-    <field name="name">Caótica Ana</field>
-    <field name="initial_release_date">2007-08-24</field>
-    <field name="id">/en/caotica_ana</field>
-    <field name="directed_by">Julio Medem</field>
   </doc>
   <doc>
+    <field name="id">/en/capote</field>
+    <field name="directed_by">Bennett Miller</field>
+    <field name="initial_release_date">2005-09-02</field>
+    <field name="name">Capote</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Drama</field>
-    <field name="name">Capote</field>
-    <field name="initial_release_date">2005-09-02</field>
-    <field name="id">/en/capote</field>
-    <field name="directed_by">Bennett Miller</field>
   </doc>
   <doc>
+    <field name="id">/en/capturing_the_friedmans</field>
+    <field name="directed_by">Andrew Jarecki</field>
+    <field name="initial_release_date">2003-01-17</field>
+    <field name="name">Capturing the Friedmans</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Mystery</field>
     <field name="genre">Biographical film</field>
-    <field name="name">Capturing the Friedmans</field>
-    <field name="initial_release_date">2003-01-17</field>
-    <field name="id">/en/capturing_the_friedmans</field>
-    <field name="directed_by">Andrew Jarecki</field>
   </doc>
   <doc>
+    <field name="id">/en/care_bears_journey_to_joke_a_lot</field>
+    <field name="directed_by">Mike Fallows</field>
+    <field name="initial_release_date">2004-10-05</field>
+    <field name="name">Care Bears: Journey to Joke-a-lot</field>
     <field name="genre">Musical</field>
     <field name="genre">Computer Animation</field>
     <field name="genre">Animation</field>
@@ -5150,35 +5147,35 @@
     <field name="genre">Musical comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Family</field>
-    <field name="name">Care Bears: Journey to Joke-a-lot</field>
-    <field name="initial_release_date">2004-10-05</field>
-    <field name="id">/en/care_bears_journey_to_joke_a_lot</field>
-    <field name="directed_by">Mike Fallows</field>
   </doc>
   <doc>
+    <field name="id">/en/cargo_2006</field>
+    <field name="directed_by">Clive Gordon</field>
+    <field name="initial_release_date">2006-01-24</field>
+    <field name="name">Cargo</field>
     <field name="genre">Thriller</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Indie film</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Drama</field>
-    <field name="name">Cargo</field>
-    <field name="initial_release_date">2006-01-24</field>
-    <field name="id">/en/cargo_2006</field>
-    <field name="directed_by">Clive Gordon</field>
   </doc>
   <doc>
+    <field name="id">/en/cars</field>
+    <field name="directed_by">John Lasseter</field>
+    <field name="directed_by">Joe Ranft</field>
+    <field name="initial_release_date">2006-03-14</field>
+    <field name="name">Cars</field>
     <field name="genre">Animation</field>
     <field name="genre">Family</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Sports</field>
     <field name="genre">Comedy</field>
-    <field name="name">Cars</field>
-    <field name="initial_release_date">2006-03-14</field>
-    <field name="id">/en/cars</field>
-    <field name="directed_by">John Lasseter</field>
-    <field name="directed_by">Joe Ranft</field>
   </doc>
   <doc>
+    <field name="id">/en/casanova</field>
+    <field name="directed_by">Lasse Hallström</field>
+    <field name="initial_release_date">2005-09-03</field>
+    <field name="name">Casanova</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Costume drama</field>
@@ -5187,55 +5184,52 @@
     <field name="genre">Swashbuckler film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Casanova</field>
-    <field name="initial_release_date">2005-09-03</field>
-    <field name="id">/en/casanova</field>
-    <field name="directed_by">Lasse Hallström</field>
   </doc>
   <doc>
+    <field name="id">/en/case_of_evil</field>
+    <field name="directed_by">Graham Theakston</field>
+    <field name="initial_release_date">2002-10-25</field>
+    <field name="name">Sherlock: Case of Evil</field>
     <field name="genre">Mystery</field>
     <field name="genre">Action Film</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="name">Sherlock: Case of Evil</field>
-    <field name="initial_release_date">2002-10-25</field>
-    <field name="id">/en/case_of_evil</field>
-    <field name="directed_by">Graham Theakston</field>
   </doc>
   <doc>
     <field name="id">/en/cast_away</field>
-    <field name="name">Cast Away</field>
     <field name="initial_release_date">2000-12-07</field>
+    <field name="name">Cast Away</field>
+    <field name="directed_by">Robert Zemeckis</field>
     <field name="genre">Airplanes and airports</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Robert Zemeckis</field>
   </doc>
   <doc>
     <field name="id">/en/castlevania_2007</field>
     <field name="name">Castlevania</field>
-    <field name="genre">Action Film</field>
-    <field name="genre">Horror</field>
     <field name="directed_by">Paul W. S. Anderson</field>
     <field name="directed_by">Sylvain White</field>
+    <field name="genre">Action Film</field>
+    <field name="genre">Horror</field>
   </doc>
   <doc>
     <field name="id">/en/catch_me_if_you_can</field>
-    <field name="name">Catch Me If You Can</field>
     <field name="initial_release_date">2002-12-16</field>
+    <field name="name">Catch Me If You Can</field>
+    <field name="directed_by">Steven Spielberg</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Comedy</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Steven Spielberg</field>
   </doc>
   <doc>
     <field name="id">/en/catch_that_kid</field>
-    <field name="name">Catch That Kid</field>
     <field name="initial_release_date">2004-02-06</field>
+    <field name="name">Catch That Kid</field>
+    <field name="directed_by">Bart Freundlich</field>
     <field name="genre">Teen film</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Crime Fiction</field>
@@ -5245,20 +5239,20 @@
     <field name="genre">Crime Comedy</field>
     <field name="genre">Family-Oriented Adventure</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Bart Freundlich</field>
   </doc>
   <doc>
     <field name="id">/en/caterina_in_the_big_city</field>
-    <field name="name">Caterina in the Big City</field>
     <field name="initial_release_date">2003-10-24</field>
+    <field name="name">Caterina in the Big City</field>
+    <field name="directed_by">Paolo Virzì</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Paolo Virzì</field>
   </doc>
   <doc>
     <field name="id">/en/cats_dogs</field>
-    <field name="name">Cats &amp;amp; Dogs</field>
     <field name="initial_release_date">2001-07-04</field>
+    <field name="name">Cats &amp;amp; Dogs</field>
+    <field name="directed_by">Lawrence Guterman</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Family</field>
     <field name="genre">Action Film</field>
@@ -5266,24 +5260,24 @@
     <field name="genre">Fantasy Adventure</field>
     <field name="genre">Fantasy Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Lawrence Guterman</field>
   </doc>
   <doc>
     <field name="id">/en/catwoman_2004</field>
-    <field name="name">Catwoman</field>
     <field name="initial_release_date">2004-07-19</field>
+    <field name="name">Catwoman</field>
+    <field name="directed_by">Pitof</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Thriller</field>
     <field name="genre">Superhero movie</field>
-    <field name="directed_by">Pitof</field>
   </doc>
   <doc>
     <field name="id">/en/caved_in_prehistoric_terror</field>
-    <field name="name">Caved In: Prehistoric Terror</field>
     <field name="initial_release_date">2006-01-07</field>
+    <field name="name">Caved In: Prehistoric Terror</field>
+    <field name="directed_by">Richard Pepin</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Horror</field>
     <field name="genre">Natural horror film</field>
@@ -5292,107 +5286,107 @@
     <field name="genre">Television film</field>
     <field name="genre">Creature Film</field>
     <field name="genre">Sci-Fi Horror</field>
-    <field name="directed_by">Richard Pepin</field>
   </doc>
   <doc>
     <field name="id">/en/cellular</field>
-    <field name="name">Cellular</field>
     <field name="initial_release_date">2004-09-10</field>
+    <field name="name">Cellular</field>
+    <field name="directed_by">David R. Ellis</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Thriller</field>
     <field name="genre">Action/Adventure</field>
-    <field name="directed_by">David R. Ellis</field>
   </doc>
   <doc>
     <field name="id">/en/center_stage</field>
-    <field name="name">Center Stage</field>
     <field name="initial_release_date">2000-05-12</field>
+    <field name="name">Center Stage</field>
+    <field name="directed_by">Nicholas Hytner</field>
     <field name="genre">Teen film</field>
     <field name="genre">Dance film</field>
     <field name="genre">Musical</field>
     <field name="genre">Musical Drama</field>
     <field name="genre">Ensemble Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Nicholas Hytner</field>
   </doc>
   <doc>
     <field name="id">/en/chai_lai</field>
-    <field name="name">Chai Lai</field>
     <field name="initial_release_date">2006-01-26</field>
+    <field name="name">Chai Lai</field>
+    <field name="directed_by">Poj Arnon</field>
     <field name="genre">Action Film</field>
     <field name="genre">Martial Arts Film</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Poj Arnon</field>
   </doc>
   <doc>
     <field name="id">/en/chain_2004</field>
     <field name="name">Chain</field>
-    <field name="genre">Documentary film</field>
     <field name="directed_by">Jem Cohen</field>
+    <field name="genre">Documentary film</field>
   </doc>
   <doc>
     <field name="id">/en/chakram_2005</field>
-    <field name="name">Chakram</field>
     <field name="initial_release_date">2005-03-25</field>
+    <field name="name">Chakram</field>
+    <field name="directed_by">Krishna Vamsi</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
-    <field name="directed_by">Krishna Vamsi</field>
   </doc>
   <doc>
     <field name="id">/en/challenger_2007</field>
     <field name="name">Challenger</field>
-    <field name="genre">Drama</field>
     <field name="directed_by">Philip Kaufman</field>
+    <field name="genre">Drama</field>
   </doc>
   <doc>
     <field name="id">/en/chalo_ishq_ladaaye</field>
-    <field name="name">Chalo Ishq Ladaaye</field>
     <field name="initial_release_date">2002-12-27</field>
+    <field name="name">Chalo Ishq Ladaaye</field>
+    <field name="directed_by">Aziz Sejawal</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
-    <field name="directed_by">Aziz Sejawal</field>
   </doc>
   <doc>
     <field name="id">/en/chalte_chalte</field>
-    <field name="name">Chalte Chalte</field>
     <field name="initial_release_date">2003-06-12</field>
+    <field name="name">Chalte Chalte</field>
+    <field name="directed_by">Aziz Mirza</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Musical</field>
     <field name="genre">Bollywood</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="directed_by">Aziz Mirza</field>
   </doc>
   <doc>
     <field name="id">/en/chameli</field>
-    <field name="name">Chameli</field>
     <field name="initial_release_date">2003-12-31</field>
+    <field name="name">Chameli</field>
+    <field name="directed_by">Sudhir Mishra</field>
+    <field name="directed_by">Anant Balani</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Sudhir Mishra</field>
-    <field name="directed_by">Anant Balani</field>
   </doc>
   <doc>
     <field name="id">/en/chandni_bar</field>
-    <field name="name">Chandni Bar</field>
     <field name="initial_release_date">2001-09-28</field>
+    <field name="name">Chandni Bar</field>
+    <field name="directed_by">Madhur Bhandarkar</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Madhur Bhandarkar</field>
   </doc>
   <doc>
     <field name="id">/en/chandramukhi</field>
-    <field name="name">Chandramukhi</field>
     <field name="initial_release_date">2005-04-13</field>
+    <field name="name">Chandramukhi</field>
+    <field name="directed_by">P. Vasu</field>
     <field name="genre">Horror</field>
     <field name="genre">World cinema</field>
     <field name="genre">Musical</field>
@@ -5401,71 +5395,71 @@
     <field name="genre">Comedy</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Romance Film</field>
-    <field name="directed_by">P. Vasu</field>
   </doc>
   <doc>
     <field name="id">/en/changing_lanes</field>
-    <field name="name">Changing Lanes</field>
     <field name="initial_release_date">2002-04-07</field>
+    <field name="name">Changing Lanes</field>
+    <field name="directed_by">Roger Michell</field>
     <field name="genre">Thriller</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Melodrama</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Roger Michell</field>
   </doc>
   <doc>
     <field name="id">/en/chaos_2007</field>
-    <field name="name">Chaos</field>
     <field name="initial_release_date">2005-12-15</field>
+    <field name="name">Chaos</field>
+    <field name="directed_by">Tony Giglio</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Heist film</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Tony Giglio</field>
   </doc>
   <doc>
     <field name="id">/en/chaos_2005</field>
-    <field name="name">Chaos</field>
     <field name="initial_release_date">2005-08-10</field>
+    <field name="name">Chaos</field>
+    <field name="directed_by">David DeFalco</field>
     <field name="genre">Horror</field>
     <field name="genre">Teen film</field>
     <field name="genre">B movie</field>
     <field name="genre">Slasher</field>
-    <field name="directed_by">David DeFalco</field>
   </doc>
   <doc>
     <field name="id">/en/chaos_and_creation_at_abbey_road</field>
-    <field name="name">Chaos and Creation at Abbey Road</field>
     <field name="initial_release_date">2006-01-27</field>
-    <field name="genre">Musical</field>
+    <field name="name">Chaos and Creation at Abbey Road</field>
     <field name="directed_by">Simon Hilton</field>
+    <field name="genre">Musical</field>
   </doc>
   <doc>
     <field name="id">/en/chaos_theory_2007</field>
     <field name="name">Chaos Theory</field>
+    <field name="directed_by">Marcos Siega</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Comedy-drama</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Marcos Siega</field>
   </doc>
   <doc>
     <field name="id">/en/chapter_27</field>
-    <field name="name">Chapter 27</field>
     <field name="initial_release_date">2007-01-25</field>
+    <field name="name">Chapter 27</field>
+    <field name="directed_by">Jarrett Schaefer</field>
     <field name="genre">Indie film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Jarrett Schaefer</field>
   </doc>
   <doc>
     <field name="id">/en/charlie_and_the_chocolate_factory_2005</field>
-    <field name="name">Charlie and the Chocolate Factory</field>
     <field name="initial_release_date">2005-07-10</field>
+    <field name="name">Charlie and the Chocolate Factory</field>
+    <field name="directed_by">Tim Burton</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Remake</field>
     <field name="genre">Adventure Film</field>
@@ -5473,23 +5467,23 @@
     <field name="genre">Children's Fantasy</field>
     <field name="genre">Children's/Family</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Tim Burton</field>
   </doc>
   <doc>
     <field name="id">/en/charlies_angels</field>
-    <field name="name">Charlie's Angels</field>
     <field name="initial_release_date">2000-10-22</field>
+    <field name="name">Charlie's Angels</field>
+    <field name="directed_by">Joseph McGinty Nichol</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Comedy</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Thriller</field>
-    <field name="directed_by">Joseph McGinty Nichol</field>
   </doc>
   <doc>
     <field name="id">/en/charlies_angels_full_throttle</field>
-    <field name="name">Charlie's Angels: Full Throttle</field>
     <field name="initial_release_date">2003-06-18</field>
+    <field name="name">Charlie's Angels: Full Throttle</field>
+    <field name="directed_by">Joseph McGinty Nichol</field>
     <field name="genre">Martial Arts Film</field>
     <field name="genre">Action Film</field>
     <field name="genre">Adventure Film</field>
@@ -5497,55 +5491,55 @@
     <field name="genre">Action/Adventure</field>
     <field name="genre">Action Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Joseph McGinty Nichol</field>
   </doc>
   <doc>
     <field name="id">/en/charlotte_gray</field>
-    <field name="name">Charlotte Gray</field>
     <field name="initial_release_date">2001-12-17</field>
+    <field name="name">Charlotte Gray</field>
+    <field name="directed_by">Gillian Armstrong</field>
     <field name="genre">Romance Film</field>
     <field name="genre">War film</field>
     <field name="genre">Political drama</field>
     <field name="genre">Historical period drama</field>
     <field name="genre">Film adaptation</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Gillian Armstrong</field>
   </doc>
   <doc>
     <field name="id">/en/charlottes_web</field>
-    <field name="name">Charlotte's Web</field>
     <field name="initial_release_date">2006-12-07</field>
+    <field name="name">Charlotte's Web</field>
+    <field name="directed_by">Gary Winick</field>
     <field name="genre">Animation</field>
     <field name="genre">Family</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Gary Winick</field>
   </doc>
   <doc>
     <field name="id">/en/chasing_liberty</field>
-    <field name="name">Chasing Liberty</field>
     <field name="initial_release_date">2004-01-07</field>
+    <field name="name">Chasing Liberty</field>
+    <field name="directed_by">Andy Cadiff</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Teen film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Road movie</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Andy Cadiff</field>
   </doc>
   <doc>
     <field name="id">/en/chasing_papi</field>
-    <field name="name">Chasing Papi</field>
     <field name="initial_release_date">2003-04-16</field>
+    <field name="name">Chasing Papi</field>
+    <field name="directed_by">Linda Mendoza</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Farce</field>
     <field name="genre">Chase Movie</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Linda Mendoza</field>
   </doc>
   <doc>
     <field name="id">/en/chasing_sleep</field>
-    <field name="name">Chasing Sleep</field>
     <field name="initial_release_date">2001-09-16</field>
+    <field name="name">Chasing Sleep</field>
+    <field name="directed_by">Michael Walker</field>
     <field name="genre">Mystery</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Surrealism</field>
@@ -5553,80 +5547,79 @@
     <field name="genre">Indie film</field>
     <field name="genre">Suspense</field>
     <field name="genre">Crime Thriller</field>
-    <field name="directed_by">Michael Walker</field>
   </doc>
   <doc>
     <field name="id">/en/chasing_the_horizon</field>
-    <field name="name">Chasing the Horizon</field>
     <field name="initial_release_date">2006-04-26</field>
-    <field name="genre">Documentary film</field>
-    <field name="genre">Auto racing</field>
+    <field name="name">Chasing the Horizon</field>
     <field name="directed_by">Markus Canter</field>
     <field name="directed_by">Mason Canter</field>
+    <field name="genre">Documentary film</field>
+    <field name="genre">Auto racing</field>
   </doc>
   <doc>
     <field name="id">/en/chathikkatha_chanthu</field>
-    <field name="name">Chathikkatha Chanthu</field>
     <field name="initial_release_date">2004-04-14</field>
+    <field name="name">Chathikkatha Chanthu</field>
+    <field name="directed_by">Meccartin</field>
     <field name="genre">Comedy</field>
     <field name="genre">Malayalam Cinema</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Meccartin</field>
   </doc>
   <doc>
     <field name="id">/en/chatrapati</field>
-    <field name="name">Chhatrapati</field>
     <field name="initial_release_date">2005-09-25</field>
+    <field name="name">Chhatrapati</field>
+    <field name="directed_by">S. S. Rajamouli</field>
     <field name="genre">Action Film</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">S. S. Rajamouli</field>
   </doc>
   <doc>
     <field name="id">/en/cheaper_by_the_dozen_2003</field>
-    <field name="name">Cheaper by the Dozen</field>
     <field name="initial_release_date">2003-12-25</field>
+    <field name="name">Cheaper by the Dozen</field>
+    <field name="directed_by">Shawn Levy</field>
     <field name="genre">Family</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Shawn Levy</field>
   </doc>
   <doc>
     <field name="id">/en/cheaper_by_the_dozen_2</field>
-    <field name="name">Cheaper by the Dozen 2</field>
     <field name="initial_release_date">2005-12-21</field>
+    <field name="name">Cheaper by the Dozen 2</field>
+    <field name="directed_by">Adam Shankman</field>
     <field name="genre">Family</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Domestic Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Adam Shankman</field>
   </doc>
   <doc>
     <field name="id">/en/checking_out_2005</field>
-    <field name="name">Checking Out</field>
     <field name="initial_release_date">2005-04-10</field>
+    <field name="name">Checking Out</field>
+    <field name="directed_by">Jeff Hare</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Jeff Hare</field>
   </doc>
   <doc>
     <field name="id">/en/chellamae</field>
-    <field name="name">Chellamae</field>
     <field name="initial_release_date">2004-09-10</field>
+    <field name="name">Chellamae</field>
+    <field name="directed_by">Gandhi Krishna</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
-    <field name="directed_by">Gandhi Krishna</field>
   </doc>
   <doc>
     <field name="id">/en/chemman_chaalai</field>
     <field name="name">Chemman Chaalai</field>
+    <field name="directed_by">Deepak Kumaran Menon</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Deepak Kumaran Menon</field>
   </doc>
   <doc>
     <field name="id">/en/chennaiyil_oru_mazhai_kaalam</field>
@@ -5635,84 +5628,85 @@
   </doc>
   <doc>
     <field name="id">/en/cher_the_farewell_tour_live_in_miami</field>
-    <field name="name">The Farewell Tour</field>
     <field name="initial_release_date">2003-08-26</field>
-    <field name="genre">Music video</field>
+    <field name="name">The Farewell Tour</field>
     <field name="directed_by">Dorina Sanchez</field>
     <field name="directed_by">David Mallet</field>
+    <field name="genre">Music video</field>
   </doc>
   <doc>
     <field name="id">/en/cherry_falls</field>
-    <field name="name">Cherry Falls</field>
     <field name="initial_release_date">2000-07-29</field>
+    <field name="name">Cherry Falls</field>
+    <field name="directed_by">Geoffrey Wright</field>
     <field name="genre">Satire</field>
     <field name="genre">Slasher</field>
     <field name="genre">Indie film</field>
     <field name="genre">Horror</field>
     <field name="genre">Horror comedy</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Geoffrey Wright</field>
   </doc>
   <doc>
     <field name="id">/wikipedia/en_title/Chess_$00282006_film$0029</field>
-    <field name="name">Chess</field>
     <field name="initial_release_date">2006-07-07</field>
+    <field name="name">Chess</field>
+    <field name="directed_by">RajBabu</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Malayalam Cinema</field>
     <field name="genre">World cinema</field>
-    <field name="directed_by">RajBabu</field>
   </doc>
   <doc>
     <field name="id">/en/chica_de_rio</field>
-    <field name="name">Girl from Rio</field>
     <field name="initial_release_date">2003-04-11</field>
+    <field name="name">Girl from Rio</field>
+    <field name="directed_by">Christopher Monger</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Christopher Monger</field>
   </doc>
   <doc>
     <field name="id">/en/chicago_2002</field>
-    <field name="name">Chicago</field>
     <field name="initial_release_date">2002-12-10</field>
+    <field name="name">Chicago</field>
+    <field name="directed_by">Rob Marshall</field>
     <field name="genre">Musical</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Comedy</field>
     <field name="genre">Musical comedy</field>
-    <field name="directed_by">Rob Marshall</field>
   </doc>
   <doc>
     <field name="id">/en/chicken_little</field>
-    <field name="name">Chicken Little</field>
     <field name="initial_release_date">2005-10-30</field>
+    <field name="name">Chicken Little</field>
+    <field name="directed_by">Mark Dindal</field>
     <field name="genre">Animation</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Mark Dindal</field>
   </doc>
   <doc>
     <field name="id">/en/chicken_run</field>
-    <field name="name">Chicken Run</field>
     <field name="initial_release_date">2000-06-21</field>
+    <field name="name">Chicken Run</field>
+    <field name="directed_by">Peter Lord</field>
+    <field name="directed_by">Nick Park</field>
     <field name="genre">Family</field>
     <field name="genre">Animation</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Peter Lord</field>
-    <field name="directed_by">Nick Park</field>
   </doc>
   <doc>
     <field name="id">/en/child_marriage_2005</field>
     <field name="name">Child Marriage</field>
-    <field name="genre">Documentary film</field>
     <field name="directed_by">Neeraj Kumar</field>
+    <field name="genre">Documentary film</field>
   </doc>
   <doc>
     <field name="id">/en/children_of_men</field>
-    <field name="name">Children of Men</field>
     <field name="initial_release_date">2006-09-03</field>
+    <field name="name">Children of Men</field>
+    <field name="directed_by">Alfonso Cuarón</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Science Fiction</field>
@@ -5724,31 +5718,31 @@
     <field name="genre">Film adaptation</field>
     <field name="genre">Action Thriller</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Alfonso Cuarón</field>
   </doc>
   <doc>
     <field name="id">/en/children_of_the_corn_revelation</field>
-    <field name="name">Children of the Corn: Revelation</field>
     <field name="initial_release_date">2001-10-09</field>
+    <field name="name">Children of the Corn: Revelation</field>
+    <field name="directed_by">Guy Magar</field>
     <field name="genre">Horror</field>
     <field name="genre">Supernatural</field>
     <field name="genre">Cult film</field>
-    <field name="directed_by">Guy Magar</field>
   </doc>
   <doc>
     <field name="id">/en/children_of_the_living_dead</field>
     <field name="name">Children of the Living Dead</field>
+    <field name="directed_by">Tor Ramsey</field>
     <field name="genre">Indie film</field>
     <field name="genre">Teen film</field>
     <field name="genre">Horror</field>
     <field name="genre">Zombie Film</field>
     <field name="genre">Horror comedy</field>
-    <field name="directed_by">Tor Ramsey</field>
   </doc>
   <doc>
     <field name="id">/en/chinthamani_kolacase</field>
-    <field name="name">Chinthamani Kolacase</field>
     <field name="initial_release_date">2006-03-31</field>
+    <field name="name">Chinthamani Kolacase</field>
+    <field name="directed_by">Shaji Kailas</field>
     <field name="genre">Horror</field>
     <field name="genre">Mystery</field>
     <field name="genre">Crime Fiction</field>
@@ -5756,7 +5750,6 @@
     <field name="genre">Thriller</field>
     <field name="genre">Malayalam Cinema</field>
     <field name="genre">World cinema</field>
-    <field name="directed_by">Shaji Kailas</field>
   </doc>
   <doc>
     <field name="id">/en/chips_2008</field>
@@ -5766,57 +5759,58 @@
   </doc>
   <doc>
     <field name="id">/en/chithiram_pesuthadi</field>
-    <field name="name">Chithiram Pesuthadi</field>
     <field name="initial_release_date">2006-02-10</field>
+    <field name="name">Chithiram Pesuthadi</field>
+    <field name="directed_by">Mysskin</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Mysskin</field>
   </doc>
   <doc>
     <field name="id">/en/chocolat_2000</field>
-    <field name="name">Chocolat</field>
     <field name="initial_release_date">2000-12-15</field>
+    <field name="name">Chocolat</field>
+    <field name="directed_by">Lasse Hallström</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Lasse Hallström</field>
   </doc>
   <doc>
     <field name="id">/en/choose_your_own_adventure_the_abominable_snowman</field>
-    <field name="name">Choose Your Own Adventure The Abominable Snowman</field>
     <field name="initial_release_date">2006-07-25</field>
+    <field name="name">Choose Your Own Adventure The Abominable Snowman</field>
+    <field name="directed_by">Bob Doucette</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Family</field>
     <field name="genre">Children's/Family</field>
     <field name="genre">Family-Oriented Adventure</field>
     <field name="genre">Animation</field>
-    <field name="directed_by">Bob Doucette</field>
   </doc>
   <doc>
     <field name="id">/en/chopin_desire_for_love</field>
-    <field name="name">Chopin: Desire for Love</field>
     <field name="initial_release_date">2002-03-01</field>
+    <field name="name">Chopin: Desire for Love</field>
+    <field name="directed_by">Jerzy Antczak</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Music</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Jerzy Antczak</field>
   </doc>
   <doc>
     <field name="id">/en/chopper</field>
-    <field name="name">Chopper</field>
     <field name="initial_release_date">2000-08-03</field>
+    <field name="name">Chopper</field>
+    <field name="directed_by">Andrew Dominik</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Andrew Dominik</field>
   </doc>
   <doc>
     <field name="id">/en/chori_chori_2003</field>
-    <field name="name">Chori Chori</field>
     <field name="initial_release_date">2003-08-01</field>
+    <field name="name">Chori Chori</field>
+    <field name="directed_by">Milan Luthria</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Musical</field>
     <field name="genre">Romantic comedy</field>
@@ -5826,25 +5820,25 @@
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="directed_by">Milan Luthria</field>
   </doc>
   <doc>
     <field name="id">/en/chori_chori_chupke_chupke</field>
-    <field name="name">Chori Chori Chupke Chupke</field>
     <field name="initial_release_date">2001-03-09</field>
+    <field name="name">Chori Chori Chupke Chupke</field>
+    <field name="directed_by">Abbas Burmawalla</field>
+    <field name="directed_by">Mustan Burmawalla</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Musical</field>
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="directed_by">Abbas Burmawalla</field>
-    <field name="directed_by">Mustan Burmawalla</field>
   </doc>
   <doc>
     <field name="id">/en/christinas_house</field>
-    <field name="name">Christina's House</field>
     <field name="initial_release_date">2000-02-24</field>
+    <field name="name">Christina's House</field>
+    <field name="directed_by">Gavin Wilding</field>
     <field name="genre">Thriller</field>
     <field name="genre">Mystery</field>
     <field name="genre">Horror</field>
@@ -5852,196 +5846,196 @@
     <field name="genre">Slasher</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Gavin Wilding</field>
   </doc>
   <doc>
     <field name="id">/en/christmas_with_the_kranks</field>
-    <field name="name">Christmas with the Kranks</field>
     <field name="initial_release_date">2004-11-24</field>
+    <field name="name">Christmas with the Kranks</field>
+    <field name="directed_by">Joe Roth</field>
     <field name="genre">Christmas movie</field>
     <field name="genre">Family</field>
     <field name="genre">Film adaptation</field>
     <field name="genre">Slapstick</field>
     <field name="genre">Holiday Film</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Joe Roth</field>
   </doc>
   <doc>
     <field name="id">/en/chromophobia</field>
-    <field name="name">Chromophobia</field>
     <field name="initial_release_date">2005-05-21</field>
+    <field name="name">Chromophobia</field>
+    <field name="directed_by">Martha Fiennes</field>
     <field name="genre">Family Drama</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Martha Fiennes</field>
   </doc>
   <doc>
     <field name="id">/en/chubby_killer</field>
     <field name="name">Chubby Killer</field>
+    <field name="directed_by">Reuben Rox</field>
     <field name="genre">Slasher</field>
     <field name="genre">Indie film</field>
     <field name="genre">Horror</field>
-    <field name="directed_by">Reuben Rox</field>
   </doc>
   <doc>
     <field name="id">/en/chukkallo_chandrudu</field>
-    <field name="name">Chukkallo Chandrudu</field>
     <field name="initial_release_date">2006-01-14</field>
+    <field name="name">Chukkallo Chandrudu</field>
+    <field name="directed_by">Siva Kumar</field>
     <field name="genre">Comedy</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Siva Kumar</field>
   </doc>
   <doc>
     <field name="id">/en/chup_chup_ke</field>
-    <field name="name">Chup Chup Ke</field>
     <field name="initial_release_date">2006-06-09</field>
+    <field name="name">Chup Chup Ke</field>
+    <field name="directed_by">Priyadarshan</field>
+    <field name="directed_by">Kookie Gulati</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Priyadarshan</field>
-    <field name="directed_by">Kookie Gulati</field>
   </doc>
   <doc>
     <field name="id">/en/church_ball</field>
-    <field name="name">Church Ball</field>
     <field name="initial_release_date">2006-03-17</field>
+    <field name="name">Church Ball</field>
+    <field name="directed_by">Kurt Hale</field>
     <field name="genre">Family</field>
     <field name="genre">Sports</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Kurt Hale</field>
   </doc>
   <doc>
     <field name="id">/en/churchill_the_hollywood_years</field>
-    <field name="name">Churchill: The Hollywood Years</field>
     <field name="initial_release_date">2004-12-03</field>
+    <field name="name">Churchill: The Hollywood Years</field>
+    <field name="directed_by">Peter Richardson</field>
     <field name="genre">Satire</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Peter Richardson</field>
   </doc>
   <doc>
     <field name="id">/en/cinderella_iii</field>
-    <field name="name">Cinderella III: A Twist in Time</field>
     <field name="initial_release_date">2007-02-06</field>
+    <field name="name">Cinderella III: A Twist in Time</field>
+    <field name="directed_by">Frank Nissen</field>
     <field name="genre">Family</field>
     <field name="genre">Animated cartoon</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Animation</field>
     <field name="genre">Children's/Family</field>
-    <field name="directed_by">Frank Nissen</field>
   </doc>
   <doc>
     <field name="id">/en/cinderella_man</field>
-    <field name="name">Cinderella Man</field>
     <field name="initial_release_date">2005-05-23</field>
+    <field name="name">Cinderella Man</field>
+    <field name="directed_by">Ron Howard</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Historical period drama</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Sports</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Ron Howard</field>
   </doc>
   <doc>
     <field name="id">/en/cinemania</field>
     <field name="name">Cinemania</field>
-    <field name="genre">Documentary film</field>
-    <field name="genre">Culture &amp;amp; Society</field>
     <field name="directed_by">Angela Christlieb</field>
     <field name="directed_by">Stephen Kijak</field>
+    <field name="genre">Documentary film</field>
+    <field name="genre">Culture &amp;amp; Society</field>
   </doc>
   <doc>
     <field name="id">/en/city_of_ghosts</field>
-    <field name="name">City of Ghosts</field>
     <field name="initial_release_date">2003-03-27</field>
+    <field name="name">City of Ghosts</field>
+    <field name="directed_by">Matt Dillon</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Crime Thriller</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Matt Dillon</field>
   </doc>
   <doc>
     <field name="id">/en/city_of_god</field>
-    <field name="name">City of God</field>
     <field name="initial_release_date">2002-05-18</field>
+    <field name="name">City of God</field>
+    <field name="directed_by">Fernando Meirelles</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Fernando Meirelles</field>
   </doc>
   <doc>
     <field name="id">/en/claustrophobia_2003</field>
     <field name="name">Claustrophobia</field>
+    <field name="directed_by">Mark Tapio Kines</field>
     <field name="genre">Slasher</field>
     <field name="genre">Horror</field>
-    <field name="directed_by">Mark Tapio Kines</field>
   </doc>
   <doc>
     <field name="id">/en/clean</field>
-    <field name="name">Clean</field>
     <field name="initial_release_date">2004-03-27</field>
+    <field name="name">Clean</field>
+    <field name="directed_by">Olivier Assayas</field>
     <field name="genre">Music</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Olivier Assayas</field>
   </doc>
   <doc>
     <field name="id">/en/clear_cut_the_story_of_philomath_oregon</field>
-    <field name="name">Clear Cut: The Story of Philomath, Oregon</field>
     <field name="initial_release_date">2006-01-20</field>
-    <field name="genre">Documentary film</field>
+    <field name="name">Clear Cut: The Story of Philomath, Oregon</field>
     <field name="directed_by">Peter Richardson</field>
+    <field name="genre">Documentary film</field>
   </doc>
   <doc>
     <field name="id">/en/clerks_ii</field>
-    <field name="name">Clerks II</field>
     <field name="initial_release_date">2006-05-26</field>
+    <field name="name">Clerks II</field>
+    <field name="directed_by">Kevin Smith</field>
     <field name="genre">Buddy film</field>
     <field name="genre">Workplace Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Kevin Smith</field>
   </doc>
   <doc>
     <field name="id">/en/click</field>
-    <field name="name">Click</field>
     <field name="initial_release_date">2006-06-22</field>
+    <field name="name">Click</field>
+    <field name="directed_by">Frank Coraci</field>
     <field name="genre">Comedy</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Frank Coraci</field>
   </doc>
   <doc>
     <field name="id">/en/clockstoppers</field>
-    <field name="name">Clockstoppers</field>
     <field name="initial_release_date">2002-03-29</field>
+    <field name="name">Clockstoppers</field>
+    <field name="directed_by">Jonathan Frakes</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Teen film</field>
     <field name="genre">Family</field>
     <field name="genre">Thriller</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Jonathan Frakes</field>
   </doc>
   <doc>
     <field name="id">/en/closer_2004</field>
-    <field name="name">Closer</field>
     <field name="initial_release_date">2004-12-03</field>
+    <field name="name">Closer</field>
+    <field name="directed_by">Mike Nichols</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Mike Nichols</field>
   </doc>
   <doc>
     <field name="id">/en/closing_the_ring</field>
-    <field name="name">Closing the Ring</field>
     <field name="initial_release_date">2007-09-14</field>
+    <field name="name">Closing the Ring</field>
+    <field name="directed_by">Richard Attenborough</field>
     <field name="genre">War film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Richard Attenborough</field>
   </doc>
   <doc>
     <field name="id">/en/club_dread</field>
-    <field name="name">Club Dread</field>
     <field name="initial_release_date">2004-02-27</field>
+    <field name="name">Club Dread</field>
+    <field name="directed_by">Jay Chandrasekhar</field>
     <field name="genre">Parody</field>
     <field name="genre">Horror</field>
     <field name="genre">Slasher</field>
@@ -6049,85 +6043,85 @@
     <field name="genre">Indie film</field>
     <field name="genre">Horror comedy</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Jay Chandrasekhar</field>
   </doc>
   <doc>
     <field name="id">/en/coach_carter</field>
-    <field name="name">Coach Carter</field>
     <field name="initial_release_date">2005-01-13</field>
+    <field name="name">Coach Carter</field>
+    <field name="directed_by">Thomas Carter</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Sports</field>
     <field name="genre">Docudrama</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Thomas Carter</field>
   </doc>
   <doc>
     <field name="id">/en/coast_guard_2002</field>
-    <field name="name">The Coast Guard</field>
     <field name="initial_release_date">2002-11-14</field>
+    <field name="name">The Coast Guard</field>
+    <field name="directed_by">Kim Ki-duk</field>
     <field name="genre">Action Film</field>
     <field name="genre">War film</field>
     <field name="genre">East Asian cinema</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Kim Ki-duk</field>
   </doc>
   <doc>
     <field name="id">/en/code_46</field>
-    <field name="name">Code 46</field>
     <field name="initial_release_date">2004-05-07</field>
+    <field name="name">Code 46</field>
+    <field name="directed_by">Michael Winterbottom</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Thriller</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Michael Winterbottom</field>
   </doc>
   <doc>
     <field name="id">/en/codename_kids_next_door_operation_z_e_r_o</field>
-    <field name="name">Codename: Kids Next Door: Operation Z.E.R.O.</field>
     <field name="initial_release_date">2006-01-13</field>
+    <field name="name">Codename: Kids Next Door: Operation Z.E.R.O.</field>
+    <field name="directed_by">Tom Warburton</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Animation</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Family</field>
     <field name="genre">Comedy</field>
     <field name="genre">Crime Fiction</field>
-    <field name="directed_by">Tom Warburton</field>
   </doc>
   <doc>
     <field name="id">/en/coffee_and_cigarettes</field>
-    <field name="name">Coffee and Cigarettes</field>
     <field name="initial_release_date">2003-09-05</field>
+    <field name="name">Coffee and Cigarettes</field>
+    <field name="directed_by">Jim Jarmusch</field>
     <field name="genre">Music</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Jim Jarmusch</field>
   </doc>
   <doc>
     <field name="id">/en/cold_creek_manor</field>
-    <field name="name">Cold Creek Manor</field>
     <field name="initial_release_date">2003-09-19</field>
+    <field name="name">Cold Creek Manor</field>
+    <field name="directed_by">Mike Figgis</field>
     <field name="genre">Thriller</field>
     <field name="genre">Mystery</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Crime Thriller</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Mike Figgis</field>
   </doc>
   <doc>
     <field name="id">/en/cold_mountain</field>
-    <field name="name">Cold Mountain</field>
     <field name="initial_release_date">2003-12-25</field>
+    <field name="name">Cold Mountain</field>
+    <field name="directed_by">Anthony Minghella</field>
     <field name="genre">War film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Anthony Minghella</field>
   </doc>
   <doc>
     <field name="id">/en/cold_showers</field>
-    <field name="name">Cold Showers</field>
     <field name="initial_release_date">2005-05-22</field>
+    <field name="name">Cold Showers</field>
+    <field name="directed_by">Antony Cordier</field>
     <field name="genre">Coming of age</field>
     <field name="genre">LGBT</field>
     <field name="genre">World cinema</field>
@@ -6135,98 +6129,96 @@
     <field name="genre">Teen film</field>
     <field name="genre">Erotic Drama</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Antony Cordier</field>
   </doc>
   <doc>
     <field name="id">/en/collateral</field>
-    <field name="name">Collateral</field>
     <field name="initial_release_date">2004-08-05</field>
+    <field name="name">Collateral</field>
+    <field name="directed_by">Michael Mann</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Crime Thriller</field>
     <field name="genre">Film noir</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Michael Mann</field>
   </doc>
   <doc>
     <field name="id">/en/collateral_damage_2002</field>
-    <field name="name">Collateral Damage</field>
     <field name="initial_release_date">2002-02-04</field>
+    <field name="name">Collateral Damage</field>
+    <field name="directed_by">Andrew Davis</field>
     <field name="genre">Action Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Andrew Davis</field>
   </doc>
   <doc>
     <field name="id">/en/comedian_2002</field>
-    <field name="name">Comedian</field>
     <field name="initial_release_date">2002-10-11</field>
+    <field name="name">Comedian</field>
+    <field name="directed_by">Christian Charles</field>
     <field name="genre">Indie film</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Stand-up comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Biographical film</field>
-    <field name="directed_by">Christian Charles</field>
   </doc>
   <doc>
     <field name="id">/en/coming_out_2006</field>
     <field name="name">Coming Out</field>
+    <field name="directed_by">Joel Zwick</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Joel Zwick</field>
   </doc>
   <doc>
     <field name="id">/en/commitments</field>
-    <field name="name">Commitments</field>
     <field name="initial_release_date">2001-05-04</field>
+    <field name="name">Commitments</field>
+    <field name="directed_by">Carol Mayes</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Carol Mayes</field>
   </doc>
   <doc>
     <field name="id">/en/common_ground_2000</field>
-    <field name="name">Common Ground</field>
     <field name="initial_release_date">2000-01-29</field>
+    <field name="name">Common Ground</field>
+    <field name="directed_by">Donna Deitch</field>
     <field name="genre">LGBT</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Donna Deitch</field>
   </doc>
   <doc>
     <field name="id">/en/company_2002</field>
-    <field name="name">Company</field>
     <field name="initial_release_date">2002-04-15</field>
+    <field name="name">Company</field>
+    <field name="directed_by">Ram Gopal Varma</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Ram Gopal Varma</field>
   </doc>
   <doc>
     <field name="id">/en/confessions_of_a_dangerous_mind</field>
     <field name="name">Confessions of a Dangerous Mind</field>
+    <field name="directed_by">George Clooney</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">George Clooney</field>
   </doc>
   <doc>
     <field name="id">/en/confessions_of_a_teenage_drama_queen</field>
-    <field name="name">Confessions of a Teenage Drama Queen</field>
     <field name="initial_release_date">2004-02-17</field>
     <field name="genre">Family</field>
     <field name="genre">Teen film</field>
     <field name="genre">Musical comedy</field>
     <field name="genre">Romantic comedy</field>
     <field name="directed_by">Sara Sugarman</field>
+    <field name="name">Confessions of a Teenage Drama Queen</field>
   </doc>
   <doc>
     <field name="id">/en/confetti_2006</field>
-    <field name="name">Confetti</field>
     <field name="initial_release_date">2006-05-05</field>
     <field name="genre">Mockumentary</field>
     <field name="genre">Romantic comedy</field>
@@ -6235,29 +6227,29 @@
     <field name="genre">Music</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Debbie Isitt</field>
+    <field name="name">Confetti</field>
   </doc>
   <doc>
     <field name="id">/en/confidence_2004</field>
-    <field name="name">Confidence</field>
     <field name="initial_release_date">2003-01-20</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
     <field name="directed_by">James Foley</field>
+    <field name="name">Confidence</field>
   </doc>
   <doc>
     <field name="id">/en/connie_and_carla</field>
-    <field name="name">Connie and Carla</field>
     <field name="initial_release_date">2004-04-16</field>
     <field name="genre">LGBT</field>
     <field name="genre">Buddy film</field>
     <field name="genre">Comedy of Errors</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Michael Lembeck</field>
+    <field name="name">Connie and Carla</field>
   </doc>
   <doc>
     <field name="id">/en/conspiracy_2001</field>
-    <field name="name">Conspiracy</field>
     <field name="initial_release_date">2001-05-19</field>
     <field name="genre">History</field>
     <field name="genre">War film</field>
@@ -6265,19 +6257,19 @@
     <field name="genre">Historical period drama</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Frank Pierson</field>
+    <field name="name">Conspiracy</field>
   </doc>
   <doc>
     <field name="id">/en/constantine_2005</field>
-    <field name="name">Constantine</field>
     <field name="initial_release_date">2005-02-08</field>
     <field name="genre">Horror</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Action Film</field>
     <field name="directed_by">Francis Lawrence</field>
+    <field name="name">Constantine</field>
   </doc>
   <doc>
     <field name="id">/en/control_room</field>
-    <field name="name">Control Room</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Political cinema</field>
     <field name="genre">Culture &amp;amp; Society</field>
@@ -6285,10 +6277,10 @@
     <field name="genre">Journalism</field>
     <field name="genre">Media studies</field>
     <field name="directed_by">Jehane Noujaim</field>
+    <field name="name">Control Room</field>
   </doc>
   <doc>
     <field name="id">/en/control_the_ian_curtis_film</field>
-    <field name="name">Control</field>
     <field name="initial_release_date">2007-05-17</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Indie film</field>
@@ -6297,36 +6289,36 @@
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
     <field name="directed_by">Anton Corbijn</field>
+    <field name="name">Control</field>
   </doc>
   <doc>
     <field name="id">/en/cope_2005</field>
-    <field name="name">Cope</field>
     <field name="initial_release_date">2007-01-23</field>
     <field name="genre">Horror</field>
     <field name="genre">B movie</field>
     <field name="directed_by">Ronald Jackson</field>
     <field name="directed_by">Ronald Jerry</field>
+    <field name="name">Cope</field>
   </doc>
   <doc>
     <field name="id">/en/copying_beethoven</field>
-    <field name="name">Copying Beethoven</field>
     <field name="initial_release_date">2006-07-30</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Music</field>
     <field name="genre">Historical fiction</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Agnieszka Holland</field>
+    <field name="name">Copying Beethoven</field>
   </doc>
   <doc>
     <field name="id">/en/corporate</field>
-    <field name="name">Corporate</field>
     <field name="initial_release_date">2006-07-07</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Madhur Bhandarkar</field>
+    <field name="name">Corporate</field>
   </doc>
   <doc>
     <field name="id">/en/corpse_bride</field>
-    <field name="name">Corpse Bride</field>
     <field name="initial_release_date">2005-09-07</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Animation</field>
@@ -6334,18 +6326,18 @@
     <field name="genre">Romance Film</field>
     <field name="directed_by">Tim Burton</field>
     <field name="directed_by">Mike Johnson</field>
+    <field name="name">Corpse Bride</field>
   </doc>
   <doc>
     <field name="id">/en/covert_one_the_hades_factor</field>
-    <field name="name">Covert One: The Hades Factor</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Action/Adventure</field>
     <field name="directed_by">Mick Jackson</field>
+    <field name="name">Covert One: The Hades Factor</field>
   </doc>
   <doc>
     <field name="id">/en/cow_belles</field>
-    <field name="name">Cow Belles</field>
     <field name="initial_release_date">2006-03-24</field>
     <field name="genre">Family</field>
     <field name="genre">Television film</field>
@@ -6353,10 +6345,10 @@
     <field name="genre">Romantic comedy</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Francine McDougall</field>
+    <field name="name">Cow Belles</field>
   </doc>
   <doc>
     <field name="id">/en/cowards_bend_the_knee</field>
-    <field name="name">Cowards Bend the Knee</field>
     <field name="initial_release_date">2003-02-26</field>
     <field name="genre">Silent film</field>
     <field name="genre">Indie film</field>
@@ -6367,10 +6359,10 @@
     <field name="genre">Avant-garde</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Guy Maddin</field>
+    <field name="name">Cowards Bend the Knee</field>
   </doc>
   <doc>
     <field name="id">/en/cowboy_bebop_the_movie</field>
-    <field name="name">Cowboy Bebop: The Movie</field>
     <field name="initial_release_date">2001-09-01</field>
     <field name="genre">Anime</field>
     <field name="genre">Science Fiction</field>
@@ -6379,10 +6371,10 @@
     <field name="genre">Comedy</field>
     <field name="genre">Crime Fiction</field>
     <field name="directed_by">Shinichirō Watanabe</field>
+    <field name="name">Cowboy Bebop: The Movie</field>
   </doc>
   <doc>
     <field name="id">/en/coyote_ugly</field>
-    <field name="name">Coyote Ugly</field>
     <field name="initial_release_date">2000-07-31</field>
     <field name="genre">Musical</field>
     <field name="genre">Romance Film</field>
@@ -6391,17 +6383,17 @@
     <field name="genre">Musical comedy</field>
     <field name="genre">Musical Drama</field>
     <field name="directed_by">David McNally</field>
+    <field name="name">Coyote Ugly</field>
   </doc>
   <doc>
     <field name="id">/en/crackerjack_2002</field>
-    <field name="name">Crackerjack</field>
     <field name="initial_release_date">2002-11-07</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Paul Moloney</field>
+    <field name="name">Crackerjack</field>
   </doc>
   <doc>
     <field name="id">/en/cradle_2_the_grave</field>
-    <field name="name">Cradle 2 the Grave</field>
     <field name="initial_release_date">2003-02-28</field>
     <field name="genre">Martial Arts Film</field>
     <field name="genre">Thriller</field>
@@ -6412,18 +6404,18 @@
     <field name="genre">Adventure Film</field>
     <field name="genre">Crime</field>
     <field name="directed_by">Andrzej Bartkowiak</field>
+    <field name="name">Cradle 2 the Grave</field>
   </doc>
   <doc>
     <field name="id">/en/cradle_of_fear</field>
-    <field name="name">Cradle of Fear</field>
     <field name="genre">Horror</field>
     <field name="genre">B movie</field>
     <field name="genre">Slasher</field>
     <field name="directed_by">Alex Chandon</field>
+    <field name="name">Cradle of Fear</field>
   </doc>
   <doc>
     <field name="id">/en/crank</field>
-    <field name="name">Crank</field>
     <field name="initial_release_date">2006-08-31</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
@@ -6432,37 +6424,37 @@
     <field name="genre">Crime Fiction</field>
     <field name="genre">Action Thriller</field>
     <field name="directed_by">Neveldine/Taylor</field>
+    <field name="name">Crank</field>
   </doc>
   <doc>
     <field name="id">/en/crash_2004</field>
-    <field name="name">Crash</field>
     <field name="initial_release_date">2004-09-10</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Indie film</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Paul Haggis</field>
+    <field name="name">Crash</field>
   </doc>
   <doc>
     <field name="id">/en/crazy_beautiful</field>
-    <field name="name">Crazy/Beautiful</field>
     <field name="initial_release_date">2001-06-28</field>
     <field name="genre">Teen film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
     <field name="directed_by">John Stockwell</field>
+    <field name="name">Crazy/Beautiful</field>
   </doc>
   <doc>
     <field name="id">/en/creep_2005</field>
-    <field name="name">Creep</field>
     <field name="initial_release_date">2004-08-10</field>
     <field name="genre">Horror</field>
     <field name="genre">Mystery</field>
     <field name="genre">Thriller</field>
     <field name="directed_by">Christopher Smith</field>
+    <field name="name">Creep</field>
   </doc>
   <doc>
     <field name="id">/en/criminal</field>
-    <field name="name">Criminal</field>
     <field name="initial_release_date">2004-09-10</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
@@ -6472,27 +6464,27 @@
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Gregory Jacobs</field>
+    <field name="name">Criminal</field>
   </doc>
   <doc>
     <field name="id">/en/crimson_gold</field>
-    <field name="name">Crimson Gold</field>
     <field name="genre">World cinema</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Jafar Panahi</field>
+    <field name="name">Crimson Gold</field>
   </doc>
   <doc>
     <field name="id">/en/crimson_rivers_ii_angels_of_the_apocalypse</field>
-    <field name="name">Crimson Rivers II: Angels of the Apocalypse</field>
     <field name="initial_release_date">2004-02-18</field>
     <field name="genre">Action Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="directed_by">Olivier Dahan</field>
+    <field name="name">Crimson Rivers II: Angels of the Apocalypse</field>
   </doc>
   <doc>
     <field name="id">/en/crocodile_2000</field>
-    <field name="name">Crocodile</field>
     <field name="initial_release_date">2000-12-26</field>
     <field name="genre">Horror</field>
     <field name="genre">Natural horror film</field>
@@ -6501,10 +6493,10 @@
     <field name="genre">Action Film</field>
     <field name="genre">Action/Adventure</field>
     <field name="directed_by">Tobe Hooper</field>
+    <field name="name">Crocodile</field>
   </doc>
   <doc>
     <field name="id">/en/crocodile_2_death_swamp</field>
-    <field name="name">Crocodile 2: Death Swamp</field>
     <field name="initial_release_date">2002-08-01</field>
     <field name="genre">Horror</field>
     <field name="genre">Natural horror film</field>
@@ -6516,10 +6508,10 @@
     <field name="genre">Action Thriller</field>
     <field name="genre">Creature Film</field>
     <field name="directed_by">Gary Jones</field>
+    <field name="name">Crocodile 2: Death Swamp</field>
   </doc>
   <doc>
     <field name="id">/en/crocodile_dundee_in_los_angeles</field>
-    <field name="name">Crocodile Dundee in Los Angeles</field>
     <field name="initial_release_date">2001-04-12</field>
     <field name="genre">Action Film</field>
     <field name="genre">Adventure Film</field>
@@ -6529,20 +6521,20 @@
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Simon Wincer</field>
+    <field name="name">Crocodile Dundee in Los Angeles</field>
   </doc>
   <doc>
     <field name="id">/en/crossing_the_bridge_the_sound_of_istanbul</field>
-    <field name="name">Crossing the Bridge: The Sound of Istanbul</field>
     <field name="initial_release_date">2005-06-09</field>
     <field name="genre">Musical</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Music</field>
     <field name="genre">Culture &amp;amp; Society</field>
     <field name="directed_by">Fatih Akın</field>
+    <field name="name">Crossing the Bridge: The Sound of Istanbul</field>
   </doc>
   <doc>
     <field name="id">/en/crossover_2006</field>
-    <field name="name">Crossover</field>
     <field name="initial_release_date">2006-09-01</field>
     <field name="genre">Action Film</field>
     <field name="genre">Coming of age</field>
@@ -6552,10 +6544,10 @@
     <field name="genre">Fantasy</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Preston A. Whitmore II</field>
+    <field name="name">Crossover</field>
   </doc>
   <doc>
     <field name="id">/en/crossroads_2002</field>
-    <field name="name">Crossroads</field>
     <field name="initial_release_date">2002-02-11</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Teen film</field>
@@ -6569,20 +6561,20 @@
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Tamra Davis</field>
+    <field name="name">Crossroads</field>
   </doc>
   <doc>
     <field name="id">/en/crouching_tiger_hidden_dragon</field>
-    <field name="name">Crouching Tiger, Hidden Dragon</field>
     <field name="initial_release_date">2000-05-16</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Action Film</field>
     <field name="genre">Martial Arts Film</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Ang Lee</field>
+    <field name="name">Crouching Tiger, Hidden Dragon</field>
   </doc>
   <doc>
     <field name="id">/en/cruel_intentions_3</field>
-    <field name="name">Cruel Intentions 3</field>
     <field name="initial_release_date">2004-05-25</field>
     <field name="genre">Erotica</field>
     <field name="genre">Thriller</field>
@@ -6594,10 +6586,10 @@
     <field name="genre">Crime Thriller</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Scott Ziehl</field>
+    <field name="name">Cruel Intentions 3</field>
   </doc>
   <doc>
     <field name="id">/en/crustaces_et_coquillages</field>
-    <field name="name">Crustacés et Coquillages</field>
     <field name="initial_release_date">2005-02-12</field>
     <field name="genre">Musical</field>
     <field name="genre">Romantic comedy</field>
@@ -6610,10 +6602,10 @@
     <field name="genre">Drama</field>
     <field name="directed_by">Jacques Martineau</field>
     <field name="directed_by">Olivier Ducastel</field>
+    <field name="name">Crustacés et Coquillages</field>
   </doc>
   <doc>
     <field name="id">/en/cry_wolf</field>
-    <field name="name">Cry_Wolf</field>
     <field name="initial_release_date">2005-09-16</field>
     <field name="genre">Slasher</field>
     <field name="genre">Horror</field>
@@ -6621,10 +6613,10 @@
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Jeff Wadlow</field>
+    <field name="name">Cry_Wolf</field>
   </doc>
   <doc>
     <field name="id">/en/cube_2_hypercube</field>
-    <field name="name">Cube 2: Hypercube</field>
     <field name="initial_release_date">2002-04-15</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Horror</field>
@@ -6632,77 +6624,77 @@
     <field name="genre">Thriller</field>
     <field name="genre">Escape Film</field>
     <field name="directed_by">Andrzej Sekuła</field>
+    <field name="name">Cube 2: Hypercube</field>
   </doc>
   <doc>
     <field name="id">/en/curious_george_2006</field>
-    <field name="name">Curious George</field>
     <field name="initial_release_date">2006-02-10</field>
     <field name="genre">Animation</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Family</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Matthew O'Callaghan</field>
+    <field name="name">Curious George</field>
   </doc>
   <doc>
     <field name="id">/en/curse_of_the_golden_flower</field>
-    <field name="name">Curse of the Golden Flower</field>
     <field name="initial_release_date">2006-12-21</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Action Film</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Zhang Yimou</field>
+    <field name="name">Curse of the Golden Flower</field>
   </doc>
   <doc>
     <field name="id">/en/cursed</field>
-    <field name="name">Cursed</field>
     <field name="initial_release_date">2004-11-07</field>
     <field name="genre">Horror</field>
     <field name="genre">Thriller</field>
     <field name="genre">Horror comedy</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Wes Craven</field>
+    <field name="name">Cursed</field>
   </doc>
   <doc>
     <field name="id">/en/d-tox</field>
-    <field name="name">D-Tox</field>
     <field name="initial_release_date">2002-01-04</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Thriller</field>
     <field name="genre">Horror</field>
     <field name="genre">Mystery</field>
     <field name="directed_by">Jim Gillespie</field>
+    <field name="name">D-Tox</field>
   </doc>
   <doc>
     <field name="id">/en/daddy</field>
-    <field name="name">Daddy</field>
     <field name="initial_release_date">2001-10-04</field>
     <field name="genre">Family</field>
     <field name="genre">Drama</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
     <field name="directed_by">Suresh Krissna</field>
+    <field name="name">Daddy</field>
   </doc>
   <doc>
     <field name="id">/en/daddy_day_care</field>
-    <field name="name">Daddy Day Care</field>
     <field name="initial_release_date">2003-05-04</field>
     <field name="genre">Family</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Steve Carr</field>
+    <field name="name">Daddy Day Care</field>
   </doc>
   <doc>
     <field name="id">/en/daddy_long-legs</field>
-    <field name="name">Daddy-Long-Legs</field>
     <field name="initial_release_date">2005-01-13</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">East Asian cinema</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Gong Jeong-shik</field>
+    <field name="name">Daddy-Long-Legs</field>
   </doc>
   <doc>
     <field name="id">/en/dahmer_2002</field>
-    <field name="name">Dahmer</field>
     <field name="initial_release_date">2002-06-21</field>
     <field name="genre">Thriller</field>
     <field name="genre">Biographical film</field>
@@ -6715,38 +6707,38 @@
     <field name="genre">Slasher</field>
     <field name="genre">Drama</field>
     <field name="directed_by">David Jacobson</field>
+    <field name="name">Dahmer</field>
   </doc>
   <doc>
     <field name="id">/en/daisy_2006</field>
-    <field name="name">Daisy</field>
     <field name="initial_release_date">2006-03-09</field>
     <field name="genre">Chinese Movies</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Melodrama</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Andrew Lau</field>
+    <field name="name">Daisy</field>
   </doc>
   <doc>
     <field name="id">/en/daivanamathil</field>
-    <field name="name">Daivanamathil</field>
     <field name="genre">Drama</field>
     <field name="genre">Malayalam Cinema</field>
     <field name="genre">World cinema</field>
     <field name="directed_by">Jayaraj</field>
+    <field name="name">Daivanamathil</field>
   </doc>
   <doc>
     <field name="id">/en/daltry_calhoun</field>
-    <field name="name">Daltry Calhoun</field>
     <field name="initial_release_date">2005-09-25</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Comedy-drama</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Katrina Holden Bronson</field>
+    <field name="name">Daltry Calhoun</field>
   </doc>
   <doc>
     <field name="id">/en/dan_in_real_life</field>
-    <field name="name">Dan in Real Life</field>
     <field name="initial_release_date">2007-10-26</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Romantic comedy</field>
@@ -6755,10 +6747,10 @@
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Peter Hedges</field>
+    <field name="name">Dan in Real Life</field>
   </doc>
   <doc>
     <field name="id">/en/dancer_in_the_dark</field>
-    <field name="name">Dancer in the Dark</field>
     <field name="initial_release_date">2000-05-17</field>
     <field name="genre">Musical</field>
     <field name="genre">Crime Fiction</field>
@@ -6766,16 +6758,16 @@
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
     <field name="directed_by">Lars von Trier</field>
+    <field name="name">Dancer in the Dark</field>
   </doc>
   <doc>
     <field name="id">/en/daniel_amos_live_in_anaheim_1985</field>
-    <field name="name">Daniel Amos Live in Anaheim 1985</field>
     <field name="genre">Music video</field>
     <field name="directed_by">Dave Perry</field>
+    <field name="name">Daniel Amos Live in Anaheim 1985</field>
   </doc>
   <doc>
     <field name="id">/en/danny_deckchair</field>
-    <field name="name">Danny Deckchair</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Indie film</field>
     <field name="genre">Romance Film</field>
@@ -6783,10 +6775,10 @@
     <field name="genre">Fantasy Comedy</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Jeff Balsmeyer</field>
+    <field name="name">Danny Deckchair</field>
   </doc>
   <doc>
     <field name="id">/en/daredevil_2003</field>
-    <field name="name">Daredevil</field>
     <field name="initial_release_date">2003-02-09</field>
     <field name="genre">Action Film</field>
     <field name="genre">Fantasy</field>
@@ -6794,63 +6786,63 @@
     <field name="genre">Crime Fiction</field>
     <field name="genre">Superhero movie</field>
     <field name="directed_by">Mark Steven Johnson</field>
+    <field name="name">Daredevil</field>
   </doc>
   <doc>
     <field name="id">/en/dark_blue</field>
-    <field name="name">Dark Blue</field>
     <field name="initial_release_date">2002-12-14</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Historical period drama</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Ron Shelton</field>
+    <field name="name">Dark Blue</field>
   </doc>
   <doc>
     <field name="id">/en/dark_harvest</field>
-    <field name="name">Dark Harvest</field>
     <field name="genre">Horror</field>
     <field name="genre">Slasher</field>
     <field name="directed_by">Paul Moore, Jr.</field>
+    <field name="name">Dark Harvest</field>
   </doc>
   <doc>
     <field name="id">/en/dark_water</field>
-    <field name="name">Dark Water</field>
     <field name="initial_release_date">2005-06-27</field>
     <field name="genre">Thriller</field>
     <field name="genre">Horror</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Walter Salles</field>
+    <field name="name">Dark Water</field>
   </doc>
   <doc>
     <field name="id">/en/dark_water_2002</field>
-    <field name="name">Dark Water</field>
     <field name="initial_release_date">2002-01-19</field>
     <field name="genre">Thriller</field>
     <field name="genre">Horror</field>
     <field name="genre">Mystery</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Hideo Nakata</field>
+    <field name="name">Dark Water</field>
   </doc>
   <doc>
     <field name="id">/en/darkness_2002</field>
-    <field name="name">Darkness</field>
     <field name="initial_release_date">2002-10-03</field>
     <field name="genre">Horror</field>
     <field name="directed_by">Jaume Balagueró</field>
+    <field name="name">Darkness</field>
   </doc>
   <doc>
     <field name="id">/en/darna_mana_hai</field>
-    <field name="name">Darna Mana Hai</field>
     <field name="initial_release_date">2003-07-25</field>
     <field name="genre">Horror</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
     <field name="directed_by">Prawaal Raman</field>
+    <field name="name">Darna Mana Hai</field>
   </doc>
   <doc>
     <field name="id">/en/darna_zaroori_hai</field>
-    <field name="name">Darna Zaroori Hai</field>
     <field name="initial_release_date">2006-04-28</field>
     <field name="genre">Horror</field>
     <field name="genre">Thriller</field>
@@ -6864,46 +6856,46 @@
     <field name="directed_by">J. D. Chakravarthy</field>
     <field name="directed_by">Sajid Khan</field>
     <field name="directed_by">Manish Gupta</field>
+    <field name="name">Darna Zaroori Hai</field>
   </doc>
   <doc>
     <field name="id">/en/darth_vaders_psychic_hotline</field>
-    <field name="name">Darth Vader's Psychic Hotline</field>
     <field name="initial_release_date">2002-04-16</field>
     <field name="genre">Indie film</field>
     <field name="genre">Short Film</field>
     <field name="genre">Fan film</field>
     <field name="directed_by">John E. Hudgens</field>
+    <field name="name">Darth Vader's Psychic Hotline</field>
   </doc>
   <doc>
     <field name="id">/en/darwins_nightmare</field>
-    <field name="name">Darwin's Nightmare</field>
     <field name="initial_release_date">2004-09-01</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Political cinema</field>
     <field name="genre">Biographical film</field>
     <field name="directed_by">Hubert Sauper</field>
+    <field name="name">Darwin's Nightmare</field>
   </doc>
   <doc>
     <field name="id">/en/das_experiment</field>
-    <field name="name">The Experiment</field>
     <field name="initial_release_date">2010-07-15</field>
     <field name="genre">Thriller</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Paul Scheuring</field>
+    <field name="name">The Experiment</field>
   </doc>
   <doc>
     <field name="id">/en/dasavatharam</field>
-    <field name="name">Dasavathaaram</field>
     <field name="initial_release_date">2008-06-12</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Disaster Film</field>
     <field name="genre">Tamil cinema</field>
     <field name="directed_by">K. S. Ravikumar</field>
+    <field name="name">Dasavathaaram</field>
   </doc>
   <doc>
     <field name="id">/en/date_movie</field>
-    <field name="name">Date Movie</field>
     <field name="initial_release_date">2006-02-17</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Parody</field>
@@ -6911,18 +6903,18 @@
     <field name="genre">Comedy</field>
     <field name="directed_by">Aaron Seltzer</field>
     <field name="directed_by">Jason Friedberg</field>
+    <field name="name">Date Movie</field>
   </doc>
   <doc>
     <field name="id">/en/dave_attells_insomniac_tour</field>
-    <field name="name">Dave Attell's Insomniac Tour</field>
     <field name="initial_release_date">2006-04-11</field>
     <field name="genre">Stand-up comedy</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Joel Gallen</field>
+    <field name="name">Dave Attell's Insomniac Tour</field>
   </doc>
   <doc>
     <field name="id">/en/dave_chappelles_block_party</field>
-    <field name="name">Dave Chappelle's Block Party</field>
     <field name="initial_release_date">2006-03-03</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Music</field>
@@ -6931,10 +6923,10 @@
     <field name="genre">Stand-up comedy</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Michel Gondry</field>
+    <field name="name">Dave Chappelle's Block Party</field>
   </doc>
   <doc>
     <field name="id">/en/david_layla</field>
-    <field name="name">David &amp;amp; Layla</field>
     <field name="initial_release_date">2005-10-21</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Indie film</field>
@@ -6943,17 +6935,17 @@
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Jay Jonroy</field>
+    <field name="name">David &amp;amp; Layla</field>
   </doc>
   <doc>
     <field name="id">/en/david_gilmour_in_concert</field>
-    <field name="name">David Gilmour in Concert</field>
     <field name="genre">Music video</field>
     <field name="genre">Concert film</field>
     <field name="directed_by">David Mallet</field>
+    <field name="name">David Gilmour in Concert</field>
   </doc>
   <doc>
     <field name="id">/en/dawn_of_the_dead_2004</field>
-    <field name="name">Dawn of the Dead</field>
     <field name="initial_release_date">2004-03-10</field>
     <field name="genre">Horror</field>
     <field name="genre">Action Film</field>
@@ -6961,10 +6953,10 @@
     <field name="genre">Science Fiction</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Zack Snyder</field>
+    <field name="name">Dawn of the Dead</field>
   </doc>
   <doc>
     <field name="id">/en/day_of_the_dead_2007</field>
-    <field name="name">Day of the Dead</field>
     <field name="initial_release_date">2008-04-08</field>
     <field name="genre">Splatter film</field>
     <field name="genre">Doomsday film</field>
@@ -6973,47 +6965,47 @@
     <field name="genre">Cult film</field>
     <field name="genre">Zombie Film</field>
     <field name="directed_by">Steve Miner</field>
+    <field name="name">Day of the Dead</field>
   </doc>
   <doc>
     <field name="id">/en/day_of_the_dead_2_contagium</field>
-    <field name="name">Day of the Dead 2: Contagium</field>
     <field name="initial_release_date">2005-10-18</field>
     <field name="genre">Horror</field>
     <field name="genre">Zombie Film</field>
     <field name="directed_by">Ana Clavell</field>
     <field name="directed_by">James Glenn Dudelson</field>
+    <field name="name">Day of the Dead 2: Contagium</field>
   </doc>
   <doc>
     <field name="id">/en/day_watch</field>
-    <field name="name">Day Watch</field>
     <field name="initial_release_date">2006-01-01</field>
     <field name="genre">Thriller</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Action Film</field>
     <field name="directed_by">Timur Bekmambetov</field>
+    <field name="name">Day Watch</field>
   </doc>
   <doc>
     <field name="id">/en/day_zero</field>
-    <field name="name">Day Zero</field>
     <field name="initial_release_date">2007-11-02</field>
     <field name="genre">Indie film</field>
     <field name="genre">Political drama</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Bryan Gunnar Cole</field>
+    <field name="name">Day Zero</field>
   </doc>
   <doc>
     <field name="id">/en/de-lovely</field>
-    <field name="name">De-Lovely</field>
     <field name="initial_release_date">2004-05-22</field>
     <field name="genre">Musical</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Musical Drama</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Irwin Winkler</field>
+    <field name="name">De-Lovely</field>
   </doc>
   <doc>
     <field name="id">/en/dead_breakfast</field>
-    <field name="name">Dead &amp;amp; Breakfast</field>
     <field name="initial_release_date">2004-03-19</field>
     <field name="genre">Horror</field>
     <field name="genre">Black comedy</field>
@@ -7022,17 +7014,17 @@
     <field name="genre">Horror comedy</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Matthew Leutwyler</field>
+    <field name="name">Dead &amp;amp; Breakfast</field>
   </doc>
   <doc>
     <field name="id">/en/dead_birds_2005</field>
-    <field name="name">Dead Birds</field>
     <field name="initial_release_date">2005-03-15</field>
     <field name="genre">Horror</field>
     <field name="directed_by">Alex Turner</field>
+    <field name="name">Dead Birds</field>
   </doc>
   <doc>
     <field name="id">/en/dead_end_2003</field>
-    <field name="name">Dead End</field>
     <field name="initial_release_date">2003-01-30</field>
     <field name="genre">Horror</field>
     <field name="genre">Thriller</field>
@@ -7040,38 +7032,38 @@
     <field name="genre">Comedy</field>
     <field name="directed_by">Jean-Baptiste Andrea</field>
     <field name="directed_by">Fabrice Canepa</field>
+    <field name="name">Dead End</field>
   </doc>
   <doc>
     <field name="id">/en/dead_friend</field>
-    <field name="name">Dead Friend</field>
     <field name="initial_release_date">2004-06-18</field>
     <field name="genre">Horror</field>
     <field name="genre">East Asian cinema</field>
     <field name="genre">World cinema</field>
     <field name="directed_by">Kim Tae-kyeong</field>
+    <field name="name">Dead Friend</field>
   </doc>
   <doc>
     <field name="id">/en/dead_mans_shoes</field>
-    <field name="name">Dead Man's Shoes</field>
     <field name="initial_release_date">2004-10-01</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Shane Meadows</field>
+    <field name="name">Dead Man's Shoes</field>
   </doc>
   <doc>
     <field name="id">/en/dear_frankie</field>
-    <field name="name">Dear Frankie</field>
     <field name="initial_release_date">2004-05-04</field>
     <field name="genre">Indie film</field>
     <field name="genre">Drama</field>
     <field name="genre">Romance Film</field>
     <field name="directed_by">Shona Auerbach</field>
+    <field name="name">Dear Frankie</field>
   </doc>
   <doc>
     <field name="id">/en/dear_wendy</field>
-    <field name="name">Dear Wendy</field>
     <field name="initial_release_date">2004-05-16</field>
     <field name="genre">Indie film</field>
     <field name="genre">Crime Fiction</field>
@@ -7080,10 +7072,10 @@
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Thomas Vinterberg</field>
+    <field name="name">Dear Wendy</field>
   </doc>
   <doc>
     <field name="id">/en/death_in_gaza</field>
-    <field name="name">Death in Gaza</field>
     <field name="initial_release_date">2004-02-11</field>
     <field name="genre">Documentary film</field>
     <field name="genre">War film</field>
@@ -7091,20 +7083,20 @@
     <field name="genre">Culture &amp;amp; Society</field>
     <field name="genre">Biographical film</field>
     <field name="directed_by">James Miller</field>
+    <field name="name">Death in Gaza</field>
   </doc>
   <doc>
     <field name="id">/en/death_to_smoochy</field>
-    <field name="name">Death to Smoochy</field>
     <field name="initial_release_date">2002-03-29</field>
     <field name="genre">Comedy</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Danny DeVito</field>
+    <field name="name">Death to Smoochy</field>
   </doc>
   <doc>
     <field name="id">/en/death_trance</field>
-    <field name="name">Death Trance</field>
     <field name="initial_release_date">2005-05-12</field>
     <field name="genre">Action Film</field>
     <field name="genre">Fantasy</field>
@@ -7115,29 +7107,29 @@
     <field name="genre">Action Thriller</field>
     <field name="genre">Japanese Movies</field>
     <field name="directed_by">Yuji Shimomura</field>
+    <field name="name">Death Trance</field>
   </doc>
   <doc>
     <field name="id">/en/death_walks_the_streets</field>
-    <field name="name">Death Walks the Streets</field>
     <field name="initial_release_date">2008-06-26</field>
     <field name="genre">Indie film</field>
     <field name="genre">Horror</field>
     <field name="genre">Crime Fiction</field>
     <field name="directed_by">James Zahn</field>
+    <field name="name">Death Walks the Streets</field>
   </doc>
   <doc>
     <field name="id">/en/deathwatch</field>
-    <field name="name">Deathwatch</field>
     <field name="initial_release_date">2002-10-06</field>
     <field name="genre">Horror</field>
     <field name="genre">War film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Michael J. Bassett</field>
+    <field name="name">Deathwatch</field>
   </doc>
   <doc>
     <field name="id">/en/december_boys</field>
-    <field name="name">December Boys</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Film adaptation</field>
     <field name="genre">Indie film</field>
@@ -7145,29 +7137,29 @@
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Rod Hardy</field>
+    <field name="name">December Boys</field>
   </doc>
   <doc>
     <field name="id">/en/decoys</field>
-    <field name="name">Decoys</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Horror</field>
     <field name="genre">Thriller</field>
     <field name="genre">Alien Film</field>
     <field name="genre">Horror comedy</field>
     <field name="directed_by">Matthew Hastings</field>
+    <field name="name">Decoys</field>
   </doc>
   <doc>
     <field name="id">/en/deepavali</field>
-    <field name="name">Deepavali</field>
     <field name="initial_release_date">2007-02-09</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
     <field name="directed_by">Ezhil</field>
+    <field name="name">Deepavali</field>
   </doc>
   <doc>
     <field name="id">/en/deewane_huye_pagal</field>
-    <field name="name">Deewane Huye Paagal</field>
     <field name="initial_release_date">2005-11-25</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Romantic comedy</field>
@@ -7176,10 +7168,10 @@
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Vikram Bhatt</field>
+    <field name="name">Deewane Huye Paagal</field>
   </doc>
   <doc>
     <field name="id">/wikipedia/ja_id/980449</field>
-    <field name="name">Déjà Vu</field>
     <field name="initial_release_date">2006-11-20</field>
     <field name="genre">Thriller</field>
     <field name="genre">Science Fiction</field>
@@ -7189,10 +7181,10 @@
     <field name="genre">Crime Thriller</field>
     <field name="genre">Action/Adventure</field>
     <field name="directed_by">Tony Scott</field>
+    <field name="name">Déjà Vu</field>
   </doc>
   <doc>
     <field name="id">/en/democrazy_2005</field>
-    <field name="name">Democrazy</field>
     <field name="genre">Parody</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Action Film</field>
@@ -7200,27 +7192,27 @@
     <field name="genre">Superhero movie</field>
     <field name="genre">Comedy</field>
     <field name="directed_by">Michael Legge</field>
+    <field name="name">Democrazy</field>
   </doc>
   <doc>
     <field name="id">/en/demonium</field>
-    <field name="name">Demonium</field>
     <field name="initial_release_date">2001-08-25</field>
     <field name="genre">Horror</field>
     <field name="genre">Thriller</field>
     <field name="directed_by">Andreas Schnaas</field>
+    <field name="name">Demonium</field>
   </doc>
   <doc>
     <field name="id">/en/der_schuh_des_manitu</field>
-    <field name="name">Der Schuh des Manitu</field>
     <field name="initial_release_date">2001-07-13</field>
     <field name="genre">Western</field>
     <field name="genre">Comedy</field>
     <field name="genre">Parody</field>
     <field name="directed_by">Michael Herbig</field>
+    <field name="name">Der Schuh des Manitu</field>
   </doc>
   <doc>
     <field name="id">/en/der_tunnel</field>
-    <field name="name">The Tunnel</field>
     <field name="initial_release_date">2001-01-21</field>
     <field name="genre">World cinema</field>
     <field name="genre">Thriller</field>
@@ -7228,37 +7220,39 @@
     <field name="genre">Political thriller</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Roland Suso Richter</field>
+    <field name="name">The Tunnel</field>
   </doc>
   <doc>
     <field name="id">/en/derailed</field>
-    <field name="name">Derailed</field>
     <field name="initial_release_date">2005-11-11</field>
     <field name="genre">Thriller</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Crime Thriller</field>
     <field name="genre">Drama</field>
     <field name="directed_by">Mikael Håfström</field>
+    <field name="name">Derailed</field>
   </doc>
   <doc>
     <field name="id">/en/derailed_2002</field>
-    <field name="name">Derailed</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Martial Arts Film</field>
     <field name="genre">Disaster Film</field>
     <field name="genre">Action/Adventure</field>
     <field name="directed_by">Bob Misiorowski</field>
+    <field name="name">Derailed</field>
   </doc>
   <doc>
     <field name="id">/en/destinys_child_live_in_atlana</field>
-    <field name="name">Destiny's Child: Live In Atlana</field>
     <field name="initial_release_date">2006-03-27</field>
     <field name="genre">Music</field>
     <field name="genre">Documentary film</field>
     <field name="directed_by">Julia Knowles</field>
+    <field name="name">Destiny's Child: Live In Atlana</field>
   </doc>
   <doc>
     <field name="id">/en/deuce_bigalow_european_gigolo</field>
+    <field name="initial_release_date">2005-08-06</field>
     <field name="name">Deuce Bigalow: European Gigolo</field>
     <field name="directed_by">Mike Bigelow</field>
     <field name="genre">Sex comedy</field>
@@ -7266,18 +7260,18 @@
     <field name="genre">Gross out</field>
     <field name="genre">Gross-out film</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2005-08-06</field>
   </doc>
   <doc>
     <field name="id">/en/dev</field>
+    <field name="initial_release_date">2004-06-11</field>
     <field name="name">Dev</field>
     <field name="directed_by">Govind Nihalani</field>
     <field name="genre">Drama</field>
     <field name="genre">Bollywood</field>
-    <field name="initial_release_date">2004-06-11</field>
   </doc>
   <doc>
     <field name="id">/en/devadasu</field>
+    <field name="initial_release_date">2006-01-11</field>
     <field name="name">Devadasu</field>
     <field name="directed_by">YVS Chowdary</field>
     <field name="directed_by">Gopireddy Mallikarjuna Reddy</field>
@@ -7285,10 +7279,10 @@
     <field name="genre">Drama</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
-    <field name="initial_release_date">2006-01-11</field>
   </doc>
   <doc>
     <field name="id">/en/devdas_2002</field>
+    <field name="initial_release_date">2002-05-23</field>
     <field name="name">Devdas</field>
     <field name="directed_by">Sanjay Leela Bhansali</field>
     <field name="genre">Romance Film</field>
@@ -7297,25 +7291,25 @@
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
     <field name="genre">Musical Drama</field>
-    <field name="initial_release_date">2002-05-23</field>
   </doc>
   <doc>
     <field name="id">/en/devils_playground_2003</field>
+    <field name="initial_release_date">2003-02-04</field>
     <field name="name">Devil's Playground</field>
     <field name="directed_by">Lucy Walker</field>
     <field name="genre">Documentary film</field>
-    <field name="initial_release_date">2003-02-04</field>
   </doc>
   <doc>
     <field name="id">/en/the_devils_pond</field>
+    <field name="initial_release_date">2003-10-21</field>
     <field name="name">Devil's Pond</field>
     <field name="directed_by">Joel Viertel</field>
     <field name="genre">Thriller</field>
     <field name="genre">Suspense</field>
-    <field name="initial_release_date">2003-10-21</field>
   </doc>
   <doc>
     <field name="id">/en/dhadkan</field>
+    <field name="initial_release_date">2000-08-11</field>
     <field name="name">Dhadkan</field>
     <field name="directed_by">Dharmesh Darshan</field>
     <field name="genre">Musical</field>
@@ -7325,10 +7319,10 @@
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="initial_release_date">2000-08-11</field>
   </doc>
   <doc>
     <field name="id">/en/dhool</field>
+    <field name="initial_release_date">2003-01-10</field>
     <field name="name">Dhool</field>
     <field name="directed_by">Dharani</field>
     <field name="genre">Musical</field>
@@ -7338,10 +7332,10 @@
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="initial_release_date">2003-01-10</field>
   </doc>
   <doc>
     <field name="id">/en/dhoom_2</field>
+    <field name="initial_release_date">2006-11-23</field>
     <field name="name">Dhoom 2</field>
     <field name="directed_by">Sanjay Gadhvi</field>
     <field name="genre">Crime Fiction</field>
@@ -7354,7 +7348,6 @@
     <field name="genre">Action Thriller</field>
     <field name="genre">Musical comedy</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2006-11-23</field>
   </doc>
   <doc>
     <field name="id">/en/dhyaas_parva</field>
@@ -7374,6 +7367,7 @@
   </doc>
   <doc>
     <field name="id">/en/diary_of_a_mad_black_woman</field>
+    <field name="initial_release_date">2005-02-25</field>
     <field name="name">Diary of a Mad Black Woman</field>
     <field name="directed_by">Darren Grant</field>
     <field name="genre">Comedy-drama</field>
@@ -7381,34 +7375,34 @@
     <field name="genre">Romantic comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2005-02-25</field>
   </doc>
   <doc>
     <field name="id">/en/dickie_roberts_former_child_star</field>
+    <field name="initial_release_date">2003-09-03</field>
     <field name="name">Dickie Roberts: Former Child Star</field>
     <field name="directed_by">Sam Weisman</field>
     <field name="genre">Parody</field>
     <field name="genre">Slapstick</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2003-09-03</field>
   </doc>
   <doc>
     <field name="id">/en/die_bad</field>
+    <field name="initial_release_date">2000-07-15</field>
     <field name="name">Die Bad</field>
     <field name="directed_by">Ryoo Seung-wan</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2000-07-15</field>
   </doc>
   <doc>
     <field name="id">/en/die_mommie_die</field>
+    <field name="initial_release_date">2003-01-20</field>
     <field name="name">Die Mommie Die!</field>
     <field name="directed_by">Mark Rucker</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2003-01-20</field>
   </doc>
   <doc>
     <field name="id">/en/dieu_est_grand_je_suis_toute_petite</field>
+    <field name="initial_release_date">2001-09-26</field>
     <field name="name">God Is Great and I'm Not</field>
     <field name="directed_by">Pascale Bailly</field>
     <field name="genre">Romantic comedy</field>
@@ -7418,10 +7412,10 @@
     <field name="genre">Comedy of manners</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2001-09-26</field>
   </doc>
   <doc>
     <field name="id">/en/digimon_the_movie</field>
+    <field name="initial_release_date">2000-03-17</field>
     <field name="name">Digimon: The Movie</field>
     <field name="directed_by">Mamoru Hosoda</field>
     <field name="directed_by">Shigeyasu Yamauchi</field>
@@ -7432,29 +7426,29 @@
     <field name="genre">Adventure Film</field>
     <field name="genre">Action Film</field>
     <field name="genre">Thriller</field>
-    <field name="initial_release_date">2000-03-17</field>
   </doc>
   <doc>
     <field name="id">/en/digital_monster_x-evolution</field>
+    <field name="initial_release_date">2005-01-03</field>
     <field name="name">Digital Monster X-Evolution</field>
     <field name="directed_by">Hiroyuki Kakudō</field>
     <field name="genre">Computer Animation</field>
     <field name="genre">Animation</field>
     <field name="genre">Japanese Movies</field>
-    <field name="initial_release_date">2005-01-03</field>
   </doc>
   <doc>
     <field name="id">/en/digna_hasta_el_ultimo_aliento</field>
+    <field name="initial_release_date">2004-12-17</field>
     <field name="name">Digna... hasta el último aliento</field>
     <field name="directed_by">Felipe Cazals</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Culture &amp;amp; Society</field>
     <field name="genre">Law &amp;amp; Crime</field>
     <field name="genre">Biographical film</field>
-    <field name="initial_release_date">2004-12-17</field>
   </doc>
   <doc>
     <field name="id">/en/dil_chahta_hai</field>
+    <field name="initial_release_date">2001-07-24</field>
     <field name="name">Dil Chahta Hai</field>
     <field name="directed_by">Farhan Akhtar</field>
     <field name="genre">Bollywood</field>
@@ -7466,10 +7460,10 @@
     <field name="genre">Musical comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2001-07-24</field>
   </doc>
   <doc>
     <field name="id">/en/dil_diya_hai</field>
+    <field name="initial_release_date">2006-09-08</field>
     <field name="name">Dil Diya Hai</field>
     <field name="directed_by">Aditya Datt</field>
     <field name="directed_by">Aditya Datt</field>
@@ -7477,10 +7471,10 @@
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2006-09-08</field>
   </doc>
   <doc>
     <field name="id">/en/dil_hai_tumhaara</field>
+    <field name="initial_release_date">2002-09-06</field>
     <field name="name">Dil Hai Tumhara</field>
     <field name="directed_by">Kundan Shah</field>
     <field name="genre">Family</field>
@@ -7490,18 +7484,18 @@
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="initial_release_date">2002-09-06</field>
   </doc>
   <doc>
     <field name="id">/en/dil_ka_rishta</field>
+    <field name="initial_release_date">2003-01-17</field>
     <field name="name">Dil Ka Rishta</field>
     <field name="directed_by">Naresh Malhotra</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Bollywood</field>
-    <field name="initial_release_date">2003-01-17</field>
   </doc>
   <doc>
     <field name="id">/en/dil_ne_jise_apna_kahaa</field>
+    <field name="initial_release_date">2004-09-10</field>
     <field name="name">Dil Ne Jise Apna Kahaa</field>
     <field name="directed_by">Atul Agnihotri</field>
     <field name="genre">Musical</field>
@@ -7512,10 +7506,10 @@
     <field name="genre">Comedy</field>
     <field name="genre">Bollywood</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2004-09-10</field>
   </doc>
   <doc>
     <field name="id">/en/dinosaur_2000</field>
+    <field name="initial_release_date">2000-05-13</field>
     <field name="name">Dinosaur</field>
     <field name="directed_by">Eric Leighton</field>
     <field name="directed_by">Ralph Zondag</field>
@@ -7526,10 +7520,10 @@
     <field name="genre">Family</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Thriller</field>
-    <field name="initial_release_date">2000-05-13</field>
   </doc>
   <doc>
     <field name="id">/en/dirty_dancing_2004</field>
+    <field name="initial_release_date">2004-02-27</field>
     <field name="name">Dirty Dancing: Havana Nights</field>
     <field name="directed_by">Guy Ferland</field>
     <field name="genre">Musical</field>
@@ -7541,10 +7535,10 @@
     <field name="genre">Dance film</field>
     <field name="genre">Musical Drama</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2004-02-27</field>
   </doc>
   <doc>
     <field name="id">/en/dirty_deeds</field>
+    <field name="initial_release_date">2002-07-18</field>
     <field name="name">Dirty Deeds</field>
     <field name="directed_by">David Caesar</field>
     <field name="genre">Historical period drama</field>
@@ -7555,17 +7549,17 @@
     <field name="genre">World cinema</field>
     <field name="genre">Gangster Film</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2002-07-18</field>
   </doc>
   <doc>
     <field name="id">/en/dirty_deeds_2005</field>
+    <field name="initial_release_date">2005-08-26</field>
     <field name="name">Dirty Deeds</field>
     <field name="directed_by">David Kendall</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2005-08-26</field>
   </doc>
   <doc>
     <field name="id">/en/dirty_love</field>
+    <field name="initial_release_date">2005-09-23</field>
     <field name="name">Dirty Love</field>
     <field name="directed_by">John Mallory Asher</field>
     <field name="genre">Indie film</field>
@@ -7573,10 +7567,10 @@
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2005-09-23</field>
   </doc>
   <doc>
     <field name="id">/en/disappearing_acts</field>
+    <field name="initial_release_date">2000-12-09</field>
     <field name="name">Disappearing Acts</field>
     <field name="directed_by">Gina Prince-Bythewood</field>
     <field name="genre">Romance Film</field>
@@ -7584,10 +7578,10 @@
     <field name="genre">Film adaptation</field>
     <field name="genre">Comedy-drama</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2000-12-09</field>
   </doc>
   <doc>
     <field name="id">/en/dishyum</field>
+    <field name="initial_release_date">2006-02-02</field>
     <field name="name">Dishyum</field>
     <field name="directed_by">Sasi</field>
     <field name="genre">Romance Film</field>
@@ -7595,17 +7589,17 @@
     <field name="genre">Drama</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
-    <field name="initial_release_date">2006-02-02</field>
   </doc>
   <doc>
     <field name="id">/en/distant_lights</field>
+    <field name="initial_release_date">2003-02-11</field>
     <field name="name">Distant Lights</field>
     <field name="directed_by">Hans-Christian Schmid</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2003-02-11</field>
   </doc>
   <doc>
     <field name="id">/en/district_b13</field>
+    <field name="initial_release_date">2004-11-10</field>
     <field name="name">District 13</field>
     <field name="directed_by">Pierre Morel</field>
     <field name="genre">Martial Arts Film</field>
@@ -7613,30 +7607,30 @@
     <field name="genre">Action Film</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Crime Fiction</field>
-    <field name="initial_release_date">2004-11-10</field>
   </doc>
   <doc>
     <field name="id">/en/disturbia</field>
+    <field name="initial_release_date">2007-04-04</field>
     <field name="name">Disturbia</field>
     <field name="directed_by">D. J. Caruso</field>
     <field name="genre">Thriller</field>
     <field name="genre">Mystery</field>
     <field name="genre">Teen film</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2007-04-04</field>
   </doc>
   <doc>
     <field name="id">/en/ditto_2000</field>
+    <field name="initial_release_date">2000-05-27</field>
     <field name="name">Ditto</field>
     <field name="directed_by">Jeong-kwon Kim</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">East Asian cinema</field>
     <field name="genre">World cinema</field>
-    <field name="initial_release_date">2000-05-27</field>
   </doc>
   <doc>
     <field name="id">/en/divine_intervention_2002</field>
+    <field name="initial_release_date">2002-05-19</field>
     <field name="name">Divine Intervention</field>
     <field name="directed_by">Elia Suleiman</field>
     <field name="genre">Black comedy</field>
@@ -7644,10 +7638,10 @@
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2002-05-19</field>
   </doc>
   <doc>
     <field name="id">/en/divine_secrets_of_the_ya_ya_sisterhood</field>
+    <field name="initial_release_date">2002-06-03</field>
     <field name="name">Divine Secrets of the Ya-Ya Sisterhood</field>
     <field name="directed_by">Callie Khouri</field>
     <field name="genre">Film adaptation</field>
@@ -7657,80 +7651,80 @@
     <field name="genre">Ensemble Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2002-06-03</field>
   </doc>
   <doc>
     <field name="id">/en/doa_dead_or_alive</field>
+    <field name="initial_release_date">2006-09-07</field>
     <field name="name">DOA: Dead or Alive</field>
     <field name="directed_by">Corey Yuen</field>
     <field name="genre">Action Film</field>
     <field name="genre">Adventure Film</field>
-    <field name="initial_release_date">2006-09-07</field>
   </doc>
   <doc>
     <field name="id">/en/dodgeball_a_true_underdog_story</field>
+    <field name="initial_release_date">2004-06-18</field>
     <field name="name">DodgeBall: A True Underdog Story</field>
     <field name="directed_by">Rawson Marshall Thurber</field>
     <field name="genre">Sports</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2004-06-18</field>
   </doc>
   <doc>
     <field name="id">/en/dog_soldiers</field>
+    <field name="initial_release_date">2002-03-22</field>
     <field name="name">Dog Soldiers</field>
     <field name="directed_by">Neil Marshall</field>
     <field name="genre">Horror</field>
     <field name="genre">Action Film</field>
     <field name="genre">Creature Film</field>
-    <field name="initial_release_date">2002-03-22</field>
   </doc>
   <doc>
     <field name="id">/en/dogtown_and_z-boys</field>
+    <field name="initial_release_date">2001-01-19</field>
     <field name="name">Dogtown and Z-Boys</field>
     <field name="directed_by">Stacy Peralta</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Sports</field>
     <field name="genre">Extreme Sports</field>
     <field name="genre">Biographical film</field>
-    <field name="initial_release_date">2001-01-19</field>
   </doc>
   <doc>
     <field name="id">/en/dogville</field>
+    <field name="initial_release_date">2003-05-19</field>
     <field name="name">Dogville</field>
     <field name="directed_by">Lars von Trier</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2003-05-19</field>
   </doc>
   <doc>
     <field name="id">/en/doll_master</field>
+    <field name="initial_release_date">2004-07-30</field>
     <field name="name">The Doll Master</field>
     <field name="directed_by">Jeong Yong-Gi</field>
     <field name="genre">Horror</field>
     <field name="genre">Thriller</field>
     <field name="genre">East Asian cinema</field>
     <field name="genre">World cinema</field>
-    <field name="initial_release_date">2004-07-30</field>
   </doc>
   <doc>
     <field name="id">/en/dolls</field>
+    <field name="initial_release_date">2002-09-05</field>
     <field name="name">Dolls</field>
     <field name="directed_by">Takeshi Kitano</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2002-09-05</field>
   </doc>
   <doc>
     <field name="id">/en/dominion_prequel_to_the_exorcist</field>
+    <field name="initial_release_date">2005-05-20</field>
     <field name="name">Dominion: Prequel to the Exorcist</field>
     <field name="directed_by">Paul Schrader</field>
     <field name="genre">Horror</field>
     <field name="genre">Supernatural</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Cult film</field>
-    <field name="initial_release_date">2005-05-20</field>
   </doc>
   <doc>
     <field name="id">/en/domino_2005</field>
+    <field name="initial_release_date">2005-09-25</field>
     <field name="name">Domino</field>
     <field name="directed_by">Tony Scott</field>
     <field name="genre">Thriller</field>
@@ -7740,10 +7734,10 @@
     <field name="genre">Comedy</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2005-09-25</field>
   </doc>
   <doc>
     <field name="id">/en/don_2006</field>
+    <field name="initial_release_date">2006-10-20</field>
     <field name="name">Don: The Chase Begins Again</field>
     <field name="directed_by">Farhan Akhtar</field>
     <field name="genre">Crime Fiction</field>
@@ -7754,20 +7748,20 @@
     <field name="genre">Comedy</field>
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
-    <field name="initial_release_date">2006-10-20</field>
   </doc>
   <doc>
     <field name="id">/en/dons_plum</field>
+    <field name="initial_release_date">2001-02-10</field>
     <field name="name">Don's Plum</field>
     <field name="directed_by">R.D. Robb</field>
     <field name="genre">Black-and-white</field>
     <field name="genre">Ensemble Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2001-02-10</field>
   </doc>
   <doc>
     <field name="id">/en/dont_come_knocking</field>
+    <field name="initial_release_date">2005-05-19</field>
     <field name="name">Don't Come Knocking</field>
     <field name="directed_by">Wim Wenders</field>
     <field name="genre">Western</field>
@@ -7776,45 +7770,45 @@
     <field name="genre">Drama</field>
     <field name="genre">Music</field>
     <field name="genre">Musical Drama</field>
-    <field name="initial_release_date">2005-05-19</field>
   </doc>
   <doc>
     <field name="id">/en/dont_move</field>
+    <field name="initial_release_date">2004-03-12</field>
     <field name="name">Don't Move</field>
     <field name="directed_by">Sergio Castellitto</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2004-03-12</field>
   </doc>
   <doc>
     <field name="id">/en/dont_say_a_word_2001</field>
+    <field name="initial_release_date">2001-09-24</field>
     <field name="name">Don't Say a Word</field>
     <field name="directed_by">Gary Fleder</field>
     <field name="genre">Thriller</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Suspense</field>
-    <field name="initial_release_date">2001-09-24</field>
   </doc>
   <doc>
     <field name="id">/en/donnie_darko</field>
+    <field name="initial_release_date">2001-01-19</field>
     <field name="name">Donnie Darko</field>
     <field name="directed_by">Richard Kelly</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Mystery</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2001-01-19</field>
   </doc>
   <doc>
     <field name="id">/en/doomsday_2008</field>
+    <field name="initial_release_date">2008-03-14</field>
     <field name="name">Doomsday</field>
     <field name="directed_by">Neil Marshall</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Action Film</field>
-    <field name="initial_release_date">2008-03-14</field>
   </doc>
   <doc>
     <field name="id">/en/dopamine_2003</field>
+    <field name="initial_release_date">2003-01-23</field>
     <field name="name">Dopamine</field>
     <field name="directed_by">Mark Decena</field>
     <field name="genre">Comedy-drama</field>
@@ -7823,28 +7817,28 @@
     <field name="genre">Romantic comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2003-01-23</field>
   </doc>
   <doc>
     <field name="id">/en/dosti_friends_forever</field>
+    <field name="initial_release_date">2005-12-23</field>
     <field name="name">Dosti: Friends Forever</field>
     <field name="directed_by">Suneel Darshan</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2005-12-23</field>
   </doc>
   <doc>
     <field name="id">/en/double_take</field>
+    <field name="initial_release_date">2001-01-12</field>
     <field name="name">Double Take</field>
     <field name="directed_by">George Gallo</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Action Film</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2001-01-12</field>
   </doc>
   <doc>
     <field name="id">/en/double_teamed</field>
+    <field name="initial_release_date">2002-01-18</field>
     <field name="name">Double Teamed</field>
     <field name="directed_by">Duwayne Dunham</field>
     <field name="genre">Family</field>
@@ -7852,10 +7846,10 @@
     <field name="genre">Family Drama</field>
     <field name="genre">Children's/Family</field>
     <field name="genre">Sports</field>
-    <field name="initial_release_date">2002-01-18</field>
   </doc>
   <doc>
     <field name="id">/en/double_vision_2002</field>
+    <field name="initial_release_date">2002-05-20</field>
     <field name="name">Double Vision</field>
     <field name="directed_by">Chen Kuo-Fu</field>
     <field name="genre">Thriller</field>
@@ -7869,10 +7863,10 @@
     <field name="genre">Crime Thriller</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Chinese Movies</field>
-    <field name="initial_release_date">2002-05-20</field>
   </doc>
   <doc>
     <field name="id">/en/double_whammy</field>
+    <field name="initial_release_date">2001-01-20</field>
     <field name="name">Double Whammy</field>
     <field name="directed_by">Tom DiCillo</field>
     <field name="genre">Comedy-drama</field>
@@ -7884,19 +7878,19 @@
     <field name="genre">Romantic comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2001-01-20</field>
   </doc>
   <doc>
     <field name="id">/en/down_and_derby</field>
+    <field name="initial_release_date">2005-04-15</field>
     <field name="name">Down and Derby</field>
     <field name="directed_by">Eric Hendershot</field>
     <field name="genre">Family</field>
     <field name="genre">Sports</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2005-04-15</field>
   </doc>
   <doc>
     <field name="id">/en/down_in_the_valley</field>
+    <field name="initial_release_date">2005-05-13</field>
     <field name="name">Down in the Valley</field>
     <field name="directed_by">David Jacobson</field>
     <field name="genre">Indie film</field>
@@ -7904,19 +7898,19 @@
     <field name="genre">Family Drama</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2005-05-13</field>
   </doc>
   <doc>
     <field name="id">/en/down_to_earth</field>
+    <field name="initial_release_date">2001-02-12</field>
     <field name="name">Down to Earth</field>
     <field name="directed_by">Chris Weitz</field>
     <field name="directed_by">Paul Weitz</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2001-02-12</field>
   </doc>
   <doc>
     <field name="id">/en/down_with_love</field>
+    <field name="initial_release_date">2003-05-09</field>
     <field name="name">Down with Love</field>
     <field name="directed_by">Peyton Reed</field>
     <field name="genre">Romantic comedy</field>
@@ -7924,38 +7918,38 @@
     <field name="genre">Screwball comedy</field>
     <field name="genre">Parody</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2003-05-09</field>
   </doc>
   <doc>
     <field name="id">/en/downfall</field>
+    <field name="initial_release_date">2004-09-08</field>
     <field name="name">Downfall</field>
     <field name="directed_by">Oliver Hirschbiegel</field>
     <field name="genre">Biographical film</field>
     <field name="genre">War film</field>
     <field name="genre">Historical drama</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2004-09-08</field>
   </doc>
   <doc>
     <field name="id">/en/dr_dolittle_2</field>
+    <field name="initial_release_date">2001-06-19</field>
     <field name="name">Dr. Dolittle 2</field>
     <field name="directed_by">Steve Carr</field>
     <field name="genre">Family</field>
     <field name="genre">Fantasy Comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Romance Film</field>
-    <field name="initial_release_date">2001-06-19</field>
   </doc>
   <doc>
     <field name="id">/en/dr_dolittle_3</field>
+    <field name="initial_release_date">2006-04-25</field>
     <field name="name">Dr. Dolittle 3</field>
     <field name="directed_by">Rich Thorne</field>
     <field name="genre">Family</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2006-04-25</field>
   </doc>
   <doc>
     <field name="id">/en/dracula_pages_from_a_virgins_diary</field>
+    <field name="initial_release_date">2002-02-28</field>
     <field name="name">Dracula: Pages from a Virgin's Diary</field>
     <field name="directed_by">Guy Maddin</field>
     <field name="genre">Silent film</field>
@@ -7967,7 +7961,6 @@
     <field name="genre">Horror comedy</field>
     <field name="genre">Musical comedy</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2002-02-28</field>
   </doc>
   <doc>
     <field name="id">/en/dragon_boys</field>
@@ -7979,6 +7972,7 @@
   </doc>
   <doc>
     <field name="id">/en/dragon_tiger_gate</field>
+    <field name="initial_release_date">2006-07-27</field>
     <field name="name">Dragon Tiger Gate</field>
     <field name="directed_by">Wilson Yip</field>
     <field name="genre">Martial Arts Film</field>
@@ -7990,10 +7984,10 @@
     <field name="genre">World cinema</field>
     <field name="genre">Action Thriller</field>
     <field name="genre">Chinese Movies</field>
-    <field name="initial_release_date">2006-07-27</field>
   </doc>
   <doc>
     <field name="id">/en/dragonfly_2002</field>
+    <field name="initial_release_date">2002-02-18</field>
     <field name="name">Dragonfly</field>
     <field name="directed_by">Tom Shadyac</field>
     <field name="genre">Thriller</field>
@@ -8001,10 +7995,10 @@
     <field name="genre">Romance Film</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2002-02-18</field>
   </doc>
   <doc>
     <field name="id">/en/dragonlance_dragons_of_autumn_twilight</field>
+    <field name="initial_release_date">2008-01-15</field>
     <field name="name">Dragonlance: Dragons of Autumn Twilight</field>
     <field name="directed_by">Will Meugniot</field>
     <field name="genre">Animation</field>
@@ -8012,20 +8006,20 @@
     <field name="genre">Fantasy</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Science Fiction</field>
-    <field name="initial_release_date">2008-01-15</field>
   </doc>
   <doc>
     <field name="id">/en/drake_josh_go_hollywood</field>
+    <field name="initial_release_date">2006-01-06</field>
     <field name="name">Drake &amp;amp; Josh Go Hollywood</field>
     <field name="directed_by">Adam Weissman</field>
     <field name="directed_by">Steve Hoefer</field>
     <field name="genre">Family</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2006-01-06</field>
   </doc>
   <doc>
     <field name="id">/en/drawing_restraint_9</field>
+    <field name="initial_release_date">2005-07-01</field>
     <field name="name">Drawing Restraint 9</field>
     <field name="directed_by">Matthew Barney</field>
     <field name="genre">Cult film</field>
@@ -8034,29 +8028,29 @@
     <field name="genre">Avant-garde</field>
     <field name="genre">Experimental film</field>
     <field name="genre">Japanese Movies</field>
-    <field name="initial_release_date">2005-07-01</field>
   </doc>
   <doc>
     <field name="id">/en/dreamcatcher</field>
+    <field name="initial_release_date">2003-03-06</field>
     <field name="name">Dreamcatcher</field>
     <field name="directed_by">Lawrence Kasdan</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Horror</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2003-03-06</field>
   </doc>
   <doc>
     <field name="id">/en/dreamer_2005</field>
+    <field name="initial_release_date">2005-09-10</field>
     <field name="name">Dreamer</field>
     <field name="directed_by">John Gatins</field>
     <field name="genre">Family</field>
     <field name="genre">Sports</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2005-09-10</field>
   </doc>
   <doc>
     <field name="id">/en/dreaming_of_julia</field>
+    <field name="initial_release_date">2003-10-24</field>
     <field name="name">Dreaming of Julia</field>
     <field name="directed_by">Juan Gerard</field>
     <field name="genre">Indie film</field>
@@ -8065,10 +8059,10 @@
     <field name="genre">Action/Adventure</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2003-10-24</field>
   </doc>
   <doc>
     <field name="id">/en/driving_miss_wealthy_juet_sai_ho_bun</field>
+    <field name="initial_release_date">2004-05-03</field>
     <field name="name">Driving Miss Wealthy</field>
     <field name="directed_by">James Yuen</field>
     <field name="genre">Romance Film</field>
@@ -8077,10 +8071,10 @@
     <field name="genre">Chinese Movies</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2004-05-03</field>
   </doc>
   <doc>
     <field name="id">/en/drowning_mona</field>
+    <field name="initial_release_date">2000-01-02</field>
     <field name="name">Drowning Mona</field>
     <field name="directed_by">Nick Gomez</field>
     <field name="genre">Black comedy</field>
@@ -8089,7 +8083,6 @@
     <field name="genre">Crime Comedy</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2000-01-02</field>
   </doc>
   <doc>
     <field name="id">/en/drugstore_girl</field>
@@ -8100,6 +8093,7 @@
   </doc>
   <doc>
     <field name="id">/en/druids</field>
+    <field name="initial_release_date">2001-08-31</field>
     <field name="name">Druids</field>
     <field name="directed_by">Jacques Dorfmann</field>
     <field name="genre">Adventure Film</field>
@@ -8111,10 +8105,10 @@
     <field name="genre">Historical fiction</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2001-08-31</field>
   </doc>
   <doc>
     <field name="id">/en/duck_the_carbine_high_massacre</field>
+    <field name="initial_release_date">2000-04-20</field>
     <field name="name">Duck! The Carbine High Massacre</field>
     <field name="directed_by">William Hellfire</field>
     <field name="directed_by">Joey Smack</field>
@@ -8124,16 +8118,15 @@
     <field name="genre">Indie film</field>
     <field name="genre">Teen film</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2000-04-20</field>
   </doc>
   <doc>
     <field name="id">/en/dude_wheres_my_car</field>
+    <field name="initial_release_date">2000-12-10</field>
     <field name="name">Dude, Where's My Car?</field>
     <field name="directed_by">Danny Leiner</field>
     <field name="genre">Mystery</field>
     <field name="genre">Comedy</field>
     <field name="genre">Science Fiction</field>
-    <field name="initial_release_date">2000-12-10</field>
   </doc>
   <doc>
     <field name="id">/en/dude_wheres_the_party</field>
@@ -8145,6 +8138,7 @@
   </doc>
   <doc>
     <field name="id">/en/duets</field>
+    <field name="initial_release_date">2000-09-09</field>
     <field name="name">Duets</field>
     <field name="directed_by">Bruce Paltrow</field>
     <field name="genre">Musical</field>
@@ -8152,10 +8146,10 @@
     <field name="genre">Musical comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2000-09-09</field>
   </doc>
   <doc>
     <field name="id">/en/dumb_dumberer</field>
+    <field name="initial_release_date">2003-06-13</field>
     <field name="name">Dumb &amp;amp; Dumberer: When Harry Met Lloyd</field>
     <field name="directed_by">Troy Miller</field>
     <field name="genre">Buddy film</field>
@@ -8163,19 +8157,19 @@
     <field name="genre">Screwball comedy</field>
     <field name="genre">Slapstick</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2003-06-13</field>
   </doc>
   <doc>
     <field name="id">/en/dumm_dumm_dumm</field>
+    <field name="initial_release_date">2001-04-13</field>
     <field name="name">Dumm Dumm Dumm</field>
     <field name="directed_by">Azhagam Perumal</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2001-04-13</field>
   </doc>
   <doc>
     <field name="id">/en/dummy_2003</field>
+    <field name="initial_release_date">2003-09-12</field>
     <field name="name">Dummy</field>
     <field name="directed_by">Greg Pritikin</field>
     <field name="genre">Romantic comedy</field>
@@ -8184,50 +8178,49 @@
     <field name="genre">Comedy</field>
     <field name="genre">Comedy-drama</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2003-09-12</field>
   </doc>
   <doc>
     <field name="id">/en/dumplings</field>
+    <field name="initial_release_date">2004-08-04</field>
     <field name="name">Dumplings</field>
     <field name="directed_by">Fruit Chan</field>
     <field name="genre">Horror</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2004-08-04</field>
   </doc>
   <doc>
     <field name="id">/en/duplex</field>
+    <field name="initial_release_date">2003-09-26</field>
     <field name="name">Duplex</field>
     <field name="directed_by">Danny DeVito</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Comedy</field>
-    <field name="initial_release_date">2003-09-26</field>
   </doc>
   <doc>
     <field name="id">/en/dus</field>
+    <field name="initial_release_date">2005-07-08</field>
     <field name="name">Dus</field>
     <field name="directed_by">Anubhav Sinha</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Bollywood</field>
-    <field name="initial_release_date">2005-07-08</field>
   </doc>
   <doc>
     <field name="id">/en/dust_2001</field>
+    <field name="initial_release_date">2001-08-29</field>
     <field name="name">Dust</field>
     <field name="directed_by">Milcho Manchevski</field>
     <field name="genre">Western</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2001-08-29</field>
   </doc>
   <doc>
     <field name="id">/wikipedia/en_title/E_$0028film$0029</field>
+    <field name="initial_release_date">2006-10-21</field>
     <field name="name">E</field>
     <field name="directed_by">S. P. Jananathan</field>
     <field name="genre">Action Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2006-10-21</field>
   </doc>
   <doc>
     <field name="id">/en/earthlings</field>
@@ -8240,13 +8233,13 @@
   </doc>
   <doc>
     <field name="id">/en/eastern_promises</field>
+    <field name="initial_release_date">2007-09-08</field>
     <field name="name">Eastern Promises</field>
     <field name="directed_by">David Cronenberg</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Mystery</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2007-09-08</field>
   </doc>
   <doc>
     <field name="id">/en/eating_out</field>
@@ -8262,6 +8255,7 @@
   </doc>
   <doc>
     <field name="id">/en/echoes_of_innocence</field>
+    <field name="initial_release_date">2005-09-09</field>
     <field name="name">Echoes of Innocence</field>
     <field name="directed_by">Nathan Todd Sims</field>
     <field name="genre">Thriller</field>
@@ -8270,17 +8264,17 @@
     <field name="genre">Mystery</field>
     <field name="genre">Supernatural</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2005-09-09</field>
   </doc>
   <doc>
     <field name="id">/en/eddies_million_dollar_cook_off</field>
+    <field name="initial_release_date">2003-07-18</field>
     <field name="name">Eddie's Million Dollar Cook-Off</field>
     <field name="directed_by">Paul Hoen</field>
     <field name="genre">Teen film</field>
-    <field name="initial_release_date">2003-07-18</field>
   </doc>
   <doc>
     <field name="id">/en/edison_2006</field>
+    <field name="initial_release_date">2005-03-05</field>
     <field name="name">Edison</field>
     <field name="directed_by">David J. Burke</field>
     <field name="genre">Thriller</field>
@@ -8288,10 +8282,10 @@
     <field name="genre">Mystery</field>
     <field name="genre">Crime Thriller</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2005-03-05</field>
   </doc>
   <doc>
     <field name="id">/en/edmond_2006</field>
+    <field name="initial_release_date">2005-09-02</field>
     <field name="name">Edmond</field>
     <field name="directed_by">Stuart Gordon</field>
     <field name="genre">Thriller</field>
@@ -8299,31 +8293,30 @@
     <field name="genre">Indie film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2005-09-02</field>
   </doc>
   <doc>
     <field name="id">/en/eight_below</field>
+    <field name="initial_release_date">2006-02-17</field>
     <field name="name">Eight Below</field>
     <field name="directed_by">Frank Marshall</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Family</field>
     <field name="genre">Drama</field>
-    <field name="initial_release_date">2006-02-17</field>
   </doc>
   <doc>
     <field name="id">/en/eight_crazy_nights</field>
-    <field name="name">Eight Crazy Nights</field>
+    <field name="directed_by">Seth Kearsley</field>
     <field name="initial_release_date">2002-11-27</field>
     <field name="genre">Christmas movie</field>
     <field name="genre">Musical</field>
     <field name="genre">Animation</field>
     <field name="genre">Musical comedy</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Seth Kearsley</field>
+    <field name="name">Eight Crazy Nights</field>
   </doc>
   <doc>
     <field name="id">/en/eight_legged_freaks</field>
-    <field name="name">Eight Legged Freaks</field>
+    <field name="directed_by">Ellory Elkayem</field>
     <field name="initial_release_date">2002-05-30</field>
     <field name="genre">Horror</field>
     <field name="genre">Natural horror film</field>
@@ -8334,11 +8327,11 @@
     <field name="genre">Action Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Horror comedy</field>
-    <field name="directed_by">Ellory Elkayem</field>
+    <field name="name">Eight Legged Freaks</field>
   </doc>
   <doc>
     <field name="id">/en/ek_ajnabee</field>
-    <field name="name">Ek Ajnabee</field>
+    <field name="directed_by">Apoorva Lakhia</field>
     <field name="initial_release_date">2005-12-09</field>
     <field name="genre">Action Film</field>
     <field name="genre">Thriller</field>
@@ -8346,11 +8339,11 @@
     <field name="genre">Action Thriller</field>
     <field name="genre">Drama</field>
     <field name="genre">Bollywood</field>
-    <field name="directed_by">Apoorva Lakhia</field>
+    <field name="name">Ek Ajnabee</field>
   </doc>
   <doc>
     <field name="id">/en/eklavya_the_royal_guard</field>
-    <field name="name">Eklavya: The Royal Guard</field>
+    <field name="directed_by">Vidhu Vinod Chopra</field>
     <field name="initial_release_date">2007-02-16</field>
     <field name="genre">Historical drama</field>
     <field name="genre">Romance Film</field>
@@ -8359,93 +8352,94 @@
     <field name="genre">Thriller</field>
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
-    <field name="directed_by">Vidhu Vinod Chopra</field>
+    <field name="name">Eklavya: The Royal Guard</field>
   </doc>
   <doc>
     <field name="id">/en/el_abrazo_partido</field>
-    <field name="name">Lost Embrace</field>
+    <field name="directed_by">Daniel Burman</field>
     <field name="initial_release_date">2004-02-09</field>
     <field name="genre">Indie film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Comedy-drama</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Daniel Burman</field>
+    <field name="name">Lost Embrace</field>
   </doc>
   <doc>
     <field name="id">/en/el_aura</field>
-    <field name="name">El Aura</field>
+    <field name="directed_by">Fabián Bielinsky</field>
     <field name="initial_release_date">2005-09-15</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Fabián Bielinsky</field>
+    <field name="name">El Aura</field>
   </doc>
   <doc>
     <field name="id">/en/el_crimen_del_padre_amaro</field>
-    <field name="name">The Crime of Father Amaro</field>
+    <field name="directed_by">Carlos Carrera</field>
     <field name="initial_release_date">2002-08-16</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Carlos Carrera</field>
+    <field name="name">The Crime of Father Amaro</field>
   </doc>
   <doc>
     <field name="id">/en/el_juego_de_arcibel</field>
-    <field name="name">El juego de Arcibel</field>
+    <field name="directed_by">Alberto Lecchi</field>
     <field name="initial_release_date">2003-05-29</field>
     <field name="genre">Indie film</field>
     <field name="genre">Political drama</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Alberto Lecchi</field>
+    <field name="name">El juego de Arcibel</field>
   </doc>
   <doc>
     <field name="id">/wikipedia/en_title/El_Muerto_$0028film$0029</field>
-    <field name="name">El Muerto</field>
+    <field name="directed_by">Brian Cox</field>
     <field name="genre">Indie film</field>
     <field name="genre">Supernatural</field>
     <field name="genre">Thriller</field>
     <field name="genre">Superhero movie</field>
     <field name="genre">Action/Adventure</field>
-    <field name="directed_by">Brian Cox</field>
+    <field name="name">El Muerto</field>
   </doc>
   <doc>
     <field name="id">/en/el_principio_de_arquimedes</field>
-    <field name="name">The Archimedes Principle</field>
+    <field name="directed_by">Gerardo Herrero</field>
     <field name="initial_release_date">2004-03-26</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Gerardo Herrero</field>
+    <field name="name">The Archimedes Principle</field>
   </doc>
   <doc>
     <field name="id">/en/el_raton_perez</field>
-    <field name="name">The Hairy Tooth Fairy</field>
+    <field name="directed_by">Juan Pablo Buscarini</field>
     <field name="initial_release_date">2006-07-13</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Animation</field>
     <field name="genre">Comedy</field>
     <field name="genre">Family</field>
-    <field name="directed_by">Juan Pablo Buscarini</field>
+    <field name="name">The Hairy Tooth Fairy</field>
   </doc>
   <doc>
     <field name="id">/en/election_2005</field>
-    <field name="name">Election</field>
+    <field name="directed_by">Johnnie To</field>
     <field name="initial_release_date">2005-05-14</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Johnnie To</field>
+    <field name="name">Election</field>
   </doc>
   <doc>
     <field name="id">/en/election_2</field>
-    <field name="name">Election 2</field>
+    <field name="directed_by">Johnnie To</field>
     <field name="initial_release_date">2006-04-04</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Johnnie To</field>
+    <field name="name">Election 2</field>
   </doc>
   <doc>
     <field name="id">/en/daft_punks_electroma</field>
-    <field name="name">Daft Punk's Electroma</field>
+    <field name="directed_by">Thomas Bangalter</field>
+    <field name="directed_by">Guy-Manuel de Homem-Christo</field>
     <field name="initial_release_date">2006-05-21</field>
     <field name="genre">Indie film</field>
     <field name="genre">Silent film</field>
@@ -8455,12 +8449,11 @@
     <field name="genre">Experimental film</field>
     <field name="genre">Road movie</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Thomas Bangalter</field>
-    <field name="directed_by">Guy-Manuel de Homem-Christo</field>
+    <field name="name">Daft Punk's Electroma</field>
   </doc>
   <doc>
     <field name="id">/en/elektra_2005</field>
-    <field name="name">Elektra</field>
+    <field name="directed_by">Rob Bowman</field>
     <field name="initial_release_date">2005-01-08</field>
     <field name="genre">Action Film</field>
     <field name="genre">Action/Adventure</field>
@@ -8469,40 +8462,40 @@
     <field name="genre">Thriller</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Crime Fiction</field>
-    <field name="directed_by">Rob Bowman</field>
+    <field name="name">Elektra</field>
   </doc>
   <doc>
     <field name="id">/en/elephant_2003</field>
-    <field name="name">Elephant</field>
+    <field name="directed_by">Gus Van Sant</field>
     <field name="initial_release_date">2003-05-18</field>
     <field name="genre">Teen film</field>
     <field name="genre">Indie film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Gus Van Sant</field>
+    <field name="name">Elephant</field>
   </doc>
   <doc>
     <field name="id">/en/elephants_dream</field>
-    <field name="name">Elephants Dream</field>
+    <field name="directed_by">Bassam Kurdali</field>
     <field name="initial_release_date">2006-03-24</field>
     <field name="genre">Short Film</field>
     <field name="genre">Computer Animation</field>
-    <field name="directed_by">Bassam Kurdali</field>
+    <field name="name">Elephants Dream</field>
   </doc>
   <doc>
     <field name="id">/en/elf_2003</field>
-    <field name="name">Elf</field>
+    <field name="directed_by">Jon Favreau</field>
     <field name="initial_release_date">2003-10-09</field>
     <field name="genre">Family</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Fantasy</field>
-    <field name="directed_by">Jon Favreau</field>
+    <field name="name">Elf</field>
   </doc>
   <doc>
     <field name="id">/en/elizabethtown_2005</field>
-    <field name="name">Elizabethtown</field>
+    <field name="directed_by">Cameron Crowe</field>
     <field name="initial_release_date">2005-09-04</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
@@ -8510,11 +8503,11 @@
     <field name="genre">Comedy-drama</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Cameron Crowe</field>
+    <field name="name">Elizabethtown</field>
   </doc>
   <doc>
     <field name="id">/en/elviras_haunted_hills</field>
-    <field name="name">Elvira's Haunted Hills</field>
+    <field name="directed_by">Sam Irvin</field>
     <field name="initial_release_date">2001-06-23</field>
     <field name="genre">Parody</field>
     <field name="genre">Horror</field>
@@ -8522,89 +8515,89 @@
     <field name="genre">Haunted House Film</field>
     <field name="genre">Horror comedy</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Sam Irvin</field>
+    <field name="name">Elvira's Haunted Hills</field>
   </doc>
   <doc>
     <field name="id">/en/elvis_has_left_the_building_2004</field>
-    <field name="name">Elvis Has Left the Building</field>
+    <field name="directed_by">Joel Zwick</field>
     <field name="genre">Action Film</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Road movie</field>
     <field name="genre">Crime Comedy</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Joel Zwick</field>
+    <field name="name">Elvis Has Left the Building</field>
   </doc>
   <doc>
     <field name="id">/en/empire_2002</field>
-    <field name="name">Empire</field>
+    <field name="directed_by">Franc. Reyes</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Indie film</field>
     <field name="genre">Action</field>
     <field name="genre">Drama</field>
     <field name="genre">Action Thriller</field>
-    <field name="directed_by">Franc. Reyes</field>
+    <field name="name">Empire</field>
   </doc>
   <doc>
     <field name="id">/en/employee_of_the_month_2004</field>
-    <field name="name">Employee of the Month</field>
+    <field name="directed_by">Mitch Rouse</field>
     <field name="initial_release_date">2004-01-17</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Indie film</field>
     <field name="genre">Heist film</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Mitch Rouse</field>
+    <field name="name">Employee of the Month</field>
   </doc>
   <doc>
     <field name="id">/en/employee_of_the_month</field>
-    <field name="name">Employee of the Month</field>
+    <field name="directed_by">Greg Coolidge</field>
     <field name="initial_release_date">2006-10-06</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Greg Coolidge</field>
+    <field name="name">Employee of the Month</field>
   </doc>
   <doc>
     <field name="id">/en/empress_chung</field>
-    <field name="name">Empress Chung</field>
+    <field name="directed_by">Nelson Shin</field>
     <field name="initial_release_date">2005-08-12</field>
     <field name="genre">Animation</field>
     <field name="genre">Children's/Family</field>
     <field name="genre">East Asian cinema</field>
     <field name="genre">World cinema</field>
-    <field name="directed_by">Nelson Shin</field>
+    <field name="name">Empress Chung</field>
   </doc>
   <doc>
     <field name="id">/en/emr</field>
-    <field name="name">EMR</field>
+    <field name="directed_by">Danny McCullough</field>
+    <field name="directed_by">James Erskine</field>
     <field name="initial_release_date">2004-03-08</field>
     <field name="genre">Thriller</field>
     <field name="genre">Mystery</field>
     <field name="genre">Psychological thriller</field>
-    <field name="directed_by">Danny McCullough</field>
-    <field name="directed_by">James Erskine</field>
+    <field name="name">EMR</field>
   </doc>
   <doc>
     <field name="id">/en/en_route</field>
-    <field name="name">En Route</field>
+    <field name="directed_by">Jan Krüger</field>
     <field name="initial_release_date">2004-06-17</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Jan Krüger</field>
+    <field name="name">En Route</field>
   </doc>
   <doc>
     <field name="id">/en/enakku_20_unakku_18</field>
-    <field name="name">Enakku 20 Unakku 18</field>
+    <field name="directed_by">Jyothi Krishna</field>
     <field name="initial_release_date">2003-12-19</field>
     <field name="genre">Musical</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="directed_by">Jyothi Krishna</field>
+    <field name="name">Enakku 20 Unakku 18</field>
   </doc>
   <doc>
     <field name="id">/en/enchanted_2007</field>
-    <field name="name">Enchanted</field>
+    <field name="directed_by">Kevin Lima</field>
     <field name="initial_release_date">2007-10-20</field>
     <field name="genre">Musical</field>
     <field name="genre">Fantasy</field>
@@ -8616,11 +8609,11 @@
     <field name="genre">Drama</field>
     <field name="genre">Musical comedy</field>
     <field name="genre">Musical Drama</field>
-    <field name="directed_by">Kevin Lima</field>
+    <field name="name">Enchanted</field>
   </doc>
   <doc>
     <field name="id">/en/end_of_the_spear</field>
-    <field name="name">End of the Spear</field>
+    <field name="directed_by">Jim Hanon</field>
     <field name="genre">Docudrama</field>
     <field name="genre">Christian film</field>
     <field name="genre">Indie film</field>
@@ -8629,11 +8622,11 @@
     <field name="genre">Action/Adventure</field>
     <field name="genre">Inspirational Drama</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Jim Hanon</field>
+    <field name="name">End of the Spear</field>
   </doc>
   <doc>
     <field name="id">/en/enduring_love</field>
-    <field name="name">Enduring Love</field>
+    <field name="directed_by">Roger Michell</field>
     <field name="initial_release_date">2004-09-04</field>
     <field name="genre">Thriller</field>
     <field name="genre">Mystery</field>
@@ -8642,11 +8635,11 @@
     <field name="genre">Romance Film</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Roger Michell</field>
+    <field name="name">Enduring Love</field>
   </doc>
   <doc>
     <field name="id">/en/enemy_at_the_gates</field>
-    <field name="name">Enemy at the Gates</field>
+    <field name="directed_by">Jean-Jacques Annaud</field>
     <field name="initial_release_date">2001-02-07</field>
     <field name="genre">War film</field>
     <field name="genre">Romance Film</field>
@@ -8654,11 +8647,11 @@
     <field name="genre">Historical fiction</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Jean-Jacques Annaud</field>
+    <field name="name">Enemy at the Gates</field>
   </doc>
   <doc>
     <field name="id">/en/enigma_2001</field>
-    <field name="name">Enigma</field>
+    <field name="directed_by">Michael Apted</field>
     <field name="initial_release_date">2001-01-22</field>
     <field name="genre">Thriller</field>
     <field name="genre">War film</field>
@@ -8666,19 +8659,19 @@
     <field name="genre">Romance Film</field>
     <field name="genre">Mystery</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Michael Apted</field>
+    <field name="name">Enigma</field>
   </doc>
   <doc>
     <field name="id">/en/enigma_the_best_of_jeff_hardy</field>
-    <field name="name">Enigma: The Best of Jeff Hardy</field>
+    <field name="directed_by">Craig Leathers</field>
     <field name="initial_release_date">2005-10-04</field>
     <field name="genre">Sports</field>
     <field name="genre">Action Film</field>
-    <field name="directed_by">Craig Leathers</field>
+    <field name="name">Enigma: The Best of Jeff Hardy</field>
   </doc>
   <doc>
     <field name="id">/en/enron_the_smartest_guys_in_the_room</field>
-    <field name="name">Enron: The Smartest Guys in the Room</field>
+    <field name="directed_by">Alex Gibney</field>
     <field name="initial_release_date">2005-04-22</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Indie film</field>
@@ -8688,20 +8681,20 @@
     <field name="genre">Finance &amp;amp; Investing</field>
     <field name="genre">Law &amp;amp; Crime</field>
     <field name="genre">Biographical film</field>
-    <field name="directed_by">Alex Gibney</field>
+    <field name="name">Enron: The Smartest Guys in the Room</field>
   </doc>
   <doc>
     <field name="id">/en/envy_2004</field>
-    <field name="name">Envy</field>
+    <field name="directed_by">Barry Levinson</field>
     <field name="initial_release_date">2004-04-30</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Cult film</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Barry Levinson</field>
+    <field name="name">Envy</field>
   </doc>
   <doc>
     <field name="id">/en/equilibrium_2002</field>
-    <field name="name">Equilibrium</field>
+    <field name="directed_by">Kurt Wimmer</field>
     <field name="initial_release_date">2002-12-06</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Dystopia</field>
@@ -8709,11 +8702,11 @@
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Kurt Wimmer</field>
+    <field name="name">Equilibrium</field>
   </doc>
   <doc>
     <field name="id">/en/eragon_2006</field>
-    <field name="name">Eragon</field>
+    <field name="directed_by">Stefen Fangmeier</field>
     <field name="initial_release_date">2006-12-13</field>
     <field name="genre">Family</field>
     <field name="genre">Adventure Film</field>
@@ -8721,11 +8714,11 @@
     <field name="genre">Sword and sorcery</field>
     <field name="genre">Action Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Stefen Fangmeier</field>
+    <field name="name">Eragon</field>
   </doc>
   <doc>
     <field name="id">/en/erin_brockovich_2000</field>
-    <field name="name">Erin Brockovich</field>
+    <field name="directed_by">Steven Soderbergh</field>
     <field name="initial_release_date">2000-03-14</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Legal drama</field>
@@ -8736,22 +8729,22 @@
     <field name="genre">Feminist Film</field>
     <field name="genre">Drama</field>
     <field name="genre">Drama film</field>
-    <field name="directed_by">Steven Soderbergh</field>
+    <field name="name">Erin Brockovich</field>
   </doc>
   <doc>
     <field name="id">/en/eros_2004</field>
-    <field name="name">Eros</field>
+    <field name="directed_by">Michelangelo Antonioni</field>
+    <field name="directed_by">Steven Soderbergh</field>
+    <field name="directed_by">Wong Kar-wai</field>
     <field name="initial_release_date">2004-09-10</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Erotica</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Michelangelo Antonioni</field>
-    <field name="directed_by">Steven Soderbergh</field>
-    <field name="directed_by">Wong Kar-wai</field>
+    <field name="name">Eros</field>
   </doc>
   <doc>
     <field name="id">/en/escaflowne</field>
-    <field name="name">Escaflowne</field>
+    <field name="directed_by">Kazuki Akane</field>
     <field name="initial_release_date">2000-06-24</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Science Fiction</field>
@@ -8761,48 +8754,48 @@
     <field name="genre">Action Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Kazuki Akane</field>
+    <field name="name">Escaflowne</field>
   </doc>
   <doc>
     <field name="id">/en/escape_2006</field>
-    <field name="name">A Few Days Later</field>
-    <field name="genre">Drama</field>
     <field name="directed_by">Niki Karimi</field>
+    <field name="genre">Drama</field>
+    <field name="name">A Few Days Later</field>
   </doc>
   <doc>
     <field name="id">/en/eternal_sunshine_of_the_spotless_mind</field>
-    <field name="name">Eternal Sunshine of the Spotless Mind</field>
+    <field name="directed_by">Michel Gondry</field>
     <field name="initial_release_date">2004-03-19</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Michel Gondry</field>
+    <field name="name">Eternal Sunshine of the Spotless Mind</field>
   </doc>
   <doc>
     <field name="id">/en/eulogy_2004</field>
-    <field name="name">Eulogy</field>
+    <field name="directed_by">Michael Clancy</field>
     <field name="initial_release_date">2004-10-15</field>
     <field name="genre">LGBT</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Indie film</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Michael Clancy</field>
+    <field name="name">Eulogy</field>
   </doc>
   <doc>
     <field name="id">/en/eurotrip</field>
-    <field name="name">EuroTrip</field>
+    <field name="directed_by">Jeff Schaffer</field>
+    <field name="directed_by">Alec Berg</field>
+    <field name="directed_by">David Mandel</field>
     <field name="initial_release_date">2004-02-20</field>
     <field name="genre">Sex comedy</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Teen film</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Jeff Schaffer</field>
-    <field name="directed_by">Alec Berg</field>
-    <field name="directed_by">David Mandel</field>
+    <field name="name">EuroTrip</field>
   </doc>
   <doc>
     <field name="id">/en/evan_almighty</field>
-    <field name="name">Evan Almighty</field>
+    <field name="directed_by">Tom Shadyac</field>
     <field name="initial_release_date">2007-06-21</field>
     <field name="genre">Religious Film</field>
     <field name="genre">Parody</field>
@@ -8811,36 +8804,38 @@
     <field name="genre">Fantasy Comedy</field>
     <field name="genre">Heavenly Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Tom Shadyac</field>
+    <field name="name">Evan Almighty</field>
   </doc>
   <doc>
     <field name="id">/en/everlasting_regret</field>
-    <field name="name">Everlasting Regret</field>
+    <field name="directed_by">Stanley Kwan</field>
     <field name="initial_release_date">2005-09-08</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Chinese Movies</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Stanley Kwan</field>
+    <field name="name">Everlasting Regret</field>
   </doc>
   <doc>
     <field name="id">/en/everybody_famous</field>
-    <field name="name">Everybody's Famous!</field>
+    <field name="directed_by">Dominique Deruddere</field>
     <field name="initial_release_date">2000-04-12</field>
     <field name="genre">World cinema</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Dominique Deruddere</field>
+    <field name="name">Everybody's Famous!</field>
   </doc>
   <doc>
     <field name="id">/en/everymans_feast</field>
-    <field name="name">Everyman's Feast</field>
+    <field name="directed_by">Fritz Lehner</field>
     <field name="initial_release_date">2002-01-25</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Fritz Lehner</field>
+    <field name="name">Everyman's Feast</field>
   </doc>
   <doc>
     <field name="id">/en/everyones_hero</field>
-    <field name="name">Everyone's Hero</field>
+    <field name="directed_by">Christopher Reeve</field>
+    <field name="directed_by">Daniel St. Pierre</field>
+    <field name="directed_by">Colin Brady</field>
     <field name="initial_release_date">2006-09-15</field>
     <field name="genre">Computer Animation</field>
     <field name="genre">Family</field>
@@ -8849,27 +8844,25 @@
     <field name="genre">Sports</field>
     <field name="genre">Children's/Family</field>
     <field name="genre">Family-Oriented Adventure</field>
-    <field name="directed_by">Christopher Reeve</field>
-    <field name="directed_by">Daniel St. Pierre</field>
-    <field name="directed_by">Colin Brady</field>
+    <field name="name">Everyone's Hero</field>
   </doc>
   <doc>
     <field name="id">/en/everything_2005</field>
-    <field name="name">Everything</field>
     <field name="initial_release_date">2005-11-22</field>
     <field name="genre">Music video</field>
+    <field name="name">Everything</field>
   </doc>
   <doc>
     <field name="id">/en/everything_goes</field>
-    <field name="name">Everything Goes</field>
+    <field name="directed_by">Andrew Kotatko</field>
     <field name="initial_release_date">2004-06-14</field>
     <field name="genre">Short Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Andrew Kotatko</field>
+    <field name="name">Everything Goes</field>
   </doc>
   <doc>
     <field name="id">/en/everything_is_illuminated_2005</field>
-    <field name="name">Everything Is Illuminated</field>
+    <field name="directed_by">Liev Schreiber</field>
     <field name="initial_release_date">2005-09-16</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Film adaptation</field>
@@ -8878,31 +8871,31 @@
     <field name="genre">Road movie</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Liev Schreiber</field>
+    <field name="name">Everything Is Illuminated</field>
   </doc>
   <doc>
     <field name="id">/en/evilenko</field>
-    <field name="name">Evilenko</field>
+    <field name="directed_by">David Grieco</field>
     <field name="initial_release_date">2004-04-16</field>
     <field name="genre">Thriller</field>
     <field name="genre">Horror</field>
     <field name="genre">Crime Fiction</field>
-    <field name="directed_by">David Grieco</field>
+    <field name="name">Evilenko</field>
   </doc>
   <doc>
     <field name="id">/en/evolution_2001</field>
-    <field name="name">Evolution</field>
+    <field name="directed_by">Ivan Reitman</field>
     <field name="initial_release_date">2001-06-08</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Parody</field>
     <field name="genre">Action Film</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Ivan Reitman</field>
+    <field name="name">Evolution</field>
   </doc>
   <doc>
     <field name="id">/en/exit_wounds</field>
-    <field name="name">Exit Wounds</field>
+    <field name="directed_by">Andrzej Bartkowiak</field>
     <field name="initial_release_date">2001-03-16</field>
     <field name="genre">Action Film</field>
     <field name="genre">Mystery</field>
@@ -8910,22 +8903,22 @@
     <field name="genre">Action/Adventure</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
-    <field name="directed_by">Andrzej Bartkowiak</field>
+    <field name="name">Exit Wounds</field>
   </doc>
   <doc>
     <field name="id">/en/exorcist_the_beginning</field>
-    <field name="name">Exorcist: The Beginning</field>
+    <field name="directed_by">Renny Harlin</field>
     <field name="initial_release_date">2004-08-18</field>
     <field name="genre">Horror</field>
     <field name="genre">Supernatural</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Cult film</field>
     <field name="genre">Historical period drama</field>
-    <field name="directed_by">Renny Harlin</field>
+    <field name="name">Exorcist: The Beginning</field>
   </doc>
   <doc>
     <field name="id">/en/extreme_days</field>
-    <field name="name">Extreme Days</field>
+    <field name="directed_by">Eric Hannah</field>
     <field name="initial_release_date">2001-09-28</field>
     <field name="genre">Comedy-drama</field>
     <field name="genre">Action Film</field>
@@ -8934,11 +8927,11 @@
     <field name="genre">Road movie</field>
     <field name="genre">Teen film</field>
     <field name="genre">Sports</field>
-    <field name="directed_by">Eric Hannah</field>
+    <field name="name">Extreme Days</field>
   </doc>
   <doc>
     <field name="id">/en/extreme_ops</field>
-    <field name="name">Extreme Ops</field>
+    <field name="directed_by">Christian Duguay</field>
     <field name="initial_release_date">2002-11-27</field>
     <field name="genre">Action Film</field>
     <field name="genre">Thriller</field>
@@ -8947,40 +8940,40 @@
     <field name="genre">Adventure Film</field>
     <field name="genre">Action Thriller</field>
     <field name="genre">Chase Movie</field>
-    <field name="directed_by">Christian Duguay</field>
+    <field name="name">Extreme Ops</field>
   </doc>
   <doc>
     <field name="id">/en/face_2004</field>
-    <field name="name">Face</field>
+    <field name="directed_by">Yoo Sang-gon</field>
     <field name="initial_release_date">2004-06-11</field>
     <field name="genre">Horror</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
     <field name="genre">East Asian cinema</field>
     <field name="genre">World cinema</field>
-    <field name="directed_by">Yoo Sang-gon</field>
+    <field name="name">Face</field>
   </doc>
   <doc>
     <field name="id">/en/la_finestra_di_fronte</field>
-    <field name="name">Facing Windows</field>
+    <field name="directed_by">Ferzan Özpetek</field>
     <field name="initial_release_date">2003-02-28</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Ferzan Özpetek</field>
+    <field name="name">Facing Windows</field>
   </doc>
   <doc>
     <field name="id">/en/factory_girl</field>
-    <field name="name">Factory Girl</field>
+    <field name="directed_by">George Hickenlooper</field>
     <field name="initial_release_date">2006-12-29</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Indie film</field>
     <field name="genre">Historical period drama</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">George Hickenlooper</field>
+    <field name="name">Factory Girl</field>
   </doc>
   <doc>
     <field name="id">/en/fahrenheit_9_11</field>
-    <field name="name">Fahrenheit 9/11</field>
+    <field name="directed_by">Michael Moore</field>
     <field name="initial_release_date">2004-05-17</field>
     <field name="genre">Indie film</field>
     <field name="genre">Documentary film</field>
@@ -8988,17 +8981,17 @@
     <field name="genre">Culture &amp;amp; Society</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Michael Moore</field>
+    <field name="name">Fahrenheit 9/11</field>
   </doc>
   <doc>
     <field name="id">/en/fahrenheit_9_111_2</field>
-    <field name="name">Fahrenheit 9/11½</field>
-    <field name="genre">Documentary film</field>
     <field name="directed_by">Michael Moore</field>
+    <field name="genre">Documentary film</field>
+    <field name="name">Fahrenheit 9/11½</field>
   </doc>
   <doc>
     <field name="id">/en/fail_safe_2000</field>
-    <field name="name">Fail Safe</field>
+    <field name="directed_by">Stephen Frears</field>
     <field name="initial_release_date">2000-04-09</field>
     <field name="genre">Thriller</field>
     <field name="genre">Science Fiction</field>
@@ -9008,52 +9001,52 @@
     <field name="genre">Psychological thriller</field>
     <field name="genre">Political drama</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Stephen Frears</field>
+    <field name="name">Fail Safe</field>
   </doc>
   <doc>
     <field name="id">/en/failan</field>
-    <field name="name">Failan</field>
+    <field name="directed_by">Song Hae-sung</field>
     <field name="initial_release_date">2001-04-28</field>
     <field name="genre">Romance Film</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Song Hae-sung</field>
+    <field name="name">Failan</field>
   </doc>
   <doc>
     <field name="id">/en/failure_to_launch</field>
-    <field name="name">Failure to Launch</field>
+    <field name="directed_by">Tom Dey</field>
     <field name="initial_release_date">2006-03-10</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Tom Dey</field>
+    <field name="name">Failure to Launch</field>
   </doc>
   <doc>
     <field name="id">/en/fake_2003</field>
-    <field name="name">Fake</field>
+    <field name="directed_by">Thanakorn Pongsuwan</field>
     <field name="initial_release_date">2003-04-28</field>
     <field name="genre">Romance Film</field>
-    <field name="directed_by">Thanakorn Pongsuwan</field>
+    <field name="name">Fake</field>
   </doc>
   <doc>
     <field name="id">/en/falcons_2002</field>
-    <field name="name">Falcons</field>
-    <field name="genre">Drama</field>
     <field name="directed_by">Friðrik Þór Friðriksson</field>
+    <field name="genre">Drama</field>
+    <field name="name">Falcons</field>
   </doc>
   <doc>
     <field name="id">/en/fallen_2006</field>
-    <field name="name">Fallen</field>
+    <field name="directed_by">Mikael Salomon</field>
+    <field name="directed_by">Kevin Kerslake</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Mikael Salomon</field>
-    <field name="directed_by">Kevin Kerslake</field>
+    <field name="name">Fallen</field>
   </doc>
   <doc>
     <field name="id">/en/family_-_ties_of_blood</field>
-    <field name="name">Family</field>
+    <field name="directed_by">Rajkumar Santoshi</field>
     <field name="initial_release_date">2006-01-11</field>
     <field name="genre">Musical</field>
     <field name="genre">Crime Fiction</field>
@@ -9062,33 +9055,33 @@
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="directed_by">Rajkumar Santoshi</field>
+    <field name="name">Family</field>
   </doc>
   <doc>
     <field name="id">/en/familywala</field>
-    <field name="name">Familywala</field>
+    <field name="directed_by">Neeraj Vora</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
-    <field name="directed_by">Neeraj Vora</field>
+    <field name="name">Familywala</field>
   </doc>
   <doc>
     <field name="id">/en/fan_chan</field>
-    <field name="name">Fan Chan</field>
-    <field name="initial_release_date">2003-10-03</field>
-    <field name="genre">Comedy</field>
-    <field name="genre">Romance Film</field>
     <field name="directed_by">Vitcha Gojiew</field>
     <field name="directed_by">Witthaya Thongyooyong</field>
     <field name="directed_by">Komgrit Triwimol</field>
     <field name="directed_by">Nithiwat Tharathorn</field>
     <field name="directed_by">Songyos Sugmakanan</field>
     <field name="directed_by">Adisorn Tresirikasem</field>
+    <field name="initial_release_date">2003-10-03</field>
+    <field name="genre">Comedy</field>
+    <field name="genre">Romance Film</field>
+    <field name="name">Fan Chan</field>
   </doc>
   <doc>
     <field name="id">/en/fanaa</field>
-    <field name="name">Fanaa</field>
+    <field name="directed_by">Kunal Kohli</field>
     <field name="initial_release_date">2006-05-26</field>
     <field name="genre">Thriller</field>
     <field name="genre">Romance Film</field>
@@ -9096,116 +9089,116 @@
     <field name="genre">Bollywood</field>
     <field name="genre">Musical Drama</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Kunal Kohli</field>
+    <field name="name">Fanaa</field>
   </doc>
   <doc>
     <field name="id">/en/fantastic_four_2005</field>
-    <field name="name">Fantastic Four</field>
+    <field name="directed_by">Tim Story</field>
     <field name="initial_release_date">2005-06-29</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Action Film</field>
-    <field name="directed_by">Tim Story</field>
+    <field name="name">Fantastic Four</field>
   </doc>
   <doc>
     <field name="id">/en/fantastic_four_and_the_silver_surfer</field>
-    <field name="name">Fantastic Four: Rise of the Silver Surfer</field>
+    <field name="directed_by">Tim Story</field>
     <field name="initial_release_date">2007-06-12</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Action Film</field>
     <field name="genre">Thriller</field>
-    <field name="directed_by">Tim Story</field>
+    <field name="name">Fantastic Four: Rise of the Silver Surfer</field>
   </doc>
   <doc>
     <field name="id">/en/fantastic_mr_fox_2007</field>
-    <field name="name">Fantastic Mr. Fox</field>
+    <field name="directed_by">Wes Anderson</field>
     <field name="initial_release_date">2009-10-14</field>
     <field name="genre">Animation</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Family</field>
-    <field name="directed_by">Wes Anderson</field>
+    <field name="name">Fantastic Mr. Fox</field>
   </doc>
   <doc>
     <field name="id">/en/faq_frequently_asked_questions</field>
-    <field name="name">FAQ: Frequently Asked Questions</field>
+    <field name="directed_by">Carlos Atanes</field>
     <field name="initial_release_date">2004-10-12</field>
     <field name="genre">Science Fiction</field>
-    <field name="directed_by">Carlos Atanes</field>
+    <field name="name">FAQ: Frequently Asked Questions</field>
   </doc>
   <doc>
     <field name="id">/en/far_cry_2008</field>
-    <field name="name">Far Cry</field>
+    <field name="directed_by">Uwe Boll</field>
     <field name="initial_release_date">2008-10-02</field>
     <field name="genre">Action Film</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Thriller</field>
     <field name="genre">Adventure Film</field>
-    <field name="directed_by">Uwe Boll</field>
+    <field name="name">Far Cry</field>
   </doc>
   <doc>
     <field name="id">/en/far_from_heaven</field>
-    <field name="name">Far from Heaven</field>
+    <field name="directed_by">Todd Haynes</field>
     <field name="initial_release_date">2002-09-01</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Melodrama</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Todd Haynes</field>
+    <field name="name">Far from Heaven</field>
   </doc>
   <doc>
     <field name="id">/en/farce_of_the_penguins</field>
-    <field name="name">Farce of the Penguins</field>
+    <field name="directed_by">Bob Saget</field>
     <field name="genre">Parody</field>
     <field name="genre">Mockumentary</field>
     <field name="genre">Adventure Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Bob Saget</field>
+    <field name="name">Farce of the Penguins</field>
   </doc>
   <doc>
     <field name="id">/en/eagles_farewell_1_tour_live_from_melbourne</field>
-    <field name="name">Eagles: Farewell 1 Tour-Live from Melbourne</field>
+    <field name="directed_by">Carol Dodds</field>
     <field name="initial_release_date">2005-06-14</field>
     <field name="genre">Music video</field>
-    <field name="directed_by">Carol Dodds</field>
+    <field name="name">Eagles: Farewell 1 Tour-Live from Melbourne</field>
   </doc>
   <doc>
     <field name="id">/en/fat_albert</field>
-    <field name="name">Fat Albert</field>
+    <field name="directed_by">Joel Zwick</field>
     <field name="initial_release_date">2004-12-12</field>
     <field name="genre">Family</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Joel Zwick</field>
+    <field name="name">Fat Albert</field>
   </doc>
   <doc>
     <field name="id">/en/fat_pizza_the_movie</field>
-    <field name="name">Fat Pizza</field>
-    <field name="genre">Comedy</field>
     <field name="directed_by">Paul Fenech</field>
+    <field name="genre">Comedy</field>
+    <field name="name">Fat Pizza</field>
   </doc>
   <doc>
     <field name="id">/en/fatwa_2006</field>
-    <field name="name">Fatwa</field>
+    <field name="directed_by">John Carter</field>
     <field name="initial_release_date">2006-03-24</field>
     <field name="genre">Thriller</field>
     <field name="genre">Political thriller</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">John Carter</field>
+    <field name="name">Fatwa</field>
   </doc>
   <doc>
     <field name="id">/en/faust_love_of_the_damned</field>
-    <field name="name">Faust: Love of the Damned</field>
+    <field name="directed_by">Brian Yuzna</field>
     <field name="initial_release_date">2000-10-12</field>
     <field name="genre">Horror</field>
     <field name="genre">Supernatural</field>
-    <field name="directed_by">Brian Yuzna</field>
+    <field name="name">Faust: Love of the Damned</field>
   </doc>
   <doc>
     <field name="id">/en/fay_grim</field>
-    <field name="name">Fay Grim</field>
+    <field name="directed_by">Hal Hartley</field>
     <field name="initial_release_date">2006-09-11</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
@@ -9215,89 +9208,89 @@
     <field name="genre">Comedy</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Hal Hartley</field>
+    <field name="name">Fay Grim</field>
   </doc>
   <doc>
     <field name="id">/en/fear_and_trembling_2003</field>
-    <field name="name">Fear and Trembling</field>
+    <field name="directed_by">Alain Corneau</field>
     <field name="genre">World cinema</field>
     <field name="genre">Japanese Movies</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Alain Corneau</field>
+    <field name="name">Fear and Trembling</field>
   </doc>
   <doc>
     <field name="id">/en/fear_of_the_dark_2006</field>
-    <field name="name">Fear of the Dark</field>
+    <field name="directed_by">Glen Baisley</field>
     <field name="initial_release_date">2001-10-06</field>
     <field name="genre">Horror</field>
     <field name="genre">Mystery</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Glen Baisley</field>
+    <field name="name">Fear of the Dark</field>
   </doc>
   <doc>
     <field name="id">/en/fear_x</field>
-    <field name="name">Fear X</field>
+    <field name="directed_by">Nicolas Winding Refn</field>
     <field name="initial_release_date">2003-01-19</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Thriller</field>
-    <field name="directed_by">Nicolas Winding Refn</field>
+    <field name="name">Fear X</field>
   </doc>
   <doc>
     <field name="id">/en/feardotcom</field>
-    <field name="name">FeardotCom</field>
+    <field name="directed_by">William Malone</field>
     <field name="initial_release_date">2002-08-09</field>
     <field name="genre">Horror</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Thriller</field>
     <field name="genre">Mystery</field>
-    <field name="directed_by">William Malone</field>
+    <field name="name">FeardotCom</field>
   </doc>
   <doc>
     <field name="id">/en/fearless</field>
-    <field name="name">Fearless</field>
+    <field name="directed_by">Ronny Yu</field>
     <field name="initial_release_date">2006-01-26</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Action Film</field>
     <field name="genre">Sports</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Ronny Yu</field>
+    <field name="name">Fearless</field>
   </doc>
   <doc>
     <field name="id">/en/feast</field>
-    <field name="name">Feast</field>
+    <field name="directed_by">John Gulager</field>
     <field name="initial_release_date">2006-09-22</field>
     <field name="genre">Horror</field>
     <field name="genre">Cult film</field>
     <field name="genre">Monster movie</field>
     <field name="genre">Horror comedy</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">John Gulager</field>
+    <field name="name">Feast</field>
   </doc>
   <doc>
     <field name="id">/en/femme_fatale_2002</field>
-    <field name="name">Femme Fatale</field>
+    <field name="directed_by">Brian De Palma</field>
     <field name="initial_release_date">2002-04-30</field>
     <field name="genre">Thriller</field>
     <field name="genre">Mystery</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Erotic thriller</field>
-    <field name="directed_by">Brian De Palma</field>
+    <field name="name">Femme Fatale</field>
   </doc>
   <doc>
     <field name="id">/en/festival_2005</field>
-    <field name="name">Festival</field>
+    <field name="directed_by">Annie Griffin</field>
     <field name="initial_release_date">2005-07-15</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Parody</field>
     <field name="genre">Comedy</field>
-    <field name="directed_by">Annie Griffin</field>
+    <field name="name">Festival</field>
   </doc>
   <doc>
     <field name="id">/en/festival_express</field>
-    <field name="name">Festival Express</field>
+    <field name="directed_by">Bob Smeaton</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Concert film</field>
     <field name="genre">History</field>
@@ -9305,11 +9298,11 @@
     <field name="genre">Indie film</field>
     <field name="genre">Rockumentary</field>
     <field name="genre">Music</field>
-    <field name="directed_by">Bob Smeaton</field>
+    <field name="name">Festival Express</field>
   </doc>
   <doc>
     <field name="id">/en/festival_in_cannes</field>
-    <field name="name">Festival in Cannes</field>
+    <field name="directed_by">Henry Jaglom</field>
     <field name="initial_release_date">2001-11-03</field>
     <field name="genre">Mockumentary</field>
     <field name="genre">Comedy-drama</field>
@@ -9317,32 +9310,32 @@
     <field name="genre">Ensemble Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Henry Jaglom</field>
+    <field name="name">Festival in Cannes</field>
   </doc>
   <doc>
     <field name="id">/en/fever_pitch_2005</field>
-    <field name="name">Fever Pitch</field>
+    <field name="directed_by">Bobby Farrelly</field>
+    <field name="directed_by">Peter Farrelly</field>
     <field name="initial_release_date">2005-04-06</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Sports</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Bobby Farrelly</field>
-    <field name="directed_by">Peter Farrelly</field>
+    <field name="name">Fever Pitch</field>
   </doc>
   <doc>
     <field name="id">/en/fida</field>
-    <field name="name">Fida</field>
+    <field name="directed_by">Ken Ghosh</field>
     <field name="initial_release_date">2004-08-20</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Ken Ghosh</field>
+    <field name="name">Fida</field>
   </doc>
   <doc>
     <field name="id">/en/fido_2006</field>
-    <field name="name">Fido</field>
+    <field name="directed_by">Andrew Currie</field>
     <field name="initial_release_date">2006-09-07</field>
     <field name="genre">Horror</field>
     <field name="genre">Parody</field>
@@ -9350,106 +9343,110 @@
     <field name="genre">Horror comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="directed_by">Andrew Currie</field>
+    <field name="name">Fido</field>
   </doc>
   <doc>
+    <field name="id">/en/fighter_in_the_wind</field>
+    <field name="initial_release_date">2004-08-06</field>
+    <field name="name">Fighter in the Wind</field>
+    <field name="directed_by">Yang Yun-ho</field>
+    <field name="directed_by">Yang Yun-ho</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Action Film</field>
     <field name="genre">War film</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Drama</field>
-    <field name="name">Fighter in the Wind</field>
-    <field name="initial_release_date">2004-08-06</field>
-    <field name="id">/en/fighter_in_the_wind</field>
-    <field name="directed_by">Yang Yun-ho</field>
-    <field name="directed_by">Yang Yun-ho</field>
   </doc>
   <doc>
+    <field name="id">/en/filantropica</field>
+    <field name="initial_release_date">2002-03-15</field>
+    <field name="name">Filantropica</field>
+    <field name="directed_by">Nae Caranfil</field>
     <field name="genre">Comedy</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Filantropica</field>
-    <field name="initial_release_date">2002-03-15</field>
-    <field name="id">/en/filantropica</field>
-    <field name="directed_by">Nae Caranfil</field>
   </doc>
   <doc>
+    <field name="id">/en/film_geek</field>
+    <field name="initial_release_date">2006-02-10</field>
+    <field name="name">Film Geek</field>
+    <field name="directed_by">James Westby</field>
     <field name="genre">Indie film</field>
     <field name="genre">Workplace Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Film Geek</field>
-    <field name="initial_release_date">2006-02-10</field>
-    <field name="id">/en/film_geek</field>
-    <field name="directed_by">James Westby</field>
   </doc>
   <doc>
-    <field name="genre">Slasher</field>
-    <field name="genre">Teen film</field>
-    <field name="genre">Supernatural</field>
-    <field name="genre">Horror</field>
-    <field name="genre">Cult film</field>
-    <field name="genre">Thriller</field>
-    <field name="name">Final Destination</field>
-    <field name="initial_release_date">2000-03-16</field>
     <field name="id">/en/final_destination</field>
+    <field name="initial_release_date">2000-03-16</field>
+    <field name="name">Final Destination</field>
     <field name="directed_by">James Wong</field>
-  </doc>
-  <doc>
-    <field name="genre">Slasher</field>
-    <field name="genre">Teen film</field>
-    <field name="genre">Horror</field>
-    <field name="genre">Thriller</field>
-    <field name="name">Final Destination 3</field>
-    <field name="initial_release_date">2006-02-09</field>
-    <field name="id">/en/final_destination_3</field>
-    <field name="directed_by">James Wong</field>
-  </doc>
-  <doc>
     <field name="genre">Slasher</field>
     <field name="genre">Teen film</field>
     <field name="genre">Supernatural</field>
     <field name="genre">Horror</field>
     <field name="genre">Cult film</field>
     <field name="genre">Thriller</field>
-    <field name="name">Final Destination 2</field>
-    <field name="initial_release_date">2003-01-30</field>
-    <field name="id">/en/final_destination_2</field>
-    <field name="directed_by">David R. Ellis</field>
   </doc>
   <doc>
+    <field name="id">/en/final_destination_3</field>
+    <field name="initial_release_date">2006-02-09</field>
+    <field name="name">Final Destination 3</field>
+    <field name="directed_by">James Wong</field>
+    <field name="genre">Slasher</field>
+    <field name="genre">Teen film</field>
+    <field name="genre">Horror</field>
+    <field name="genre">Thriller</field>
+  </doc>
+  <doc>
+    <field name="id">/en/final_destination_2</field>
+    <field name="initial_release_date">2003-01-30</field>
+    <field name="name">Final Destination 2</field>
+    <field name="directed_by">David R. Ellis</field>
+    <field name="genre">Slasher</field>
+    <field name="genre">Teen film</field>
+    <field name="genre">Supernatural</field>
+    <field name="genre">Horror</field>
+    <field name="genre">Cult film</field>
+    <field name="genre">Thriller</field>
+  </doc>
+  <doc>
+    <field name="id">/en/final_fantasy_vii_advent_children</field>
+    <field name="initial_release_date">2005-08-31</field>
+    <field name="name">Final Fantasy VII: Advent Children</field>
+    <field name="directed_by">Tetsuya Nomura</field>
+    <field name="directed_by">Takeshi Nozue</field>
     <field name="genre">Anime</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Animation</field>
     <field name="genre">Action Film</field>
     <field name="genre">Thriller</field>
-    <field name="name">Final Fantasy VII: Advent Children</field>
-    <field name="initial_release_date">2005-08-31</field>
-    <field name="id">/en/final_fantasy_vii_advent_children</field>
-    <field name="directed_by">Tetsuya Nomura</field>
-    <field name="directed_by">Takeshi Nozue</field>
   </doc>
   <doc>
+    <field name="id">/en/final_fantasy_the_spirits_within</field>
+    <field name="initial_release_date">2001-07-02</field>
+    <field name="name">Final Fantasy: The Spirits Within</field>
+    <field name="directed_by">Hironobu Sakaguchi</field>
+    <field name="directed_by">Motonori Sakakibara</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Anime</field>
     <field name="genre">Animation</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Action Film</field>
     <field name="genre">Adventure Film</field>
-    <field name="name">Final Fantasy: The Spirits Within</field>
-    <field name="initial_release_date">2001-07-02</field>
-    <field name="id">/en/final_fantasy_the_spirits_within</field>
-    <field name="directed_by">Hironobu Sakaguchi</field>
-    <field name="directed_by">Motonori Sakakibara</field>
   </doc>
   <doc>
+    <field name="id">/en/final_stab</field>
+    <field name="name">Final Stab</field>
+    <field name="directed_by">David DeCoteau</field>
     <field name="genre">Horror</field>
     <field name="genre">Slasher</field>
     <field name="genre">Teen film</field>
-    <field name="name">Final Stab</field>
-    <field name="id">/en/final_stab</field>
-    <field name="directed_by">David DeCoteau</field>
   </doc>
   <doc>
+    <field name="id">/en/find_me_guilty</field>
+    <field name="initial_release_date">2006-02-16</field>
+    <field name="name">Find Me Guilty</field>
+    <field name="directed_by">Sidney Lumet</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Trial drama</field>
     <field name="genre">Docudrama</field>
@@ -9459,114 +9456,114 @@
     <field name="genre">Gangster Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Find Me Guilty</field>
-    <field name="initial_release_date">2006-02-16</field>
-    <field name="id">/en/find_me_guilty</field>
-    <field name="directed_by">Sidney Lumet</field>
   </doc>
   <doc>
+    <field name="id">/en/finders_fee</field>
+    <field name="initial_release_date">2001-06-16</field>
+    <field name="name">Finder's Fee</field>
+    <field name="directed_by">Jeff Probst</field>
     <field name="genre">Thriller</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Indie film</field>
     <field name="genre">Suspense</field>
     <field name="genre">Drama</field>
-    <field name="name">Finder's Fee</field>
-    <field name="initial_release_date">2001-06-16</field>
-    <field name="id">/en/finders_fee</field>
-    <field name="directed_by">Jeff Probst</field>
   </doc>
   <doc>
+    <field name="id">/en/finding_nemo</field>
+    <field name="initial_release_date">2003-05-30</field>
+    <field name="name">Finding Nemo</field>
+    <field name="directed_by">Andrew Stanton</field>
+    <field name="directed_by">Lee Unkrich</field>
     <field name="genre">Animation</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Family</field>
-    <field name="name">Finding Nemo</field>
-    <field name="initial_release_date">2003-05-30</field>
-    <field name="id">/en/finding_nemo</field>
-    <field name="directed_by">Andrew Stanton</field>
-    <field name="directed_by">Lee Unkrich</field>
   </doc>
   <doc>
+    <field name="id">/en/finding_neverland</field>
+    <field name="initial_release_date">2004-09-04</field>
+    <field name="name">Finding Neverland</field>
+    <field name="directed_by">Marc Forster</field>
     <field name="genre">Costume drama</field>
     <field name="genre">Historical period drama</field>
     <field name="genre">Family</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Drama</field>
-    <field name="name">Finding Neverland</field>
-    <field name="initial_release_date">2004-09-04</field>
-    <field name="id">/en/finding_neverland</field>
-    <field name="directed_by">Marc Forster</field>
   </doc>
   <doc>
+    <field name="id">/en/fingerprints</field>
+    <field name="name">Fingerprints</field>
+    <field name="directed_by">Harry Basil</field>
     <field name="genre">Thriller</field>
     <field name="genre">Horror</field>
     <field name="genre">Mystery</field>
-    <field name="name">Fingerprints</field>
-    <field name="id">/en/fingerprints</field>
-    <field name="directed_by">Harry Basil</field>
   </doc>
   <doc>
+    <field name="id">/en/firewall_2006</field>
+    <field name="initial_release_date">2006-02-02</field>
+    <field name="name">Firewall</field>
+    <field name="directed_by">Richard Loncraine</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Crime Thriller</field>
     <field name="genre">Action Thriller</field>
-    <field name="name">Firewall</field>
-    <field name="initial_release_date">2006-02-02</field>
-    <field name="id">/en/firewall_2006</field>
-    <field name="directed_by">Richard Loncraine</field>
   </doc>
   <doc>
+    <field name="id">/en/first_daughter</field>
+    <field name="initial_release_date">2004-09-24</field>
+    <field name="name">First Daughter</field>
+    <field name="directed_by">Forest Whitaker</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Teen film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">First Daughter</field>
-    <field name="initial_release_date">2004-09-24</field>
-    <field name="id">/en/first_daughter</field>
-    <field name="directed_by">Forest Whitaker</field>
   </doc>
   <doc>
+    <field name="id">/en/first_descent</field>
+    <field name="initial_release_date">2005-12-02</field>
+    <field name="name">First Descent</field>
+    <field name="directed_by">Kemp Curly</field>
+    <field name="directed_by">Kevin Harrison</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Sports</field>
     <field name="genre">Extreme Sports</field>
     <field name="genre">Biographical film</field>
-    <field name="name">First Descent</field>
-    <field name="initial_release_date">2005-12-02</field>
-    <field name="id">/en/first_descent</field>
-    <field name="directed_by">Kemp Curly</field>
-    <field name="directed_by">Kevin Harrison</field>
   </doc>
   <doc>
+    <field name="id">/en/fiza</field>
+    <field name="initial_release_date">2000-09-08</field>
+    <field name="name">Fiza</field>
+    <field name="directed_by">Khalid Mohamed</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="name">Fiza</field>
-    <field name="initial_release_date">2000-09-08</field>
-    <field name="id">/en/fiza</field>
-    <field name="directed_by">Khalid Mohamed</field>
   </doc>
   <doc>
+    <field name="id">/en/flags_of_our_fathers_2006</field>
+    <field name="initial_release_date">2006-10-20</field>
+    <field name="name">Flags of Our Fathers</field>
+    <field name="directed_by">Clint Eastwood</field>
     <field name="genre">War film</field>
     <field name="genre">History</field>
     <field name="genre">Action Film</field>
     <field name="genre">Film adaptation</field>
     <field name="genre">Historical drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Flags of Our Fathers</field>
-    <field name="initial_release_date">2006-10-20</field>
-    <field name="id">/en/flags_of_our_fathers_2006</field>
-    <field name="directed_by">Clint Eastwood</field>
   </doc>
   <doc>
-    <field name="genre">Documentary film</field>
-    <field name="name">Flight from Death</field>
-    <field name="initial_release_date">2006-09-06</field>
     <field name="id">/en/flight_from_death</field>
+    <field name="initial_release_date">2006-09-06</field>
+    <field name="name">Flight from Death</field>
     <field name="directed_by">Patrick Shen</field>
+    <field name="genre">Documentary film</field>
   </doc>
   <doc>
+    <field name="id">/en/flight_of_the_phoenix</field>
+    <field name="initial_release_date">2004-12-17</field>
+    <field name="name">Flight of the Phoenix</field>
+    <field name="directed_by">John Moore</field>
     <field name="genre">Airplanes and airports</field>
     <field name="genre">Disaster Film</field>
     <field name="genre">Action Film</field>
@@ -9574,61 +9571,61 @@
     <field name="genre">Action/Adventure</field>
     <field name="genre">Film adaptation</field>
     <field name="genre">Drama</field>
-    <field name="name">Flight of the Phoenix</field>
-    <field name="initial_release_date">2004-12-17</field>
-    <field name="id">/en/flight_of_the_phoenix</field>
-    <field name="directed_by">John Moore</field>
   </doc>
   <doc>
+    <field name="id">/en/flightplan</field>
+    <field name="initial_release_date">2005-09-22</field>
+    <field name="name">Flightplan</field>
+    <field name="directed_by">Robert Schwentke</field>
     <field name="genre">Thriller</field>
     <field name="genre">Mystery</field>
     <field name="genre">Drama</field>
-    <field name="name">Flightplan</field>
-    <field name="initial_release_date">2005-09-22</field>
-    <field name="id">/en/flightplan</field>
-    <field name="directed_by">Robert Schwentke</field>
   </doc>
   <doc>
+    <field name="id">/en/flock_of_dodos</field>
+    <field name="name">Flock of Dodos</field>
+    <field name="directed_by">Randy Olson</field>
     <field name="genre">Documentary film</field>
     <field name="genre">History</field>
-    <field name="name">Flock of Dodos</field>
-    <field name="id">/en/flock_of_dodos</field>
-    <field name="directed_by">Randy Olson</field>
   </doc>
   <doc>
+    <field name="id">/en/fluffy_the_english_vampire_slayer</field>
+    <field name="name">Fluffy the English Vampire Slayer</field>
+    <field name="directed_by">Henry Burrows</field>
     <field name="genre">Horror comedy</field>
     <field name="genre">Short Film</field>
     <field name="genre">Fan film</field>
     <field name="genre">Parody</field>
-    <field name="name">Fluffy the English Vampire Slayer</field>
-    <field name="id">/en/fluffy_the_english_vampire_slayer</field>
-    <field name="directed_by">Henry Burrows</field>
   </doc>
   <doc>
+    <field name="id">/en/flushed_away</field>
+    <field name="initial_release_date">2006-10-22</field>
+    <field name="name">Flushed Away</field>
+    <field name="directed_by">David Bowers</field>
+    <field name="directed_by">Sam Fell</field>
     <field name="genre">Animation</field>
     <field name="genre">Family</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Children's/Family</field>
     <field name="genre">Family-Oriented Adventure</field>
     <field name="genre">Comedy</field>
-    <field name="name">Flushed Away</field>
-    <field name="initial_release_date">2006-10-22</field>
-    <field name="id">/en/flushed_away</field>
-    <field name="directed_by">David Bowers</field>
-    <field name="directed_by">Sam Fell</field>
   </doc>
   <doc>
+    <field name="id">/en/fool_and_final</field>
+    <field name="initial_release_date">2007-06-01</field>
+    <field name="name">Fool &amp;amp; Final</field>
+    <field name="directed_by">Ahmed Khan</field>
     <field name="genre">Comedy</field>
     <field name="genre">Action Film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
-    <field name="name">Fool &amp;amp; Final</field>
-    <field name="initial_release_date">2007-06-01</field>
-    <field name="id">/en/fool_and_final</field>
-    <field name="directed_by">Ahmed Khan</field>
   </doc>
   <doc>
+    <field name="id">/en/foolproof</field>
+    <field name="initial_release_date">2003-10-03</field>
+    <field name="name">Foolproof</field>
+    <field name="directed_by">William Phillips</field>
     <field name="genre">Action Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Thriller</field>
@@ -9636,56 +9633,56 @@
     <field name="genre">Caper story</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Comedy</field>
-    <field name="name">Foolproof</field>
-    <field name="initial_release_date">2003-10-03</field>
-    <field name="id">/en/foolproof</field>
-    <field name="directed_by">William Phillips</field>
   </doc>
   <doc>
+    <field name="id">/en/for_the_birds</field>
+    <field name="initial_release_date">2000-06-05</field>
+    <field name="name">For the Birds</field>
+    <field name="directed_by">Ralph Eggleston</field>
     <field name="genre">Short Film</field>
     <field name="genre">Animation</field>
     <field name="genre">Comedy</field>
     <field name="genre">Family</field>
-    <field name="name">For the Birds</field>
-    <field name="initial_release_date">2000-06-05</field>
-    <field name="id">/en/for_the_birds</field>
-    <field name="directed_by">Ralph Eggleston</field>
   </doc>
   <doc>
+    <field name="id">/en/for_your_consideration_2006</field>
+    <field name="initial_release_date">2006-11-17</field>
+    <field name="name">For Your Consideration</field>
+    <field name="directed_by">Christopher Guest</field>
     <field name="genre">Mockumentary</field>
     <field name="genre">Parody</field>
     <field name="genre">Comedy</field>
-    <field name="name">For Your Consideration</field>
-    <field name="initial_release_date">2006-11-17</field>
-    <field name="id">/en/for_your_consideration_2006</field>
-    <field name="directed_by">Christopher Guest</field>
   </doc>
   <doc>
+    <field name="id">/en/diev_mi_kas</field>
+    <field name="initial_release_date">2005-09-23</field>
+    <field name="name">Forest of the Gods</field>
+    <field name="directed_by">Algimantas Puipa</field>
     <field name="genre">War film</field>
     <field name="genre">Drama</field>
-    <field name="name">Forest of the Gods</field>
-    <field name="initial_release_date">2005-09-23</field>
-    <field name="id">/en/diev_mi_kas</field>
-    <field name="directed_by">Algimantas Puipa</field>
   </doc>
   <doc>
+    <field name="id">/en/formula_17</field>
+    <field name="initial_release_date">2004-04-02</field>
+    <field name="name">Formula 17</field>
+    <field name="directed_by">Chen Yin-jung</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
-    <field name="name">Formula 17</field>
-    <field name="initial_release_date">2004-04-02</field>
-    <field name="id">/en/formula_17</field>
-    <field name="directed_by">Chen Yin-jung</field>
   </doc>
   <doc>
+    <field name="id">/en/forty_shades_of_blue</field>
+    <field name="name">Forty Shades of Blue</field>
+    <field name="directed_by">Ira Sachs</field>
     <field name="genre">Indie film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="name">Forty Shades of Blue</field>
-    <field name="id">/en/forty_shades_of_blue</field>
-    <field name="directed_by">Ira Sachs</field>
   </doc>
   <doc>
+    <field name="id">/en/four_brothers_2005</field>
+    <field name="initial_release_date">2005-08-12</field>
+    <field name="name">Four Brothers</field>
+    <field name="directed_by">John Singleton</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Thriller</field>
@@ -9693,97 +9690,97 @@
     <field name="genre">Family Drama</field>
     <field name="genre">Crime Drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Four Brothers</field>
-    <field name="initial_release_date">2005-08-12</field>
-    <field name="id">/en/four_brothers_2005</field>
-    <field name="directed_by">John Singleton</field>
   </doc>
   <doc>
+    <field name="id">/en/frailty</field>
+    <field name="initial_release_date">2001-11-17</field>
+    <field name="name">Frailty</field>
+    <field name="directed_by">Bill Paxton</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="name">Frailty</field>
-    <field name="initial_release_date">2001-11-17</field>
-    <field name="id">/en/frailty</field>
-    <field name="directed_by">Bill Paxton</field>
   </doc>
   <doc>
+    <field name="id">/en/frankenfish</field>
+    <field name="initial_release_date">2004-10-09</field>
+    <field name="name">Frankenfish</field>
+    <field name="directed_by">Mark A.Z. Dippé</field>
     <field name="genre">Action Film</field>
     <field name="genre">Horror</field>
     <field name="genre">Natural horror film</field>
     <field name="genre">Monster</field>
     <field name="genre">Science Fiction</field>
-    <field name="name">Frankenfish</field>
-    <field name="initial_release_date">2004-10-09</field>
-    <field name="id">/en/frankenfish</field>
-    <field name="directed_by">Mark A.Z. Dippé</field>
   </doc>
   <doc>
-    <field name="genre">Family</field>
-    <field name="genre">Animation</field>
-    <field name="name">Franklin and the Turtle Lake Treasure</field>
-    <field name="initial_release_date">2006-12-20</field>
     <field name="id">/en/franklin_and_grannys_secret</field>
+    <field name="initial_release_date">2006-12-20</field>
+    <field name="name">Franklin and the Turtle Lake Treasure</field>
     <field name="directed_by">Dominique Monféry</field>
-  </doc>
-  <doc>
     <field name="genre">Family</field>
     <field name="genre">Animation</field>
-    <field name="name">Franklin and the Green Knight</field>
-    <field name="initial_release_date">2000-10-17</field>
+  </doc>
+  <doc>
     <field name="id">/en/franklin_and_the_green_knight</field>
+    <field name="initial_release_date">2000-10-17</field>
+    <field name="name">Franklin and the Green Knight</field>
     <field name="directed_by">John van Bruggen</field>
-  </doc>
-  <doc>
     <field name="genre">Family</field>
     <field name="genre">Animation</field>
-    <field name="name">Franklin's Magic Christmas</field>
-    <field name="initial_release_date">2001-11-06</field>
-    <field name="id">/en/franklins_magic_christmas</field>
-    <field name="directed_by">John van Bruggen</field>
   </doc>
   <doc>
+    <field name="id">/en/franklins_magic_christmas</field>
+    <field name="initial_release_date">2001-11-06</field>
+    <field name="name">Franklin's Magic Christmas</field>
+    <field name="directed_by">John van Bruggen</field>
+    <field name="genre">Family</field>
+    <field name="genre">Animation</field>
+  </doc>
+  <doc>
+    <field name="id">/en/freaky_friday_2003</field>
+    <field name="initial_release_date">2003-08-04</field>
+    <field name="name">Freaky Friday</field>
+    <field name="directed_by">Mark Waters</field>
     <field name="genre">Family</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Freaky Friday</field>
-    <field name="initial_release_date">2003-08-04</field>
-    <field name="id">/en/freaky_friday_2003</field>
-    <field name="directed_by">Mark Waters</field>
   </doc>
   <doc>
+    <field name="id">/en/freddy_vs_jason</field>
+    <field name="initial_release_date">2003-08-13</field>
+    <field name="name">Freddy vs. Jason</field>
+    <field name="directed_by">Ronny Yu</field>
     <field name="genre">Horror</field>
     <field name="genre">Thriller</field>
     <field name="genre">Slasher</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
-    <field name="name">Freddy vs. Jason</field>
-    <field name="initial_release_date">2003-08-13</field>
-    <field name="id">/en/freddy_vs_jason</field>
-    <field name="directed_by">Ronny Yu</field>
   </doc>
   <doc>
+    <field name="id">/en/free_jimmy</field>
+    <field name="initial_release_date">2006-04-21</field>
+    <field name="name">Free Jimmy</field>
+    <field name="directed_by">Christopher Nielsen</field>
     <field name="genre">Anime</field>
     <field name="genre">Animation</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Satire</field>
     <field name="genre">Stoner film</field>
     <field name="genre">Comedy</field>
-    <field name="name">Free Jimmy</field>
-    <field name="initial_release_date">2006-04-21</field>
-    <field name="id">/en/free_jimmy</field>
-    <field name="directed_by">Christopher Nielsen</field>
   </doc>
   <doc>
+    <field name="id">/en/free_zone</field>
+    <field name="initial_release_date">2005-05-19</field>
+    <field name="name">Free Zone</field>
+    <field name="directed_by">Amos Gitai</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Free Zone</field>
-    <field name="initial_release_date">2005-05-19</field>
-    <field name="id">/en/free_zone</field>
-    <field name="directed_by">Amos Gitai</field>
   </doc>
   <doc>
+    <field name="id">/en/freedomland</field>
+    <field name="initial_release_date">2006-02-17</field>
+    <field name="name">Freedomland</field>
+    <field name="directed_by">Joe Roth</field>
     <field name="genre">Mystery</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
@@ -9791,21 +9788,21 @@
     <field name="genre">Crime Thriller</field>
     <field name="genre">Crime Drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Freedomland</field>
-    <field name="initial_release_date">2006-02-17</field>
-    <field name="id">/en/freedomland</field>
-    <field name="directed_by">Joe Roth</field>
   </doc>
   <doc>
+    <field name="id">/en/french_bean</field>
+    <field name="initial_release_date">2007-03-22</field>
+    <field name="name">Mr. Bean's Holiday</field>
+    <field name="directed_by">Steve Bendelack</field>
     <field name="genre">Family</field>
     <field name="genre">Comedy</field>
     <field name="genre">Road movie</field>
-    <field name="name">Mr. Bean's Holiday</field>
-    <field name="initial_release_date">2007-03-22</field>
-    <field name="id">/en/french_bean</field>
-    <field name="directed_by">Steve Bendelack</field>
   </doc>
   <doc>
+    <field name="id">/en/frequency_2000</field>
+    <field name="initial_release_date">2000-04-28</field>
+    <field name="name">Frequency</field>
+    <field name="directed_by">Gregory Hoblit</field>
     <field name="genre">Thriller</field>
     <field name="genre">Time travel</field>
     <field name="genre">Science Fiction</field>
@@ -9814,50 +9811,50 @@
     <field name="genre">Crime Fiction</field>
     <field name="genre">Family Drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Frequency</field>
-    <field name="initial_release_date">2000-04-28</field>
-    <field name="id">/en/frequency_2000</field>
-    <field name="directed_by">Gregory Hoblit</field>
   </doc>
   <doc>
+    <field name="id">/en/frida</field>
+    <field name="initial_release_date">2002-08-29</field>
+    <field name="name">Frida</field>
+    <field name="directed_by">Julie Taymor</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Political drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Frida</field>
-    <field name="initial_release_date">2002-08-29</field>
-    <field name="id">/en/frida</field>
-    <field name="directed_by">Julie Taymor</field>
   </doc>
   <doc>
+    <field name="id">/en/friday_after_next</field>
+    <field name="initial_release_date">2002-11-22</field>
+    <field name="name">Friday After Next</field>
+    <field name="directed_by">Marcus Raboy</field>
     <field name="genre">Buddy film</field>
     <field name="genre">Comedy</field>
-    <field name="name">Friday After Next</field>
-    <field name="initial_release_date">2002-11-22</field>
-    <field name="id">/en/friday_after_next</field>
-    <field name="directed_by">Marcus Raboy</field>
   </doc>
   <doc>
+    <field name="id">/en/friday_night_lights</field>
+    <field name="initial_release_date">2004-10-06</field>
+    <field name="name">Friday Night Lights</field>
+    <field name="directed_by">Peter Berg</field>
     <field name="genre">Action Film</field>
     <field name="genre">Sports</field>
     <field name="genre">Drama</field>
-    <field name="name">Friday Night Lights</field>
-    <field name="initial_release_date">2004-10-06</field>
-    <field name="id">/en/friday_night_lights</field>
-    <field name="directed_by">Peter Berg</field>
   </doc>
   <doc>
+    <field name="id">/en/friends_2001</field>
+    <field name="initial_release_date">2001-01-14</field>
+    <field name="name">Friends</field>
+    <field name="directed_by">Siddique</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">World cinema</field>
-    <field name="name">Friends</field>
-    <field name="initial_release_date">2001-01-14</field>
-    <field name="id">/en/friends_2001</field>
-    <field name="directed_by">Siddique</field>
   </doc>
   <doc>
+    <field name="id">/en/friends_with_money</field>
+    <field name="initial_release_date">2006-04-07</field>
+    <field name="name">Friends with Money</field>
+    <field name="directed_by">Nicole Holofcener</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Indie film</field>
     <field name="genre">Comedy-drama</field>
@@ -9865,19 +9862,20 @@
     <field name="genre">Ensemble Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Friends with Money</field>
-    <field name="initial_release_date">2006-04-07</field>
-    <field name="id">/en/friends_with_money</field>
-    <field name="directed_by">Nicole Holofcener</field>
   </doc>
   <doc>
-    <field name="genre">Comedy-drama</field>
-    <field name="name">FRO - The Movie</field>
     <field name="id">/en/fro_the_movie</field>
+    <field name="name">FRO - The Movie</field>
     <field name="directed_by">Brad Gashler</field>
     <field name="directed_by">Michael J. Brooks</field>
+    <field name="genre">Comedy-drama</field>
   </doc>
   <doc>
+    <field name="id">/en/from_hell_2001</field>
+    <field name="initial_release_date">2001-09-08</field>
+    <field name="name">From Hell</field>
+    <field name="directed_by">Allen Hughes</field>
+    <field name="directed_by">Albert Hughes</field>
     <field name="genre">Thriller</field>
     <field name="genre">Mystery</field>
     <field name="genre">Biographical film</field>
@@ -9886,22 +9884,21 @@
     <field name="genre">Film adaptation</field>
     <field name="genre">Horror</field>
     <field name="genre">Drama</field>
-    <field name="name">From Hell</field>
-    <field name="initial_release_date">2001-09-08</field>
-    <field name="id">/en/from_hell_2001</field>
-    <field name="directed_by">Allen Hughes</field>
-    <field name="directed_by">Albert Hughes</field>
   </doc>
   <doc>
-    <field name="genre">Music video</field>
-    <field name="name">From Janet to Damita Jo: The Videos</field>
-    <field name="initial_release_date">2004-09-07</field>
     <field name="id">/en/from_janet_to_damita_jo_the_videos</field>
+    <field name="initial_release_date">2004-09-07</field>
+    <field name="name">From Janet to Damita Jo: The Videos</field>
     <field name="directed_by">Jonathan Dayton</field>
     <field name="directed_by">Mark Romanek</field>
     <field name="directed_by">Paul Hunter</field>
+    <field name="genre">Music video</field>
   </doc>
   <doc>
+    <field name="id">/en/from_justin_to_kelly</field>
+    <field name="initial_release_date">2003-06-20</field>
+    <field name="name">From Justin to Kelly</field>
+    <field name="directed_by">Robert Iscove</field>
     <field name="genre">Musical</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Teen film</field>
@@ -9909,39 +9906,39 @@
     <field name="genre">Beach Film</field>
     <field name="genre">Musical comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">From Justin to Kelly</field>
-    <field name="initial_release_date">2003-06-20</field>
-    <field name="id">/en/from_justin_to_kelly</field>
-    <field name="directed_by">Robert Iscove</field>
   </doc>
   <doc>
+    <field name="id">/en/frostbite_2005</field>
+    <field name="name">Frostbite</field>
+    <field name="directed_by">Jonathan Schwartz</field>
     <field name="genre">Sports</field>
     <field name="genre">Comedy</field>
-    <field name="name">Frostbite</field>
-    <field name="id">/en/frostbite_2005</field>
-    <field name="directed_by">Jonathan Schwartz</field>
   </doc>
   <doc>
+    <field name="id">/en/fubar_2002</field>
+    <field name="initial_release_date">2002-01-01</field>
+    <field name="name">FUBAR</field>
+    <field name="directed_by">Michael Dowse</field>
     <field name="genre">Mockumentary</field>
     <field name="genre">Indie film</field>
     <field name="genre">Buddy film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">FUBAR</field>
-    <field name="initial_release_date">2002-01-01</field>
-    <field name="id">/en/fubar_2002</field>
-    <field name="directed_by">Michael Dowse</field>
   </doc>
   <doc>
+    <field name="id">/en/fuck_2005</field>
+    <field name="initial_release_date">2005-11-07</field>
+    <field name="name">Fuck</field>
+    <field name="directed_by">Steve Anderson</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Indie film</field>
     <field name="genre">Political cinema</field>
-    <field name="name">Fuck</field>
-    <field name="initial_release_date">2005-11-07</field>
-    <field name="id">/en/fuck_2005</field>
-    <field name="directed_by">Steve Anderson</field>
   </doc>
   <doc>
+    <field name="id">/en/fuckland</field>
+    <field name="initial_release_date">2000-09-21</field>
+    <field name="name">Fuckland</field>
+    <field name="directed_by">José Luis Márques</field>
     <field name="genre">Indie film</field>
     <field name="genre">Dogme 95</field>
     <field name="genre">Comedy-drama</field>
@@ -9949,30 +9946,30 @@
     <field name="genre">Comedy of manners</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Fuckland</field>
-    <field name="initial_release_date">2000-09-21</field>
-    <field name="id">/en/fuckland</field>
-    <field name="directed_by">José Luis Márques</field>
   </doc>
   <doc>
+    <field name="id">/en/full_court_miracle</field>
+    <field name="initial_release_date">2003-11-21</field>
+    <field name="name">Full-Court Miracle</field>
+    <field name="directed_by">Stuart Gillard</field>
     <field name="genre">Family</field>
     <field name="genre">Drama</field>
-    <field name="name">Full-Court Miracle</field>
-    <field name="initial_release_date">2003-11-21</field>
-    <field name="id">/en/full_court_miracle</field>
-    <field name="directed_by">Stuart Gillard</field>
   </doc>
   <doc>
+    <field name="id">/en/full_disclosure_2001</field>
+    <field name="initial_release_date">2001-05-15</field>
+    <field name="name">Full Disclosure</field>
+    <field name="directed_by">John Bradshaw</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Action Film</field>
     <field name="genre">Political thriller</field>
-    <field name="name">Full Disclosure</field>
-    <field name="initial_release_date">2001-05-15</field>
-    <field name="id">/en/full_disclosure_2001</field>
-    <field name="directed_by">John Bradshaw</field>
   </doc>
   <doc>
+    <field name="id">/en/full_frontal</field>
+    <field name="initial_release_date">2002-08-02</field>
+    <field name="name">Full Frontal</field>
+    <field name="directed_by">Steven Soderbergh</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Indie film</field>
     <field name="genre">Romance Film</field>
@@ -9980,45 +9977,44 @@
     <field name="genre">Ensemble Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Full Frontal</field>
-    <field name="initial_release_date">2002-08-02</field>
-    <field name="id">/en/full_frontal</field>
-    <field name="directed_by">Steven Soderbergh</field>
   </doc>
   <doc>
+    <field name="id">/wikipedia/ja/$5287$5834$7248_$92FC$306E$932C$91D1$8853$5E2B_$30B7$30E3$30F3$30D0$30E9$3092$5F81$304F$8005</field>
+    <field name="initial_release_date">2005-07-23</field>
+    <field name="name">Fullmetal Alchemist the Movie: Conqueror of Shamballa</field>
+    <field name="directed_by">Seiji Mizushima</field>
     <field name="genre">Anime</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Action Film</field>
     <field name="genre">Animation</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Drama</field>
-    <field name="name">Fullmetal Alchemist the Movie: Conqueror of Shamballa</field>
-    <field name="initial_release_date">2005-07-23</field>
-    <field name="id">/wikipedia/ja/$5287$5834$7248_$92FC$306E$932C$91D1$8853$5E2B_$30B7$30E3$30F3$30D0$30E9$3092$5F81$304F$8005</field>
-    <field name="directed_by">Seiji Mizushima</field>
   </doc>
   <doc>
+    <field name="id">/en/fulltime_killer</field>
+    <field name="initial_release_date">2001-08-03</field>
+    <field name="name">Fulltime Killer</field>
+    <field name="directed_by">Johnnie To</field>
+    <field name="directed_by">Wai Ka-fai</field>
     <field name="genre">Action Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Martial Arts Film</field>
     <field name="genre">Action Thriller</field>
     <field name="genre">Drama</field>
-    <field name="name">Fulltime Killer</field>
-    <field name="initial_release_date">2001-08-03</field>
-    <field name="id">/en/fulltime_killer</field>
-    <field name="directed_by">Johnnie To</field>
-    <field name="directed_by">Wai Ka-fai</field>
   </doc>
   <doc>
+    <field name="id">/en/fun_with_dick_and_jane_2005</field>
+    <field name="initial_release_date">2005-12-21</field>
+    <field name="name">Fun with Dick and Jane</field>
+    <field name="directed_by">Dean Parisot</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Comedy</field>
-    <field name="name">Fun with Dick and Jane</field>
-    <field name="initial_release_date">2005-12-21</field>
-    <field name="id">/en/fun_with_dick_and_jane_2005</field>
-    <field name="directed_by">Dean Parisot</field>
   </doc>
   <doc>
+    <field name="id">/en/funny_ha_ha</field>
+    <field name="name">Funny Ha Ha</field>
+    <field name="directed_by">Andrew Bujalski</field>
     <field name="genre">Indie film</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
@@ -10026,92 +10022,93 @@
     <field name="genre">Comedy-drama</field>
     <field name="genre">Comedy of manners</field>
     <field name="genre">Comedy</field>
-    <field name="name">Funny Ha Ha</field>
-    <field name="id">/en/funny_ha_ha</field>
-    <field name="directed_by">Andrew Bujalski</field>
   </doc>
   <doc>
+    <field name="id">/en/g-sale</field>
+    <field name="initial_release_date">2005-11-15</field>
+    <field name="name">G-Sale</field>
+    <field name="directed_by">Randy Nargi</field>
     <field name="genre">Mockumentary</field>
     <field name="genre">Comedy of manners</field>
     <field name="genre">Comedy</field>
-    <field name="name">G-Sale</field>
-    <field name="initial_release_date">2005-11-15</field>
-    <field name="id">/en/g-sale</field>
-    <field name="directed_by">Randy Nargi</field>
   </doc>
   <doc>
+    <field name="id">/en/gabrielle_2006</field>
+    <field name="initial_release_date">2005-09-05</field>
+    <field name="name">Gabrielle</field>
+    <field name="directed_by">Patrice Chéreau</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="name">Gabrielle</field>
-    <field name="initial_release_date">2005-09-05</field>
-    <field name="id">/en/gabrielle_2006</field>
-    <field name="directed_by">Patrice Chéreau</field>
   </doc>
   <doc>
+    <field name="id">/en/gagamboy</field>
+    <field name="initial_release_date">2004-01-01</field>
+    <field name="name">Gagamboy</field>
+    <field name="directed_by">Erik Matti</field>
     <field name="genre">Action Film</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Comedy</field>
     <field name="genre">Fantasy</field>
-    <field name="name">Gagamboy</field>
-    <field name="initial_release_date">2004-01-01</field>
-    <field name="id">/en/gagamboy</field>
-    <field name="directed_by">Erik Matti</field>
   </doc>
   <doc>
+    <field name="id">/en/gallipoli_2005</field>
+    <field name="initial_release_date">2005-03-18</field>
+    <field name="name">Gallipoli</field>
+    <field name="directed_by">Tolga Örnek</field>
     <field name="genre">Documentary film</field>
     <field name="genre">War film</field>
-    <field name="name">Gallipoli</field>
-    <field name="initial_release_date">2005-03-18</field>
-    <field name="id">/en/gallipoli_2005</field>
-    <field name="directed_by">Tolga Örnek</field>
   </doc>
   <doc>
+    <field name="id">/en/game_6_2006</field>
+    <field name="initial_release_date">2006-03-10</field>
+    <field name="name">Game 6</field>
+    <field name="directed_by">Michael Hoffman</field>
     <field name="genre">Indie film</field>
     <field name="genre">Sports</field>
     <field name="genre">Comedy-drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Game 6</field>
-    <field name="initial_release_date">2006-03-10</field>
-    <field name="id">/en/game_6_2006</field>
-    <field name="directed_by">Michael Hoffman</field>
   </doc>
   <doc>
-    <field name="genre">Science Fiction</field>
-    <field name="name">Maximum Surge</field>
-    <field name="initial_release_date">2003-06-23</field>
     <field name="id">/en/game_over_2003</field>
+    <field name="initial_release_date">2003-06-23</field>
+    <field name="name">Maximum Surge</field>
     <field name="directed_by">Jason Bourque</field>
+    <field name="genre">Science Fiction</field>
   </doc>
   <doc>
+    <field name="id">/en/gamma_squad</field>
+    <field name="initial_release_date">2004-06-14</field>
+    <field name="name">Expendable</field>
+    <field name="directed_by">Nathaniel Barker</field>
+    <field name="directed_by">Eliot Lash</field>
     <field name="genre">Indie film</field>
     <field name="genre">Short Film</field>
     <field name="genre">War film</field>
-    <field name="name">Expendable</field>
-    <field name="initial_release_date">2004-06-14</field>
-    <field name="id">/en/gamma_squad</field>
-    <field name="directed_by">Nathaniel Barker</field>
-    <field name="directed_by">Eliot Lash</field>
   </doc>
   <doc>
+    <field name="id">/en/gangotri_2003</field>
+    <field name="initial_release_date">2003-03-28</field>
+    <field name="name">Gangotri</field>
+    <field name="directed_by">Kovelamudi Raghavendra Rao</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
-    <field name="name">Gangotri</field>
-    <field name="initial_release_date">2003-03-28</field>
-    <field name="id">/en/gangotri_2003</field>
-    <field name="directed_by">Kovelamudi Raghavendra Rao</field>
   </doc>
   <doc>
+    <field name="id">/en/gangs_of_new_york</field>
+    <field name="initial_release_date">2002-12-09</field>
+    <field name="name">Gangs of New York</field>
+    <field name="directed_by">Martin Scorsese</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Historical drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Gangs of New York</field>
-    <field name="initial_release_date">2002-12-09</field>
-    <field name="id">/en/gangs_of_new_york</field>
-    <field name="directed_by">Martin Scorsese</field>
   </doc>
   <doc>
+    <field name="id">/en/gangster_2006</field>
+    <field name="initial_release_date">2006-04-28</field>
+    <field name="name">Gangster</field>
+    <field name="directed_by">Anurag Basu</field>
     <field name="genre">Thriller</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Mystery</field>
@@ -10119,12 +10116,12 @@
     <field name="genre">Crime Fiction</field>
     <field name="genre">Bollywood</field>
     <field name="genre">Drama</field>
-    <field name="name">Gangster</field>
-    <field name="initial_release_date">2006-04-28</field>
-    <field name="id">/en/gangster_2006</field>
-    <field name="directed_by">Anurag Basu</field>
   </doc>
   <doc>
+    <field name="id">/en/gangster_no_1</field>
+    <field name="initial_release_date">2000-06-09</field>
+    <field name="name">Gangster No. 1</field>
+    <field name="directed_by">Paul McGuigan</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Historical period drama</field>
@@ -10133,19 +10130,19 @@
     <field name="genre">Action/Adventure</field>
     <field name="genre">Gangster Film</field>
     <field name="genre">Drama</field>
-    <field name="name">Gangster No. 1</field>
-    <field name="initial_release_date">2000-06-09</field>
-    <field name="id">/en/gangster_no_1</field>
-    <field name="directed_by">Paul McGuigan</field>
   </doc>
   <doc>
-    <field name="genre">Comedy</field>
-    <field name="name">Garam Masala</field>
-    <field name="initial_release_date">2005-11-02</field>
     <field name="id">/en/garam_masala_2005</field>
+    <field name="initial_release_date">2005-11-02</field>
+    <field name="name">Garam Masala</field>
     <field name="directed_by">Priyadarshan</field>
+    <field name="genre">Comedy</field>
   </doc>
   <doc>
+    <field name="id">/en/garcon_stupide</field>
+    <field name="initial_release_date">2004-03-10</field>
+    <field name="name">Garçon stupide</field>
+    <field name="directed_by">Lionel Baier</field>
     <field name="genre">LGBT</field>
     <field name="genre">World cinema</field>
     <field name="genre">Gay</field>
@@ -10154,83 +10151,83 @@
     <field name="genre">Coming of age</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Garçon stupide</field>
-    <field name="initial_release_date">2004-03-10</field>
-    <field name="id">/en/garcon_stupide</field>
-    <field name="directed_by">Lionel Baier</field>
   </doc>
   <doc>
+    <field name="id">/en/garden_state</field>
+    <field name="initial_release_date">2004-01-16</field>
+    <field name="name">Garden State</field>
+    <field name="directed_by">Zach Braff</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy-drama</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Garden State</field>
-    <field name="initial_release_date">2004-01-16</field>
-    <field name="id">/en/garden_state</field>
-    <field name="directed_by">Zach Braff</field>
   </doc>
   <doc>
+    <field name="id">/en/garfield_2004</field>
+    <field name="initial_release_date">2004-06-06</field>
+    <field name="name">Garfield: The Movie</field>
+    <field name="directed_by">Peter Hewitt</field>
     <field name="genre">Slapstick</field>
     <field name="genre">Animation</field>
     <field name="genre">Family</field>
     <field name="genre">Comedy</field>
-    <field name="name">Garfield: The Movie</field>
-    <field name="initial_release_date">2004-06-06</field>
-    <field name="id">/en/garfield_2004</field>
-    <field name="directed_by">Peter Hewitt</field>
   </doc>
   <doc>
+    <field name="id">/en/garfield_a_tail_of_two_kitties</field>
+    <field name="initial_release_date">2006-06-15</field>
+    <field name="name">Garfield: A Tail of Two Kitties</field>
+    <field name="directed_by">Tim Hill</field>
     <field name="genre">Family</field>
     <field name="genre">Animal Picture</field>
     <field name="genre">Children's/Family</field>
     <field name="genre">Family-Oriented Adventure</field>
     <field name="genre">Comedy</field>
-    <field name="name">Garfield: A Tail of Two Kitties</field>
-    <field name="initial_release_date">2006-06-15</field>
-    <field name="id">/en/garfield_a_tail_of_two_kitties</field>
-    <field name="directed_by">Tim Hill</field>
   </doc>
   <doc>
+    <field name="id">/en/gene-x</field>
+    <field name="name">Gene-X</field>
+    <field name="directed_by">Martin Simpson</field>
     <field name="genre">Thriller</field>
     <field name="genre">Romance Film</field>
-    <field name="name">Gene-X</field>
-    <field name="id">/en/gene-x</field>
-    <field name="directed_by">Martin Simpson</field>
   </doc>
   <doc>
+    <field name="id">/en/george_of_the_jungle_2</field>
+    <field name="initial_release_date">2003-08-18</field>
+    <field name="name">George of the Jungle 2</field>
+    <field name="directed_by">David Grossman</field>
     <field name="genre">Parody</field>
     <field name="genre">Slapstick</field>
     <field name="genre">Family</field>
     <field name="genre">Jungle Film</field>
     <field name="genre">Comedy</field>
-    <field name="name">George of the Jungle 2</field>
-    <field name="initial_release_date">2003-08-18</field>
-    <field name="id">/en/george_of_the_jungle_2</field>
-    <field name="directed_by">David Grossman</field>
   </doc>
   <doc>
+    <field name="id">/en/george_washington_2000</field>
+    <field name="initial_release_date">2000-09-29</field>
+    <field name="name">George Washington</field>
+    <field name="directed_by">David Gordon Green</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Indie film</field>
     <field name="genre">Drama</field>
-    <field name="name">George Washington</field>
-    <field name="initial_release_date">2000-09-29</field>
-    <field name="id">/en/george_washington_2000</field>
-    <field name="directed_by">David Gordon Green</field>
   </doc>
   <doc>
+    <field name="id">/en/georgia_rule</field>
+    <field name="initial_release_date">2007-05-10</field>
+    <field name="name">Georgia Rule</field>
+    <field name="directed_by">Garry Marshall</field>
     <field name="genre">Comedy-drama</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Melodrama</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Georgia Rule</field>
-    <field name="initial_release_date">2007-05-10</field>
-    <field name="id">/en/georgia_rule</field>
-    <field name="directed_by">Garry Marshall</field>
   </doc>
   <doc>
+    <field name="id">/en/gerry</field>
+    <field name="initial_release_date">2003-02-14</field>
+    <field name="name">Gerry</field>
+    <field name="directed_by">Gus Van Sant</field>
     <field name="genre">Indie film</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Mystery</field>
@@ -10238,20 +10235,20 @@
     <field name="genre">Experimental film</field>
     <field name="genre">Buddy film</field>
     <field name="genre">Drama</field>
-    <field name="name">Gerry</field>
-    <field name="initial_release_date">2003-02-14</field>
-    <field name="id">/en/gerry</field>
-    <field name="directed_by">Gus Van Sant</field>
   </doc>
   <doc>
+    <field name="id">/en/get_a_clue</field>
+    <field name="initial_release_date">2002-06-28</field>
+    <field name="name">Get a Clue</field>
+    <field name="directed_by">Maggie Greenwald Mansfield</field>
     <field name="genre">Mystery</field>
     <field name="genre">Comedy</field>
-    <field name="name">Get a Clue</field>
-    <field name="initial_release_date">2002-06-28</field>
-    <field name="id">/en/get_a_clue</field>
-    <field name="directed_by">Maggie Greenwald Mansfield</field>
   </doc>
   <doc>
+    <field name="id">/en/get_over_it</field>
+    <field name="initial_release_date">2001-03-09</field>
+    <field name="name">Get Over It</field>
+    <field name="directed_by">Tommy O'Haver</field>
     <field name="genre">Musical</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Teen film</field>
@@ -10264,12 +10261,12 @@
     <field name="genre">Sex comedy</field>
     <field name="genre">Musical comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Get Over It</field>
-    <field name="initial_release_date">2001-03-09</field>
-    <field name="id">/en/get_over_it</field>
-    <field name="directed_by">Tommy O'Haver</field>
   </doc>
   <doc>
+    <field name="id">/en/get_rich_or_die_tryin</field>
+    <field name="initial_release_date">2005-11-09</field>
+    <field name="name">Get Rich or Die Tryin'</field>
+    <field name="directed_by">Jim Sheridan</field>
     <field name="genre">Coming of age</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Hip hop film</field>
@@ -10277,12 +10274,11 @@
     <field name="genre">Biographical film</field>
     <field name="genre">Musical Drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Get Rich or Die Tryin'</field>
-    <field name="initial_release_date">2005-11-09</field>
-    <field name="id">/en/get_rich_or_die_tryin</field>
-    <field name="directed_by">Jim Sheridan</field>
   </doc>
   <doc>
+    <field name="id">/en/get_up</field>
+    <field name="name">Get Up!</field>
+    <field name="directed_by">Kazuyuki Izutsu</field>
     <field name="genre">Musical</field>
     <field name="genre">Action Film</field>
     <field name="genre">Japanese Movies</field>
@@ -10290,40 +10286,41 @@
     <field name="genre">Musical comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Get Up!</field>
-    <field name="id">/en/get_up</field>
-    <field name="directed_by">Kazuyuki Izutsu</field>
   </doc>
   <doc>
+    <field name="id">/en/getting_my_brother_laid</field>
+    <field name="name">Getting My Brother Laid</field>
+    <field name="directed_by">Sven Taddicken</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Getting My Brother Laid</field>
-    <field name="id">/en/getting_my_brother_laid</field>
-    <field name="directed_by">Sven Taddicken</field>
   </doc>
   <doc>
+    <field name="id">/en/getting_there</field>
+    <field name="initial_release_date">2002-06-11</field>
+    <field name="name">Getting There: Sweet 16 and Licensed to Drive</field>
+    <field name="directed_by">Steve Purcell</field>
     <field name="genre">Family</field>
     <field name="genre">Teen film</field>
     <field name="genre">Comedy</field>
-    <field name="name">Getting There: Sweet 16 and Licensed to Drive</field>
-    <field name="initial_release_date">2002-06-11</field>
-    <field name="id">/en/getting_there</field>
-    <field name="directed_by">Steve Purcell</field>
   </doc>
   <doc>
+    <field name="id">/en/ghajini</field>
+    <field name="initial_release_date">2005-09-29</field>
+    <field name="name">Ghajini</field>
+    <field name="directed_by">A.R. Murugadoss</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Mystery</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="name">Ghajini</field>
-    <field name="initial_release_date">2005-09-29</field>
-    <field name="id">/en/ghajini</field>
-    <field name="directed_by">A.R. Murugadoss</field>
   </doc>
   <doc>
+    <field name="id">/en/gharshana</field>
+    <field name="initial_release_date">2004-07-30</field>
+    <field name="name">Gharshana</field>
+    <field name="directed_by">Gautham Menon</field>
     <field name="genre">Mystery</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Romance Film</field>
@@ -10331,52 +10328,52 @@
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
     <field name="genre">Drama</field>
-    <field name="name">Gharshana</field>
-    <field name="initial_release_date">2004-07-30</field>
-    <field name="id">/en/gharshana</field>
-    <field name="directed_by">Gautham Menon</field>
   </doc>
   <doc>
+    <field name="id">/en/ghilli</field>
+    <field name="initial_release_date">2004-04-17</field>
+    <field name="name">Ghilli</field>
+    <field name="directed_by">Dharani</field>
     <field name="genre">Sports</field>
     <field name="genre">Action Film</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
-    <field name="name">Ghilli</field>
-    <field name="initial_release_date">2004-04-17</field>
-    <field name="id">/en/ghilli</field>
-    <field name="directed_by">Dharani</field>
   </doc>
   <doc>
-    <field name="genre">Horror comedy</field>
-    <field name="name">Ghost Game</field>
-    <field name="initial_release_date">2005-09-01</field>
     <field name="id">/en/ghost_game_2006</field>
+    <field name="initial_release_date">2005-09-01</field>
+    <field name="name">Ghost Game</field>
     <field name="directed_by">Joe Knee</field>
+    <field name="genre">Horror comedy</field>
   </doc>
   <doc>
+    <field name="id">/en/ghost_house</field>
+    <field name="initial_release_date">2004-09-17</field>
+    <field name="name">Ghost House</field>
+    <field name="directed_by">Kim Sang-jin</field>
     <field name="genre">Horror</field>
     <field name="genre">Horror comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">East Asian cinema</field>
     <field name="genre">World cinema</field>
-    <field name="name">Ghost House</field>
-    <field name="initial_release_date">2004-09-17</field>
-    <field name="id">/en/ghost_house</field>
-    <field name="directed_by">Kim Sang-jin</field>
   </doc>
   <doc>
+    <field name="id">/en/ghost_in_the_shell_2_innocence</field>
+    <field name="initial_release_date">2004-03-06</field>
+    <field name="name">Ghost in the Shell 2: Innocence</field>
+    <field name="directed_by">Mamoru Oshii</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Anime</field>
     <field name="genre">Action Film</field>
     <field name="genre">Animation</field>
     <field name="genre">Thriller</field>
     <field name="genre">Drama</field>
-    <field name="name">Ghost in the Shell 2: Innocence</field>
-    <field name="initial_release_date">2004-03-06</field>
-    <field name="id">/en/ghost_in_the_shell_2_innocence</field>
-    <field name="directed_by">Mamoru Oshii</field>
   </doc>
   <doc>
+    <field name="id">/en/s_a_c_solid_state_society</field>
+    <field name="initial_release_date">2006-09-01</field>
+    <field name="name">Ghost in the Shell: Solid State Society</field>
+    <field name="directed_by">Kenji Kamiyama</field>
     <field name="genre">Anime</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Action Film</field>
@@ -10384,49 +10381,48 @@
     <field name="genre">Thriller</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Fantasy</field>
-    <field name="name">Ghost in the Shell: Solid State Society</field>
-    <field name="initial_release_date">2006-09-01</field>
-    <field name="id">/en/s_a_c_solid_state_society</field>
-    <field name="directed_by">Kenji Kamiyama</field>
   </doc>
   <doc>
+    <field name="id">/en/ghost_lake</field>
+    <field name="initial_release_date">2005-05-17</field>
+    <field name="name">Ghost Lake</field>
+    <field name="directed_by">Jay Woelfel</field>
     <field name="genre">Horror</field>
     <field name="genre">Zombie Film</field>
-    <field name="name">Ghost Lake</field>
-    <field name="initial_release_date">2005-05-17</field>
-    <field name="id">/en/ghost_lake</field>
-    <field name="directed_by">Jay Woelfel</field>
   </doc>
   <doc>
+    <field name="id">/en/ghost_rider_2007</field>
+    <field name="initial_release_date">2007-01-15</field>
+    <field name="name">Ghost Rider</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Superhero movie</field>
     <field name="genre">Horror</field>
     <field name="genre">Drama</field>
-    <field name="name">Ghost Rider</field>
-    <field name="initial_release_date">2007-01-15</field>
-    <field name="id">/en/ghost_rider_2007</field>
     <field name="directed_by">Mark Steven Johnson</field>
   </doc>
   <doc>
+    <field name="id">/en/ghost_ship_2002</field>
+    <field name="initial_release_date">2002-10-22</field>
+    <field name="name">Ghost Ship</field>
     <field name="genre">Horror</field>
     <field name="genre">Supernatural</field>
     <field name="genre">Slasher</field>
-    <field name="name">Ghost Ship</field>
-    <field name="initial_release_date">2002-10-22</field>
-    <field name="id">/en/ghost_ship_2002</field>
     <field name="directed_by">Steve Beck</field>
   </doc>
   <doc>
+    <field name="id">/en/ghost_world_2001</field>
+    <field name="initial_release_date">2001-06-16</field>
+    <field name="name">Ghost World</field>
     <field name="genre">Indie film</field>
     <field name="genre">Comedy-drama</field>
-    <field name="name">Ghost World</field>
-    <field name="initial_release_date">2001-06-16</field>
-    <field name="id">/en/ghost_world_2001</field>
     <field name="directed_by">Terry Zwigoff</field>
   </doc>
   <doc>
+    <field name="id">/en/ghosts_of_mars</field>
+    <field name="initial_release_date">2001-08-24</field>
+    <field name="name">Ghosts of Mars</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Horror</field>
@@ -10434,57 +10430,57 @@
     <field name="genre">Action Film</field>
     <field name="genre">Thriller</field>
     <field name="genre">Space Western</field>
-    <field name="name">Ghosts of Mars</field>
-    <field name="initial_release_date">2001-08-24</field>
-    <field name="id">/en/ghosts_of_mars</field>
     <field name="directed_by">John Carpenter</field>
   </doc>
   <doc>
+    <field name="id">/m/06ry42</field>
+    <field name="initial_release_date">2004-10-28</field>
+    <field name="name">The International Playboys' First Movie: Ghouls Gone Wild!</field>
     <field name="genre">Short Film</field>
     <field name="genre">Musical</field>
-    <field name="name">The International Playboys' First Movie: Ghouls Gone Wild!</field>
-    <field name="initial_release_date">2004-10-28</field>
-    <field name="id">/m/06ry42</field>
     <field name="directed_by">Ted Geoghegan</field>
   </doc>
   <doc>
+    <field name="id">/en/gie</field>
+    <field name="initial_release_date">2005-07-14</field>
+    <field name="name">Gie</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Political drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Gie</field>
-    <field name="initial_release_date">2005-07-14</field>
-    <field name="id">/en/gie</field>
     <field name="directed_by">Riri Riza</field>
   </doc>
   <doc>
+    <field name="id">/en/gigantic_2003</field>
+    <field name="initial_release_date">2003-03-10</field>
+    <field name="name">Gigantic (A Tale of Two Johns)</field>
     <field name="genre">Indie film</field>
     <field name="genre">Documentary film</field>
-    <field name="name">Gigantic (A Tale of Two Johns)</field>
-    <field name="initial_release_date">2003-03-10</field>
-    <field name="id">/en/gigantic_2003</field>
     <field name="directed_by">A. J. Schnack</field>
   </doc>
   <doc>
+    <field name="id">/en/gigli</field>
+    <field name="initial_release_date">2003-07-27</field>
+    <field name="name">Gigli</field>
     <field name="genre">Crime Thriller</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Comedy</field>
-    <field name="name">Gigli</field>
-    <field name="initial_release_date">2003-07-27</field>
-    <field name="id">/en/gigli</field>
     <field name="directed_by">Martin Brest</field>
   </doc>
   <doc>
+    <field name="id">/en/ginger_snaps</field>
+    <field name="initial_release_date">2000-09-10</field>
+    <field name="name">Ginger Snaps</field>
     <field name="genre">Teen film</field>
     <field name="genre">Horror</field>
     <field name="genre">Cult film</field>
-    <field name="name">Ginger Snaps</field>
-    <field name="initial_release_date">2000-09-10</field>
-    <field name="id">/en/ginger_snaps</field>
     <field name="directed_by">John Fawcett</field>
   </doc>
   <doc>
+    <field name="id">/en/ginger_snaps_2_unleashed</field>
+    <field name="initial_release_date">2004-01-30</field>
+    <field name="name">Ginger Snaps 2: Unleashed</field>
     <field name="genre">Thriller</field>
     <field name="genre">Horror</field>
     <field name="genre">Teen film</field>
@@ -10492,183 +10488,183 @@
     <field name="genre">Feminist Film</field>
     <field name="genre">Horror comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Ginger Snaps 2: Unleashed</field>
-    <field name="initial_release_date">2004-01-30</field>
-    <field name="id">/en/ginger_snaps_2_unleashed</field>
     <field name="directed_by">Brett Sullivan</field>
   </doc>
   <doc>
+    <field name="id">/en/girlfight</field>
+    <field name="initial_release_date">2000-01-22</field>
+    <field name="name">Girlfight</field>
     <field name="genre">Teen film</field>
     <field name="genre">Sports</field>
     <field name="genre">Coming-of-age story</field>
     <field name="genre">Drama</field>
-    <field name="name">Girlfight</field>
-    <field name="initial_release_date">2000-01-22</field>
-    <field name="id">/en/girlfight</field>
     <field name="directed_by">Karyn Kusama</field>
   </doc>
   <doc>
+    <field name="id">/en/gladiator_2000</field>
+    <field name="initial_release_date">2000-05-01</field>
+    <field name="name">Gladiator</field>
     <field name="genre">Historical drama</field>
     <field name="genre">Epic film</field>
     <field name="genre">Action Film</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Drama</field>
-    <field name="name">Gladiator</field>
-    <field name="initial_release_date">2000-05-01</field>
-    <field name="id">/en/gladiator_2000</field>
     <field name="directed_by">Ridley Scott</field>
   </doc>
   <doc>
+    <field name="id">/en/glastonbury_2006</field>
+    <field name="initial_release_date">2006-04-14</field>
+    <field name="name">Glastonbury</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Music</field>
     <field name="genre">Concert film</field>
     <field name="genre">Biographical film</field>
-    <field name="name">Glastonbury</field>
-    <field name="initial_release_date">2006-04-14</field>
-    <field name="id">/en/glastonbury_2006</field>
     <field name="directed_by">Julien Temple</field>
   </doc>
   <doc>
+    <field name="id">/en/glastonbury_anthems</field>
+    <field name="name">Glastonbury Anthems</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Music</field>
     <field name="genre">Concert film</field>
-    <field name="name">Glastonbury Anthems</field>
-    <field name="id">/en/glastonbury_anthems</field>
     <field name="directed_by">Gavin Taylor</field>
     <field name="directed_by">Declan Lowney</field>
     <field name="directed_by">Janet Fraser-Crook</field>
     <field name="directed_by">Phil Heyes</field>
   </doc>
   <doc>
+    <field name="id">/en/glitter_2001</field>
+    <field name="initial_release_date">2001-09-21</field>
+    <field name="name">Glitter</field>
     <field name="genre">Musical</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Musical Drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Glitter</field>
-    <field name="initial_release_date">2001-09-21</field>
-    <field name="id">/en/glitter_2001</field>
     <field name="directed_by">Vondie Curtis-Hall</field>
   </doc>
   <doc>
-    <field name="genre">Comedy</field>
-    <field name="name">Global Heresy</field>
-    <field name="initial_release_date">2002-09-03</field>
     <field name="id">/en/global_heresy</field>
+    <field name="initial_release_date">2002-09-03</field>
+    <field name="name">Global Heresy</field>
+    <field name="genre">Comedy</field>
     <field name="directed_by">Sidney J. Furie</field>
   </doc>
   <doc>
+    <field name="id">/en/glory_road_2006</field>
+    <field name="initial_release_date">2006-01-13</field>
+    <field name="name">Glory Road</field>
     <field name="genre">Sports</field>
     <field name="genre">Historical period drama</field>
     <field name="genre">Docudrama</field>
     <field name="genre">Drama</field>
-    <field name="name">Glory Road</field>
-    <field name="initial_release_date">2006-01-13</field>
-    <field name="id">/en/glory_road_2006</field>
     <field name="directed_by">James Gartner</field>
   </doc>
   <doc>
+    <field name="id">/en/go_figure_2005</field>
+    <field name="initial_release_date">2005-06-10</field>
+    <field name="name">Go Figure</field>
     <field name="genre">Family</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Go Figure</field>
-    <field name="initial_release_date">2005-06-10</field>
-    <field name="id">/en/go_figure_2005</field>
     <field name="directed_by">Francine McDougall</field>
   </doc>
   <doc>
+    <field name="id">/en/goal__2005</field>
+    <field name="initial_release_date">2005-09-08</field>
+    <field name="name">Goal!</field>
     <field name="genre">Sports</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="name">Goal!</field>
-    <field name="initial_release_date">2005-09-08</field>
-    <field name="id">/en/goal__2005</field>
     <field name="directed_by">Danny Cannon</field>
   </doc>
   <doc>
+    <field name="id">/en/goal_2_living_the_dream</field>
+    <field name="initial_release_date">2007-02-09</field>
+    <field name="name">Goal II: Living the Dream</field>
     <field name="genre">Sports</field>
     <field name="genre">Drama</field>
-    <field name="name">Goal II: Living the Dream</field>
-    <field name="initial_release_date">2007-02-09</field>
-    <field name="id">/en/goal_2_living_the_dream</field>
     <field name="directed_by">Jaume Collet-Serra</field>
   </doc>
   <doc>
+    <field name="id">/en/god_grew_tired_of_us</field>
+    <field name="initial_release_date">2006-09-04</field>
+    <field name="name">God Grew Tired of Us</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Indie film</field>
     <field name="genre">Historical fiction</field>
-    <field name="name">God Grew Tired of Us</field>
-    <field name="initial_release_date">2006-09-04</field>
-    <field name="id">/en/god_grew_tired_of_us</field>
     <field name="directed_by">Christopher Dillon Quinn</field>
     <field name="directed_by">Tommy Walker</field>
   </doc>
   <doc>
+    <field name="id">/en/god_on_my_side</field>
+    <field name="initial_release_date">2006-11-02</field>
+    <field name="name">God on My Side</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Christian film</field>
-    <field name="name">God on My Side</field>
-    <field name="initial_release_date">2006-11-02</field>
-    <field name="id">/en/god_on_my_side</field>
     <field name="directed_by">Andrew Denton</field>
   </doc>
   <doc>
+    <field name="id">/en/godavari</field>
+    <field name="initial_release_date">2006-05-19</field>
+    <field name="name">Godavari</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
-    <field name="name">Godavari</field>
-    <field name="initial_release_date">2006-05-19</field>
-    <field name="id">/en/godavari</field>
     <field name="directed_by">Sekhar Kammula</field>
   </doc>
   <doc>
+    <field name="id">/en/godfather</field>
+    <field name="initial_release_date">2006-02-24</field>
+    <field name="name">Varalaru</field>
     <field name="genre">Action Film</field>
     <field name="genre">Musical</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Tamil cinema</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="name">Varalaru</field>
-    <field name="initial_release_date">2006-02-24</field>
-    <field name="id">/en/godfather</field>
     <field name="directed_by">K. S. Ravikumar</field>
   </doc>
   <doc>
+    <field name="id">/en/godsend</field>
+    <field name="initial_release_date">2004-04-30</field>
+    <field name="name">Godsend</field>
     <field name="genre">Thriller</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Horror</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Sci-Fi Horror</field>
     <field name="genre">Drama</field>
-    <field name="name">Godsend</field>
-    <field name="initial_release_date">2004-04-30</field>
-    <field name="id">/en/godsend</field>
     <field name="directed_by">Nick Hamm</field>
   </doc>
   <doc>
+    <field name="id">/en/godzilla_3d_to_the_max</field>
+    <field name="initial_release_date">2007-09-12</field>
+    <field name="name">Godzilla 3D to the MAX</field>
     <field name="genre">Horror</field>
     <field name="genre">Action Film</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Short Film</field>
-    <field name="name">Godzilla 3D to the MAX</field>
-    <field name="initial_release_date">2007-09-12</field>
-    <field name="id">/en/godzilla_3d_to_the_max</field>
     <field name="directed_by">Keith Melton</field>
     <field name="directed_by">Yoshimitsu Banno</field>
   </doc>
   <doc>
-    <field name="genre">Monster</field>
-    <field name="genre">Science Fiction</field>
-    <field name="genre">Cult film</field>
-    <field name="genre">World cinema</field>
-    <field name="genre">Action Film</field>
-    <field name="genre">Creature Film</field>
-    <field name="genre">Japanese Movies</field>
-    <field name="name">Godzilla Against Mechagodzilla</field>
-    <field name="initial_release_date">2002-12-15</field>
     <field name="id">/en/godzilla_against_mechagodzilla</field>
+    <field name="initial_release_date">2002-12-15</field>
+    <field name="name">Godzilla Against Mechagodzilla</field>
+    <field name="genre">Monster</field>
+    <field name="genre">Science Fiction</field>
+    <field name="genre">Cult film</field>
+    <field name="genre">World cinema</field>
+    <field name="genre">Action Film</field>
+    <field name="genre">Creature Film</field>
+    <field name="genre">Japanese Movies</field>
     <field name="directed_by">Masaaki Tezuka</field>
   </doc>
   <doc>
+    <field name="id">/en/godzilla_vs_megaguirus</field>
+    <field name="initial_release_date">2000-11-03</field>
+    <field name="name">Godzilla vs. Megaguirus</field>
     <field name="genre">Monster</field>
     <field name="genre">World cinema</field>
     <field name="genre">Science Fiction</field>
@@ -10676,12 +10672,12 @@
     <field name="genre">Action Film</field>
     <field name="genre">Creature Film</field>
     <field name="genre">Japanese Movies</field>
-    <field name="name">Godzilla vs. Megaguirus</field>
-    <field name="initial_release_date">2000-11-03</field>
-    <field name="id">/en/godzilla_vs_megaguirus</field>
     <field name="directed_by">Masaaki Tezuka</field>
   </doc>
   <doc>
+    <field name="id">/en/godzilla_tokyo_sos</field>
+    <field name="initial_release_date">2003-11-03</field>
+    <field name="name">Godzilla: Tokyo SOS</field>
     <field name="genre">Monster</field>
     <field name="genre">Fantasy</field>
     <field name="genre">World cinema</field>
@@ -10689,169 +10685,169 @@
     <field name="genre">Science Fiction</field>
     <field name="genre">Cult film</field>
     <field name="genre">Japanese Movies</field>
-    <field name="name">Godzilla: Tokyo SOS</field>
-    <field name="initial_release_date">2003-11-03</field>
-    <field name="id">/en/godzilla_tokyo_sos</field>
     <field name="directed_by">Masaaki Tezuka</field>
   </doc>
   <doc>
+    <field name="id">/wikipedia/fr/Godzilla$002C_Mothra_and_King_Ghidorah$003A_Giant_Monsters_All-Out_Attack</field>
+    <field name="initial_release_date">2001-11-03</field>
+    <field name="name">Godzilla, Mothra and King Ghidorah: Giant Monsters All-Out Attack</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Action Film</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Drama</field>
-    <field name="name">Godzilla, Mothra and King Ghidorah: Giant Monsters All-Out Attack</field>
-    <field name="initial_release_date">2001-11-03</field>
-    <field name="id">/wikipedia/fr/Godzilla$002C_Mothra_and_King_Ghidorah$003A_Giant_Monsters_All-Out_Attack</field>
     <field name="directed_by">Shusuke Kaneko</field>
   </doc>
   <doc>
+    <field name="id">/en/godzilla_final_wars</field>
+    <field name="initial_release_date">2004-11-29</field>
+    <field name="name">Godzilla: Final Wars</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Monster movie</field>
-    <field name="name">Godzilla: Final Wars</field>
-    <field name="initial_release_date">2004-11-29</field>
-    <field name="id">/en/godzilla_final_wars</field>
     <field name="directed_by">Ryuhei Kitamura</field>
   </doc>
   <doc>
-    <field name="genre">Comedy</field>
-    <field name="name">Going the Distance</field>
-    <field name="initial_release_date">2004-08-20</field>
     <field name="id">/en/going_the_distance</field>
+    <field name="initial_release_date">2004-08-20</field>
+    <field name="name">Going the Distance</field>
+    <field name="genre">Comedy</field>
     <field name="directed_by">Mark Griffiths</field>
   </doc>
   <doc>
+    <field name="id">/en/going_to_the_mat</field>
+    <field name="initial_release_date">2004-03-19</field>
+    <field name="name">Going to the Mat</field>
     <field name="genre">Family</field>
     <field name="genre">Sports</field>
     <field name="genre">Drama</field>
-    <field name="name">Going to the Mat</field>
-    <field name="initial_release_date">2004-03-19</field>
-    <field name="id">/en/going_to_the_mat</field>
     <field name="directed_by">Stuart Gillard</field>
   </doc>
   <doc>
+    <field name="id">/en/going_upriver</field>
+    <field name="initial_release_date">2004-09-14</field>
+    <field name="name">Going Upriver</field>
     <field name="genre">Documentary film</field>
     <field name="genre">War film</field>
     <field name="genre">Political cinema</field>
-    <field name="name">Going Upriver</field>
-    <field name="initial_release_date">2004-09-14</field>
-    <field name="id">/en/going_upriver</field>
     <field name="directed_by">George Butler</field>
   </doc>
   <doc>
+    <field name="id">/en/golmaal</field>
+    <field name="initial_release_date">2006-07-14</field>
+    <field name="name">Golmaal: Fun Unlimited</field>
     <field name="genre">Musical</field>
     <field name="genre">Musical comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Golmaal: Fun Unlimited</field>
-    <field name="initial_release_date">2006-07-14</field>
-    <field name="id">/en/golmaal</field>
     <field name="directed_by">Rohit Shetty</field>
   </doc>
   <doc>
+    <field name="id">/en/gone_in_sixty_seconds</field>
+    <field name="initial_release_date">2000-06-05</field>
+    <field name="name">Gone in 60 Seconds</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Crime Thriller</field>
     <field name="genre">Heist film</field>
     <field name="genre">Action/Adventure</field>
-    <field name="name">Gone in 60 Seconds</field>
-    <field name="initial_release_date">2000-06-05</field>
-    <field name="id">/en/gone_in_sixty_seconds</field>
     <field name="directed_by">Dominic Sena</field>
   </doc>
   <doc>
+    <field name="id">/en/good_bye_lenin</field>
+    <field name="initial_release_date">2003-02-09</field>
+    <field name="name">Good bye, Lenin!</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
     <field name="genre">Tragicomedy</field>
-    <field name="name">Good bye, Lenin!</field>
-    <field name="initial_release_date">2003-02-09</field>
-    <field name="id">/en/good_bye_lenin</field>
     <field name="directed_by">Wolfgang Becker</field>
   </doc>
   <doc>
+    <field name="id">/en/good_luck_chuck</field>
+    <field name="initial_release_date">2007-06-13</field>
+    <field name="name">Good Luck Chuck</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Good Luck Chuck</field>
-    <field name="initial_release_date">2007-06-13</field>
-    <field name="id">/en/good_luck_chuck</field>
     <field name="directed_by">Mark Helfrich</field>
   </doc>
   <doc>
+    <field name="id">/en/good_night_and_good_luck</field>
+    <field name="initial_release_date">2005-09-01</field>
+    <field name="name">Good Night, and Good Luck</field>
     <field name="genre">Political drama</field>
     <field name="genre">Historical drama</field>
     <field name="genre">Docudrama</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Historical fiction</field>
     <field name="genre">Drama</field>
-    <field name="name">Good Night, and Good Luck</field>
-    <field name="initial_release_date">2005-09-01</field>
-    <field name="id">/en/good_night_and_good_luck</field>
     <field name="directed_by">George Clooney</field>
   </doc>
   <doc>
+    <field name="id">/en/goodbye_dragon_inn</field>
+    <field name="initial_release_date">2003-12-12</field>
+    <field name="name">Goodbye, Dragon Inn</field>
     <field name="genre">Comedy-drama</field>
     <field name="genre">Comedy of manners</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Goodbye, Dragon Inn</field>
-    <field name="initial_release_date">2003-12-12</field>
-    <field name="id">/en/goodbye_dragon_inn</field>
     <field name="directed_by">Tsai Ming-liang</field>
   </doc>
   <doc>
+    <field name="id">/en/gosford_park</field>
+    <field name="initial_release_date">2001-11-07</field>
+    <field name="name">Gosford Park</field>
     <field name="genre">Mystery</field>
     <field name="genre">Drama</field>
-    <field name="name">Gosford Park</field>
-    <field name="initial_release_date">2001-11-07</field>
-    <field name="id">/en/gosford_park</field>
     <field name="directed_by">Robert Altman</field>
   </doc>
   <doc>
+    <field name="id">/en/gothika</field>
+    <field name="initial_release_date">2003-11-13</field>
+    <field name="name">Gothika</field>
     <field name="genre">Thriller</field>
     <field name="genre">Horror</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Supernatural</field>
     <field name="genre">Crime Thriller</field>
     <field name="genre">Mystery</field>
-    <field name="name">Gothika</field>
-    <field name="initial_release_date">2003-11-13</field>
-    <field name="id">/en/gothika</field>
     <field name="directed_by">Mathieu Kassovitz</field>
   </doc>
   <doc>
+    <field name="id">/en/gotta_kick_it_up</field>
+    <field name="name">Gotta Kick It Up!</field>
     <field name="genre">Teen film</field>
     <field name="genre">Television film</field>
     <field name="genre">Children's/Family</field>
     <field name="genre">Family</field>
-    <field name="name">Gotta Kick It Up!</field>
-    <field name="id">/en/gotta_kick_it_up</field>
     <field name="directed_by">Ramón Menéndez</field>
   </doc>
   <doc>
+    <field name="id">/en/goyas_ghosts</field>
+    <field name="initial_release_date">2006-11-08</field>
+    <field name="name">Goya's Ghosts</field>
     <field name="genre">Biographical film</field>
     <field name="genre">War film</field>
     <field name="genre">Drama</field>
-    <field name="name">Goya's Ghosts</field>
-    <field name="initial_release_date">2006-11-08</field>
-    <field name="id">/en/goyas_ghosts</field>
     <field name="directed_by">Miloš Forman</field>
   </doc>
   <doc>
+    <field name="id">/en/gozu</field>
+    <field name="initial_release_date">2003-07-12</field>
+    <field name="name">Gozu</field>
     <field name="genre">Horror</field>
     <field name="genre">Surrealism</field>
     <field name="genre">World cinema</field>
     <field name="genre">Japanese Movies</field>
     <field name="genre">Horror comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Gozu</field>
-    <field name="initial_release_date">2003-07-12</field>
-    <field name="id">/en/gozu</field>
     <field name="directed_by">Takashi Miike</field>
   </doc>
   <doc>
+    <field name="id">/en/grande_ecole</field>
+    <field name="initial_release_date">2004-02-04</field>
+    <field name="name">Grande École</field>
     <field name="genre">World cinema</field>
     <field name="genre">LGBT</field>
     <field name="genre">Romance Film</field>
@@ -10861,102 +10857,99 @@
     <field name="genre">Ensemble Film</field>
     <field name="genre">Erotic Drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Grande École</field>
-    <field name="initial_release_date">2004-02-04</field>
-    <field name="id">/en/grande_ecole</field>
     <field name="directed_by">Robert Salis</field>
   </doc>
   <doc>
+    <field name="id">/en/grandmas_boy</field>
+    <field name="initial_release_date">2006-01-06</field>
+    <field name="name">Grandma's Boy</field>
     <field name="genre">Stoner film</field>
     <field name="genre">Comedy</field>
-    <field name="name">Grandma's Boy</field>
-    <field name="initial_release_date">2006-01-06</field>
-    <field name="id">/en/grandmas_boy</field>
     <field name="directed_by">Nicholaus Goossen</field>
   </doc>
   <doc>
+    <field name="id">/en/grayson_2004</field>
+    <field name="initial_release_date">2004-07-20</field>
+    <field name="name">Grayson</field>
     <field name="genre">Indie film</field>
     <field name="genre">Fan film</field>
     <field name="genre">Short Film</field>
-    <field name="name">Grayson</field>
-    <field name="initial_release_date">2004-07-20</field>
-    <field name="id">/en/grayson_2004</field>
     <field name="directed_by">John Fiorella</field>
   </doc>
   <doc>
+    <field name="id">/en/grbavica_2006</field>
+    <field name="initial_release_date">2006-02-12</field>
+    <field name="name">Grbavica: The Land of My Dreams</field>
     <field name="genre">War film</field>
     <field name="genre">Art film</field>
     <field name="genre">Drama</field>
-    <field name="name">Grbavica: The Land of My Dreams</field>
-    <field name="initial_release_date">2006-02-12</field>
-    <field name="id">/en/grbavica_2006</field>
     <field name="directed_by">Jasmila Žbanić</field>
   </doc>
   <doc>
+    <field name="id">/en/green_street</field>
+    <field name="initial_release_date">2005-03-12</field>
+    <field name="name">Green Street</field>
     <field name="genre">Sports</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="name">Green Street</field>
-    <field name="initial_release_date">2005-03-12</field>
-    <field name="id">/en/green_street</field>
     <field name="directed_by">Lexi Alexander</field>
   </doc>
   <doc>
+    <field name="id">/en/green_tea_2003</field>
+    <field name="initial_release_date">2003-08-18</field>
+    <field name="name">Green Tea</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="name">Green Tea</field>
-    <field name="initial_release_date">2003-08-18</field>
-    <field name="id">/en/green_tea_2003</field>
     <field name="directed_by">Zhang Yuan</field>
   </doc>
   <doc>
+    <field name="id">/en/greenfingers</field>
+    <field name="initial_release_date">2001-09-14</field>
+    <field name="name">Greenfingers</field>
     <field name="genre">Comedy-drama</field>
     <field name="genre">Prison film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Greenfingers</field>
-    <field name="initial_release_date">2001-09-14</field>
-    <field name="id">/en/greenfingers</field>
     <field name="directed_by">Joel Hershman</field>
   </doc>
   <doc>
+    <field name="id">/en/gridiron_gang</field>
+    <field name="initial_release_date">2006-09-15</field>
+    <field name="name">Gridiron Gang</field>
     <field name="genre">Sports</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Drama</field>
-    <field name="name">Gridiron Gang</field>
-    <field name="initial_release_date">2006-09-15</field>
-    <field name="id">/en/gridiron_gang</field>
     <field name="directed_by">Phil Joanou</field>
   </doc>
   <doc>
+    <field name="id">/en/grill_point</field>
+    <field name="initial_release_date">2002-02-12</field>
+    <field name="name">Grill Point</field>
     <field name="genre">Drama</field>
     <field name="genre">Comedy</field>
     <field name="genre">Tragicomedy</field>
     <field name="genre">Comedy-drama</field>
-    <field name="name">Grill Point</field>
-    <field name="initial_release_date">2002-02-12</field>
-    <field name="id">/en/grill_point</field>
     <field name="directed_by">Andreas Dresen</field>
   </doc>
   <doc>
+    <field name="id">/en/grilled</field>
+    <field name="initial_release_date">2006-07-11</field>
+    <field name="name">Grilled</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Buddy film</field>
     <field name="genre">Workplace Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Grilled</field>
-    <field name="initial_release_date">2006-07-11</field>
-    <field name="id">/en/grilled</field>
     <field name="directed_by">Jason Ensler</field>
   </doc>
   <doc>
+    <field name="id">/en/grind_house</field>
+    <field name="initial_release_date">2007-04-06</field>
+    <field name="name">Grindhouse</field>
     <field name="genre">Slasher</field>
     <field name="genre">Thriller</field>
     <field name="genre">Action Film</field>
     <field name="genre">Horror</field>
     <field name="genre">Zombie Film</field>
-    <field name="name">Grindhouse</field>
-    <field name="initial_release_date">2007-04-06</field>
-    <field name="id">/en/grind_house</field>
     <field name="directed_by">Robert Rodriguez</field>
     <field name="directed_by">Quentin Tarantino</field>
     <field name="directed_by">Eli Roth</field>
@@ -10965,96 +10958,98 @@
     <field name="directed_by">Jason Eisener</field>
   </doc>
   <doc>
+    <field name="id">/en/grizzly_falls</field>
+    <field name="initial_release_date">2004-06-28</field>
+    <field name="name">Grizzly Falls</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Animal Picture</field>
     <field name="genre">Family-Oriented Adventure</field>
     <field name="genre">Family</field>
     <field name="genre">Drama</field>
-    <field name="name">Grizzly Falls</field>
-    <field name="initial_release_date">2004-06-28</field>
-    <field name="id">/en/grizzly_falls</field>
     <field name="directed_by">Stewart Raffill</field>
   </doc>
   <doc>
+    <field name="id">/en/grizzly_man</field>
+    <field name="initial_release_date">2005-01-24</field>
+    <field name="name">Grizzly Man</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Biographical film</field>
-    <field name="name">Grizzly Man</field>
-    <field name="initial_release_date">2005-01-24</field>
-    <field name="id">/en/grizzly_man</field>
     <field name="directed_by">Werner Herzog</field>
   </doc>
   <doc>
+    <field name="id">/en/grodmin</field>
+    <field name="name">GRODMIN</field>
     <field name="genre">Avant-garde</field>
     <field name="genre">Experimental film</field>
     <field name="genre">Drama</field>
-    <field name="name">GRODMIN</field>
-    <field name="id">/en/grodmin</field>
     <field name="directed_by">Jim Horwitz</field>
   </doc>
   <doc>
+    <field name="id">/en/gudumba_shankar</field>
+    <field name="initial_release_date">2004-09-09</field>
+    <field name="name">Gudumba Shankar</field>
     <field name="genre">Action Film</field>
     <field name="genre">Drama</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
-    <field name="name">Gudumba Shankar</field>
-    <field name="initial_release_date">2004-09-09</field>
-    <field name="id">/en/gudumba_shankar</field>
     <field name="directed_by">Veera Shankar</field>
   </doc>
   <doc>
+    <field name="id">/en/che_part_two</field>
+    <field name="initial_release_date">2008-05-21</field>
+    <field name="name">Che: Part Two</field>
     <field name="genre">Biographical film</field>
     <field name="genre">War film</field>
     <field name="genre">Historical drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Che: Part Two</field>
-    <field name="initial_release_date">2008-05-21</field>
-    <field name="id">/en/che_part_two</field>
     <field name="directed_by">Steven Soderbergh</field>
   </doc>
   <doc>
+    <field name="id">/en/guess_who_2005</field>
+    <field name="initial_release_date">2005-03-25</field>
+    <field name="name">Guess Who</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Comedy of manners</field>
     <field name="genre">Domestic Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Guess Who</field>
-    <field name="initial_release_date">2005-03-25</field>
-    <field name="id">/en/guess_who_2005</field>
     <field name="directed_by">Kevin Rodney Sullivan</field>
   </doc>
   <doc>
+    <field name="id">/en/gunner_palace</field>
+    <field name="initial_release_date">2005-03-04</field>
+    <field name="name">Gunner Palace</field>
     <field name="genre">Documentary film</field>
     <field name="genre">Indie film</field>
     <field name="genre">War film</field>
-    <field name="name">Gunner Palace</field>
-    <field name="initial_release_date">2005-03-04</field>
-    <field name="id">/en/gunner_palace</field>
     <field name="directed_by">Michael Tucker</field>
     <field name="directed_by">Petra Epperlein</field>
   </doc>
   <doc>
+    <field name="id">/en/guru_2007</field>
+    <field name="initial_release_date">2007-01-12</field>
+    <field name="name">Guru</field>
     <field name="genre">Biographical film</field>
     <field name="genre">Musical</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical Drama</field>
-    <field name="name">Guru</field>
-    <field name="initial_release_date">2007-01-12</field>
-    <field name="id">/en/guru_2007</field>
     <field name="directed_by">Mani Ratnam</field>
   </doc>
   <doc>
+    <field name="id">/en/primeval_2007</field>
+    <field name="initial_release_date">2007-01-12</field>
+    <field name="name">Primeval</field>
     <field name="genre">Thriller</field>
     <field name="genre">Horror</field>
     <field name="genre">Natural horror film</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Action Film</field>
-    <field name="name">Primeval</field>
-    <field name="initial_release_date">2007-01-12</field>
-    <field name="id">/en/primeval_2007</field>
     <field name="directed_by">Michael Katleman</field>
   </doc>
   <doc>
+    <field name="id">/en/gypsy_83</field>
+    <field name="name">Gypsy 83</field>
     <field name="genre">Coming of age</field>
     <field name="genre">LGBT</field>
     <field name="genre">Black comedy</field>
@@ -11063,11 +11058,12 @@
     <field name="genre">Road movie</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Gypsy 83</field>
-    <field name="id">/en/gypsy_83</field>
     <field name="directed_by">Todd Stephens</field>
   </doc>
   <doc>
+    <field name="id">/en/h_2002</field>
+    <field name="initial_release_date">2002-12-27</field>
+    <field name="name">H</field>
     <field name="genre">Thriller</field>
     <field name="genre">Horror</field>
     <field name="genre">Drama</field>
@@ -11075,22 +11071,22 @@
     <field name="genre">Crime Fiction</field>
     <field name="genre">East Asian cinema</field>
     <field name="genre">World cinema</field>
-    <field name="name">H</field>
-    <field name="initial_release_date">2002-12-27</field>
-    <field name="id">/en/h_2002</field>
     <field name="directed_by">Jong-hyuk Lee</field>
   </doc>
   <doc>
+    <field name="id">/en/h_g_wells_the_war_of_the_worlds</field>
+    <field name="initial_release_date">2005-06-14</field>
+    <field name="name">H. G. Wells' The War of the Worlds</field>
     <field name="genre">Indie film</field>
     <field name="genre">Steampunk</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Thriller</field>
-    <field name="name">H. G. Wells' The War of the Worlds</field>
-    <field name="initial_release_date">2005-06-14</field>
-    <field name="id">/en/h_g_wells_the_war_of_the_worlds</field>
     <field name="directed_by">Timothy Hines</field>
   </doc>
   <doc>
+    <field name="id">/en/h_g_wells_war_of_the_worlds</field>
+    <field name="initial_release_date">2005-06-28</field>
+    <field name="name">H. G. Wells' War of the Worlds</field>
     <field name="genre">Indie film</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Thriller</field>
@@ -11100,127 +11096,127 @@
     <field name="genre">Horror</field>
     <field name="genre">Mockbuster</field>
     <field name="genre">Drama</field>
-    <field name="name">H. G. Wells' War of the Worlds</field>
-    <field name="initial_release_date">2005-06-28</field>
-    <field name="id">/en/h_g_wells_war_of_the_worlds</field>
     <field name="directed_by">David Michael Latt</field>
   </doc>
   <doc>
+    <field name="id">/en/hadh_kar_di_aapne</field>
+    <field name="initial_release_date">2000-04-14</field>
+    <field name="name">Hadh Kar Di Aapne</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Bollywood</field>
-    <field name="name">Hadh Kar Di Aapne</field>
-    <field name="initial_release_date">2000-04-14</field>
-    <field name="id">/en/hadh_kar_di_aapne</field>
     <field name="directed_by">Manoj Agrawal</field>
   </doc>
   <doc>
+    <field name="id">/en/haggard_the_movie</field>
+    <field name="initial_release_date">2003-06-24</field>
+    <field name="name">Haggard: The Movie</field>
     <field name="genre">Indie film</field>
     <field name="genre">Comedy</field>
-    <field name="name">Haggard: The Movie</field>
-    <field name="initial_release_date">2003-06-24</field>
-    <field name="id">/en/haggard_the_movie</field>
     <field name="directed_by">Bam Margera</field>
   </doc>
   <doc>
+    <field name="id">/en/haiku_tunnel</field>
+    <field name="name">Haiku Tunnel</field>
     <field name="genre">Black comedy</field>
     <field name="genre">Indie film</field>
     <field name="genre">Satire</field>
     <field name="genre">Workplace Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Haiku Tunnel</field>
-    <field name="id">/en/haiku_tunnel</field>
     <field name="directed_by">Jacob Kornbluth</field>
     <field name="directed_by">Josh Kornbluth</field>
   </doc>
   <doc>
+    <field name="id">/en/hairspray</field>
+    <field name="initial_release_date">2007-07-13</field>
+    <field name="name">Hairspray</field>
     <field name="genre">Musical</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Musical comedy</field>
-    <field name="name">Hairspray</field>
-    <field name="initial_release_date">2007-07-13</field>
-    <field name="id">/en/hairspray</field>
     <field name="directed_by">Adam Shankman</field>
   </doc>
   <doc>
+    <field name="id">/en/half_nelson</field>
+    <field name="initial_release_date">2006-01-23</field>
+    <field name="name">Half Nelson</field>
     <field name="genre">Social problem film</field>
     <field name="genre">Drama</field>
-    <field name="name">Half Nelson</field>
-    <field name="initial_release_date">2006-01-23</field>
-    <field name="id">/en/half_nelson</field>
     <field name="directed_by">Ryan Fleck</field>
   </doc>
   <doc>
+    <field name="id">/en/half_life_2006</field>
+    <field name="name">Half-Life</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Indie film</field>
     <field name="genre">Science Fiction</field>
     <field name="genre">Fantasy Drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Half-Life</field>
-    <field name="id">/en/half_life_2006</field>
     <field name="directed_by">Jennifer Phang</field>
   </doc>
   <doc>
+    <field name="id">/en/halloween_resurrection</field>
+    <field name="initial_release_date">2002-07-12</field>
+    <field name="name">Halloween Resurrection</field>
     <field name="genre">Slasher</field>
     <field name="genre">Horror</field>
     <field name="genre">Cult film</field>
     <field name="genre">Teen film</field>
-    <field name="name">Halloween Resurrection</field>
-    <field name="initial_release_date">2002-07-12</field>
-    <field name="id">/en/halloween_resurrection</field>
     <field name="directed_by">Rick Rosenthal</field>
   </doc>
   <doc>
+    <field name="id">/en/halloweentown_high</field>
+    <field name="initial_release_date">2004-10-08</field>
+    <field name="name">Halloweentown High</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Teen film</field>
     <field name="genre">Fantasy Comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Family</field>
-    <field name="name">Halloweentown High</field>
-    <field name="initial_release_date">2004-10-08</field>
-    <field name="id">/en/halloweentown_high</field>
     <field name="directed_by">Mark A.Z. Dippé</field>
   </doc>
   <doc>
+    <field name="id">/en/halloweentown_ii_kalabars_revenge</field>
+    <field name="initial_release_date">2001-10-12</field>
+    <field name="name">Halloweentown II: Kalabar's Revenge</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Children's Fantasy</field>
     <field name="genre">Children's/Family</field>
     <field name="genre">Family</field>
-    <field name="name">Halloweentown II: Kalabar's Revenge</field>
-    <field name="initial_release_date">2001-10-12</field>
-    <field name="id">/en/halloweentown_ii_kalabars_revenge</field>
     <field name="directed_by">Mary Lambert</field>
   </doc>
   <doc>
+    <field name="id">/en/halloweentown_witch_u</field>
+    <field name="initial_release_date">2006-10-20</field>
+    <field name="name">Return to Halloweentown</field>
     <field name="genre">Family</field>
     <field name="genre">Children's/Family</field>
     <field name="genre">Fantasy Comedy</field>
     <field name="genre">Comedy</field>
-    <field name="name">Return to Halloweentown</field>
-    <field name="initial_release_date">2006-10-20</field>
-    <field name="id">/en/halloweentown_witch_u</field>
     <field name="directed_by">David Jackson</field>
   </doc>
   <doc>
+    <field name="id">/en/hamlet_2000</field>
+    <field name="initial_release_date">2000-05-12</field>
+    <field name="name">Hamlet</field>
     <field name="genre">Thriller</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Drama</field>
-    <field name="name">Hamlet</field>
-    <field name="initial_release_date">2000-05-12</field>
-    <field name="id">/en/hamlet_2000</field>
     <field name="directed_by">Michael Almereyda</field>
   </doc>
   <doc>
+    <field name="id">/en/hana_alice</field>
+    <field name="initial_release_date">2004-03-13</field>
+    <field name="name">Hana and Alice</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Hana and Alice</field>
-    <field name="initial_release_date">2004-03-13</field>
-    <field name="id">/en/hana_alice</field>
     <field name="directed_by">Shunji Iwai</field>
   </doc>
   <doc>
+    <field name="id">/en/hannibal</field>
+    <field name="initial_release_date">2001-02-09</field>
+    <field name="name">Hannibal</field>
     <field name="genre">Thriller</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Horror</field>
@@ -11228,64 +11224,64 @@
     <field name="genre">Mystery</field>
     <field name="genre">Crime Thriller</field>
     <field name="genre">Drama</field>
-    <field name="name">Hannibal</field>
-    <field name="initial_release_date">2001-02-09</field>
-    <field name="id">/en/hannibal</field>
     <field name="directed_by">Ridley Scott</field>
   </doc>
   <doc>
-    <field name="genre">Drama</field>
-    <field name="name">Making Babies</field>
-    <field name="initial_release_date">2001-01-29</field>
     <field name="id">/en/hans_och_hennes</field>
+    <field name="initial_release_date">2001-01-29</field>
+    <field name="name">Making Babies</field>
+    <field name="genre">Drama</field>
     <field name="directed_by">Daniel Lind Lagerlöf</field>
   </doc>
   <doc>
+    <field name="id">/en/hanuman_2005</field>
+    <field name="initial_release_date">2005-10-21</field>
+    <field name="name">Hanuman</field>
     <field name="genre">Animation</field>
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
-    <field name="name">Hanuman</field>
-    <field name="initial_release_date">2005-10-21</field>
-    <field name="id">/en/hanuman_2005</field>
     <field name="directed_by">V.G. Samant</field>
     <field name="directed_by">Milind Ukey</field>
   </doc>
   <doc>
+    <field name="id">/en/hanuman_junction</field>
+    <field name="initial_release_date">2001-12-21</field>
+    <field name="name">Hanuman Junction</field>
     <field name="genre">Action Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
     <field name="genre">Tollywood</field>
     <field name="genre">World cinema</field>
-    <field name="name">Hanuman Junction</field>
-    <field name="initial_release_date">2001-12-21</field>
-    <field name="id">/en/hanuman_junction</field>
     <field name="directed_by">M.Raja</field>
   </doc>
   <doc>
+    <field name="id">/en/happily_never_after</field>
+    <field name="initial_release_date">2006-12-16</field>
+    <field name="name">Happily N'Ever After</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Animation</field>
     <field name="genre">Family</field>
     <field name="genre">Comedy</field>
     <field name="genre">Adventure Film</field>
-    <field name="name">Happily N'Ever After</field>
-    <field name="initial_release_date">2006-12-16</field>
-    <field name="id">/en/happily_never_after</field>
     <field name="directed_by">Paul J. Bolger</field>
     <field name="directed_by">Yvette Kaplan</field>
   </doc>
   <doc>
+    <field name="id">/en/happy_2006</field>
+    <field name="initial_release_date">2006-01-27</field>
+    <field name="name">Happy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Musical</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
     <field name="genre">Musical comedy</field>
     <field name="genre">Musical Drama</field>
-    <field name="name">Happy</field>
-    <field name="initial_release_date">2006-01-27</field>
-    <field name="id">/en/happy_2006</field>
     <field name="directed_by">A. Karunakaran</field>
   </doc>
   <doc>
+    <field name="id">/en/happy_endings</field>
+    <field name="initial_release_date">2005-01-20</field>
+    <field name="name">Happy Endings</field>
     <field name="genre">LGBT</field>
     <field name="genre">Music</field>
     <field name="genre">Thriller</field>
@@ -11294,110 +11290,110 @@
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">Drama</field>
-    <field name="name">Happy Endings</field>
-    <field name="initial_release_date">2005-01-20</field>
-    <field name="id">/en/happy_endings</field>
     <field name="directed_by">Don Roos</field>
   </doc>
   <doc>
+    <field name="id">/en/happy_ero_christmas</field>
+    <field name="initial_release_date">2003-12-17</field>
+    <field name="name">Happy Ero Christmas</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Comedy</field>
     <field name="genre">East Asian cinema</field>
     <field name="genre">World cinema</field>
-    <field name="name">Happy Ero Christmas</field>
-    <field name="initial_release_date">2003-12-17</field>
-    <field name="id">/en/happy_ero_christmas</field>
     <field name="directed_by">Lee Geon-dong</field>
   </doc>
   <doc>
+    <field name="id">/en/happy_feet</field>
+    <field name="initial_release_date">2006-11-16</field>
+    <field name="name">Happy Feet</field>
     <field name="genre">Family</field>
     <field name="genre">Animation</field>
     <field name="genre">Comedy</field>
     <field name="genre">Music</field>
     <field name="genre">Musical</field>
     <field name="genre">Musical comedy</field>
-    <field name="name">Happy Feet</field>
-    <field name="initial_release_date">2006-11-16</field>
-    <field name="id">/en/happy_feet</field>
     <field name="directed_by">George Miller</field>
     <field name="directed_by">Warren Coleman</field>
     <field name="directed_by">Judy Morris</field>
   </doc>
   <doc>
+    <field name="id">/wikipedia/en_title/I_Love_New_Year</field>
+    <field name="initial_release_date">2013-12-30</field>
+    <field name="name">I Love New Year</field>
     <field name="genre">Caper story</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Romantic comedy</field>
     <field name="genre">Romance Film</field>
     <field name="genre">Bollywood</field>
     <field name="genre">World cinema</field>
-    <field name="name">I Love New Year</field>
-    <field name="initial_release_date">2013-12-30</field>
-    <field name="id">/wikipedia/en_title/I_Love_New_Year</field>
     <field name="directed_by">Radhika Rao</field>
     <field name="directed_by">Vinay Sapru</field>
   </doc>
   <doc>
+    <field name="id">/en/har_dil_jo_pyar_karega</field>
+    <field name="initial_release_date">2000-07-24</field>
+    <field name="name">Har Dil Jo Pyar Karega</field>
     <field name="genre">Musical</field>
     <field name="genre">Romance Film</field>
     <field name="genre">World cinema</field>
     <field name="genre">Musical Drama</field>
     <field name="genre">Drama</field>
-    <field name="name">Har Dil Jo Pyar Karega</field>
-    <field name="initial_release_date">2000-07-24</field>
-    <field name="id">/en/har_dil_jo_pyar_karega</field>
     <field name="directed_by">Raj Kanwar</field>
   </doc>
   <doc>
+    <field name="id">/en/hard_candy</field>
+    <field name="name">Hard Candy</field>
     <field name="genre">Psychological thriller</field>
     <field name="genre">Thriller</field>
     <field name="genre">Suspense</field>
     <field name="genre">Indie film</field>
     <field name="genre">Erotic thriller</field>
     <field name="genre">Drama</field>
-    <field name="name">Hard Candy</field>
-    <field name="id">/en/hard_candy</field>
     <field name="directed_by">David Slade</field>
   </doc>
   <doc>
+    <field name="id">/en/hard_luck</field>
+    <field name="initial_release_date">2006-10-17</field>
+    <field name="name">Hard Luck</field>
     <field name="genre">Thriller</field>
     <field name="genre">Crime Fiction</field>
     <field name="genre">Action/Adventure</field>
     <field name="genre">Action Film</field>
     <field name="genre">Drama</field>
-    <field name="name">Hard Luck</field>
-    <field name="initial_release_date">2006-10-17</field>
-    <field name="id">/en/hard_luck</field>
     <field name="directed_by">Mario Van Peebles</field>
   </doc>
   <doc>
+    <field name="id">/en/hardball</field>
+    <field name="initial_release_date">2001-09-14</field>
+    <field name="name">Hardball</field>
     <field name="genre">Sports</field>
     <field name="genre">Drama</field>
-    <field name="name">Hardball</field>
-    <field name="initial_release_date">2001-09-14</field>
-    <field name="id">/en/hardball</field>
     <field name="directed_by">Brian Robbins</field>
   </doc>
   <doc>
+    <field name="id">/en/harold_kumar_go_to_white_castle</field>
+    <field name="initial_release_date">2004-05-20</field>
+    <field name="name">Harold &amp;amp; Kumar Go to White Castle</field>
     <field name="genre">Stoner film</field>
     <field name="genre">Buddy film</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Comedy</field>
-    <field name="name">Harold &amp;amp; Kumar Go to White Castle</field>
-    <field name="initial_release_date">2004-05-20</field>
-    <field name="id">/en/harold_kumar_go_to_white_castle</field>
     <field name="directed_by">Danny Leiner</field>
   </doc>
   <doc>
+    <field name="id">/en/harry_potter_and_the_chamber_of_secrets_2002</field>
+    <field name="initial_release_date">2002-11-03</field>
+    <field name="name">Harry Potter and the Chamber of Secrets</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Family</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Mystery</field>
-    <field name="name">Harry Potter and the Chamber of Secrets</field>
-    <field name="initial_release_date">2002-11-03</field>
-    <field name="id">/en/harry_potter_and_the_chamber_of_secrets_2002</field>
     <field name="directed_by">Chris Columbus</field>
   </doc>
   <doc>
+    <field name="id">/en/harry_potter_and_the_goblet_of_fire_2005</field>
+    <field name="initial_release_date">2005-11-06</field>
+    <field name="name">Harry Potter and the Goblet of Fire</field>
     <field name="genre">Family</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Adventure Film</field>
@@ -11409,12 +11405,12 @@
     <field name="genre">Children's/Family</field>
     <field name="genre">Fantasy Adventure</field>
     <field name="genre">Fiction</field>
-    <field name="name">Harry Potter and the Goblet of Fire</field>
-    <field name="initial_release_date">2005-11-06</field>
-    <field name="id">/en/harry_potter_and_the_goblet_of_fire_2005</field>
     <field name="directed_by">Mike Newell</field>
   </doc>
   <doc>
+    <field name="id">/en/harry_potter_and_the_half_blood_prince_2008</field>
+    <field name="initial_release_date">2009-07-06</field>
+    <field name="name">Harry Potter and the Half-Blood Prince</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Mystery</field>
@@ -11425,21 +11421,18 @@
     <field name="genre">Children's/Family</field>
     <field name="genre">Fantasy Adventure</field>
     <field name="genre">Fiction</field>
-    <field name="name">Harry Potter and the Half-Blood Prince</field>
-    <field name="initial_release_date">2009-07-06</field>
-    <field name="id">/en/harry_potter_and_the_half_blood_prince_2008</field>
     <field name="directed_by">David Yates</field>
   </doc>
   <doc>
+    <field name="id">/en/harry_potter_and_the_order_of_the_phoenix_2007</field>
+    <field name="initial_release_date">2007-06-28</field>
+    <field name="name">Harry Potter and the Order of the Phoenix</field>
     <field name="genre">Family</field>
     <field name="genre">Mystery</field>
     <field name="genre">Adventure Film</field>
     <field name="genre">Fantasy</field>
     <field name="genre">Fantasy Adventure</field>
     <field name="genre">Fiction</field>
-    <field name="name">Harry Potter and the Order of the Phoenix</field>
-    <field name="initial_release_date">2007-06-28</field>
-    <field name="id">/en/harry_potter_and_the_order_of_the_phoenix_2007</field>
     <field name="directed_by">David Yates</field>
   </doc>
 </add>
diff --git a/solr/licenses/apacheds-all-2.0.0-M15.jar.sha1 b/solr/licenses/apacheds-all-2.0.0-M15.jar.sha1
new file mode 100644
index 0000000..73021a3
--- /dev/null
+++ b/solr/licenses/apacheds-all-2.0.0-M15.jar.sha1
@@ -0,0 +1 @@
+14285c8b4399d18097140b74fb5b4783807730e2
diff --git a/solr/licenses/apacheds-all-LICENSE-ASL.txt b/solr/licenses/apacheds-all-LICENSE-ASL.txt
new file mode 100644
index 0000000..3615005
--- /dev/null
+++ b/solr/licenses/apacheds-all-LICENSE-ASL.txt
@@ -0,0 +1,341 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+--------------------------------------------------------------------------------------------------
+ANTLR 2 License
+
+We reserve no legal rights to the ANTLR--it is fully in the public domain. An individual or company
+may do whatever they wish with source code distributed with ANTLR or the code generated by ANTLR,
+including the incorporation of ANTLR, or its output, into commerical software.
+
+We encourage users to develop software with ANTLR. However, we do ask that credit is given to us
+for developing ANTLR. By "credit", we mean that if you use ANTLR or incorporate any source code
+into one of your programs (commercial product, research project, or otherwise) that you acknowledge
+this fact somewhere in the documentation, research report, etc... If you like ANTLR and have developed
+a nice tool with the output, please mention that you developed it using ANTLR. In addition, we ask that
+the headers remain intact in our source code. As long as these guidelines are kept, we expect to
+continue enhancing this system and expect to make other tools available as they are completed.
+
+--------------------------------------------------------------------------------------------------
+/**
+ * JDBM LICENSE v1.00
+ *
+ * Redistribution and use of this software and associated documentation
+ * ("Software"), with or without modification, are permitted provided
+ * that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain copyright
+ *    statements and notices.  Redistributions must also contain a
+ *    copy of this document.
+ *
+ * 2. Redistributions in binary form must reproduce the
+ *    above copyright notice, this list of conditions and the
+ *    following disclaimer in the documentation and/or other
+ *    materials provided with the distribution.
+ *
+ * 3. The name "JDBM" must not be used to endorse or promote
+ *    products derived from this Software without prior written
+ *    permission of Cees de Groot.  For written permission,
+ *    please contact cg@cdegroot.com.
+ *
+ * 4. Products derived from this Software may not be called "JDBM"
+ *    nor may "JDBM" appear in their names without prior written
+ *    permission of Cees de Groot.
+ *
+ * 5. Due credit should be given to the JDBM Project
+ *    (http://jdbm.sourceforge.net/).
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE JDBM PROJECT AND CONTRIBUTORS
+ * ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT
+ * NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL
+ * CEES DE GROOT OR ANY CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+ * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+ * OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Copyright 2000 (C) Cees de Groot. All Rights Reserved.
+ * Contributions are Copyright (C) 2000 by their associated contributors.
+ *
+ * $Id: LICENSE.txt,v 1.1 2000/05/05 23:59:52 boisvert Exp $
+ */
+--------------------------------------------------------------------------------------------------
+Copyright (c) 2000-2006 The Legion Of The Bouncy Castle (http://www.bouncycastle.org)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software
+and associated documentation files (the "Software"), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial
+portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
+--------------------------------------------------------------------------------------------------
+JUG (package org/safehaus/uuid) is licensed under the AL v2.
+--------------------------------------------------------------------------------------------------
+Spring is licensed under the AL v2.
+--------------------------------------------------------------------------------------------------
+slf4j license:
+Copyright (c) 2004-2007 QOS.ch All rights reserved. Permission is hereby granted, free of charge,
+to any person obtaining a copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation the rights to use, copy,
+modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
+persons to whom the Software is furnished to do so, subject to the following conditions: The above
+copyright notice and this permission notice shall be included in all copies or substantial portions
+of the Software.
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
+NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+--------------------------------------------------------------------------------------------------
+Copyright (c) 1999, 2004 Tanuki Software
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of the Java Service Wrapper and associated
+documentation files (the "Software"), to deal in the Software
+without  restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sub-license,
+and/or sell copies of the Software, and to permit persons to
+whom the Software is furnished to do so, subject to the
+following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NON-INFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
+
+
+Portions of the Software have been derived from source code
+developed by Silver Egg Technology under the following license:
+
+Copyright (c) 2001 Silver Egg Technology
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sub-license, and/or
+sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
diff --git a/solr/licenses/apacheds-all-NOTICE.txt b/solr/licenses/apacheds-all-NOTICE.txt
new file mode 100644
index 0000000..2e454e2
--- /dev/null
+++ b/solr/licenses/apacheds-all-NOTICE.txt
@@ -0,0 +1,23 @@
+
+Apache Directory Server
+Copyright 2003-2013 The Apache Software Foundation
+
+This product includes software developed at
+The Apache Software Foundation (http://www.apache.org/).
+
+Safehaus JUG
+Copyright 2005 Safehaus
+
+This product includes software developed at
+Safehaus (http://docs.safehaus.org/display/HAUS/Home).
+
+    ======================================================================
+    == NOTICE file corresponding to section 4 d of the Apache License,  ==
+    == Version 2.0, in this case for the Spring Framework distribution. ==
+    ======================================================================
+
+    This product includes software developed by
+    the Apache Software Foundation (http://www.apache.org).
+
+    This product also includes software developed by
+    Clinton Begin (http://www.ibatis.com).
diff --git a/solr/licenses/fontbox-1.8.6.jar.sha1 b/solr/licenses/fontbox-1.8.6.jar.sha1
deleted file mode 100644
index 42700bd..0000000
--- a/solr/licenses/fontbox-1.8.6.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-024be806ffd12fc8ab4d69e8992377a877de36b8
diff --git a/solr/licenses/fontbox-1.8.8.jar.sha1 b/solr/licenses/fontbox-1.8.8.jar.sha1
new file mode 100644
index 0000000..ed67d1e
--- /dev/null
+++ b/solr/licenses/fontbox-1.8.8.jar.sha1
@@ -0,0 +1 @@
+d65d291533631de29b0ee61a9527d08e24ca579a
diff --git a/solr/licenses/hadoop-annotations-2.2.0.jar.sha1 b/solr/licenses/hadoop-annotations-2.2.0.jar.sha1
deleted file mode 100644
index bfd4e4c..0000000
--- a/solr/licenses/hadoop-annotations-2.2.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ebd95f271e3bce5c0dca5926bb3eb36530cd9ab6
diff --git a/solr/licenses/hadoop-annotations-2.3.0.jar.sha1 b/solr/licenses/hadoop-annotations-2.3.0.jar.sha1
new file mode 100644
index 0000000..b2022f1
--- /dev/null
+++ b/solr/licenses/hadoop-annotations-2.3.0.jar.sha1
@@ -0,0 +1 @@
+7f7e399a640450d3d14d65a3d386f48bc49df400
diff --git a/solr/licenses/hadoop-auth-2.2.0.jar.sha1 b/solr/licenses/hadoop-auth-2.2.0.jar.sha1
deleted file mode 100644
index a545fb6..0000000
--- a/solr/licenses/hadoop-auth-2.2.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-74e5f8b2134be51312c004d29e33a7bf4377ce20
diff --git a/solr/licenses/hadoop-auth-2.3.0.jar.sha1 b/solr/licenses/hadoop-auth-2.3.0.jar.sha1
new file mode 100644
index 0000000..a4c7dc9
--- /dev/null
+++ b/solr/licenses/hadoop-auth-2.3.0.jar.sha1
@@ -0,0 +1 @@
+3b8bef9a70c015eb7ed1dbe5204e8697469293cb
diff --git a/solr/licenses/hadoop-common-2.2.0-tests.jar.sha1 b/solr/licenses/hadoop-common-2.2.0-tests.jar.sha1
deleted file mode 100644
index b7cf151..0000000
--- a/solr/licenses/hadoop-common-2.2.0-tests.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a9994d261d00295040a402cd2f611a2bac23972a
diff --git a/solr/licenses/hadoop-common-2.2.0.jar.sha1 b/solr/licenses/hadoop-common-2.2.0.jar.sha1
deleted file mode 100644
index 94c6e27..0000000
--- a/solr/licenses/hadoop-common-2.2.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a840350e4a1f5483f949ec5e72cfa2c6b2b3cf94
diff --git a/solr/licenses/hadoop-common-2.3.0-tests.jar.sha1 b/solr/licenses/hadoop-common-2.3.0-tests.jar.sha1
new file mode 100644
index 0000000..bb1eb0c
--- /dev/null
+++ b/solr/licenses/hadoop-common-2.3.0-tests.jar.sha1
@@ -0,0 +1 @@
+d181043223ea8ab07943be9a05d8a7be5b888221
diff --git a/solr/licenses/hadoop-common-2.3.0.jar.sha1 b/solr/licenses/hadoop-common-2.3.0.jar.sha1
new file mode 100644
index 0000000..b5a04bb
--- /dev/null
+++ b/solr/licenses/hadoop-common-2.3.0.jar.sha1
@@ -0,0 +1 @@
+f972dbdbdceb1c233ecf3e8731932ed83b75f472
diff --git a/solr/licenses/hadoop-hdfs-2.2.0-tests.jar.sha1 b/solr/licenses/hadoop-hdfs-2.2.0-tests.jar.sha1
deleted file mode 100644
index c541526..0000000
--- a/solr/licenses/hadoop-hdfs-2.2.0-tests.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9f20694a2b243f5511f1da3c2812bbb27cef24aa
diff --git a/solr/licenses/hadoop-hdfs-2.2.0.jar.sha1 b/solr/licenses/hadoop-hdfs-2.2.0.jar.sha1
deleted file mode 100644
index 37fbd9f..0000000
--- a/solr/licenses/hadoop-hdfs-2.2.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f2686b55818b9bae3e16d33a3f205a388920aa34
diff --git a/solr/licenses/hadoop-hdfs-2.3.0-tests.jar.sha1 b/solr/licenses/hadoop-hdfs-2.3.0-tests.jar.sha1
new file mode 100644
index 0000000..ea1dcec
--- /dev/null
+++ b/solr/licenses/hadoop-hdfs-2.3.0-tests.jar.sha1
@@ -0,0 +1 @@
+c94e4f9baff2f5a23ddb5ccc1196f65c0619ea32
diff --git a/solr/licenses/hadoop-hdfs-2.3.0.jar.sha1 b/solr/licenses/hadoop-hdfs-2.3.0.jar.sha1
new file mode 100644
index 0000000..934021e
--- /dev/null
+++ b/solr/licenses/hadoop-hdfs-2.3.0.jar.sha1
@@ -0,0 +1 @@
+93ce1263a75d4f976e5d331bf30b461979776a03
diff --git a/solr/licenses/hadoop-mapreduce-client-app-2.2.0.jar.sha1 b/solr/licenses/hadoop-mapreduce-client-app-2.2.0.jar.sha1
deleted file mode 100644
index 32c3a59..0000000
--- a/solr/licenses/hadoop-mapreduce-client-app-2.2.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9e5bdd970000b330382128350a957609cbcfe348
diff --git a/solr/licenses/hadoop-mapreduce-client-app-2.3.0.jar.sha1 b/solr/licenses/hadoop-mapreduce-client-app-2.3.0.jar.sha1
new file mode 100644
index 0000000..3f2abcc
--- /dev/null
+++ b/solr/licenses/hadoop-mapreduce-client-app-2.3.0.jar.sha1
@@ -0,0 +1 @@
+9fd98df0635b1af4fa6f77af4c6f192f7e62aad7
diff --git a/solr/licenses/hadoop-mapreduce-client-common-2.2.0.jar.sha1 b/solr/licenses/hadoop-mapreduce-client-common-2.2.0.jar.sha1
deleted file mode 100644
index 87cb25e..0000000
--- a/solr/licenses/hadoop-mapreduce-client-common-2.2.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5600fdda58499e3901bf179f1614a8ca38090871
diff --git a/solr/licenses/hadoop-mapreduce-client-common-2.3.0.jar.sha1 b/solr/licenses/hadoop-mapreduce-client-common-2.3.0.jar.sha1
new file mode 100644
index 0000000..49dd5e2
--- /dev/null
+++ b/solr/licenses/hadoop-mapreduce-client-common-2.3.0.jar.sha1
@@ -0,0 +1 @@
+4e9aafc76586d2fa713f15e6c6bd7c88c6a3da74
diff --git a/solr/licenses/hadoop-mapreduce-client-core-2.2.0.jar.sha1 b/solr/licenses/hadoop-mapreduce-client-core-2.2.0.jar.sha1
deleted file mode 100644
index ead6387..0000000
--- a/solr/licenses/hadoop-mapreduce-client-core-2.2.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4be274d45f35543d3c4dd8e2bfed2cebc56696c7
diff --git a/solr/licenses/hadoop-mapreduce-client-core-2.3.0.jar.sha1 b/solr/licenses/hadoop-mapreduce-client-core-2.3.0.jar.sha1
new file mode 100644
index 0000000..d65d277
--- /dev/null
+++ b/solr/licenses/hadoop-mapreduce-client-core-2.3.0.jar.sha1
@@ -0,0 +1 @@
+118cace157c354131382c5250426082516b954ec
diff --git a/solr/licenses/hadoop-mapreduce-client-hs-2.2.0.jar.sha1 b/solr/licenses/hadoop-mapreduce-client-hs-2.2.0.jar.sha1
deleted file mode 100644
index 455d9cd..0000000
--- a/solr/licenses/hadoop-mapreduce-client-hs-2.2.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-7c3b62138f881f1a98f02347b1002b9bde052b81
diff --git a/solr/licenses/hadoop-mapreduce-client-hs-2.3.0.jar.sha1 b/solr/licenses/hadoop-mapreduce-client-hs-2.3.0.jar.sha1
new file mode 100644
index 0000000..abbfe76
--- /dev/null
+++ b/solr/licenses/hadoop-mapreduce-client-hs-2.3.0.jar.sha1
@@ -0,0 +1 @@
+5940dea3f8163c0b3938ebf06d187c59a59bc081
diff --git a/solr/licenses/hadoop-mapreduce-client-jobclient-2.2.0-tests.jar.sha1 b/solr/licenses/hadoop-mapreduce-client-jobclient-2.2.0-tests.jar.sha1
deleted file mode 100644
index 6737620..0000000
--- a/solr/licenses/hadoop-mapreduce-client-jobclient-2.2.0-tests.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4c75b683a7d96a48172535c115b2067faf211cfc
diff --git a/solr/licenses/hadoop-mapreduce-client-jobclient-2.2.0.jar.sha1 b/solr/licenses/hadoop-mapreduce-client-jobclient-2.2.0.jar.sha1
deleted file mode 100644
index 8f63967..0000000
--- a/solr/licenses/hadoop-mapreduce-client-jobclient-2.2.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-842d0c9d8793fd21bfbb1c6b1fa9fbc05698f76c
diff --git a/solr/licenses/hadoop-mapreduce-client-jobclient-2.3.0-tests.jar.sha1 b/solr/licenses/hadoop-mapreduce-client-jobclient-2.3.0-tests.jar.sha1
new file mode 100644
index 0000000..34ca23e
--- /dev/null
+++ b/solr/licenses/hadoop-mapreduce-client-jobclient-2.3.0-tests.jar.sha1
@@ -0,0 +1 @@
+38e08485b844d50a0e7172f2ada494230f3df8da
diff --git a/solr/licenses/hadoop-mapreduce-client-jobclient-2.3.0.jar.sha1 b/solr/licenses/hadoop-mapreduce-client-jobclient-2.3.0.jar.sha1
new file mode 100644
index 0000000..bb9d5c1
--- /dev/null
+++ b/solr/licenses/hadoop-mapreduce-client-jobclient-2.3.0.jar.sha1
@@ -0,0 +1 @@
+8699a554bc5b537c8f1451dede4cec682b1bcbe4
diff --git a/solr/licenses/hadoop-mapreduce-client-shuffle-2.2.0.jar.sha1 b/solr/licenses/hadoop-mapreduce-client-shuffle-2.2.0.jar.sha1
deleted file mode 100644
index 1845c54..0000000
--- a/solr/licenses/hadoop-mapreduce-client-shuffle-2.2.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c4c9da8f8f6ab1e3ba68798f30360eff4ba52187
diff --git a/solr/licenses/hadoop-mapreduce-client-shuffle-2.3.0.jar.sha1 b/solr/licenses/hadoop-mapreduce-client-shuffle-2.3.0.jar.sha1
new file mode 100644
index 0000000..c1199ae
--- /dev/null
+++ b/solr/licenses/hadoop-mapreduce-client-shuffle-2.3.0.jar.sha1
@@ -0,0 +1 @@
+1439483aaf06c9afc9a8c9f6abc102f913af1987
diff --git a/solr/licenses/hadoop-minikdc-2.3.0.jar.sha1 b/solr/licenses/hadoop-minikdc-2.3.0.jar.sha1
new file mode 100644
index 0000000..8243e92
--- /dev/null
+++ b/solr/licenses/hadoop-minikdc-2.3.0.jar.sha1
@@ -0,0 +1 @@
+e55e4a3be5ab5c1b26706c81ca287ac573054153
diff --git a/solr/licenses/hadoop-minikdc-LICENSE-ASL.txt b/solr/licenses/hadoop-minikdc-LICENSE-ASL.txt
new file mode 100644
index 0000000..9a8e847
--- /dev/null
+++ b/solr/licenses/hadoop-minikdc-LICENSE-ASL.txt
@@ -0,0 +1,244 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+APACHE HADOOP SUBCOMPONENTS:
+
+The Apache Hadoop project contains subcomponents with separate copyright
+notices and license terms. Your use of the source code for the these
+subcomponents is subject to the terms and conditions of the following
+licenses. 
+
+For the org.apache.hadoop.util.bloom.* classes:
+
+/**
+ *
+ * Copyright (c) 2005, European Commission project OneLab under contract
+ * 034819 (http://www.one-lab.org)
+ * All rights reserved.
+ * Redistribution and use in source and binary forms, with or 
+ * without modification, are permitted provided that the following 
+ * conditions are met:
+ *  - Redistributions of source code must retain the above copyright 
+ *    notice, this list of conditions and the following disclaimer.
+ *  - Redistributions in binary form must reproduce the above copyright 
+ *    notice, this list of conditions and the following disclaimer in 
+ *    the documentation and/or other materials provided with the distribution.
+ *  - Neither the name of the University Catholique de Louvain - UCL
+ *    nor the names of its contributors may be used to endorse or 
+ *    promote products derived from this software without specific prior 
+ *    written permission.
+ *    
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS 
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, 
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN 
+ * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
\ No newline at end of file
diff --git a/solr/licenses/hadoop-minikdc-NOTICE.txt b/solr/licenses/hadoop-minikdc-NOTICE.txt
new file mode 100644
index 0000000..c56a5e4
--- /dev/null
+++ b/solr/licenses/hadoop-minikdc-NOTICE.txt
@@ -0,0 +1,2 @@
+This product includes software developed by The Apache Software
+Foundation (http://www.apache.org/).
\ No newline at end of file
diff --git a/solr/licenses/hadoop-yarn-api-2.2.0.jar.sha1 b/solr/licenses/hadoop-yarn-api-2.2.0.jar.sha1
deleted file mode 100644
index c81f37c..0000000
--- a/solr/licenses/hadoop-yarn-api-2.2.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-655910becbe9c5c60033e9e64e95aab0ec4ce94a
diff --git a/solr/licenses/hadoop-yarn-api-2.3.0.jar.sha1 b/solr/licenses/hadoop-yarn-api-2.3.0.jar.sha1
new file mode 100644
index 0000000..35f8a7f
--- /dev/null
+++ b/solr/licenses/hadoop-yarn-api-2.3.0.jar.sha1
@@ -0,0 +1 @@
+1e9a28a4b3455f654f8d128b303817c09542f8d7
diff --git a/solr/licenses/hadoop-yarn-client-2.2.0.jar.sha1 b/solr/licenses/hadoop-yarn-client-2.2.0.jar.sha1
deleted file mode 100644
index ed76937..0000000
--- a/solr/licenses/hadoop-yarn-client-2.2.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f299044dd9e546ca30a30014ef30699306e9ef3e
diff --git a/solr/licenses/hadoop-yarn-client-2.3.0.jar.sha1 b/solr/licenses/hadoop-yarn-client-2.3.0.jar.sha1
new file mode 100644
index 0000000..12d2ef4
--- /dev/null
+++ b/solr/licenses/hadoop-yarn-client-2.3.0.jar.sha1
@@ -0,0 +1 @@
+bd3a8ce1c3580cd7321f3fac98c11165b2a35b07
diff --git a/solr/licenses/hadoop-yarn-common-2.2.0.jar.sha1 b/solr/licenses/hadoop-yarn-common-2.2.0.jar.sha1
deleted file mode 100644
index 05d1a40..0000000
--- a/solr/licenses/hadoop-yarn-common-2.2.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-77f18c3d40dcb45b0be2602cfa5115a5edb40db1
diff --git a/solr/licenses/hadoop-yarn-common-2.3.0.jar.sha1 b/solr/licenses/hadoop-yarn-common-2.3.0.jar.sha1
new file mode 100644
index 0000000..a7813e9
--- /dev/null
+++ b/solr/licenses/hadoop-yarn-common-2.3.0.jar.sha1
@@ -0,0 +1 @@
+2a926205d8437fe846b902e0a9e8df96924f72bf
diff --git a/solr/licenses/hadoop-yarn-server-common-2.2.0.jar.sha1 b/solr/licenses/hadoop-yarn-server-common-2.2.0.jar.sha1
deleted file mode 100644
index ad9a65e..0000000
--- a/solr/licenses/hadoop-yarn-server-common-2.2.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ce13e5699bbe644da95bfd7e01549c6a389fec7f
diff --git a/solr/licenses/hadoop-yarn-server-common-2.3.0.jar.sha1 b/solr/licenses/hadoop-yarn-server-common-2.3.0.jar.sha1
new file mode 100644
index 0000000..df12307
--- /dev/null
+++ b/solr/licenses/hadoop-yarn-server-common-2.3.0.jar.sha1
@@ -0,0 +1 @@
+fb681f62c79bedb8886e5c54bbc8b18be4d8afb1
diff --git a/solr/licenses/hadoop-yarn-server-nodemanager-2.2.0.jar.sha1 b/solr/licenses/hadoop-yarn-server-nodemanager-2.2.0.jar.sha1
deleted file mode 100644
index 5255101..0000000
--- a/solr/licenses/hadoop-yarn-server-nodemanager-2.2.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5e7f0f16676afffff62919578bcb5606e3548f36
diff --git a/solr/licenses/hadoop-yarn-server-nodemanager-2.3.0.jar.sha1 b/solr/licenses/hadoop-yarn-server-nodemanager-2.3.0.jar.sha1
new file mode 100644
index 0000000..8e96397
--- /dev/null
+++ b/solr/licenses/hadoop-yarn-server-nodemanager-2.3.0.jar.sha1
@@ -0,0 +1 @@
+7f9e411eabc9e5a6fd2f38ffefbad40986a2d690
diff --git a/solr/licenses/hadoop-yarn-server-resourcemanager-2.2.0.jar.sha1 b/solr/licenses/hadoop-yarn-server-resourcemanager-2.2.0.jar.sha1
deleted file mode 100644
index 57843e0..0000000
--- a/solr/licenses/hadoop-yarn-server-resourcemanager-2.2.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-867da9c1c98a2c8c9b6cf7f3f9354818cd8831cf
diff --git a/solr/licenses/hadoop-yarn-server-resourcemanager-2.3.0.jar.sha1 b/solr/licenses/hadoop-yarn-server-resourcemanager-2.3.0.jar.sha1
new file mode 100644
index 0000000..30be94d
--- /dev/null
+++ b/solr/licenses/hadoop-yarn-server-resourcemanager-2.3.0.jar.sha1
@@ -0,0 +1 @@
+56db53c09bb6844bd0bb0a6d6efc56874f63d024
diff --git a/solr/licenses/hadoop-yarn-server-tests-2.2.0-tests.jar.sha1 b/solr/licenses/hadoop-yarn-server-tests-2.2.0-tests.jar.sha1
deleted file mode 100644
index dafd029..0000000
--- a/solr/licenses/hadoop-yarn-server-tests-2.2.0-tests.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d6bf9776d45f3812a9011d768d571bc554706f05
diff --git a/solr/licenses/hadoop-yarn-server-tests-2.3.0-tests.jar.sha1 b/solr/licenses/hadoop-yarn-server-tests-2.3.0-tests.jar.sha1
new file mode 100644
index 0000000..6434146
--- /dev/null
+++ b/solr/licenses/hadoop-yarn-server-tests-2.3.0-tests.jar.sha1
@@ -0,0 +1 @@
+e63163a106934e5dd702bb68ad3a1d397c5af516
diff --git a/solr/licenses/hadoop-yarn-server-web-proxy-2.2.0.jar.sha1 b/solr/licenses/hadoop-yarn-server-web-proxy-2.2.0.jar.sha1
deleted file mode 100644
index 2349416..0000000
--- a/solr/licenses/hadoop-yarn-server-web-proxy-2.2.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ab2404e576910f14cbcd185f81776ff806571b37
diff --git a/solr/licenses/hadoop-yarn-server-web-proxy-2.3.0.jar.sha1 b/solr/licenses/hadoop-yarn-server-web-proxy-2.3.0.jar.sha1
new file mode 100644
index 0000000..fadad32
--- /dev/null
+++ b/solr/licenses/hadoop-yarn-server-web-proxy-2.3.0.jar.sha1
@@ -0,0 +1 @@
+4fad3f4e47394efb5ba745b6d2f73330dbc4f41c
diff --git a/solr/licenses/javax.servlet-api-3.0.1.jar.sha1 b/solr/licenses/javax.servlet-api-3.0.1.jar.sha1
deleted file mode 100644
index a660a30..0000000
--- a/solr/licenses/javax.servlet-api-3.0.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6bf0ebb7efd993e222fc1112377b5e92a13b38dd
diff --git a/solr/licenses/javax.servlet-api-3.1.0.jar.sha1 b/solr/licenses/javax.servlet-api-3.1.0.jar.sha1
new file mode 100644
index 0000000..a269ca0
--- /dev/null
+++ b/solr/licenses/javax.servlet-api-3.1.0.jar.sha1
@@ -0,0 +1 @@
+3cd63d075497751784b2fa84be59432f4905bf7c
diff --git a/solr/licenses/jcl-over-slf4j-1.7.6.jar.sha1 b/solr/licenses/jcl-over-slf4j-1.7.6.jar.sha1
deleted file mode 100644
index ecf02c8..0000000
--- a/solr/licenses/jcl-over-slf4j-1.7.6.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-ab1648fe1dd6f1e5c2ec6d12f394672bb8c1036a
diff --git a/solr/licenses/jcl-over-slf4j-1.7.7.jar.sha1 b/solr/licenses/jcl-over-slf4j-1.7.7.jar.sha1
new file mode 100644
index 0000000..6309ee8
--- /dev/null
+++ b/solr/licenses/jcl-over-slf4j-1.7.7.jar.sha1
@@ -0,0 +1 @@
+56003dcd0a31deea6391b9e2ef2f2dc90b205a92
diff --git a/solr/licenses/jempbox-1.8.6.jar.sha1 b/solr/licenses/jempbox-1.8.6.jar.sha1
deleted file mode 100644
index 5da0e13..0000000
--- a/solr/licenses/jempbox-1.8.6.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6a2727fe097641d0a65338068aa5d9ffbc0178cc
diff --git a/solr/licenses/jempbox-1.8.8.jar.sha1 b/solr/licenses/jempbox-1.8.8.jar.sha1
new file mode 100644
index 0000000..c118934
--- /dev/null
+++ b/solr/licenses/jempbox-1.8.8.jar.sha1
@@ -0,0 +1 @@
+5a6cd3d3bf9464a8d7f06127a4847d6b0e840fe6
diff --git a/solr/licenses/jetty-continuation-8.1.10.v20130312.jar.sha1 b/solr/licenses/jetty-continuation-8.1.10.v20130312.jar.sha1
deleted file mode 100644
index 40d32c2..0000000
--- a/solr/licenses/jetty-continuation-8.1.10.v20130312.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c0e26574ddcac7a86486f19a8b3782657acfd961
diff --git a/solr/licenses/jetty-continuation-9.2.6.v20141205.jar.sha1 b/solr/licenses/jetty-continuation-9.2.6.v20141205.jar.sha1
new file mode 100644
index 0000000..6566b24
--- /dev/null
+++ b/solr/licenses/jetty-continuation-9.2.6.v20141205.jar.sha1
@@ -0,0 +1 @@
+74ca2679e461e7e9b5fdffcf9685575a7d5f5c8e
diff --git a/solr/licenses/jetty-deploy-8.1.10.v20130312.jar.sha1 b/solr/licenses/jetty-deploy-8.1.10.v20130312.jar.sha1
deleted file mode 100644
index 7262965..0000000
--- a/solr/licenses/jetty-deploy-8.1.10.v20130312.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-875b96ef84e3b83a40b3aac9e473434d6414a389
diff --git a/solr/licenses/jetty-deploy-9.2.6.v20141205.jar.sha1 b/solr/licenses/jetty-deploy-9.2.6.v20141205.jar.sha1
new file mode 100644
index 0000000..c606e90
--- /dev/null
+++ b/solr/licenses/jetty-deploy-9.2.6.v20141205.jar.sha1
@@ -0,0 +1 @@
+7ad677c1442dc965b5815f4452848b8eecbc5082
diff --git a/solr/licenses/jetty-http-8.1.10.v20130312.jar.sha1 b/solr/licenses/jetty-http-8.1.10.v20130312.jar.sha1
deleted file mode 100644
index 7cc9164..0000000
--- a/solr/licenses/jetty-http-8.1.10.v20130312.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d9eb53007e04d6338f12f3ded60fad1f7bfcb40e
diff --git a/solr/licenses/jetty-http-9.2.6.v20141205.jar.sha1 b/solr/licenses/jetty-http-9.2.6.v20141205.jar.sha1
new file mode 100644
index 0000000..eee8f10
--- /dev/null
+++ b/solr/licenses/jetty-http-9.2.6.v20141205.jar.sha1
@@ -0,0 +1 @@
+5484771191612c1f5a57466865b7014ff56886ce
diff --git a/solr/licenses/jetty-io-8.1.10.v20130312.jar.sha1 b/solr/licenses/jetty-io-8.1.10.v20130312.jar.sha1
deleted file mode 100644
index 2ba8e66..0000000
--- a/solr/licenses/jetty-io-8.1.10.v20130312.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-e829c768f2b9de5d9fae3bc0aba3996bd0344f56
diff --git a/solr/licenses/jetty-io-9.2.6.v20141205.jar.sha1 b/solr/licenses/jetty-io-9.2.6.v20141205.jar.sha1
new file mode 100644
index 0000000..bc69203
--- /dev/null
+++ b/solr/licenses/jetty-io-9.2.6.v20141205.jar.sha1
@@ -0,0 +1 @@
+11b84cc7078745fca844bd2fb95c2b4f818eafc2
diff --git a/solr/licenses/jetty-jmx-8.1.10.v20130312.jar.sha1 b/solr/licenses/jetty-jmx-8.1.10.v20130312.jar.sha1
deleted file mode 100644
index e400550..0000000
--- a/solr/licenses/jetty-jmx-8.1.10.v20130312.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6e48870e2af2caf2a77751eae3a79bfcf6b90a78
diff --git a/solr/licenses/jetty-jmx-9.2.6.v20141205.jar.sha1 b/solr/licenses/jetty-jmx-9.2.6.v20141205.jar.sha1
new file mode 100644
index 0000000..d62a76e
--- /dev/null
+++ b/solr/licenses/jetty-jmx-9.2.6.v20141205.jar.sha1
@@ -0,0 +1 @@
+53ebf8bd8d151c3c7883e3b23eee9f67ecd716e9
diff --git a/solr/licenses/jetty-security-8.1.10.v20130312.jar.sha1 b/solr/licenses/jetty-security-8.1.10.v20130312.jar.sha1
deleted file mode 100644
index 2de8273..0000000
--- a/solr/licenses/jetty-security-8.1.10.v20130312.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a386e7f0a8b9157b99a2f29a6deac61e007a2b9b
diff --git a/solr/licenses/jetty-security-9.2.6.v20141205.jar.sha1 b/solr/licenses/jetty-security-9.2.6.v20141205.jar.sha1
new file mode 100644
index 0000000..495af06
--- /dev/null
+++ b/solr/licenses/jetty-security-9.2.6.v20141205.jar.sha1
@@ -0,0 +1 @@
+513537c2a4f7d2a44120f4ff5fdc7fc2c8bc9357
diff --git a/solr/licenses/jetty-server-8.1.10.v20130312.jar.sha1 b/solr/licenses/jetty-server-8.1.10.v20130312.jar.sha1
deleted file mode 100644
index 2674565..0000000
--- a/solr/licenses/jetty-server-8.1.10.v20130312.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-13ca9587bc1645f8fac89454b15252a2ad5bdcf5
diff --git a/solr/licenses/jetty-server-9.2.6.v20141205.jar.sha1 b/solr/licenses/jetty-server-9.2.6.v20141205.jar.sha1
new file mode 100644
index 0000000..bba3d06
--- /dev/null
+++ b/solr/licenses/jetty-server-9.2.6.v20141205.jar.sha1
@@ -0,0 +1 @@
+5960eb385ded42360045447185a0510365f811dc
diff --git a/solr/licenses/jetty-servlet-8.1.10.v20130312.jar.sha1 b/solr/licenses/jetty-servlet-8.1.10.v20130312.jar.sha1
deleted file mode 100644
index ac4faaa..0000000
--- a/solr/licenses/jetty-servlet-8.1.10.v20130312.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-98f8029fe7236e9c66381c04f292b5319f47ca84
diff --git a/solr/licenses/jetty-servlet-9.2.6.v20141205.jar.sha1 b/solr/licenses/jetty-servlet-9.2.6.v20141205.jar.sha1
new file mode 100644
index 0000000..c29ef43
--- /dev/null
+++ b/solr/licenses/jetty-servlet-9.2.6.v20141205.jar.sha1
@@ -0,0 +1 @@
+026aa018ef20780e8a900ae7fc95f59884d3095b
diff --git a/solr/licenses/jetty-servlets-9.2.6.v20141205.jar.sha1 b/solr/licenses/jetty-servlets-9.2.6.v20141205.jar.sha1
new file mode 100644
index 0000000..8396ef2
--- /dev/null
+++ b/solr/licenses/jetty-servlets-9.2.6.v20141205.jar.sha1
@@ -0,0 +1 @@
+0e36f5f1d19c48ad1ae6e65172ee472d0ba8b571
diff --git a/solr/licenses/jetty-util-8.1.10.v20130312.jar.sha1 b/solr/licenses/jetty-util-8.1.10.v20130312.jar.sha1
deleted file mode 100644
index cff356d..0000000
--- a/solr/licenses/jetty-util-8.1.10.v20130312.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-d198a8ad8ea20b4fb74c781175c48500ec2b8b7a
diff --git a/solr/licenses/jetty-util-9.2.6.v20141205.jar.sha1 b/solr/licenses/jetty-util-9.2.6.v20141205.jar.sha1
new file mode 100644
index 0000000..6da0dcc
--- /dev/null
+++ b/solr/licenses/jetty-util-9.2.6.v20141205.jar.sha1
@@ -0,0 +1 @@
+fdfa0b969d99a2dfb2a46c0ff00251d7e6c7b2bb
diff --git a/solr/licenses/jetty-webapp-8.1.10.v20130312.jar.sha1 b/solr/licenses/jetty-webapp-8.1.10.v20130312.jar.sha1
deleted file mode 100644
index fded150..0000000
--- a/solr/licenses/jetty-webapp-8.1.10.v20130312.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a3353244b93e895f1ca8f607077f247063470233
diff --git a/solr/licenses/jetty-webapp-9.2.6.v20141205.jar.sha1 b/solr/licenses/jetty-webapp-9.2.6.v20141205.jar.sha1
new file mode 100644
index 0000000..20c144e
--- /dev/null
+++ b/solr/licenses/jetty-webapp-9.2.6.v20141205.jar.sha1
@@ -0,0 +1 @@
+68da2c29c6bedac8d502839fea66b91e05077a68
diff --git a/solr/licenses/jetty-xml-8.1.10.v20130312.jar.sha1 b/solr/licenses/jetty-xml-8.1.10.v20130312.jar.sha1
deleted file mode 100644
index db8de90..0000000
--- a/solr/licenses/jetty-xml-8.1.10.v20130312.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-96eb17dd4265732b0cb9cd15e36f83633832dd65
diff --git a/solr/licenses/jetty-xml-9.2.6.v20141205.jar.sha1 b/solr/licenses/jetty-xml-9.2.6.v20141205.jar.sha1
new file mode 100644
index 0000000..dfd78fb
--- /dev/null
+++ b/solr/licenses/jetty-xml-9.2.6.v20141205.jar.sha1
@@ -0,0 +1 @@
+2f51a87d2c962257304391aa29e1c0030774c4ac
diff --git a/solr/licenses/jul-to-slf4j-1.7.6.jar.sha1 b/solr/licenses/jul-to-slf4j-1.7.6.jar.sha1
deleted file mode 100644
index 778692f..0000000
--- a/solr/licenses/jul-to-slf4j-1.7.6.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-322e2af1694ccc75d33f4d11216c852121d8fefd
diff --git a/solr/licenses/jul-to-slf4j-1.7.7.jar.sha1 b/solr/licenses/jul-to-slf4j-1.7.7.jar.sha1
new file mode 100644
index 0000000..98b2acc
--- /dev/null
+++ b/solr/licenses/jul-to-slf4j-1.7.7.jar.sha1
@@ -0,0 +1 @@
+def21bc1a6e648ee40b41a84f1db443132913105
diff --git a/solr/licenses/org.restlet-2.1.1.jar.sha1 b/solr/licenses/org.restlet-2.1.1.jar.sha1
deleted file mode 100644
index 4b0aa1f..0000000
--- a/solr/licenses/org.restlet-2.1.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-e12c23b962c925f2681729afa1e40066a350ad27
diff --git a/solr/licenses/org.restlet-2.3.0.jar.sha1 b/solr/licenses/org.restlet-2.3.0.jar.sha1
new file mode 100644
index 0000000..77e949d
--- /dev/null
+++ b/solr/licenses/org.restlet-2.3.0.jar.sha1
@@ -0,0 +1 @@
+4c5d184e23fa729726668a90dc7338d80c4e7e6f
diff --git a/solr/licenses/org.restlet.ext.servlet-2.1.1.jar.sha1 b/solr/licenses/org.restlet.ext.servlet-2.1.1.jar.sha1
deleted file mode 100644
index a51aa82..0000000
--- a/solr/licenses/org.restlet.ext.servlet-2.1.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-72baf27dc19d98f43c362ded582db408433373ee
diff --git a/solr/licenses/org.restlet.ext.servlet-2.3.0.jar.sha1 b/solr/licenses/org.restlet.ext.servlet-2.3.0.jar.sha1
new file mode 100644
index 0000000..32c31ad
--- /dev/null
+++ b/solr/licenses/org.restlet.ext.servlet-2.3.0.jar.sha1
@@ -0,0 +1 @@
+9303e20d0397c0304342943560c3a1693fd7ce7d
diff --git a/solr/licenses/pdfbox-1.8.6.jar.sha1 b/solr/licenses/pdfbox-1.8.6.jar.sha1
deleted file mode 100644
index 2048e40..0000000
--- a/solr/licenses/pdfbox-1.8.6.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b5233e1070e7a5d1b041e549b9f93712485deb50
diff --git a/solr/licenses/pdfbox-1.8.8.jar.sha1 b/solr/licenses/pdfbox-1.8.8.jar.sha1
new file mode 100644
index 0000000..2c01ece
--- /dev/null
+++ b/solr/licenses/pdfbox-1.8.8.jar.sha1
@@ -0,0 +1 @@
+520d4255b392f56124d693d72f2df61ba1172061
diff --git a/solr/licenses/poi-3.11-beta2.jar.sha1 b/solr/licenses/poi-3.11-beta2.jar.sha1
deleted file mode 100644
index 091d3d3..0000000
--- a/solr/licenses/poi-3.11-beta2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5b89faba0fd879a6a7eca16e81a47a2fd008738a
diff --git a/solr/licenses/poi-3.11.jar.sha1 b/solr/licenses/poi-3.11.jar.sha1
new file mode 100644
index 0000000..8a24258
--- /dev/null
+++ b/solr/licenses/poi-3.11.jar.sha1
@@ -0,0 +1 @@
+51058d9db469437a5ed0aa508e7de8937019e1d9
diff --git a/solr/licenses/poi-ooxml-3.11-beta2.jar.sha1 b/solr/licenses/poi-ooxml-3.11-beta2.jar.sha1
deleted file mode 100644
index 658903e..0000000
--- a/solr/licenses/poi-ooxml-3.11-beta2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6ea3924178a334b5298aede99bca3feaae8765bf
diff --git a/solr/licenses/poi-ooxml-3.11.jar.sha1 b/solr/licenses/poi-ooxml-3.11.jar.sha1
new file mode 100644
index 0000000..9109cd7
--- /dev/null
+++ b/solr/licenses/poi-ooxml-3.11.jar.sha1
@@ -0,0 +1 @@
+e87975291fbb65888468b09fda2cf00e2996c2a0
diff --git a/solr/licenses/poi-ooxml-schemas-3.11-beta2.jar.sha1 b/solr/licenses/poi-ooxml-schemas-3.11-beta2.jar.sha1
deleted file mode 100644
index 8f0a422..0000000
--- a/solr/licenses/poi-ooxml-schemas-3.11-beta2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-76e88385937608ef805a040b35185220e5d95ea1
diff --git a/solr/licenses/poi-ooxml-schemas-3.11.jar.sha1 b/solr/licenses/poi-ooxml-schemas-3.11.jar.sha1
new file mode 100644
index 0000000..a9bd3a7
--- /dev/null
+++ b/solr/licenses/poi-ooxml-schemas-3.11.jar.sha1
@@ -0,0 +1 @@
+4f015e9125a65351a2e1d27c1f8c6af0ff4a34c8
diff --git a/solr/licenses/poi-scratchpad-3.11-beta2.jar.sha1 b/solr/licenses/poi-scratchpad-3.11-beta2.jar.sha1
deleted file mode 100644
index 137022b..0000000
--- a/solr/licenses/poi-scratchpad-3.11-beta2.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-77a69b5418faeeb22ad5645708711ebc59c13254
diff --git a/solr/licenses/poi-scratchpad-3.11.jar.sha1 b/solr/licenses/poi-scratchpad-3.11.jar.sha1
new file mode 100644
index 0000000..5a891fc
--- /dev/null
+++ b/solr/licenses/poi-scratchpad-3.11.jar.sha1
@@ -0,0 +1 @@
+c5f7987f03d92bb6d01895f043915e897f7bb50a
diff --git a/solr/licenses/servlet-api-3.0.jar.sha1 b/solr/licenses/servlet-api-3.0.jar.sha1
deleted file mode 100644
index 749a2c2..0000000
--- a/solr/licenses/servlet-api-3.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-0aaaa85845fb5c59da00193f06b8e5278d8bf3f8
diff --git a/solr/licenses/slf4j-api-1.7.6.jar.sha1 b/solr/licenses/slf4j-api-1.7.6.jar.sha1
deleted file mode 100644
index 6745a4c..0000000
--- a/solr/licenses/slf4j-api-1.7.6.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-562424e36df3d2327e8e9301a76027fca17d54ea
diff --git a/solr/licenses/slf4j-api-1.7.7.jar.sha1 b/solr/licenses/slf4j-api-1.7.7.jar.sha1
new file mode 100644
index 0000000..1a82218
--- /dev/null
+++ b/solr/licenses/slf4j-api-1.7.7.jar.sha1
@@ -0,0 +1 @@
+2b8019b6249bb05d81d3a3094e468753e2b21311
diff --git a/solr/licenses/slf4j-log4j12-1.7.6.jar.sha1 b/solr/licenses/slf4j-log4j12-1.7.6.jar.sha1
deleted file mode 100644
index d8dc4fa..0000000
--- a/solr/licenses/slf4j-log4j12-1.7.6.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6953717b9850aeb26d1b8375ca07dbd9c50eca4e
diff --git a/solr/licenses/slf4j-log4j12-1.7.7.jar.sha1 b/solr/licenses/slf4j-log4j12-1.7.7.jar.sha1
new file mode 100644
index 0000000..c328cb3
--- /dev/null
+++ b/solr/licenses/slf4j-log4j12-1.7.7.jar.sha1
@@ -0,0 +1 @@
+58f588119ffd1702c77ccab6acb54bfb41bed8bd
diff --git a/solr/licenses/start.jar.sha1 b/solr/licenses/start.jar.sha1
index a375ce1..5ef2c26 100644
--- a/solr/licenses/start.jar.sha1
+++ b/solr/licenses/start.jar.sha1
@@ -1 +1 @@
-8e1a37a8daae3f222d957012f826047497466a56
+6380e0fe5a738cced80ef705dbac3320a8f25edf
diff --git a/solr/licenses/stax2-api-3.1.4.jar.sha1 b/solr/licenses/stax2-api-3.1.4.jar.sha1
new file mode 100644
index 0000000..d5f4020
--- /dev/null
+++ b/solr/licenses/stax2-api-3.1.4.jar.sha1
@@ -0,0 +1 @@
+ac19014b1e6a7c08aad07fe114af792676b685b7
diff --git a/solr/licenses/stax2-api-LICENSE-BSD.txt b/solr/licenses/stax2-api-LICENSE-BSD.txt
new file mode 100644
index 0000000..49e7019
--- /dev/null
+++ b/solr/licenses/stax2-api-LICENSE-BSD.txt
@@ -0,0 +1,10 @@
+Copyright (c) <YEAR>, <OWNER>
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/solr/licenses/stax2-api-NOTICE.txt b/solr/licenses/stax2-api-NOTICE.txt
new file mode 100644
index 0000000..a5f70fc
--- /dev/null
+++ b/solr/licenses/stax2-api-NOTICE.txt
@@ -0,0 +1,8 @@
+Stax2 API is an extension to basic Stax 1.0 API that adds significant
+new functionality, such as full-featured bi-direction validation
+interface and high-performance Typed Access API.
+
+(From http://repo1.maven.org/maven2/org/codehaus/woodstox/stax2-api/3.1.4/stax2-api-3.1.4.pom)
+Developer: Tatu Saloranta <tatu@fasterxml.com>
+License: The BSD License (http://www.opensource.org/licenses/bsd-license.php)
+Organization: fasterxml.com (http://fasterxml.com)
diff --git a/solr/licenses/tika-core-1.6.jar.sha1 b/solr/licenses/tika-core-1.6.jar.sha1
deleted file mode 100644
index c603ce3..0000000
--- a/solr/licenses/tika-core-1.6.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-fbccbe81436fc61dbe81d2a8d83c8e50b49d5771
diff --git a/solr/licenses/tika-core-1.7.jar.sha1 b/solr/licenses/tika-core-1.7.jar.sha1
new file mode 100644
index 0000000..081a792
--- /dev/null
+++ b/solr/licenses/tika-core-1.7.jar.sha1
@@ -0,0 +1 @@
+ee92c1d9deb07e85198fe2ff69926a7a1ce068d6
diff --git a/solr/licenses/tika-core-NOTICE.txt b/solr/licenses/tika-core-NOTICE.txt
index a1bf620..3b73637 100644
--- a/solr/licenses/tika-core-NOTICE.txt
+++ b/solr/licenses/tika-core-NOTICE.txt
@@ -1,12 +1,9 @@
 Apache Tika
-Copyright 2011 The Apache Software Foundation
+Copyright 2015 The Apache Software Foundation
 
 This product includes software developed at
 The Apache Software Foundation (http://www.apache.org/).
 
-Copyright 1993-2010 University Corporation for Atmospheric Research/Unidata
-This software contains code derived from UCAR/Unidata's NetCDF library.
-
 Tika-server compoment uses CDDL-licensed dependencies: jersey (http://jersey.java.net/) and 
 Grizzly (http://grizzly.java.net/)
 
diff --git a/solr/licenses/tika-java7-1.7.jar.sha1 b/solr/licenses/tika-java7-1.7.jar.sha1
new file mode 100644
index 0000000..b2930d7
--- /dev/null
+++ b/solr/licenses/tika-java7-1.7.jar.sha1
@@ -0,0 +1 @@
+f3f7f9c82fbf5ce1c3948e1f77f1469393d5eb38
diff --git a/solr/licenses/tika-java7-LICENSE-ASL.txt b/solr/licenses/tika-java7-LICENSE-ASL.txt
new file mode 100644
index 0000000..8ba51ef
--- /dev/null
+++ b/solr/licenses/tika-java7-LICENSE-ASL.txt
@@ -0,0 +1,239 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+APACHE TIKA SUBCOMPONENTS
+
+Apache Tika includes a number of subcomponents with separate copyright notices
+and license terms. Your use of these subcomponents is subject to the terms and
+conditions of the following licenses.
+
+Charset detection code from ICU4J (http://site.icu-project.org/)
+
+    Copyright (c) 1995-2009 International Business Machines Corporation
+    and others
+
+    All rights reserved.
+
+    Permission is hereby granted, free of charge, to any person obtaining
+    a copy of this software and associated documentation files (the
+    "Software"), to deal in the Software without restriction, including
+    without limitation the rights to use, copy, modify, merge, publish,
+    distribute, and/or sell copies of the Software, and to permit persons
+    to whom the Software is furnished to do so, provided that the above
+    copyright notice(s) and this permission notice appear in all copies
+    of the Software and that both the above copyright notice(s) and this
+    permission notice appear in supporting documentation.
+
+    THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+    OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+    FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS.
+    IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE
+    BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES,
+    OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+    WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
+    ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
+    SOFTWARE.
+
+    Except as contained in this notice, the name of a copyright holder shall
+    not be used in advertising or otherwise to promote the sale, use or other
+    dealings in this Software without prior written authorization of the
+    copyright holder.
diff --git a/solr/licenses/tika-java7-NOTICE.txt b/solr/licenses/tika-java7-NOTICE.txt
new file mode 100644
index 0000000..7c1722d
--- /dev/null
+++ b/solr/licenses/tika-java7-NOTICE.txt
@@ -0,0 +1,12 @@
+Apache Tika Java 7 detectors
+Copyright 2015 The Apache Software Foundation
+
+This product includes software developed at
+The Apache Software Foundation (http://www.apache.org/).
+
+Tika-server compoment uses CDDL-licensed dependencies: jersey (http://jersey.java.net/) and 
+Grizzly (http://grizzly.java.net/)
+
+OpenCSV: Copyright 2005 Bytecode Pty Ltd. Licensed under the Apache License, Version 2.0
+
+IPTC Photo Metadata descriptions Copyright 2010 International Press Telecommunications Council.
diff --git a/solr/licenses/tika-parsers-1.6.jar.sha1 b/solr/licenses/tika-parsers-1.6.jar.sha1
deleted file mode 100644
index 1f47e77..0000000
--- a/solr/licenses/tika-parsers-1.6.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-13e2a8ca4bb471155663ccb72b92e5f8cc70a02c
diff --git a/solr/licenses/tika-parsers-1.7.jar.sha1 b/solr/licenses/tika-parsers-1.7.jar.sha1
new file mode 100644
index 0000000..8b9886d
--- /dev/null
+++ b/solr/licenses/tika-parsers-1.7.jar.sha1
@@ -0,0 +1 @@
+ae2a0cdac649cf9baab01156c75a655cf50ca936
diff --git a/solr/licenses/tika-parsers-NOTICE.txt b/solr/licenses/tika-parsers-NOTICE.txt
index 4a1d9e0..e490b9e 100644
--- a/solr/licenses/tika-parsers-NOTICE.txt
+++ b/solr/licenses/tika-parsers-NOTICE.txt
@@ -1,12 +1,9 @@
 Apache Tika parsers
-Copyright 2011 The Apache Software Foundation
+Copyright 2015 The Apache Software Foundation
 
 This product includes software developed at
 The Apache Software Foundation (http://www.apache.org/).
 
-Copyright 1993-2010 University Corporation for Atmospheric Research/Unidata
-This software contains code derived from UCAR/Unidata's NetCDF library.
-
 Tika-server compoment uses CDDL-licensed dependencies: jersey (http://jersey.java.net/) and 
 Grizzly (http://grizzly.java.net/)
 
diff --git a/solr/licenses/tika-xmp-1.6.jar.sha1 b/solr/licenses/tika-xmp-1.6.jar.sha1
deleted file mode 100644
index a05773b..0000000
--- a/solr/licenses/tika-xmp-1.6.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-2668676f66597033d276301cc4fab3bf4b965a2c
diff --git a/solr/licenses/tika-xmp-1.7.jar.sha1 b/solr/licenses/tika-xmp-1.7.jar.sha1
new file mode 100644
index 0000000..01b88b6
--- /dev/null
+++ b/solr/licenses/tika-xmp-1.7.jar.sha1
@@ -0,0 +1 @@
+ac7b48998ffbafa98467ed075ab5f8efb688bebc
diff --git a/solr/licenses/tika-xmp-NOTICE.txt b/solr/licenses/tika-xmp-NOTICE.txt
index 156a582..52230b5 100644
--- a/solr/licenses/tika-xmp-NOTICE.txt
+++ b/solr/licenses/tika-xmp-NOTICE.txt
@@ -1,12 +1,9 @@
 Apache Tika xmp
-Copyright 2011 The Apache Software Foundation
+Copyright 2015 The Apache Software Foundation
 
 This product includes software developed at
 The Apache Software Foundation (http://www.apache.org/).
 
-Copyright 1993-2010 University Corporation for Atmospheric Research/Unidata
-This software contains code derived from UCAR/Unidata's NetCDF library.
-
 Tika-server compoment uses CDDL-licensed dependencies: jersey (http://jersey.java.net/) and 
 Grizzly (http://grizzly.java.net/)
 
diff --git a/solr/licenses/woodstox-core-asl-4.4.1.jar.sha1 b/solr/licenses/woodstox-core-asl-4.4.1.jar.sha1
new file mode 100644
index 0000000..4432f29
--- /dev/null
+++ b/solr/licenses/woodstox-core-asl-4.4.1.jar.sha1
@@ -0,0 +1 @@
+84fee5eb1a4a1cefe65b6883c73b3fa83be3c1a1
diff --git a/solr/licenses/wstx-asl-LICENSE-ASL.txt b/solr/licenses/woodstox-core-asl-LICENSE-ASL.txt
similarity index 100%
rename from solr/licenses/wstx-asl-LICENSE-ASL.txt
rename to solr/licenses/woodstox-core-asl-LICENSE-ASL.txt
diff --git a/solr/licenses/woodstox-core-asl-NOTICE.txt b/solr/licenses/woodstox-core-asl-NOTICE.txt
new file mode 100644
index 0000000..b7ba12b
--- /dev/null
+++ b/solr/licenses/woodstox-core-asl-NOTICE.txt
@@ -0,0 +1,37 @@
+(From http://woodstox.codehaus.org/4.2.0/release-notes/README)
+--------------
+
+Woodstox is an XML-parser that allows parsing of XML documents in so-called
+pull mode (aka "pull parsing").
+It specifically implements StAX 1.0 API:
+
+http://www.jcp.org/en/jsr/detail?id=173
+
+which defines what is closest to being the J2xE standard for XML pull parsers.
+
+Woodstox was originally written by Tatu Saloranta (<tatu.saloranta@iki.fi>.
+
+Woodstox licensing is explained in file LICENSE; be sure to read it
+to understand licensing.
+
+Contributions to the source code need to be made as specified by
+the License; so that they can be distributed according to the
+License terms.
+--------------
+
+(From http://svn.codehaus.org/woodstox/wstx/trunk/release-notes/asl/LICENSE)
+--------------
+This copy of Woodstox XML processor is licensed under the
+Apache (Software) License, version 2.0 ("the License").
+See the License for details about distribution rights, and the
+specific rights regarding derivate works.
+
+You may obtain a copy of the License at:
+
+http://www.apache.org/licenses/
+
+A copy is also included with both the the downloadable source code package
+and jar that contains class bytecodes, as file "ASL 2.0". In both cases,
+that file should be located next to this file: in source distribution
+the location should be "release-notes/asl"; and in jar "META-INF/"
+--------------
diff --git a/solr/licenses/wstx-asl-3.2.7.jar.sha1 b/solr/licenses/wstx-asl-3.2.7.jar.sha1
deleted file mode 100644
index 867ea25..0000000
--- a/solr/licenses/wstx-asl-3.2.7.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-252c7faae9ce98cb9c9d29f02db88f7373e7f407
diff --git a/solr/licenses/wstx-asl-NOTICE.txt b/solr/licenses/wstx-asl-NOTICE.txt
deleted file mode 100644
index 5389012..0000000
--- a/solr/licenses/wstx-asl-NOTICE.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-This product currently only contains code developed by authors
-of specific components, as identified by the source code files.
-
-Since product implements StAX API, it has dependencies to StAX API
-classes.
-
-For additional credits (generally to people who reported problems)
-see CREDITS file.
diff --git a/solr/server/contexts/solr-jetty-context.xml b/solr/server/contexts/solr-jetty-context.xml
index 50978a3..2383639 100644
--- a/solr/server/contexts/solr-jetty-context.xml
+++ b/solr/server/contexts/solr-jetty-context.xml
@@ -1,8 +1,9 @@
 <?xml version="1.0"?>
-<!DOCTYPE Configure PUBLIC "-//Jetty//Configure//EN" "http://www.eclipse.org/jetty/configure.dtd">
+<!DOCTYPE Configure PUBLIC "-//Jetty//Configure//EN" "http://www.eclipse.org/jetty/configure_9_0.dtd">
 <Configure class="org.eclipse.jetty.webapp.WebAppContext">
-  <Set name="contextPath"><SystemProperty name="hostContext" default="/solr"/></Set>
-  <Set name="war"><SystemProperty name="jetty.home"/>/webapps/solr.war</Set>
-  <Set name="defaultsDescriptor"><SystemProperty name="jetty.home"/>/etc/webdefault.xml</Set>
-  <Set name="tempDirectory"><Property name="jetty.home" default="."/>/solr-webapp</Set>
+  <Set name="contextPath"><Property name="hostContext" default="/solr"/></Set>
+  <Set name="war"><Property name="jetty.base"/>/webapps/solr.war</Set>
+  <Set name="defaultsDescriptor"><Property name="jetty.base"/>/etc/webdefault.xml</Set>
+  <Set name="tempDirectory"><Property name="jetty.base" default="."/>/solr-webapp</Set>
+  <Set name="persistTempDirectory">true</Set>
 </Configure>
diff --git a/solr/server/etc/jetty-http.xml b/solr/server/etc/jetty-http.xml
new file mode 100644
index 0000000..b84a4e1
--- /dev/null
+++ b/solr/server/etc/jetty-http.xml
@@ -0,0 +1,58 @@
+<?xml version="1.0"?>
+<!DOCTYPE Configure PUBLIC "-//Jetty//Configure//EN" "http://www.eclipse.org/jetty/configure_9_0.dtd">
+
+<!-- ============================================================= -->
+<!-- Configure the Jetty Server instance with an ID "Server"       -->
+<!-- by adding a HTTP connector.                                   -->
+<!-- This configuration must be used in conjunction with jetty.xml -->
+<!-- ============================================================= -->
+<Configure id="Server" class="org.eclipse.jetty.server.Server">
+
+  <!-- =========================================================== -->
+  <!-- Server Thread Pool                                          -->
+  <!-- =========================================================== -->
+  <Get name="ThreadPool">
+    <!-- Default queued blocking threadpool -->
+    <Set name="minThreads"><Property name="jetty.threads.min" /></Set>
+    <Set name="maxThreads"><Property name="jetty.threads.max" /></Set>
+    <Set name="detailedDump">false</Set>
+    <Set name="idleTimeout"><Property name="jetty.threads.idle.timeout" /></Set>
+  </Get>
+
+  <!-- =========================================================== -->
+  <!-- Add a HTTP Connector.                                       -->
+  <!-- Configure an o.e.j.server.ServerConnector with a single     -->
+  <!-- HttpConnectionFactory instance using the common httpConfig  -->
+  <!-- instance defined in jetty.xml                               -->
+  <!--                                                             -->
+  <!-- Consult the javadoc of o.e.j.server.ServerConnector and     -->
+  <!-- o.e.j.server.HttpConnectionFactory for all configuration    -->
+  <!-- that may be set here.                                       -->
+  <!-- =========================================================== -->
+  <Call name="addConnector">
+    <Arg>
+      <New class="org.eclipse.jetty.server.ServerConnector">
+        <Arg name="server"><Ref refid="Server" /></Arg>
+        <Arg name="acceptors" type="int"><Property name="jetty.http.acceptors" default="-1"/></Arg>
+        <Arg name="selectors" type="int"><Property name="jetty.http.selectors" default="-1"/></Arg>
+        <Arg name="factories">
+          <Array type="org.eclipse.jetty.server.ConnectionFactory">
+            <Item>
+              <New class="org.eclipse.jetty.server.HttpConnectionFactory">
+                <Arg name="config">
+                  <New id="httpConfig" class="org.eclipse.jetty.server.HttpConfiguration">
+                    <Set name="sendServerVersion">false</Set>
+                    <Set name="sendDateHeader">false</Set>
+                  </New>
+                </Arg>
+              </New>
+            </Item>
+          </Array>
+        </Arg>
+        <Set name="port"><Property name="jetty.port" default="8983" /></Set>
+        <Set name="host"><Property name="jetty.host" /></Set>
+      </New>
+    </Arg>
+  </Call>
+
+</Configure>
\ No newline at end of file
diff --git a/solr/server/etc/jetty-https.xml b/solr/server/etc/jetty-https.xml
new file mode 100644
index 0000000..bfd6e38
--- /dev/null
+++ b/solr/server/etc/jetty-https.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0"?>
+<!DOCTYPE Configure PUBLIC "-//Jetty//Configure//EN" "http://www.eclipse.org/jetty/configure_9_0.dtd">
+
+<!-- ============================================================= -->
+<!-- Configure a HTTPS connector.                                  -->
+<!-- This configuration must be used in conjunction with jetty.xml -->
+<!-- and jetty-ssl.xml.                                            -->
+<!-- ============================================================= -->
+<Configure id="sslConnector" class="org.eclipse.jetty.server.ServerConnector">
+
+  <Call name="addIfAbsentConnectionFactory">
+    <Arg>
+      <New class="org.eclipse.jetty.server.SslConnectionFactory">
+        <Arg name="next">http/1.1</Arg>
+        <Arg name="sslContextFactory"><Ref refid="sslContextFactory"/></Arg>
+      </New>
+    </Arg>
+  </Call>
+
+  <Call name="addConnectionFactory">
+    <Arg>
+      <New class="org.eclipse.jetty.server.HttpConnectionFactory">
+        <Arg name="config"><Ref refid="sslHttpConfig" /></Arg>
+      </New>
+    </Arg>
+  </Call>
+
+</Configure>
\ No newline at end of file
diff --git a/solr/server/etc/jetty-ssl.xml b/solr/server/etc/jetty-ssl.xml
new file mode 100644
index 0000000..312f4ac
--- /dev/null
+++ b/solr/server/etc/jetty-ssl.xml
@@ -0,0 +1,78 @@
+<?xml version="1.0"?>
+<!DOCTYPE Configure PUBLIC "-//Jetty//Configure//EN" "http://www.eclipse.org/jetty/configure_9_0.dtd">
+
+<!-- ============================================================= -->
+<!-- Base SSL configuration                                        -->
+<!-- This configuration needs to be used together with 1 or more   -->
+<!-- of jetty-https.xml, jetty-spdy.xml and/or jetty-http2.xml     -->
+<!-- ============================================================= -->
+<Configure id="Server" class="org.eclipse.jetty.server.Server">
+
+  <!-- =========================================================== -->
+  <!-- Add a SSL Connector with no protocol factories              -->
+  <!-- =========================================================== -->
+  <Call  name="addConnector">
+    <Arg>
+      <New id="sslConnector" class="org.eclipse.jetty.server.ServerConnector">
+        <Arg name="server"><Ref refid="Server" /></Arg>
+        <Arg name="acceptors" type="int"><Property name="ssl.acceptors" default="-1"/></Arg>
+        <Arg name="selectors" type="int"><Property name="ssl.selectors" default="-1"/></Arg>
+        <Arg name="factories">
+          <Array type="org.eclipse.jetty.server.ConnectionFactory">
+            <!-- uncomment to support proxy protocol
+	    <Item>
+              <New class="org.eclipse.jetty.server.ProxyConnectionFactory"/>
+            </Item>-->
+          </Array>
+        </Arg>
+
+        <Set name="host"><Property name="jetty.host" /></Set>
+        <Set name="port"><Property name="ssl.port" default="443" /></Set>
+        <Set name="idleTimeout"><Property name="ssl.timeout" default="30000"/></Set>
+        <Set name="soLingerTime"><Property name="ssl.soLingerTime" default="-1"/></Set>
+        <Set name="acceptorPriorityDelta"><Property name="ssl.acceptorPriorityDelta" default="0"/></Set>
+        <Set name="selectorPriorityDelta"><Property name="ssl.selectorPriorityDelta" default="0"/></Set>
+        <Set name="acceptQueueSize"><Property name="ssl.acceptQueueSize" default="0"/></Set>
+      </New>
+    </Arg>
+  </Call>
+
+  <!-- ============================================================= -->
+  <!-- Create a TLS (SSL) Context Factory  for later reuse           -->
+  <!-- ============================================================= -->
+  <New id="sslContextFactory" class="org.eclipse.jetty.util.ssl.SslContextFactory">
+    <Set name="KeyStorePath"><Property name="jetty.base" default="." />/<Property name="jetty.keystore" default="etc/keystore"/></Set>
+    <Set name="KeyStorePassword"><Property name="jetty.keystore.password" default="OBF:1vny1zlo1x8e1vnw1vn61x8g1zlu1vn4"/></Set>
+    <Set name="KeyManagerPassword"><Property name="jetty.keymanager.password" default="OBF:1u2u1wml1z7s1z7a1wnl1u2g"/></Set>
+    <Set name="TrustStorePath"><Property name="jetty.base" default="." />/<Property name="jetty.truststore" default="etc/keystore"/></Set>
+    <Set name="TrustStorePassword"><Property name="jetty.truststore.password" default="OBF:1vny1zlo1x8e1vnw1vn61x8g1zlu1vn4"/></Set>
+    <Set name="EndpointIdentificationAlgorithm"></Set>
+    <Set name="NeedClientAuth"><Property name="jetty.ssl.needClientAuth" default="false"/></Set>
+    <Set name="WantClientAuth"><Property name="jetty.ssl.wantClientAuth" default="false"/></Set>
+    <Set name="ExcludeCipherSuites">
+      <Array type="String">
+        <Item>SSL_RSA_WITH_DES_CBC_SHA</Item>
+        <Item>SSL_DHE_RSA_WITH_DES_CBC_SHA</Item>
+        <Item>SSL_DHE_DSS_WITH_DES_CBC_SHA</Item>
+        <Item>SSL_RSA_EXPORT_WITH_RC4_40_MD5</Item>
+        <Item>SSL_RSA_EXPORT_WITH_DES40_CBC_SHA</Item>
+        <Item>SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA</Item>
+        <Item>SSL_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA</Item>
+      </Array>
+    </Set>
+  </New>
+
+  <!-- =========================================================== -->
+  <!-- Create a TLS specific HttpConfiguration based on the        -->
+  <!-- common HttpConfiguration defined in jetty.xml               -->
+  <!-- Add a SecureRequestCustomizer to extract certificate and    -->
+  <!-- session information                                         -->
+  <!-- =========================================================== -->
+  <New id="sslHttpConfig" class="org.eclipse.jetty.server.HttpConfiguration">
+    <Arg><Ref refid="httpConfig"/></Arg>
+    <Call name="addCustomizer">
+      <Arg><New class="org.eclipse.jetty.server.SecureRequestCustomizer"/></Arg>
+    </Call>
+  </New>
+
+</Configure>
\ No newline at end of file
diff --git a/solr/server/etc/jetty.xml b/solr/server/etc/jetty.xml
index 7596a6c..8da30a0 100644
--- a/solr/server/etc/jetty.xml
+++ b/solr/server/etc/jetty.xml
@@ -1,5 +1,5 @@
 <?xml version="1.0"?>
-<!DOCTYPE Configure PUBLIC "-//Jetty//Configure//EN" "http://www.eclipse.org/jetty/configure.dtd">
+<!DOCTYPE Configure PUBLIC "-//Jetty//Configure//EN" "http://www.eclipse.org/jetty/configure_9_0.dtd">
 
 <!-- =============================================================== -->
 <!-- Configure the Jetty Server                                      -->
@@ -13,91 +13,6 @@
 <Configure id="Server" class="org.eclipse.jetty.server.Server">
 
     <!-- =========================================================== -->
-    <!-- Server Thread Pool                                          -->
-    <!-- =========================================================== -->
-    <Set name="ThreadPool">
-      <!-- Default queued blocking threadpool -->
-      <New class="org.eclipse.jetty.util.thread.QueuedThreadPool">
-        <Set name="minThreads">10</Set>
-        <Set name="maxThreads">10000</Set>
-        <Set name="detailedDump">false</Set>
-      </New>
-    </Set>
-
-    <!-- =========================================================== -->
-    <!-- Set connectors                                              -->
-    <!-- =========================================================== -->
-
-  <!--
-    <Call name="addConnector">
-      <Arg>
-          <New class="org.eclipse.jetty.server.nio.SelectChannelConnector">
-            <Set name="host"><SystemProperty name="jetty.host" /></Set>
-            <Set name="port"><SystemProperty name="jetty.port" default="8983"/></Set>
-            <Set name="maxIdleTime">50000</Set>
-            <Set name="Acceptors">2</Set>
-            <Set name="statsOn">false</Set>
-            <Set name="confidentialPort">8443</Set>
-	    <Set name="lowResourcesConnections">5000</Set>
-	    <Set name="lowResourcesMaxIdleTime">5000</Set>
-          </New>
-      </Arg>
-    </Call>
-  -->
-
-    <!-- This connector is currently being used for Solr because it
-          showed better performance than nio.SelectChannelConnector
-          for typical Solr requests.  -->
-    <Call name="addConnector">
-      <Arg>
-          <New class="org.eclipse.jetty.server.bio.SocketConnector">
-            <Set name="host"><SystemProperty name="jetty.host" /></Set>
-            <Set name="port"><SystemProperty name="jetty.port" default="8983"/></Set>
-            <Set name="maxIdleTime">50000</Set>
-            <Set name="lowResourceMaxIdleTime">1500</Set>
-            <Set name="statsOn">false</Set>
-          </New>
-      </Arg>
-    </Call>
-
-    <!-- if the connector below is uncommented, then jetty will also accept SSL
-         connections on port 8984, using a self signed certificate and can 
-         optionally require the client to authenticate with a certificate. 
-         (which can be the same as the server certificate_
-         
-         # Run solr example with SSL on port 8984
-         java -jar start.jar
-         # 
-         # Run post.jar so that it trusts the server cert...
-         java -Djavax.net.ssl.trustStore=../etc/solrtest.keystore -Durl=https://localhost:8984/solr/update -jar post.jar *.xml
-
-         # Run solr example with SSL requiring client certs on port 8984
-         java -Djetty.ssl.clientAuth=true -jar start.jar
-         #
-         # Run post.jar so that it trusts the server cert, 
-         # and authenticates with a client cert
-         java -Djavax.net.ssl.keyStorePassword=secret -Djavax.net.ssl.keyStore=../etc/solrtest.keystore -Djavax.net.ssl.trustStore=../etc/solrtest.keystore -Durl=https://localhost:8984/solr/update -jar post.jar *.xml
-
-    -->
-    <!--
-    <Call name="addConnector">
-      <Arg>
-        <New class="org.eclipse.jetty.server.ssl.SslSelectChannelConnector">
-          <Arg>
-            <New class="org.eclipse.jetty.http.ssl.SslContextFactory">
-              <Set name="keyStore"><SystemProperty name="jetty.home" default="."/>/etc/solrtest.keystore</Set>
-              <Set name="keyStorePassword">secret</Set>
-              <Set name="needClientAuth"><SystemProperty name="jetty.ssl.clientAuth" default="false"/></Set>
-            </New>
-          </Arg>
-          <Set name="port"><SystemProperty name="jetty.ssl.port" default="8984"/></Set>
-          <Set name="maxIdleTime">30000</Set>
-        </New>
-      </Arg>
-    </Call>
-    -->
-
-    <!-- =========================================================== -->
     <!-- Set handler Collection Structure                            --> 
     <!-- =========================================================== -->
     <Set name="handler">
@@ -121,7 +36,7 @@
     <!-- =========================================================== -->
     <!-- Configure Request Log                                       -->
     <!-- =========================================================== -->
-    <!-- 
+    <!--
     <Ref id="Handlers">
       <Call name="addHandler">
         <Arg>
@@ -149,26 +64,28 @@
     <!-- extra options                                               -->
     <!-- =========================================================== -->
     <Set name="stopAtShutdown">true</Set>
-    <Set name="sendServerVersion">false</Set>
-    <Set name="sendDateHeader">false</Set>
-    <Set name="gracefulShutdown">1000</Set>
     <Set name="dumpAfterStart">false</Set>
     <Set name="dumpBeforeStop">false</Set>
 
-
-
-
     <Call name="addBean">
       <Arg>
         <New id="DeploymentManager" class="org.eclipse.jetty.deploy.DeploymentManager">
           <Set name="contexts">
-            <Ref id="Contexts" />
+            <Ref refid="Contexts" />
           </Set>
           <Call name="setContextAttribute">
             <Arg>org.eclipse.jetty.server.webapp.ContainerIncludeJarPattern</Arg>
             <Arg>.*/servlet-api-[^/]*\.jar$</Arg>
           </Call>
-          
+
+          <Call name="addAppProvider">
+            <Arg>
+              <New class="org.eclipse.jetty.deploy.providers.WebAppProvider">
+                <Set name="monitoredDirName"><Property name="jetty.home" default="."/>/contexts</Set>
+                <Set name="scanInterval">0</Set>
+              </New>
+            </Arg>
+          </Call>
           
           <!-- Add a customize step to the deployment lifecycle -->
           <!-- uncomment and replace DebugBinding with your extended AppLifeCycle.Binding class 
@@ -190,15 +107,4 @@
       </Arg>
     </Call>
     
-    <Ref id="DeploymentManager">
-      <Call name="addAppProvider">
-        <Arg>
-          <New class="org.eclipse.jetty.deploy.providers.ContextProvider">
-            <Set name="monitoredDirName"><SystemProperty name="jetty.home" default="."/>/contexts</Set>
-            <Set name="scanInterval">0</Set>
-          </New>
-        </Arg>
-      </Call>
-    </Ref>
-
 </Configure>
diff --git a/solr/server/etc/create-solrtest.keystore.sh b/solr/server/etc/test/create-solrtest.keystore.sh
similarity index 96%
rename from solr/server/etc/create-solrtest.keystore.sh
rename to solr/server/etc/test/create-solrtest.keystore.sh
index d3decee..36c5f0d 100755
--- a/solr/server/etc/create-solrtest.keystore.sh
+++ b/solr/server/etc/test/create-solrtest.keystore.sh
@@ -18,7 +18,7 @@
 ############
  
 # This script shows how the solrtest.keystore file used for solr tests 
-# and these example configs was generated.
+# was generated.
 #
 # Running this script should only be necessary if the keystore file
 # needs to be replaced, which shouldn't be required until sometime around
diff --git a/solr/server/etc/solrtest.keystore b/solr/server/etc/test/solrtest.keystore
similarity index 100%
rename from solr/server/etc/solrtest.keystore
rename to solr/server/etc/test/solrtest.keystore
Binary files differ
diff --git a/solr/server/ivy.xml b/solr/server/ivy.xml
index 7453958..d4bb01b 100644
--- a/solr/server/ivy.xml
+++ b/solr/server/ivy.xml
@@ -17,7 +17,7 @@
    under the License.    
 -->
 <ivy-module version="2.0">
-  <info organisation="org.apache.solr" module="example"/>
+  <info organisation="org.apache.solr" module="server"/>
   <configurations defaultconfmapping="jetty->master;start->master;servlet->master;logging->master">
     <conf name="jetty" description="jetty jars" transitive="false"/>
     <conf name="start" description="jetty start jar" transitive="false"/>
@@ -40,15 +40,15 @@
     <dependency org="org.eclipse.jetty" name="jetty-security" rev="${/org.eclipse.jetty/jetty-security}" conf="jetty"/>
     <dependency org="org.eclipse.jetty" name="jetty-server" rev="${/org.eclipse.jetty/jetty-server}" conf="jetty"/>
     <dependency org="org.eclipse.jetty" name="jetty-servlet" rev="${/org.eclipse.jetty/jetty-servlet}" conf="jetty"/>
+    <dependency org="org.eclipse.jetty" name="jetty-servlets" rev="${/org.eclipse.jetty/jetty-servlets}" conf="jetty"/>
     <dependency org="org.eclipse.jetty" name="jetty-util" rev="${/org.eclipse.jetty/jetty-util}" conf="jetty"/>
     <dependency org="org.eclipse.jetty" name="jetty-webapp" rev="${/org.eclipse.jetty/jetty-webapp}" conf="jetty"/>
     <dependency org="org.eclipse.jetty" name="jetty-xml" rev="${/org.eclipse.jetty/jetty-xml}" conf="jetty"/>
 
+    <dependency org="javax.servlet" name="javax.servlet-api" rev="${/javax.servlet/javax.servlet-api}" conf="jetty"/>
+
     <dependency org="org.eclipse.jetty" name="jetty-start" rev="${/org.eclipse.jetty/jetty-start}" conf="start"/>
 
-    <dependency org="org.eclipse.jetty.orbit" name="javax.servlet" rev="${/org.eclipse.jetty.orbit/javax.servlet}" conf="servlet">
-      <artifact name="javax.servlet" type="orbit" ext="jar"/>
-    </dependency>
 
     <exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/> 
   </dependencies>
diff --git a/solr/server/modules/http.mod b/solr/server/modules/http.mod
new file mode 100644
index 0000000..d4ceec5
--- /dev/null
+++ b/solr/server/modules/http.mod
@@ -0,0 +1,9 @@
+#
+# Jetty HTTP Connector
+#
+
+[depend]
+server
+
+[xml]
+etc/jetty-http.xml
\ No newline at end of file
diff --git a/solr/server/modules/https.mod b/solr/server/modules/https.mod
new file mode 100644
index 0000000..32f2a41
--- /dev/null
+++ b/solr/server/modules/https.mod
@@ -0,0 +1,19 @@
+#
+# Jetty HTTPS Connector
+#
+
+[depend]
+ssl
+
+[xml]
+etc/jetty-https.xml
+
+[ini-template]
+## HTTPS Configuration
+# HTTP port to listen on
+https.port=8443
+# HTTPS idle timeout in milliseconds
+https.timeout=30000
+# HTTPS Socket.soLingerTime in seconds. (-1 to disable)
+https.soLingerTime=-1
+
diff --git a/solr/server/modules/server.mod b/solr/server/modules/server.mod
new file mode 100644
index 0000000..4bf59bf
--- /dev/null
+++ b/solr/server/modules/server.mod
@@ -0,0 +1,16 @@
+#
+# Base Server Module
+#
+
+[optional]
+jvm
+ext
+resources
+
+[lib]
+lib/*.jar
+lib/ext/*.jar
+resources/
+
+[xml]
+etc/jetty.xml
\ No newline at end of file
diff --git a/solr/server/modules/ssl.mod b/solr/server/modules/ssl.mod
new file mode 100644
index 0000000..fde2c3b
--- /dev/null
+++ b/solr/server/modules/ssl.mod
@@ -0,0 +1,40 @@
+#
+# SSL Keystore module
+#
+
+[depend]
+server
+
+[xml]
+etc/jetty-ssl.xml
+
+[files]
+http://git.eclipse.org/c/jetty/org.eclipse.jetty.project.git/plain/jetty-server/src/main/config/etc/keystore|etc/keystore
+
+[ini-template]
+### SSL Keystore Configuration
+# define the port to use for secure redirection
+jetty.secure.port=8443
+
+## Setup a demonstration keystore and truststore
+jetty.keystore=etc/keystore
+jetty.truststore=etc/keystore
+
+## Set the demonstration passwords.
+## Note that OBF passwords are not secure, just protected from casual observation
+## See http://www.eclipse.org/jetty/documentation/current/configuring-security-secure-passwords.html
+jetty.keystore.password=OBF:1vny1zlo1x8e1vnw1vn61x8g1zlu1vn4
+jetty.keymanager.password=OBF:1u2u1wml1z7s1z7a1wnl1u2g
+jetty.truststore.password=OBF:1vny1zlo1x8e1vnw1vn61x8g1zlu1vn4
+
+### Set the client auth behavior
+## Set to true if client certificate authentication is required
+# jetty.ssl.needClientAuth=true
+## Set to true if client certificate authentication is desired
+# jetty.ssl.wantClientAuth=true
+
+## Parameters to control the number and priority of acceptors and selectors
+# ssl.selectors=1
+# ssl.acceptors=1
+# ssl.selectorPriorityDelta=0
+ssl.acceptorPriorityDelta=0
diff --git a/solr/server/resources/jetty-logging.properties b/solr/server/resources/jetty-logging.properties
new file mode 100644
index 0000000..55b0c37
--- /dev/null
+++ b/solr/server/resources/jetty-logging.properties
@@ -0,0 +1 @@
+org.eclipse.jetty.util.log.class=org.eclipse.jetty.util.log.Slf4jLog
\ No newline at end of file
diff --git a/solr/server/solr/configsets/basic_configs/conf/schema.xml b/solr/server/solr/configsets/basic_configs/conf/schema.xml
index c0be84e..2b6850f 100755
--- a/solr/server/solr/configsets/basic_configs/conf/schema.xml
+++ b/solr/server/solr/configsets/basic_configs/conf/schema.xml
@@ -504,13 +504,13 @@
       http://wiki.apache.org/solr/SolrAdaptersForLuceneSpatial4
     -->
     <fieldType name="location_rpt" class="solr.SpatialRecursivePrefixTreeFieldType"
-        geo="true" distErrPct="0.025" maxDistErr="0.000009" units="degrees" />
+        geo="true" distErrPct="0.025" maxDistErr="0.001" distanceUnits="kilometers" />
 
     <!-- Spatial rectangle (bounding box) field. It supports most spatial predicates, and has
      special relevancy modes: score=overlapRatio|area|area2D (local-param to the query).  DocValues is recommended for
      relevancy. -->
     <fieldType name="bbox" class="solr.BBoxField"
-               geo="true" units="degrees" numberType="_bbox_coord" />
+               geo="true" distanceUnits="kilometers" numberType="_bbox_coord" />
     <fieldType name="_bbox_coord" class="solr.TrieDoubleField" precisionStep="8" docValues="true" stored="false"/>
 
    <!-- Money/currency field type. See http://wiki.apache.org/solr/MoneyFieldType
diff --git a/solr/server/solr/configsets/data_driven_schema_configs/conf/schema.xml b/solr/server/solr/configsets/data_driven_schema_configs/conf/managed-schema
similarity index 87%
rename from solr/server/solr/configsets/data_driven_schema_configs/conf/schema.xml
rename to solr/server/solr/configsets/data_driven_schema_configs/conf/managed-schema
index adf3136..f2f19d3 100755
--- a/solr/server/solr/configsets/data_driven_schema_configs/conf/schema.xml
+++ b/solr/server/solr/configsets/data_driven_schema_configs/conf/managed-schema
@@ -45,7 +45,7 @@
     that avoids logging every request
 -->
 
-<schema name="example-schemaless" version="1.5">
+<schema name="example-data-driven-schema" version="1.5">
   <!-- attribute "name" is the name of this schema and is only used for display purposes.
        version="x.y" is Solr's version number for the schema syntax and 
        semantics.  It should not normally be changed by applications.
@@ -105,13 +105,21 @@
       trailing underscores (e.g. _version_) are reserved.
    -->
 
-    <!-- In this "schemaless" example, only two fields are pre-declared: id and _version_.
-         All other fields will be type guessed and added via the
+    <!-- In this data_driven_schema_configs configset, only three fields are pre-declared: 
+         id, _version_, and _text.  All other fields will be type guessed and added via the
          "add-unknown-fields-to-the-schema" update request processor chain declared 
          in solrconfig.xml.
+         
+         Note that many dynamic fields are also defined - you can used them to specify a 
+         field's type via field naming conventions - see below.
+  
+  WARNING: The _text catch-all field will significantly increase your index size.
+           If you don't need it, consider removing it and the corresponding copyField directive.
       -->
     <field name="id" type="string" indexed="true" stored="true" required="true" multiValued="false" />
     <field name="_version_" type="long" indexed="true" stored="true"/>
+    <field name="_text" type="text_general" indexed="true" stored="false" multiValued="true"/>
+    <copyField source="*" dest="_text"/>
 
 
     <!-- Dynamic field definitions allow using convention over configuration
@@ -121,20 +129,19 @@
        a "*" only at the start or the end.  -->
    
     <dynamicField name="*_i"  type="int"    indexed="true"  stored="true"/>
-    <dynamicField name="*_is" type="int"    indexed="true"  stored="true"  multiValued="true"/>
+    <dynamicField name="*_is" type="ints"    indexed="true"  stored="true"/>
     <dynamicField name="*_s"  type="string"  indexed="true"  stored="true" />
-    <dynamicField name="*_ss" type="string"  indexed="true"  stored="true" multiValued="true"/>
+    <dynamicField name="*_ss" type="strings"  indexed="true"  stored="true"/>
     <dynamicField name="*_l"  type="long"   indexed="true"  stored="true"/>
-    <dynamicField name="*_ls" type="long"   indexed="true"  stored="true"  multiValued="true"/>
-    <dynamicField name="*_t"  type="text_general"    indexed="true"  stored="true"/>
-    <dynamicField name="*_txt" type="text_general"   indexed="true"  stored="true" multiValued="true"/>
-    <dynamicField name="*_en"  type="text_en"    indexed="true"  stored="true" multiValued="true"/>
+    <dynamicField name="*_ls" type="longs"   indexed="true"  stored="true"/>
+    <dynamicField name="*_t"   type="text_general" indexed="true" stored="true"/>
+    <dynamicField name="*_txt" type="text_general" indexed="true" stored="true"/>
     <dynamicField name="*_b"  type="boolean" indexed="true" stored="true"/>
-    <dynamicField name="*_bs" type="boolean" indexed="true" stored="true"  multiValued="true"/>
+    <dynamicField name="*_bs" type="booleans" indexed="true" stored="true"/>
     <dynamicField name="*_f"  type="float"  indexed="true"  stored="true"/>
-    <dynamicField name="*_fs" type="float"  indexed="true"  stored="true"  multiValued="true"/>
+    <dynamicField name="*_fs" type="floats"  indexed="true"  stored="true"/>
     <dynamicField name="*_d"  type="double" indexed="true"  stored="true"/>
-    <dynamicField name="*_ds" type="double" indexed="true"  stored="true"  multiValued="true"/>
+    <dynamicField name="*_ds" type="doubles" indexed="true"  stored="true"/>
 
     <!-- Type used to index the lat and lon components for the "location" FieldType -->
     <dynamicField name="*_coordinate"  type="tdouble" indexed="true"  stored="false" />
@@ -142,13 +149,19 @@
     <dynamicField name="*_dt"  type="date"    indexed="true"  stored="true"/>
     <dynamicField name="*_dts" type="date"    indexed="true"  stored="true" multiValued="true"/>
     <dynamicField name="*_p"  type="location" indexed="true" stored="true"/>
+    <dynamicField name="*_srpt"  type="location_rpt" indexed="true" stored="true"/>
 
     <!-- some trie-coded dynamic fields for faster range queries -->
     <dynamicField name="*_ti" type="tint"    indexed="true"  stored="true"/>
+    <dynamicField name="*_tis" type="tints"    indexed="true"  stored="true"/>
     <dynamicField name="*_tl" type="tlong"   indexed="true"  stored="true"/>
+    <dynamicField name="*_tls" type="tlongs"   indexed="true"  stored="true"/>
     <dynamicField name="*_tf" type="tfloat"  indexed="true"  stored="true"/>
+    <dynamicField name="*_tfs" type="tfloats"  indexed="true"  stored="true"/>
     <dynamicField name="*_td" type="tdouble" indexed="true"  stored="true"/>
+    <dynamicField name="*_tds" type="tdoubles" indexed="true"  stored="true"/>
     <dynamicField name="*_tdt" type="tdate"  indexed="true"  stored="true"/>
+    <dynamicField name="*_tdts" type="tdates"  indexed="true"  stored="true"/>
 
     <dynamicField name="*_c"   type="currency" indexed="true"  stored="true"/>
 
@@ -173,24 +186,9 @@
    -->
   <uniqueKey>id</uniqueKey>
 
-  <!-- DEPRECATED: The defaultSearchField is consulted by various query parsers when
-  parsing a query string that isn't explicit about the field.  Machine (non-user)
-  generated queries are best made explicit, or they can use the "df" request parameter
-  which takes precedence over this.
-  Note: Un-commenting defaultSearchField will be insufficient if your request handler
-  in solrconfig.xml defines "df", which takes precedence. That would need to be removed.
- <defaultSearchField>text</defaultSearchField> -->
-
-  <!-- DEPRECATED: The defaultOperator (AND|OR) is consulted by various query parsers
-  when parsing a query string to determine if a clause of the query should be marked as
-  required or optional, assuming the clause isn't already marked by some operator.
-  The default is OR, which is generally assumed so it is not a good idea to change it
-  globally here.  The "q.op" request parameter takes precedence over this.
- <solrQueryParser defaultOperator="OR"/> -->
-
   <!-- copyField commands copy one field to another at the time a document
-        is added to the index.  It's used either to index the same field differently,
-        or to add multiple fields to the same field for easier/faster searching.
+       is added to the index.  It's used either to index the same field differently,
+       or to add multiple fields to the same field for easier/faster searching.
 
    <copyField source="cat" dest="text"/>
    <copyField source="name" dest="text"/>
@@ -277,6 +275,11 @@
     <fieldType name="long" class="solr.TrieLongField" precisionStep="0" positionIncrementGap="0"/>
     <fieldType name="double" class="solr.TrieDoubleField" precisionStep="0" positionIncrementGap="0"/>
 
+    <fieldType name="ints" class="solr.TrieIntField" precisionStep="0" positionIncrementGap="0" multiValued="true"/>
+    <fieldType name="floats" class="solr.TrieFloatField" precisionStep="0" positionIncrementGap="0" multiValued="true"/>
+    <fieldType name="longs" class="solr.TrieLongField" precisionStep="0" positionIncrementGap="0" multiValued="true"/>
+    <fieldType name="doubles" class="solr.TrieDoubleField" precisionStep="0" positionIncrementGap="0" multiValued="true"/>
+
     <!--
      Numeric field types that index each value at various levels of precision
      to accelerate range queries when the number of values between the range
@@ -320,6 +323,7 @@
          Note: For faster range queries, consider the tdate type
       -->
     <fieldType name="date" class="solr.TrieDateField" precisionStep="0" positionIncrementGap="0"/>
+    <fieldType name="dates" class="solr.TrieDateField" precisionStep="0" positionIncrementGap="0" multiValued="true"/>
 
     <!-- A Trie based date field for faster date range queries and date faceting. -->
     <fieldType name="tdate" class="solr.TrieDateField" precisionStep="6" positionIncrementGap="0"/>
@@ -364,6 +368,7 @@
     -->
 
     <!-- A text field that only splits on whitespace for exact matching of words -->
+  <dynamicField name="*_ws" type="text_ws"  indexed="true"  stored="true"/>
     <fieldType name="text_ws" class="solr.TextField" positionIncrementGap="100">
       <analyzer>
         <tokenizer class="solr.WhitespaceTokenizerFactory"/>
@@ -397,6 +402,7 @@
          (lang/stopwords_en.txt), down cases, protects words from protwords.txt, and
          finally applies Porter's stemming.  The query time analyzer
          also applies synonyms from synonyms.txt. -->
+    <dynamicField name="*_txt_en" type="text_en"  indexed="true"  stored="true"/>
     <fieldType name="text_en" class="solr.TextField" positionIncrementGap="100">
       <analyzer type="index">
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -443,6 +449,7 @@
 	 cases will work, for example query "wi fi" will match
 	 document "WiFi" or "wi-fi".
         -->
+    <dynamicField name="*_txt_en_split" type="text_en_splitting"  indexed="true"  stored="true"/>
     <fieldType name="text_en_splitting" class="solr.TextField" positionIncrementGap="100" autoGeneratePhraseQueries="true">
       <analyzer type="index">
         <tokenizer class="solr.WhitespaceTokenizerFactory"/>
@@ -476,6 +483,7 @@
 
     <!-- Less flexible matching, but less false matches.  Probably not ideal for product names,
          but may be good for SKUs.  Can insert dashes in the wrong place and still match. -->
+    <dynamicField name="*_txt_en_split_tight" type="text_en_splitting_tight"  indexed="true"  stored="true"/>
     <fieldType name="text_en_splitting_tight" class="solr.TextField" positionIncrementGap="100" autoGeneratePhraseQueries="true">
       <analyzer>
         <tokenizer class="solr.WhitespaceTokenizerFactory"/>
@@ -493,7 +501,8 @@
 
     <!-- Just like text_general except it reverses the characters of
 	 each token, to enable more efficient leading wildcard queries. -->
-    <fieldType name="text_general_rev" class="solr.TextField" positionIncrementGap="100">
+  <dynamicField name="*_txt_rev" type="text_general_rev"  indexed="true"  stored="true"/>
+  <fieldType name="text_general_rev" class="solr.TextField" positionIncrementGap="100">
       <analyzer type="index">
         <tokenizer class="solr.StandardTokenizerFactory"/>
         <filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt" />
@@ -509,75 +518,16 @@
       </analyzer>
     </fieldType>
 
-    <!-- charFilter + WhitespaceTokenizer  -->
-    <!--
-    <fieldType name="text_char_norm" class="solr.TextField" positionIncrementGap="100" >
-      <analyzer>
-        <charFilter class="solr.MappingCharFilterFactory" mapping="mapping-ISOLatin1Accent.txt"/>
-        <tokenizer class="solr.WhitespaceTokenizerFactory"/>
-      </analyzer>
-    </fieldType>
-    -->
-
-    <!-- This is an example of using the KeywordTokenizer along
-         With various TokenFilterFactories to produce a sortable field
-         that does not include some properties of the source text
-      -->
-    <fieldType name="alphaOnlySort" class="solr.TextField" sortMissingLast="true" omitNorms="true">
-      <analyzer>
-        <!-- KeywordTokenizer does no actual tokenizing, so the entire
-             input string is preserved as a single token
-          -->
-        <tokenizer class="solr.KeywordTokenizerFactory"/>
-        <!-- The LowerCase TokenFilter does what you expect, which can be
-             when you want your sorting to be case insensitive
-          -->
-        <filter class="solr.LowerCaseFilterFactory" />
-        <!-- The TrimFilter removes any leading or trailing whitespace -->
-        <filter class="solr.TrimFilterFactory" />
-        <!-- The PatternReplaceFilter gives you the flexibility to use
-             Java Regular expression to replace any sequence of characters
-             matching a pattern with an arbitrary replacement string, 
-             which may include back references to portions of the original
-             string matched by the pattern.
-             
-             See the Java Regular Expression documentation for more
-             information on pattern and replacement string syntax.
-             
-             http://docs.oracle.com/javase/7/docs/api/java/util/regex/package-summary.html
-          -->
-        <filter class="solr.PatternReplaceFilterFactory"
-                pattern="([^a-z])" replacement="" replace="all"
-            />
-      </analyzer>
-    </fieldType>
-    
-    <fieldType name="phonetic" stored="false" indexed="true" class="solr.TextField" >
+  <dynamicField name="*_phon_en" type="phonetic_en"  indexed="true"  stored="true"/>
+  <fieldType name="phonetic_en" stored="false" indexed="true" class="solr.TextField" >
       <analyzer>
         <tokenizer class="solr.StandardTokenizerFactory"/>
         <filter class="solr.DoubleMetaphoneFilterFactory" inject="false"/>
       </analyzer>
     </fieldType>
 
-    <fieldType name="payloads" stored="false" indexed="true" class="solr.TextField" >
-      <analyzer>
-        <tokenizer class="solr.WhitespaceTokenizerFactory"/>
-        <!--
-        The DelimitedPayloadTokenFilter can put payloads on tokens... for example,
-        a token of "foo|1.4"  would be indexed as "foo" with a payload of 1.4f
-        Attributes of the DelimitedPayloadTokenFilterFactory : 
-         "delimiter" - a one character delimiter. Default is | (pipe)
-	 "encoder" - how to encode the following value into a playload
-	    float -> org.apache.lucene.analysis.payloads.FloatEncoder,
-	    integer -> o.a.l.a.p.IntegerEncoder
-	    identity -> o.a.l.a.p.IdentityEncoder
-            Fully Qualified class name implementing PayloadEncoder, Encoder must have a no arg constructor.
-         -->
-        <filter class="solr.DelimitedPayloadTokenFilterFactory" encoder="float"/>
-      </analyzer>
-    </fieldType>
-
     <!-- lowercases the entire field value, keeping it as a single token.  -->
+    <dynamicField name="*_s_lower" type="lowercase"  indexed="true"  stored="true"/>
     <fieldType name="lowercase" class="solr.TextField" positionIncrementGap="100">
       <analyzer>
         <tokenizer class="solr.KeywordTokenizerFactory"/>
@@ -589,7 +539,8 @@
       Example of using PathHierarchyTokenizerFactory at index time, so
       queries for paths match documents at that path, or in descendent paths
     -->
-    <fieldType name="descendent_path" class="solr.TextField">
+  <dynamicField name="*_descendent_path" type="descendent_path"  indexed="true"  stored="true"/>
+  <fieldType name="descendent_path" class="solr.TextField">
       <analyzer type="index">
         <tokenizer class="solr.PathHierarchyTokenizerFactory" delimiter="/" />
       </analyzer>
@@ -601,6 +552,7 @@
       Example of using PathHierarchyTokenizerFactory at query time, so
       queries for paths match documents at that path, or in ancestor paths
     -->
+    <dynamicField name="*_ancestor_path" type="ancestor_path"  indexed="true"  stored="true"/>
     <fieldType name="ancestor_path" class="solr.TextField">
       <analyzer type="index">
         <tokenizer class="solr.KeywordTokenizerFactory" />
@@ -625,7 +577,8 @@
       The subFields are an implementation detail of the fieldType, and end
       users normally should not need to know about them.
      -->
-    <fieldType name="point" class="solr.PointType" dimension="2" subFieldSuffix="_d"/>
+  <dynamicField name="*_point" type="point"  indexed="true"  stored="true"/>
+  <fieldType name="point" class="solr.PointType" dimension="2" subFieldSuffix="_d"/>
 
     <!-- A specialized field for geospatial search. If indexed, this fieldType must not be multivalued. -->
     <fieldType name="location" class="solr.LatLonType" subFieldSuffix="_coordinate"/>
@@ -635,7 +588,7 @@
       http://wiki.apache.org/solr/SolrAdaptersForLuceneSpatial4
     -->
     <fieldType name="location_rpt" class="solr.SpatialRecursivePrefixTreeFieldType"
-               geo="true" distErrPct="0.025" maxDistErr="0.000009" units="degrees" />
+               geo="true" distErrPct="0.025" maxDistErr="0.001" distanceUnits="kilometers" />
 
     <!-- Money/currency field type. See http://wiki.apache.org/solr/MoneyFieldType
         Parameters:
@@ -655,6 +608,7 @@
     <!-- some examples for different languages (generally ordered by ISO code) -->
 
     <!-- Arabic -->
+    <dynamicField name="*_txt_ar" type="text_ar"  indexed="true"  stored="true"/>
     <fieldType name="text_ar" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -668,6 +622,7 @@
     </fieldType>
 
     <!-- Bulgarian -->
+    <dynamicField name="*_txt_bg" type="text_bg"  indexed="true"  stored="true"/>
     <fieldType name="text_bg" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/> 
@@ -678,6 +633,7 @@
     </fieldType>
     
     <!-- Catalan -->
+    <dynamicField name="*_txt_ca" type="text_ca"  indexed="true"  stored="true"/>
     <fieldType name="text_ca" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -690,6 +646,7 @@
     </fieldType>
     
     <!-- CJK bigram (see text_ja for a Japanese configuration using morphological analysis) -->
+    <dynamicField name="*_txt_cjk" type="text_cjk"  indexed="true"  stored="true"/>
     <fieldType name="text_cjk" class="solr.TextField" positionIncrementGap="100">
       <analyzer>
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -702,6 +659,7 @@
     </fieldType>
 
     <!-- Czech -->
+    <dynamicField name="*_txt_cz" type="text_cz"  indexed="true"  stored="true"/>
     <fieldType name="text_cz" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -712,6 +670,7 @@
     </fieldType>
     
     <!-- Danish -->
+    <dynamicField name="*_txt_da" type="text_da"  indexed="true"  stored="true"/>
     <fieldType name="text_da" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -722,6 +681,7 @@
     </fieldType>
     
     <!-- German -->
+    <dynamicField name="*_txt_de" type="text_de"  indexed="true"  stored="true"/>
     <fieldType name="text_de" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -735,6 +695,7 @@
     </fieldType>
     
     <!-- Greek -->
+    <dynamicField name="*_txt_el" type="text_el"  indexed="true"  stored="true"/>
     <fieldType name="text_el" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -746,6 +707,7 @@
     </fieldType>
     
     <!-- Spanish -->
+    <dynamicField name="*_txt_es" type="text_es"  indexed="true"  stored="true"/>
     <fieldType name="text_es" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -757,6 +719,7 @@
     </fieldType>
     
     <!-- Basque -->
+    <dynamicField name="*_txt_eu" type="text_eu"  indexed="true"  stored="true"/>
     <fieldType name="text_eu" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -767,6 +730,7 @@
     </fieldType>
     
     <!-- Persian -->
+    <dynamicField name="*_txt_fa" type="text_fa"  indexed="true"  stored="true"/>
     <fieldType name="text_fa" class="solr.TextField" positionIncrementGap="100">
       <analyzer>
         <!-- for ZWNJ -->
@@ -780,6 +744,7 @@
     </fieldType>
     
     <!-- Finnish -->
+    <dynamicField name="*_txt_fi" type="text_fi"  indexed="true"  stored="true"/>
     <fieldType name="text_fi" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -791,6 +756,7 @@
     </fieldType>
     
     <!-- French -->
+    <dynamicField name="*_txt_fr" type="text_fr"  indexed="true"  stored="true"/>
     <fieldType name="text_fr" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -805,6 +771,7 @@
     </fieldType>
     
     <!-- Irish -->
+    <dynamicField name="*_txt_ga" type="text_ga"  indexed="true"  stored="true"/>
     <fieldType name="text_ga" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -819,6 +786,7 @@
     </fieldType>
     
     <!-- Galician -->
+    <dynamicField name="*_txt_gl" type="text_gl"  indexed="true"  stored="true"/>
     <fieldType name="text_gl" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -830,6 +798,7 @@
     </fieldType>
     
     <!-- Hindi -->
+    <dynamicField name="*_txt_hi" type="text_hi"  indexed="true"  stored="true"/>
     <fieldType name="text_hi" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -844,6 +813,7 @@
     </fieldType>
     
     <!-- Hungarian -->
+    <dynamicField name="*_txt_hu" type="text_hu"  indexed="true"  stored="true"/>
     <fieldType name="text_hu" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -855,6 +825,7 @@
     </fieldType>
     
     <!-- Armenian -->
+    <dynamicField name="*_txt_hy" type="text_hy"  indexed="true"  stored="true"/>
     <fieldType name="text_hy" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -865,6 +836,7 @@
     </fieldType>
     
     <!-- Indonesian -->
+    <dynamicField name="*_txt_id" type="text_id"  indexed="true"  stored="true"/>
     <fieldType name="text_id" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -876,7 +848,8 @@
     </fieldType>
     
     <!-- Italian -->
-    <fieldType name="text_it" class="solr.TextField" positionIncrementGap="100">
+  <dynamicField name="*_txt_it" type="text_it"  indexed="true"  stored="true"/>
+  <fieldType name="text_it" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
         <!-- removes l', etc -->
@@ -894,6 +867,7 @@
          parser config with <solrQueryParser defaultOperator="AND"/> further down in this file.  Use 
          OR if you would like to optimize for recall (default).
     -->
+    <dynamicField name="*_txt_ja" type="text_ja"  indexed="true"  stored="true"/>
     <fieldType name="text_ja" class="solr.TextField" positionIncrementGap="100" autoGeneratePhraseQueries="false">
       <analyzer>
         <!-- Kuromoji Japanese morphological analyzer/tokenizer (JapaneseTokenizer)
@@ -942,6 +916,7 @@
     </fieldType>
     
     <!-- Latvian -->
+    <dynamicField name="*_txt_lv" type="text_lv"  indexed="true"  stored="true"/>
     <fieldType name="text_lv" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -952,6 +927,7 @@
     </fieldType>
     
     <!-- Dutch -->
+    <dynamicField name="*_txt_nl" type="text_nl"  indexed="true"  stored="true"/>
     <fieldType name="text_nl" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -963,6 +939,7 @@
     </fieldType>
     
     <!-- Norwegian -->
+    <dynamicField name="*_txt_no" type="text_no"  indexed="true"  stored="true"/>
     <fieldType name="text_no" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -975,7 +952,8 @@
     </fieldType>
     
     <!-- Portuguese -->
-    <fieldType name="text_pt" class="solr.TextField" positionIncrementGap="100">
+  <dynamicField name="*_txt_pt" type="text_pt"  indexed="true"  stored="true"/>
+  <fieldType name="text_pt" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
         <filter class="solr.LowerCaseFilterFactory"/>
@@ -988,6 +966,7 @@
     </fieldType>
     
     <!-- Romanian -->
+    <dynamicField name="*_txt_ro" type="text_ro"  indexed="true"  stored="true"/>
     <fieldType name="text_ro" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -998,6 +977,7 @@
     </fieldType>
     
     <!-- Russian -->
+    <dynamicField name="*_txt_ru" type="text_ru"  indexed="true"  stored="true"/>
     <fieldType name="text_ru" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -1009,6 +989,7 @@
     </fieldType>
     
     <!-- Swedish -->
+    <dynamicField name="*_txt_sv" type="text_sv"  indexed="true"  stored="true"/>
     <fieldType name="text_sv" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -1020,6 +1001,7 @@
     </fieldType>
     
     <!-- Thai -->
+    <dynamicField name="*_txt_th" type="text_th"  indexed="true"  stored="true"/>
     <fieldType name="text_th" class="solr.TextField" positionIncrementGap="100">
       <analyzer>
         <tokenizer class="solr.ThaiTokenizerFactory"/>
@@ -1029,6 +1011,7 @@
     </fieldType>
     
     <!-- Turkish -->
+    <dynamicField name="*_txt_tr" type="text_tr"  indexed="true"  stored="true"/>
     <fieldType name="text_tr" class="solr.TextField" positionIncrementGap="100">
       <analyzer> 
         <tokenizer class="solr.StandardTokenizerFactory"/>
@@ -1038,10 +1021,6 @@
       </analyzer>
     </fieldType>
 
-  <!-- catchall field, containing all other searchable text fields (implemented
-   via copyField)  -->
-  <field name="_text" type="text_general" indexed="true" stored="false" multiValued="true"/>
-  <copyField source="*" dest="_text"/>
   <!-- Similarity is the scoring routine for each document vs. a query.
        A custom Similarity or SimilarityFactory may be specified here, but 
        the default is fine for most applications.  
diff --git a/solr/server/solr/configsets/data_driven_schema_configs/conf/params.json b/solr/server/solr/configsets/data_driven_schema_configs/conf/params.json
new file mode 100644
index 0000000..06114ef
--- /dev/null
+++ b/solr/server/solr/configsets/data_driven_schema_configs/conf/params.json
@@ -0,0 +1,20 @@
+{"params":{
+  "query":{
+    "defType":"edismax",
+    "q.alt":"*:*",
+    "rows":"10",
+    "fl":"*,score",
+    "":{"v":0}
+  },
+  "facets":{
+    "facet":"on",
+    "facet.mincount": "1",
+    "":{"v":0}
+  },
+ "velocity":{
+   "wt": "velocity",
+   "v.template":"browse",
+   "v.layout": "layout",
+   "":{"v":0}
+ }
+}}
\ No newline at end of file
diff --git a/solr/server/solr/configsets/data_driven_schema_configs/conf/solrconfig.xml b/solr/server/solr/configsets/data_driven_schema_configs/conf/solrconfig.xml
index f175161..d531a55 100755
--- a/solr/server/solr/configsets/data_driven_schema_configs/conf/solrconfig.xml
+++ b/solr/server/solr/configsets/data_driven_schema_configs/conf/solrconfig.xml
@@ -256,15 +256,6 @@
     <unlockOnStartup>false</unlockOnStartup>
       -->
 
-    <!-- If true, IndexReaders will be opened/reopened from the IndexWriter
-         instead of from the Directory. Hosts in a master/slave setup
-         should have this set to false while those in a SolrCloud
-         cluster need to be set to true. Default: true
-      -->
-    <!-- 
-    <nrtMode>true</nrtMode>
-      -->
-
     <!-- Commit Deletion Policy
          Custom deletion policies can be specified here. The class must
          implement org.apache.lucene.index.IndexDeletionPolicy.
@@ -869,24 +860,9 @@
        (SearchHandler) can be registered multiple times with different
        names (and different init parameters)
     -->
-  <requestHandler name="/browse" class="solr.SearchHandler">
+  <requestHandler name="/browse" class="solr.SearchHandler" useParams="query,facets,velocity,browse">
     <lst name="defaults">
       <str name="echoParams">explicit</str>
-
-      <!-- VelocityResponseWriter settings -->
-      <str name="wt">velocity</str>
-      <str name="v.template">browse</str>
-      <str name="v.layout">layout</str>
-
-      <!-- Query settings -->
-      <str name="defType">edismax</str>
-      <str name="q.alt">*:*</str>
-      <str name="rows">10</str>
-      <str name="fl">*,score</str>
-
-      <!-- Faceting defaults -->
-      <str name="facet">on</str>
-      <str name="facet.mincount">1</str>
     </lst>
   </requestHandler>
 
@@ -1402,6 +1378,10 @@
     <processor class="solr.LogUpdateProcessorFactory"/>
     <processor class="solr.DistributedUpdateProcessorFactory"/>
     <processor class="solr.RemoveBlankFieldUpdateProcessorFactory"/>
+    <processor class="solr.FieldNameMutatingUpdateProcessorFactory">
+      <str name="pattern">[^\w-\.]</str>
+      <str name="replacement">_</str>
+    </processor>
     <processor class="solr.ParseBooleanFieldUpdateProcessorFactory"/>
     <processor class="solr.ParseLongFieldUpdateProcessorFactory"/>
     <processor class="solr.ParseDoubleFieldUpdateProcessorFactory"/>
@@ -1550,8 +1530,9 @@
   <!--
      Custom response writers can be declared as needed...
     -->
-  <queryResponseWriter name="velocity" class="solr.VelocityResponseWriter" startup="lazy"/>
-
+  <queryResponseWriter name="velocity" class="solr.VelocityResponseWriter" startup="lazy">
+    <str name="template.base.dir">${velocity.template.base.dir:}</str>
+  </queryResponseWriter>
 
   <!-- XSLT response writer transforms the XML output by any xslt file found
        in Solr's conf/xslt directory.  Changes to xslt files are checked for
diff --git a/solr/server/solr/configsets/sample_techproducts_configs/conf/schema.xml b/solr/server/solr/configsets/sample_techproducts_configs/conf/schema.xml
index 119ecd9..fab5731 100755
--- a/solr/server/solr/configsets/sample_techproducts_configs/conf/schema.xml
+++ b/solr/server/solr/configsets/sample_techproducts_configs/conf/schema.xml
@@ -707,13 +707,13 @@
       http://wiki.apache.org/solr/SolrAdaptersForLuceneSpatial4
     -->
     <fieldType name="location_rpt" class="solr.SpatialRecursivePrefixTreeFieldType"
-        geo="true" distErrPct="0.025" maxDistErr="0.000009" units="degrees" />
+        geo="true" distErrPct="0.025" maxDistErr="0.001" distanceUnits="kilometers" />
 
     <!-- Spatial rectangle (bounding box) field. It supports most spatial predicates, and has
      special relevancy modes: score=overlapRatio|area|area2D (local-param to the query).  DocValues is recommended for
      relevancy. -->
     <fieldType name="bbox" class="solr.BBoxField"
-               geo="true" units="degrees" numberType="_bbox_coord" />
+               geo="true" distanceUnits="kilometers" numberType="_bbox_coord" />
     <fieldType name="_bbox_coord" class="solr.TrieDoubleField" precisionStep="8" docValues="true" stored="false"/>
 
    <!-- Money/currency field type. See http://wiki.apache.org/solr/MoneyFieldType
diff --git a/solr/server/solr/configsets/sample_techproducts_configs/conf/solrconfig.xml b/solr/server/solr/configsets/sample_techproducts_configs/conf/solrconfig.xml
index 170a196..b8c90ca 100755
--- a/solr/server/solr/configsets/sample_techproducts_configs/conf/solrconfig.xml
+++ b/solr/server/solr/configsets/sample_techproducts_configs/conf/solrconfig.xml
@@ -117,23 +117,7 @@
        persistent, and doesn't work with replication.
     -->
   <directoryFactory name="DirectoryFactory" 
-                    class="${solr.directoryFactory:solr.NRTCachingDirectoryFactory}">
-    
-         
-    <!-- These will be used if you are using the solr.HdfsDirectoryFactory,
-         otherwise they will be ignored. If you don't plan on using hdfs,
-         you can safely remove this section. -->      
-    <!-- The root directory that collection data should be written to. -->     
-    <str name="solr.hdfs.home">${solr.hdfs.home:}</str>
-    <!-- The hadoop configuration files to use for the hdfs client. -->    
-    <str name="solr.hdfs.confdir">${solr.hdfs.confdir:}</str>
-    <!-- Enable/Disable the hdfs cache. -->    
-    <str name="solr.hdfs.blockcache.enabled">${solr.hdfs.blockcache.enabled:true}</str>
-    <!-- Enable/Disable using one global cache for all SolrCores. 
-         The settings used will be from the first HdfsDirectoryFactory created. -->    
-    <str name="solr.hdfs.blockcache.global">${solr.hdfs.blockcache.global:true}</str>
-    
-  </directoryFactory> 
+                    class="${solr.directoryFactory:solr.NRTCachingDirectoryFactory}"/>
 
   <!-- The CodecFactory for defining the format of the inverted index.
        The default implementation is SchemaCodecFactory, which is the official Lucene
@@ -274,15 +258,6 @@
     <unlockOnStartup>false</unlockOnStartup>
       -->
 
-    <!-- If true, IndexReaders will be opened/reopened from the IndexWriter
-         instead of from the Directory. Hosts in a master/slave setup
-         should have this set to false while those in a SolrCloud
-         cluster need to be set to true. Default: true
-      -->
-    <!-- 
-    <nrtMode>true</nrtMode>
-      -->
-
     <!-- Commit Deletion Policy
          Custom deletion policies can be specified here. The class must
          implement org.apache.lucene.index.IndexDeletionPolicy.
@@ -1364,8 +1339,7 @@
 
        To use this suggester, set the "solr.suggester.enabled=true" system property
     -->
-  <searchComponent name="suggest" class="solr.SuggestComponent" 
-                   enable="${solr.suggester.enabled:false}"     >
+  <searchComponent name="suggest" class="solr.SuggestComponent">
     <lst name="suggester">
       <str name="name">mySuggester</str>
       <str name="lookupImpl">FuzzyLookupFactory</str>      
@@ -1373,11 +1347,12 @@
       <str name="field">cat</str>
       <str name="weightField">price</str>
       <str name="suggestAnalyzerFieldType">string</str>
+      <str name="buildOnStartup">false</str>
     </lst>
   </searchComponent>
 
   <requestHandler name="/suggest" class="solr.SearchHandler" 
-                  startup="lazy" enable="${solr.suggester.enabled:false}" >
+                  startup="lazy" >
     <lst name="defaults">
       <str name="suggest">true</str>
       <str name="suggest.count">10</str>
@@ -1771,7 +1746,9 @@
   <!--
      Custom response writers can be declared as needed...
     -->
-    <queryResponseWriter name="velocity" class="solr.VelocityResponseWriter" startup="lazy"/>
+    <queryResponseWriter name="velocity" class="solr.VelocityResponseWriter" startup="lazy">
+      <str name="template.base.dir">${velocity.template.base.dir:}</str>
+    </queryResponseWriter>
   
 
   <!-- XSLT response writer transforms the XML output by any xslt file found
diff --git a/solr/server/solr/solr.xml b/solr/server/solr/solr.xml
index 94d60b6..e247452 100644
--- a/solr/server/solr/solr.xml
+++ b/solr/server/solr/solr.xml
@@ -29,17 +29,23 @@
 <solr>
 
   <solrcloud>
+
     <str name="host">${host:}</str>
     <int name="hostPort">${jetty.port:8983}</int>
     <str name="hostContext">${hostContext:solr}</str>
-    <int name="zkClientTimeout">${zkClientTimeout:30000}</int>
+
     <bool name="genericCoreNodeNames">${genericCoreNodeNames:true}</bool>
+
+    <int name="zkClientTimeout">${zkClientTimeout:30000}</int>
+    <int name="distribUpdateSoTimeout">${distribUpdateSoTimeout:600000}</int>
+    <int name="distribUpdateConnTimeout">${distribUpdateConnTimeout:60000}</int>
+
   </solrcloud>
 
   <shardHandlerFactory name="shardHandlerFactory"
     class="HttpShardHandlerFactory">
-    <int name="socketTimeout">${socketTimeout:0}</int>
-    <int name="connTimeout">${connTimeout:0}</int>
+    <int name="socketTimeout">${socketTimeout:600000}</int>
+    <int name="connTimeout">${connTimeout:60000}</int>
   </shardHandlerFactory>
 
 </solr>
diff --git a/solr/server/start.ini b/solr/server/start.ini
new file mode 100644
index 0000000..dac95c3
--- /dev/null
+++ b/solr/server/start.ini
@@ -0,0 +1,9 @@
+--module=http
+
+jetty.threads.min=10
+jetty.threads.max=10000
+jetty.threads.idle.timeout=50000
+
+# jetty.http.acceptors=1
+# jetty.http.selectors=1
+
diff --git a/solr/SYSTEM_REQUIREMENTS.txt b/solr/site/SYSTEM_REQUIREMENTS.mdtext
similarity index 100%
rename from solr/SYSTEM_REQUIREMENTS.txt
rename to solr/site/SYSTEM_REQUIREMENTS.mdtext
diff --git a/solr/site/assets/images/favicon.ico b/solr/site/assets/images/favicon.ico
new file mode 100644
index 0000000..e93104c
--- /dev/null
+++ b/solr/site/assets/images/favicon.ico
Binary files differ
diff --git a/solr/site/assets/images/quickstart-admin-ui-facet-options.png b/solr/site/assets/images/quickstart-admin-ui-facet-options.png
new file mode 100644
index 0000000..7c9be7e
--- /dev/null
+++ b/solr/site/assets/images/quickstart-admin-ui-facet-options.png
Binary files differ
diff --git a/solr/site/assets/images/quickstart-query-screen.png b/solr/site/assets/images/quickstart-query-screen.png
new file mode 100644
index 0000000..f46bf2d
--- /dev/null
+++ b/solr/site/assets/images/quickstart-query-screen.png
Binary files differ
diff --git a/solr/site/assets/images/quickstart-range-facet.png b/solr/site/assets/images/quickstart-range-facet.png
new file mode 100644
index 0000000..e441782
--- /dev/null
+++ b/solr/site/assets/images/quickstart-range-facet.png
Binary files differ
diff --git a/solr/site/assets/images/quickstart-solrcloud.png b/solr/site/assets/images/quickstart-solrcloud.png
new file mode 100644
index 0000000..9b76456
--- /dev/null
+++ b/solr/site/assets/images/quickstart-solrcloud.png
Binary files differ
diff --git a/solr/site/assets/images/quickstart-spatial.png b/solr/site/assets/images/quickstart-spatial.png
new file mode 100644
index 0000000..2a73d6a
--- /dev/null
+++ b/solr/site/assets/images/quickstart-spatial.png
Binary files differ
diff --git a/solr/site/html/solr.svg b/solr/site/assets/images/solr.svg
similarity index 100%
rename from solr/site/html/solr.svg
rename to solr/site/assets/images/solr.svg
diff --git a/solr/site/html/tutorial.html b/solr/site/html/tutorial.html
deleted file mode 100755
index 725c1aa..0000000
--- a/solr/site/html/tutorial.html
+++ /dev/null
@@ -1,688 +0,0 @@
-<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<html>
-<head>
-<META http-equiv="Content-Type" content="text/html; charset=UTF-8" />
-<title>Solr Tutorial</title>
-<style>
-pre.code {
-  background-color: #D3D3D3;
-  padding: 0.2em;
-}
-.codefrag {
-  font-family: monospace;
-  font-weight:bold;
-}
-</style>
-
-</head>
-<body>
-
-<div id="content">
-<h1>Solr Tutorial</h1>
-
-<a name="N1000E"></a><a name="Overview"></a>
-<h2 class="boxed">Overview</h2>
-<div class="section">
-<p>
-This document covers the basics of running Solr using an example
-schema, and some sample data.
-</p>
-</div>
-
-
-<a name="N10018"></a><a name="Requirements"></a>
-<h2 class="boxed">Requirements</h2>
-<div class="section">
-<p>
-To follow along with this tutorial, you will need...
-</p>
-<ol>
-  
-<li>Java 1.8 or greater.  Some places you can get it are from
-  <a href="http://www.oracle.com/technetwork/java/javase/downloads/index.html">Oracle</a>,
-  <a href="http://openjdk.java.net/">Open JDK</a>, or
-  <a href="http://www.ibm.com/developerworks/java/jdk/">IBM</a>.
-  <ul>
-    <li>Running <span class="codefrag">java -version</span> at the command 
-      line should indicate a version number starting with 1.8.
-    </li>
-    <li>Gnu's GCJ is not supported and does not work with Solr.</li>
-  </ul>
-</li>
-  
-<li>A <a href="http://lucene.apache.org/solr/mirrors-solr-latest-redir.html">Solr release</a>.
-  </li>
-
-</ol>
-</div>
-
-
-<a name="N10040"></a><a name="Getting+Started"></a>
-<h2 class="boxed">Getting Started</h2>
-<div class="section">
-<p>
-<strong>
-Please run the browser showing this tutorial and the Solr server on the same machine so tutorial links will correctly point to your Solr server.
-</strong>
-</p>
-<p>
-Begin by unzipping the Solr release and changing your working directory
-to be the "<span class="codefrag">example</span>" directory.  (Note that the base directory name may vary with the version of Solr downloaded.)  For example, with a shell in UNIX, Cygwin, or MacOS:
-</p>
-<pre class="code">
-user:~solr$ <strong>ls</strong>
-solr-nightly.zip
-user:~solr$ <strong>unzip -q solr-nightly.zip</strong>
-user:~solr$ <strong>cd solr-nightly/example/</strong>
-</pre>
-<p>
-Solr can run in any Java Servlet Container of your choice, but to simplify
-this tutorial, the example index includes a small installation of Jetty.
-</p>
-<p>
-To launch Jetty with the Solr WAR, and the example configs, just run the <span class="codefrag">start.jar</span> ...
-</p>
-<pre class="code">
-user:~/solr/example$ <strong>java -jar start.jar</strong>
-2012-06-06 15:25:59.815:INFO:oejs.Server:jetty-8.1.2.v20120308
-2012-06-06 15:25:59.834:INFO:oejdp.ScanningAppProvider:Deployment monitor .../solr/example/webapps at interval 0
-2012-06-06 15:25:59.839:INFO:oejd.DeploymentManager:Deployable added: .../solr/example/webapps/solr.war
-...
-Jun 6, 2012 3:26:03 PM org.apache.solr.core.SolrCore registerSearcher
-INFO: [collection1] Registered new searcher Searcher@7527e2ee main{StandardDirectoryReader(segments_1:1)}
-</pre>
-<p>
-This will start up the Jetty application server on port 8983, and use your terminal to display the logging information from Solr.
-</p>
-<p>
-You can see that the Solr is running by loading <a href="http://localhost:8983/solr/">http://localhost:8983/solr/</a> in your web browser.  This is the main starting point for Administering Solr.
-</p>
-</div>
-
-
-
-
-<a name="N10078"></a><a name="Indexing+Data"></a>
-<h2 class="boxed">Indexing Data</h2>
-<div class="section">
-<p>
-Your Solr server is up and running, but it doesn't contain any data.  You can
-modify a Solr index by POSTing commands to Solr to add (or
-update) documents, delete documents, and commit pending adds and deletes.  
-These commands can be in a 
-<a href="http://wiki.apache.org/solr/UpdateRequestHandler">variety of formats</a>.
-</p>
-<p>
-The <span class="codefrag">exampledocs</span> directory contains sample files
-showing of the types of commands Solr accepts, as well as a java utility 
-for posting them from the command line (a <span class="codefrag">post.sh</span>
-shell script is also available, but for this tutorial we'll use the 
-cross-platform Java client. Run <span class="codefrag">java -jar post.jar -h</span> so see it's various options).  
-</p>
-<p> To try this, open a new terminal window, enter the exampledocs directory, 
-and run "<span class="codefrag">java -jar post.jar</span>" on some of the XML 
-files in that directory.
-</p>
-<pre class="code">
-user:~/solr/example/exampledocs$ <strong>java -jar post.jar solr.xml monitor.xml</strong>
-SimplePostTool: version 1.4
-SimplePostTool: POSTing files to http://localhost:8983/solr/update..
-SimplePostTool: POSTing file solr.xml
-SimplePostTool: POSTing file monitor.xml
-SimplePostTool: COMMITting Solr index changes..
-</pre>
-<p>
-You have now indexed two documents in Solr, and committed these changes.  
-You can now search for "solr" by loading the <a href="http://localhost:8983/solr/#/collection1/query">"Query" tab</a> in the Admin interface, and entering "solr" in the "q" text box.  Clicking the "Execute Query" button should display the following URL containing one result...
-</p>
-<p>
-<a href="http://localhost:8983/solr/collection1/select?q=solr&amp;wt=xml">http://localhost:8983/solr/collection1/select?q=solr&amp;wt=xml</a>
-
-</p>
-<p>
-You can index all of the sample data, using the following command 
-(assuming your command line shell supports the *.xml notation):
-</p>
-<pre class="code">
-user:~/solr/example/exampledocs$ <strong>java -jar post.jar *.xml</strong>
-SimplePostTool: version 1.4
-SimplePostTool: POSTing files to http://localhost:8983/solr/update..
-SimplePostTool: POSTing file gb18030-example.xml
-SimplePostTool: POSTing file hd.xml
-SimplePostTool: POSTing file ipod_other.xml
-SimplePostTool: POSTing file ipod_video.xml
-...
-SimplePostTool: POSTing file solr.xml
-SimplePostTool: POSTing file utf8-example.xml
-SimplePostTool: POSTing file vidcard.xml
-SimplePostTool: COMMITting Solr index changes..
-</pre>
-<p>
-  ...and now you can search for all sorts of things using the default <a href="http://wiki.apache.org/solr/SolrQuerySyntax">Solr Query Syntax</a> (a superset of the Lucene query syntax)...
-</p>
-<ul>
-  
-<li>
-<a href="http://localhost:8983/solr/#/collection1/query?q=video">video</a>
-</li>
-  
-<li>
-<a href="http://localhost:8983/solr/#/collection1/query?q=name:video">name:video</a>
-</li>
-  
-<li>
-<a href="http://localhost:8983/solr/#/collection1/query?q=%2Bvideo%20%2Bprice%3A[*%20TO%20400]">+video +price:[* TO 400]</a>
-</li>
-
-
-</ul>
-<p></p>
-<p>
-  There are many other different ways to import your data into Solr... one can
-</p>
-<ul>
-  
-<li>Import records from a database using the 
-    <a href="http://wiki.apache.org/solr/DataImportHandler">Data Import Handler (DIH)</a>.
-  </li>
-  
-<li>
-<a href="http://wiki.apache.org/solr/UpdateCSV">Load a CSV file</a> (comma separated values),
-   including those exported by Excel or MySQL.
-  </li> 
-  
-<li>
-<a href="http://wiki.apache.org/solr/UpdateJSON">POST JSON documents</a>
-  
-</li> 
-  
-<li>Index binary documents such as Word and PDF with 
-    <a href="http://wiki.apache.org/solr/ExtractingRequestHandler">Solr Cell</a> (ExtractingRequestHandler).
-  </li>
-  
-<li>
-    Use <a href="http://wiki.apache.org/solr/Solrj">SolrJ</a> for Java or other Solr clients to
-    programatically create documents to send to Solr.
-  </li>
-
-
-</ul>
-</div>
-
-
-
-
-<a name="N100EE"></a><a name="Updating+Data"></a>
-<h2 class="boxed">Updating Data</h2>
-<div class="section">
-<p>
-You may have noticed that even though the file <span class="codefrag">solr.xml</span> has now
-been POSTed to the server twice, you still only get 1 result when searching for
-"solr".  This is because the example <span class="codefrag">schema.xml</span> specifies a "<span class="codefrag">uniqueKey</span>" field
-called "<span class="codefrag">id</span>".  Whenever you POST commands to Solr to add a
-document with the same value for the <span class="codefrag">uniqueKey</span> as an existing document, it
-automatically replaces it for you.  You can see that that has happened by
-looking at the values for <span class="codefrag">numDocs</span> and <span class="codefrag">maxDoc</span> in the
-"CORE"/searcher section of the statistics page...  </p>
-<p>
-
-<a href="http://localhost:8983/solr/#/collection1/plugins/core?entry=searcher">http://localhost:8983/solr/#/collection1/plugins/core?entry=searcher</a>
-
-</p>
-<p>
-  
-<strong><span class="codefrag">numDocs</span></strong> represents the number of searchable documents in the
-  index (and will be larger than the number of XML files since some files
-  contained more than one <span class="codefrag">&lt;doc&gt;</span>). <strong><span class="codefrag">maxDoc</span></strong>
-  may be larger as the <span class="codefrag">maxDoc</span> count includes logically deleted documents that
-  have not yet been removed from the index. You can re-post the sample XML
-  files over and over again as much as you want and <span class="codefrag">numDocs</span> will never
-  increase, because the new documents will constantly be replacing the old.
-</p>
-<p>
-Go ahead and edit the existing XML files to change some of the data, and re-run
-the <span class="codefrag">java -jar post.jar</span> command, you'll see your changes reflected
-in subsequent searches.
-</p>
-<a name="N1012D"></a><a name="Deleting+Data"></a>
-<h3 class="boxed">Deleting Data</h3>
-
-<p>
-You can delete data by POSTing a delete command to the update URL and 
-specifying the value of the document's unique key field, or a query that 
-matches multiple documents (be careful with that one!).  Since these commands
-are smaller, we will specify them right on the command line rather than 
-reference an XML file.
-</p>
-
-<p>Execute the following command to delete a specific document</p>
-<pre class="code">java -Ddata=args -Dcommit=false -jar post.jar "&lt;delete&gt;&lt;id&gt;SP2514N&lt;/id&gt;&lt;/delete&gt;"</pre>
-
-<p>
-Because we have specified "commit=false", a search for <a href="http://localhost:8983/solr/#/collection1/query?q=id:SP2514N">id:SP2514N</a> we still find the document we have deleted.  Since the example configuration uses Solr's "autoCommit" feature Solr will still automatically persist this change to the index, but it will not affect search results until an "openSearcher" commit is explicitly executed.
-</p>
-
-<p>
-Using the <a href="http://localhost:8983/solr/#/collection1/plugins/updatehandler?entry=updateHandler">statistics page</a> 
-for the <span class="codefrag">updateHandler</span> you can observe this delete
-propogate to disk by watching the <span class="codefrag">deletesById</span> 
-value drop to 0 as the <span class="codefrag">cumulative_deletesById</span> 
-and <span class="codefrag">autocommit</span> values increase.
-</p>
-
-<p>
-Here is an example of using delete-by-query to delete anything with
-<a href="http://localhost:8983/solr/collection1/select?q=name:DDR&amp;fl=name">DDR</a> in the name:
-</p>
-<pre class="code">java -Dcommit=false -Ddata=args -jar post.jar "&lt;delete&gt;&lt;query&gt;name:DDR&lt;/query&gt;&lt;/delete&gt;"</pre>
-
-<p>
-You can force a new searcher to be opened to reflect these changes by sending an explicit commit command to Solr:
-</p>
-<pre class="code">java -jar post.jar -</pre>
-
-<p>
-Now re-execute <a href="http://localhost:8983/solr/#/collection1/query?q=id:SP2514N">the previous search</a> 
-and verify that no matching documents are found.  You can also revisit the
-statistics page and observe the changes to both the number of commits in the <a href="http://localhost:8983/solr/#/collection1/plugins/updatehandler?entry=updateHandler">updateHandler</a> and the numDocs in the <a href="http://localhost:8983/solr/#/collection1/plugins/core?entry=searcher">searcher</a>.
-</p>
-
-<p>
-Commits that open a new searcher can be expensive operations so it's best to 
-make many changes to an index in a batch and then send the 
-<span class="codefrag">commit</span> command at the end.  
-There is also an <span class="codefrag">optimize</span> command that does the 
-same things as <span class="codefrag">commit</span>, but also forces all index 
-segments to be merged into a single segment -- this can be very resource 
-intensive, but may be worthwhile for improving search speed if your index
-changes very infrequently.
-</p>
-<p>
-All of the update commands can be specified using either <a href="http://wiki.apache.org/solr/UpdateXmlMessages">XML</a> or <a href="http://wiki.apache.org/solr/UpdateJSON">JSON</a>.
-</p>
-
-<p>To continue with the tutorial, re-add any documents you may have deleted by going to the <span class="codefrag">exampledocs</span> directory and executing</p>
-<pre class="code">java -jar post.jar *.xml</pre>
-</div>
-
-
-<a name="N1017C"></a><a name="Querying+Data"></a>
-<h2 class="boxed">Querying Data</h2>
-<div class="section">
-<p>
-    Searches are done via HTTP GET on the <span class="codefrag">select</span> URL with the query string in the <span class="codefrag">q</span> parameter.
-    You can pass a number of optional <a href="http://wiki.apache.org/solr/SearchHandler">request parameters</a>
-    to the request handler to control what information is returned.  For example, you can use the "<span class="codefrag">fl</span>" parameter
-    to control what stored fields are returned, and if the relevancy score is returned:
-  </p>
-<ul>
-      
-<li>
-<a href="http://localhost:8983/solr/collection1/select/?indent=on&amp;q=video&amp;fl=name,id">q=video&amp;fl=name,id</a>       (return only name and id fields)   </li>
-      
-<li>
-<a href="http://localhost:8983/solr/collection1/select/?indent=on&amp;q=video&amp;fl=name,id,score">q=video&amp;fl=name,id,score</a>  (return relevancy score as well) </li>
-      
-<li>
-<a href="http://localhost:8983/solr/collection1/select/?indent=on&amp;q=video&amp;fl=*,score">q=video&amp;fl=*,score</a>        (return all stored fields, as well as relevancy score)  </li>
-      
-<li>
-<a href="http://localhost:8983/solr/collection1/select/?indent=on&amp;q=video&amp;sort=price desc&amp;fl=name,id,price">q=video&amp;sort=price desc&amp;fl=name,id,price</a>  (add sort specification: sort by price descending) </li>
-      
-<li>
-<a href="http://localhost:8983/solr/collection1/select/?indent=on&amp;q=video&amp;wt=json">q=video&amp;wt=json</a> (return response in JSON format)  </li>
-    
-</ul>
-<p>
-The <a href="http://localhost:8983/solr/#/collection1/query">query form</a> 
-provided in the web admin interface allows setting various request parameters 
-and is useful when testing or debugging queries.
-</p>
-
-<a name="N101BA"></a><a name="Sorting"></a>
-<h3 class="boxed">Sorting</h3>
-<p>
-      Solr provides a simple method to sort on one or more indexed fields.
-      Use the "<span class="codefrag">sort</span>' parameter to specify "field direction" pairs, separated by commas if there's more than one sort field:
-    </p>
-<ul>
-      
-<li>
-<a href="http://localhost:8983/solr/collection1/select/?indent=on&amp;q=video&amp;sort=price+desc">q=video&amp;sort=price desc</a>
-</li>
-      
-<li>
-<a href="http://localhost:8983/solr/collection1/select/?indent=on&amp;q=video&amp;sort=price+asc">q=video&amp;sort=price asc</a>
-</li>
-      
-<li>
-<a href="http://localhost:8983/solr/collection1/select/?indent=on&amp;q=video&amp;sort=inStock+asc,price+desc">q=video&amp;sort=inStock asc, price desc</a>
-</li>
-    
-</ul>
-<p>
-      "<span class="codefrag">score</span>" can also be used as a field name when specifying a sort:
-    </p>
-<ul>
-      
-<li>
-<a href="http://localhost:8983/solr/collection1/select/?indent=on&amp;q=video&amp;sort=score+desc">q=video&amp;sort=score desc</a>
-</li>
-      
-<li>
-<a href="http://localhost:8983/solr/collection1/select/?indent=on&amp;q=video&amp;sort=inStock+asc,score+desc">q=video&amp;sort=inStock asc, score desc</a>
-</li>
-    
-</ul>
-<p>
-      Complex functions may also be used to sort results:
-    </p>
-<ul>
-      
-<li>
-<a href="http://localhost:8983/solr/collection1/select/?indent=on&amp;q=video&amp;sort=div(popularity,add(price,1))+desc">q=video&amp;sort=div(popularity,add(price,1)) desc</a>
-</li>
-    
-</ul>
-<p>
-      If no sort is specified, the default is <span class="codefrag">score desc</span> to return the matches having the highest relevancy.
-    </p>
-</div>
-
-
-
-<a name="N101FE"></a><a name="Highlighting"></a>
-<h2 class="boxed">Highlighting</h2>
-<div class="section">
-<p>
-    Hit highlighting returns relevant snippets of each returned document, and highlights
-    terms from the query within those context snippets.
-  </p>
-<p>
-    The following example searches for <span class="codefrag">video card</span> and requests
-    highlighting on the fields <span class="codefrag">name,features</span>.  This causes a
-    <span class="codefrag">highlighting</span> section to be added to the response with the
-    words to highlight surrounded with <span class="codefrag">&lt;em&gt;</span> (for emphasis)
-    tags.
-  </p>
-<p>
-    
-<a href="http://localhost:8983/solr/collection1/select/?wt=json&amp;indent=on&amp;q=video+card&amp;fl=name,id&amp;hl=true&amp;hl.fl=name,features">...&amp;q=video card&amp;fl=name,id&amp;hl=true&amp;hl.fl=name,features</a>
-  
-</p>
-<p>
-    More request parameters related to controlling highlighting may be found
-    <a href="http://wiki.apache.org/solr/HighlightingParameters">here</a>.
-  </p>
-</div> <!-- highlighting -->
-
-
-
-<a name="N10227"></a><a name="Faceted+Search"></a>
-<h2 class="boxed">Faceted Search</h2>
-<div class="section">
-<p>
-    Faceted search takes the documents matched by a query and generates counts for various
-    properties or categories.  Links are usually provided that allows users to "drill down" or
-    refine their search results based on the returned categories.
-  </p>
-<p>
-    The following example searches for all documents (<span class="codefrag">*:*</span>) and
-    requests counts by the category field <span class="codefrag">cat</span>.
-  </p>
-<p>
-    
-<a href="http://localhost:8983/solr/collection1/select/?wt=json&amp;indent=on&amp;q=*:*&amp;fl=name&amp;facet=true&amp;facet.field=cat">...&amp;q=*:*&amp;facet=true&amp;facet.field=cat</a>
-  
-</p>
-<p>
-    Notice that although only the first 10 documents are returned in the results list,
-    the facet counts generated are for the complete set of documents that match the query.
-  </p>
-<p>
-    We can facet multiple ways at the same time.  The following example adds a facet on the
-    boolean <span class="codefrag">inStock</span> field:
-  </p>
-<p>
-    
-<a href="http://localhost:8983/solr/collection1/select/?wt=json&amp;indent=on&amp;q=*:*&amp;fl=name&amp;facet=true&amp;facet.field=cat&amp;facet.field=inStock">...&amp;q=*:*&amp;facet=true&amp;facet.field=cat&amp;facet.field=inStock</a>
-  
-</p>
-<p>
-    Solr can also generate counts for arbitrary queries. The following example
-    queries for <span class="codefrag">ipod</span> and shows prices below and above 100 by using
-    range queries on the price field.
-  </p>
-<p>
-    
-<a href="http://localhost:8983/solr/collection1/select/?wt=json&amp;indent=on&amp;q=ipod&amp;fl=name&amp;facet=true&amp;facet.query=price:[0+TO+100]&amp;facet.query=price:[100+TO+*]">...&amp;q=ipod&amp;facet=true&amp;facet.query=price:[0 TO 100]&amp;facet.query=price:[100 TO *]</a>
-  
-</p>
-<p>
-    Solr can even facet by numeric ranges (including dates).  This example requests counts for the manufacture date (<span class="codefrag">manufacturedate_dt</span> field) for each year between 2004 and 2010.
-  </p>
-<p>
-    
-<a href="http://localhost:8983/solr/collection1/select/?wt=json&amp;indent=on&amp;q=*:*&amp;fl=name,manufacturedate_dt&amp;facet=true&amp;facet.range=manufacturedate_dt&amp;facet.range.start=2004-01-01T00:00:00Z&amp;facet.range.end=2010-01-01T00:00:00Z&amp;facet.range.gap=%2b1YEAR">...&amp;q=*:*&amp;facet=true&amp;facet.range=manufacturedate_dt&amp;facet.range.start=2004-01-01T00:00:00Z&amp;facet.range.end=2010-01-01T00:00:00Z&amp;facet.range.gap=+1YEAR</a>
-  
-</p>
-<p>
-    More information on faceted search may be found on the 
-    <a href="http://wiki.apache.org/solr/SolrFacetingOverview">faceting overview</a>
-    and
-    <a href="http://wiki.apache.org/solr/SimpleFacetParameters">faceting parameters</a>
-    pages.
-  </p>
-</div> <!-- faceted search -->
-
-
-
-<a name="N10278"></a><a name="Search+UI"></a>
-<h2 class="boxed">Search UI</h2>
-<div class="section">
-<p>
-Solr includes an example search interface built with <a href="https://wiki.apache.org/solr/VelocityResponseWriter">velocity templating</a>
-that demonstrates many features, including searching, faceting, highlighting,
-autocomplete, and geospatial searching. 
-</p>
-<p>
-Try it out at
-<a href="http://localhost:8983/solr/collection1/browse">http://localhost:8983/solr/collection1/browse</a>
-  
-</p>
-</div> <!-- Search UI -->
-
-
-
-
-<a name="N1028B"></a><a name="Text+Analysis"></a>
-<h2 class="boxed">Text Analysis</h2>
-<div class="section">
-<p>
-    Text fields are typically indexed by breaking the text into words and applying various transformations such as
-    lowercasing, removing plurals, or stemming to increase relevancy.  The same text transformations are normally
-    applied to any queries in order to match what is indexed.
-  </p>
-<p>
-    The <a href="http://wiki.apache.org/solr/SchemaXml">schema</a> defines
-    the fields in the index and what type of analysis is applied to them.  The current schema your collection is using
-    may be viewed directly via the <a href="http://localhost:8983/solr/#/collection1/schema">Schema tab</a> in the Admin UI, or explored dynamically using the <a href="http://localhost:8983/solr/#/collection1/schema-browser">Schema Browser tab</a>.
-</p>
-<p>
-The best analysis components (tokenization and filtering) for your textual 
-content depends heavily on language.
-As you can see in the <a href="http://localhost:8983/solr/#/collection1/schema-browser?type=text_general">Schema Browser</a>, 
-many of the fields in the example schema are using a 
-<span class="codefrag">fieldType</span> named 
-<span class="codefrag">text_general</span>, which has defaults appropriate for 
-most languages.
-</p>
-
-<p>
-  If you know your textual content is English, as is the case for the example 
-  documents in this tutorial, and you'd like to apply English-specific stemming
-  and stop word removal, as well as split compound words, you can use the 
-  <a href="http://localhost:8983/solr/#/collection1/schema-browser?type=text_en_splitting"><span class="codefrag">text_en_splitting</span></a> fieldType instead.
-  Go ahead and edit the <span class="codefrag">schema.xml</span> in the 
-  <span class="codefrag">solr/example/solr/collection1/conf</span> directory,
-  to use the <span class="codefrag">text_en_splitting</span> fieldType for 
-  the <span class="codefrag">text</span> and 
-  <span class="codefrag">features</span> fields like so:
-</p>
-<pre class="code">
-   &lt;field name="features" <b>type="text_en_splitting"</b> indexed="true" stored="true" multiValued="true"/&gt;
-   ...
-   &lt;field name="text" <b>type="text_en_splitting"</b> indexed="true" stored="false" multiValued="true"/&gt;
-</pre>
-<p>
-  Stop and restart Solr after making these changes and then re-post all of 
-  the example documents using 
-  <span class="codefrag">java -jar post.jar *.xml</span>.  
-  Now queries like the ones listed below will demonstrate English-specific 
-  transformations:
-  </p>
-<ul>
-    
-<li>A search for
-  <a href="http://localhost:8983/solr/collection1/select?q=power-shot&amp;fl=name">power-shot</a>
-  can match <span class="codefrag">PowerShot</span>, and
-  <a href="http://localhost:8983/solr/collection1/select?q=adata&amp;fl=name">adata</a>
-  can match <span class="codefrag">A-DATA</span> by using the 
-  <span class="codefrag">WordDelimiterFilter</span> and <span class="codefrag">LowerCaseFilter</span>.
-</li>
-
-    
-<li>A search for
-  <a href="http://localhost:8983/solr/collection1/select?q=features:recharging&amp;fl=name,features">features:recharging</a>
-  can match <span class="codefrag">Rechargeable</span> using the stemming 
-  features of <span class="codefrag">PorterStemFilter</span>.
-</li>
-
-    
-<li>A search for
-  <a href="http://localhost:8983/solr/collection1/select?q=%221 gigabyte%22&amp;fl=name">"1 gigabyte"</a>
-  can match <span class="codefrag">1GB</span>, and the commonly misspelled
-  <a href="http://localhost:8983/solr/collection1/select?q=pixima&amp;fl=name">pixima</a> can matches <span class="codefrag">Pixma</span> using the 
-  <span class="codefrag">SynonymFilter</span>.
-</li>
-
-  
-</ul>
-<p>A full description of the analysis components, Analyzers, Tokenizers, and TokenFilters
-    available for use is <a href="http://wiki.apache.org/solr/AnalyzersTokenizersTokenFilters">here</a>.
-  </p>
-<a name="N1030B"></a><a name="Analysis+Debugging"></a>
-
-
-<h3 class="boxed">Analysis Debugging</h3>
-<p>
-There is a handy <a href="http://localhost:8983/solr/#/collection1/analysis">Analysis tab</a>
-where you can see how a text value is broken down into words by both Index time nad Query time analysis chains for a field or field type.  This page shows the resulting tokens after they pass through each filter in the chains.
-</p>
-<p>
-  <a href="http://localhost:8983/solr/#/collection1/analysis?analysis.fieldvalue=Canon+Power-Shot+SD500&amp;analysis.query=&amp;analysis.fieldtype=text_en_splitting&amp;verbose_output=0">This url</a>
-  shows the tokens created from 
-  "<span class="codefrag">Canon Power-Shot SD500</span>" 
-  using the 
-  <span class="codefrag">text_en_splitting</span> type.  Each section of
-  the table shows the resulting tokens after having passed through the next 
-  <span class="codefrag">TokenFilter</span> in the (Index) analyzer.
-  Notice how both <span class="codefrag">powershot</span> and 
-  <span class="codefrag">power</span>, <span class="codefrag">shot</span> 
-  are indexed, using tokens that have the same "position".
-  (Compare the previous output with
-  <a href="http://localhost:8983/solr/#/collection1/analysis?analysis.fieldvalue=Canon+Power-Shot+SD500&amp;analysis.query=&amp;analysis.fieldtype=text_general&amp;verbose_output=0">The tokens produced using the text_general field type</a>.)
-</p>
-
-<p>
-Mousing over the section label to the left of the section will display the full name of the analyzer component at that stage of the chain.  Toggling the "Verbose Output" checkbox will <a href="http://localhost:8983/solr/#/collection1/analysis?analysis.fieldvalue=Canon+Power-Shot+SD500&amp;analysis.query=&amp;analysis.fieldtype=text_en_splitting&amp;verbose_output=1">show/hide the detailed token attributes</a>.
-</p>
-<p>
-When both <a href="http://localhost:8983/solr/#/collection1/analysis?analysis.fieldvalue=Canon+Power-Shot+SD500&amp;analysis.query=power+shot+sd-500&amp;analysis.fieldtype=text_en_splitting&amp;verbose_output=0">Index and Query</a>
-values are provided, two tables will be displayed side by side showing the 
-results of each chain.  Terms in the Index chain results that are equivalent
-to the final terms produced by the Query chain will be highlighted.
-</p>
-<p>
-  Other interesting examples:
-</p>
-<ul>
-  <li><a href="http://localhost:8983/solr/#/collection1/analysis?analysis.fieldvalue=A+new+nation%2C+conceived+in+liberty+and+dedicated+to+the+proposition+that+all+men+are+created+equal.%0A&amp;analysis.query=liberties+and+equality&amp;analysis.fieldtype=text_en&amp;verbose_output=0">English stemming and stop-words</a> 
-    using the <span class="codefrag">text_en</span> field type
-  </li>
-  <li><a href="http://localhost:8983/solr/#/collection1/analysis?analysis.fieldtype=text_cjk&amp;analysis.fieldvalue=%EF%BD%B6%EF%BE%80%EF%BD%B6%EF%BE%85&amp;analysis.query=%E3%82%AB%E3%82%BF%E3%82%AB%E3%83%8A&amp;verbose_output=1">Half-width katakana normalization with bi-graming</a> 
-    using the <span class="codefrag">text_cjk</span> field type
-  </li>
-  <li><a href="http://localhost:8983/solr/#/collection1/analysis?analysis.fieldtype=text_ja&amp;analysis.fieldvalue=%E7%A7%81%E3%81%AF%E5%88%B6%E9%99%90%E3%82%B9%E3%83%94%E3%83%BC%E3%83%89%E3%82%92%E8%B6%85%E3%81%88%E3%82%8B%E3%80%82&amp;verbose_output=1">Japanese morphological decomposition with part-of-speech filtering</a>
-    using the <span class="codefrag">text_ja</span> field type 
-  </li>
-  <li><a href="http://localhost:8983/solr/#/collection1/analysis?analysis.fieldtype=text_ar&amp;analysis.fieldvalue=%D9%84%D8%A7+%D8%A3%D8%AA%D9%83%D9%84%D9%85+%D8%A7%D9%84%D8%B9%D8%B1%D8%A8%D9%8A%D8%A9&amp;verbose_output=1">Arabic stop-words, normalization, and stemming</a>
-    using the <span class="codefrag">text_ar</span> field type 
-  </li>
-</ul>
-
-</div>
-
-
-<a name="N1034D"></a><a name="Conclusion"></a>
-<h2 class="boxed">Conclusion</h2>
-<div class="section">
-<p>
-  Congratulations!  You successfully ran a small Solr instance, added some
-  documents, and made changes to the index and schema.  You learned about queries, text
-  analysis, and the Solr admin interface.  You're ready to start using Solr on
-  your own project!  Continue on with the following steps:
-</p>
-<ul>
-  
-<li>Subscribe to the Solr <a href="http://lucene.apache.org/solr/discussion.html">mailing lists</a>!</li>
-  
-<li>Make a copy of the Solr <span class="codefrag">example</span> directory as a template for your project.</li>
-  
-<li>Customize the schema and other config in <span class="codefrag">solr/collection1/conf/</span> to meet your needs.</li> 
-
-</ul>
-<p>
-  Solr has a ton of other features that we haven't touched on here, including
-  <a href="http://wiki.apache.org/solr/DistributedSearch">distributed search</a>
-  to handle huge document collections,
-  <a href="http://wiki.apache.org/solr/FunctionQuery">function queries</a>,
-  <a href="http://wiki.apache.org/solr/StatsComponent">numeric field statistics</a>,
-  and
-  <a href="http://wiki.apache.org/solr/ClusteringComponent">search results clustering</a>.
-  Explore the <a href="http://wiki.apache.org/solr/FrontPage">Solr Wiki</a> to find
-  more details about Solr's many <a href="http://lucene.apache.org/solr/features.html">features</a>.
-</p>
-<p>
-  Have Fun, and we'll see you on the Solr mailing lists!
-</p>
-</div>
-
-</div>
-
-<div class="clearboth">&nbsp;</div>
-
-<div id="footer">
-<div class="copyright">
-        Copyright &copy;
-         2012 <a href="http://www.apache.org/licenses/">The Apache Software Foundation.</a>
-</div>
-</div>
-</body>
-</html>
diff --git a/solr/site/xsl/index.xsl b/solr/site/index.xsl
similarity index 91%
rename from solr/site/xsl/index.xsl
rename to solr/site/index.xsl
index c59825d..04e58b3 100644
--- a/solr/site/xsl/index.xsl
+++ b/solr/site/index.xsl
@@ -34,13 +34,13 @@
     <html>
       <head>
         <title><xsl:text>Apache Solr </xsl:text><xsl:value-of select="$version"/><xsl:text> Documentation</xsl:text></title>
-        <link rel="icon" type="image/x-icon" href="/solr/assets/images/favicon.ico"/>
-        <link rel="shortcut icon" type="image/x-icon" href="/solr/assets/images/favicon.ico"/>
+        <link rel="icon" type="image/x-icon" href="images/favicon.ico"/>
+        <link rel="shortcut icon" type="image/x-icon" href="images/favicon.ico"/>
       </head>
       <body>
         <div>
           <a href="http://lucene.apache.org/solr/">
-            <img src="solr.svg" style="width:210px; margin:22px 0px 7px 20px; border:none;" title="Apache Solr Logo" alt="Solr" />
+            <img src="images/solr.svg" style="width:210px; margin:22px 0px 7px 20px; border:none;" title="Apache Solr Logo" alt="Solr" />
           </a>
           <div style="z-index:100;position:absolute;top:25px;left:226px">
             <span style="font-size: x-small">TM</span>
@@ -74,7 +74,7 @@
             <li><a href="http://wiki.apache.org/solr">Wiki</a>: Additional documentation, especially focused on using Solr.</li>
             <li><a href="changes/Changes.html">Changes</a>: List of changes in this release.</li>
             <li><a href="SYSTEM_REQUIREMENTS.html">System Requirements</a>: Minimum and supported Java versions.</li>
-            <li><a href="tutorial.html">Solr Tutorial</a>: This document covers the basics of running Solr using an example schema, and some sample data.</li>
+            <li><a href="quickstart.html">Solr Quick Start</a>: This document covers the basics of running Solr using an example schema, and some sample data.</li>
             <li><a href="{$luceneJavadocUrl}index.html">Lucene Documentation</a></li>
           </ul>
         <h2>API Javadocs</h2>
diff --git a/solr/site/quickstart.mdtext b/solr/site/quickstart.mdtext
new file mode 100644
index 0000000..42e51f8
--- /dev/null
+++ b/solr/site/quickstart.mdtext
@@ -0,0 +1,604 @@
+# Solr Quick Start
+
+## Overview
+
+This document covers getting Solr up and running, ingesting a variety of data sources into multiple collections,
+and getting a feel for the Solr administrative and search interfaces.
+
+## Requirements
+
+To follow along with this tutorial, you will need...
+
+1. To meet the [system requirements](SYSTEM_REQUIREMENTS.html)
+2. An Apache Solr release.  This tutorial was written using Apache Solr 5.0.0.
+
+## Getting Started
+
+Please run the browser showing this tutorial and the Solr server on the same machine so tutorial links will correctly
+point to your Solr server.
+
+Begin by unzipping the Solr release and changing your working directory to the subdirectory where Solr was installed.
+Note that the base directory name may vary with the version of Solr downloaded.  For example, with a shell in UNIX,
+Cygwin, or MacOS:
+
+    /:$ ls solr*
+    solr-5.0.0.zip
+    /:$ unzip -q solr-5.0.0.zip
+    /:$ cd solr-5.0.0/
+
+To launch Solr, run: `bin/solr start -e cloud -noprompt`
+
+    /solr-5.0.0:$ bin/solr start -e cloud -noprompt
+
+    Welcome to the SolrCloud example!
+
+
+    Starting up 2 Solr nodes for your example SolrCloud cluster.
+    ...
+
+    Started Solr server on port 8983 (pid=8404). Happy searching!
+    ...
+
+    Started Solr server on port 7574 (pid=8549). Happy searching!
+    ...
+
+    SolrCloud example running, please visit http://localhost:8983/solr
+
+    /solr-5.0.0:$ _
+
+You can see that the Solr is running by loading the Solr Admin UI in your web browser: <http://localhost:8983/solr/>.
+This is the main starting point for administering Solr.
+
+Solr will now be running two "nodes", one on port 7574 and one on port 8983.  There is one collection created
+automatically, `gettingstarted`, a two shard collection, each with two replicas.
+The [Cloud tab](http://localhost:8983/solr/#/~cloud) in the Admin UI diagrams the collection nicely:
+
+<img alt="Solr Quick Start: SolrCloud diagram" class="float-right" width="50%" src="images/quickstart-solrcloud.png" />
+
+## Indexing Data
+
+Your Solr server is up and running, but it doesn't contain any data.  The Solr install includes the `bin/post`* tool in
+order to facilitate getting various types of documents easily into Solr from the start.  We'll be
+using this tool for the indexing examples below.
+
+You'll need a command shell to run these examples, rooted in the Solr install directory; the shell from where you
+launched Solr works just fine.
+
+* NOTE: Currently the `bin/post` tool does not have a comparable Windows script, but the underlying Java program invoked
+is available.  See the [Post Tool, Windows section](https://cwiki.apache.org/confluence/display/solr/Post+Tool#PostTool-Windows)
+for details.
+
+### Indexing a directory of "rich" files
+
+Let's first index local "rich" files including HTML, PDF, Microsoft Office formats (such as MS Word), plain text and
+many other formats.  `bin/post` features the ability to crawl a directory of files, optionally recursively even,
+sending the raw content of each file into Solr for extraction and indexing.   A Solr install includes a `docs/`
+subdirectory, so that makes a convenient set of (mostly) HTML files built-in to start with.
+
+    bin/post -c gettingstarted docs/
+
+Here's what it'll look like:
+
+    /solr-5.0.0:$ bin/post -c gettingstarted docs/
+    java -classpath /solr-5.0.0/dist/solr-core-5.0.0.jar -Dauto=yes -Dc=gettingstarted -Ddata=files -Drecursive=yes org.apache.solr.util.SimplePostTool docs/
+    SimplePostTool version 5.0.0
+    Posting files to [base] url http://localhost:8983/solr/gettingstarted/update...
+    Entering auto mode. File endings considered are xml,json,csv,pdf,doc,docx,ppt,pptx,xls,xlsx,odt,odp,ods,ott,otp,ots,rtf,htm,html,txt,log
+    Entering recursive mode, max depth=999, delay=0s
+    Indexing directory docs (3 files, depth=0)
+    POSTing file index.html (text/html) to [base]/extract
+    POSTing file quickstart.html (text/html) to [base]/extract
+    POSTing file SYSTEM_REQUIREMENTS.html (text/html) to [base]/extract
+    Indexing directory docs/changes (1 files, depth=1)
+    POSTing file Changes.html (text/html) to [base]/extract
+    ...
+    3248 files indexed.
+    COMMITting Solr index changes to http://localhost:8983/solr/gettingstarted/update...
+    Time spent: 0:00:41.660
+
+
+The command-line breaks down as follows:
+
+   * `-c gettingstarted`: name of the collection to index into
+   * `docs/`: a relative path of the Solr install `docs/` directory
+
+You have now indexed thousands of documents into the `gettingstarted` collection in Solr and committed these changes.
+You can search for "solr" by loading the Admin UI [Query tab](#admin-collection), enter "solr" in the `q` param
+(replacing `*:*`, which matches all documents), and "Execute Query".  See the [Searching](#searching)
+section below for more information.
+
+To index your own data, re-run the directory indexing command pointed to your own directory of documents.  For example,
+on a Mac instead of `docs/` try `~/Documents/` or `~/Desktop/`!   You may want to start from a clean, empty system
+again rather than have your content in addition to the Solr `docs/` directory; see the Cleanup section [below](#cleanup)
+for how to get back to a clean starting point.
+
+### Indexing Solr XML
+
+Solr supports indexing structured content in a variety of incoming formats.  The historically predominant format for
+getting structured content into Solr has been [Solr XML](https://cwiki.apache.org/confluence/display/solr/Uploading+Data+with+Index+Handlers#UploadingDatawithIndexHandlers-XMLFormattedIndexUpdates).
+Many Solr indexers have been coded to process domain content into Solr XML output, generally HTTP POSTed directly to
+Solr's `/update` endpoint.
+
+<a name="techproducts"/>
+Solr's install includes a handful of Solr XML formatted files with example data (mostly mocked tech product data).
+NOTE: This tech product data as a more domain-specific configuration, including schema and browse UI.  The `bin/solr`
+script includes built-in support for this by running `bin/solr start -e techproducts` which not only starts Solr but
+also then indexes this data too (be sure to `bin/solr stop -all` before trying it out).
+beforehand).
+
+Using `bin/post`, index the example Solr XML files in `example/exampledocs/`:
+
+    bin/post -c gettingstarted example/exampledocs/*.xml
+
+Here's what you'll see:
+
+    /solr-5.0.0:$ bin/post -c gettingstarted example/exampledocs/*.xml
+    java -classpath /solr-5.0.0/dist/solr-core-5.0.0-SNAPSHOT.jar -Dauto=yes -Dc=gettingstarted -Ddata=files org.apache.solr.util.SimplePostTool example/exampledocs/gb18030-example.xml ...
+    SimplePostTool version 5.0.0
+    Posting files to [base] url http://localhost:8983/solr/gettingstarted/update...
+    Entering auto mode. File endings considered are xml,json,csv,pdf,doc,docx,ppt,pptx,xls,xlsx,odt,odp,ods,ott,otp,ots,rtf,htm,html,txt,log
+    POSTing file gb18030-example.xml (application/xml) to [base]
+    POSTing file hd.xml (application/xml) to [base]
+    POSTing file ipod_other.xml (application/xml) to [base]
+    POSTing file ipod_video.xml (application/xml) to [base]
+    POSTing file manufacturers.xml (application/xml) to [base]
+    POSTing file mem.xml (application/xml) to [base]
+    POSTing file money.xml (application/xml) to [base]
+    POSTing file monitor.xml (application/xml) to [base]
+    POSTing file monitor2.xml (application/xml) to [base]
+    POSTing file mp500.xml (application/xml) to [base]
+    POSTing file sd500.xml (application/xml) to [base]
+    POSTing file solr.xml (application/xml) to [base]
+    POSTing file utf8-example.xml (application/xml) to [base]
+    POSTing file vidcard.xml (application/xml) to [base]
+    14 files indexed.
+    COMMITting Solr index changes to http://localhost:8983/solr/gettingstarted/update...
+    Time spent: 0:00:01.857
+
+...and now you can search for all sorts of things using the default [Solr Query Syntax](https://cwiki.apache.org/confluence/display/solr/The+Standard+Query+Parser#TheStandardQueryParser-SpecifyingTermsfortheStandardQueryParser)
+(a superset of the Lucene query syntax)...
+
+NOTE:
+You can browse the documents indexed at <http://localhost:8983/solr/gettingstarted/browse>.  The `/browse` UI allows getting
+a feel for how Solr's technical capabilities can be worked with in a familiar, though a bit rough and prototypical,
+interactive HTML view.  (The `/browse` view defaults to assuming the `gettingstarted` schema and data are a catch-all mix
+of structured XML, JSON, CSV example data, and unstructured rich documents.  Your own data may not look ideal at first,
+though the `/browse` templates are customizable.)
+
+### Indexing JSON
+
+Solr supports indexing JSON, either arbitrary structured JSON or "Solr JSON" (which is similar to Solr XML).
+
+Solr includes a small sample Solr JSON file to illustrate this capability.  Again using `bin/post`, index the
+sample JSON file:
+
+    bin/post -c gettingstarted example/exampledocs/books.json
+
+You'll see:
+
+    /solr-5.0.0:$ bin/post -c gettingstarted example/exampledocs/books.json
+    java -classpath /solr-5.0.0/dist/solr-core-5.0.0-SNAPSHOT.jar -Dauto=yes -Dc=gettingstarted -Ddata=files org.apache.solr.util.SimplePostTool example/exampledocs/books.json
+    SimplePostTool version 5.0.0
+    Posting files to [base] url http://localhost:8983/solr/gettingstarted/update...
+    Entering auto mode. File endings considered are xml,json,csv,pdf,doc,docx,ppt,pptx,xls,xlsx,odt,odp,ods,ott,otp,ots,rtf,htm,html,txt,log
+    POSTing file books.json (application/json) to [base]
+    1 files indexed.
+    COMMITting Solr index changes to http://localhost:8983/solr/gettingstarted/update...
+    Time spent: 0:00:00.377
+
+
+To flatten (and/or split) and index arbitrary structured JSON, a topic beyond this quick start guide, check out
+[Transforming and Indexing Custom JSON data](https://cwiki.apache.org/confluence/display/solr/Uploading+Data+with+Index+Handlers#UploadingDatawithIndexHandlers-TransformingandIndexingcustomJSONdata).
+
+### Indexing CSV (Comma/Column Separated Values)
+
+A great conduit of data into Solr is via CSV, especially when the documents are homogeneous by all having the
+same set of fields.  CSV can be conveniently exported from a spreadsheet such as Excel, or exported from databases such
+as MySQL.  When getting started with Solr, it can often be easiest to get your structured data into CSV format and then
+index that into Solr rather than a more sophisticated single step operation.
+
+Using `bin/post` index the included example CSV file:
+
+    bin/post -c gettingstarted example/exampledocs/books.csv
+
+In your terminal you'll see:
+
+    /solr-5.0.0:$ bin/post -c gettingstarted example/exampledocs/books.csv
+    java -classpath /solr-5.0.0/dist/solr-core-5.0.0-SNAPSHOT.jar -Dauto=yes -Dc=gettingstarted -Ddata=files org.apache.solr.util.SimplePostTool example/exampledocs/books.csv
+    SimplePostTool version 5.0.0
+    Posting files to [base] url http://localhost:8983/solr/gettingstarted/update...
+    Entering auto mode. File endings considered are xml,json,csv,pdf,doc,docx,ppt,pptx,xls,xlsx,odt,odp,ods,ott,otp,ots,rtf,htm,html,txt,log
+    POSTing file books.csv (text/csv) to [base]
+    1 files indexed.
+    COMMITting Solr index changes to http://localhost:8983/solr/gettingstarted/update...
+    Time spent: 0:00:00.099
+
+
+### Other indexing techniques
+
+* Import records from a database using the [Data Import Handler (DIH)](https://cwiki.apache.org/confluence/display/solr/Uploading+Structured+Data+Store+Data+with+the+Data+Import+Handler).
+
+* Use [SolrJ](https://cwiki.apache.org/confluence/display/solr/Using+SolrJ) from JVM-based languages or
+other [Solr clients](https://cwiki.apache.org/confluence/display/solr/Client+APIs) to programatically create documents
+to send to Solr.
+
+* Use the Admin UI core-specific Documents tab to paste in a document to be
+indexed, or select `Document Builder` from the `Document Type` dropdown to build a document one field at a time.
+Click on the `Submit Document` button below the form to index your document.
+
+***
+
+## Updating Data
+
+You may notice that even if you index content in this guide more than once, it does not duplicate the results found.
+This is because the example `schema.xml` specifies a "`uniqueKey`" field called "`id`". Whenever you POST commands to
+Solr to add a document with the same value for the `uniqueKey` as an existing document, it automatically replaces it
+for you. You can see that that has happened by looking at the values for `numDocs` and `maxDoc` in the core-specific
+Overview section of the Solr Admin UI.
+
+`numDocs` represents the number of searchable documents in the index (and will be larger than the number of XML, JSON,
+or CSV files since some files contained more than one document).  The maxDoc value may be larger as the maxDoc count
+includes logically deleted documents that have not yet been physically removed from the index. You can re-post the sample files
+over and over again as much as you want and `numDocs` will never increase, because the new documents will constantly be
+replacing the old.
+
+Go ahead and edit any of the existing example data files, change some of the data, and re-run the SimplePostTool command.
+You'll see your changes reflected in subsequent searches.
+
+## Deleting Data
+
+You can delete data by POSTing a delete command to the update URL and specifying the value of the document's unique key
+field, or a query that matches multiple documents (be careful with that one!). Since these commands are smaller, we
+specify them right on the command line rather than reference a JSON or XML file.
+
+Execute the following command to delete a specific document:
+
+    bin/post -c gettingstarted -d "<delete><id>SP2514N</id></delete>"
+
+
+<a name="searching"/>
+## Searching
+
+Solr can be queried via REST clients, cURL, wget, Chrome POSTMAN, etc., as well as via the native clients available for
+many programming languages.
+
+The Solr Admin UI includes a query builder interface - see the `gettingstarted` query tab at <http://localhost:8983/solr/#/gettingstarted_shard1_replica1/query>.
+If you click the `Execute Query` button without changing anything in the form, you'll get 10 documents in JSON
+format (`*:*` in the `q` param matches all documents):
+
+<img style="border:1px solid #ccc" width="50%" src="images/quickstart-query-screen.png" alt="Solr Quick Start: gettingstarted Query tab" class="float-right"/>
+
+The URL sent by the Admin UI to Solr is shown in light grey near the top right of the above screenshot - if you click on
+it, your browser will show you the raw response.  To use cURL, give the same URL in quotes on the `curl` command line:
+
+    curl "http://localhost:8983/solr/gettingstarted/select?q=*%3A*&wt=json&indent=true"
+
+In the above URL, the "`:`" in "`q=*:*`" has been URL-encoded as "`%3A`", but since "`:`" has no reserved purpose in the
+query component of the URL (after the "`?`"), you don't need to URL encode it.  So the following also works:
+
+    curl "http://localhost:8983/solr/gettingstarted/select?q=*:*&wt=json&indent=true"
+
+### Basics
+
+#### Search for a single term
+
+To search for a term, give it as the `q` param value in the core-specific Solr Admin UI Query section, replace `*:*`
+with the term you want to find.  To search for "foundation":
+
+    curl "http://localhost:8983/solr/gettingstarted/select?wt=json&indent=true&q=foundation"
+
+You'll see:
+
+    /solr-5.0.0$ curl "http://localhost:8983/solr/gettingstarted/select?wt=json&indent=true&q=foundation"
+    {
+      "responseHeader":{
+        "status":0,
+        "QTime":0,
+        "params":{
+          "indent":"true",
+          "q":"foundation",
+          "wt":"json"}},
+      "response":{"numFound":2812,"start":0,"docs":[
+          {
+            "id":"0553293354",
+            "cat":["book"],
+            "name":"Foundation",
+    ...
+
+The response indicates that there are 2,812 hits (`"numFound":2812`), of which the first 10 were returned, since by
+default `start=0` and `rows=10`.  You can specify these params to page through results, where `start` is the
+(zero-based) position of the first result to return, and `rows` is the page size.
+
+To restrict fields returned in the response, use the `fl` param, which takes a comma-separated list of field names.
+E.g. to only return the `id` field:
+
+    curl "http://localhost:8983/solr/gettingstarted/select?wt=json&indent=true&q=foundation&fl=id"
+
+`q=foundation` matches nearly all of the docs we've indexed, since most of the files under `docs/` contain
+"The Apache Software Foundation".  To restrict search to a particular field, use the syntax "`q=field:value`",
+e.g. to search for `foundation` only in the `name` field:
+
+    curl "http://localhost:8983/solr/gettingstarted/select?wt=json&indent=true&q=name:foundation"
+
+The above request returns only one document (`"numFound":1`) - from the response:
+
+    ...
+      "response":{"numFound":1,"start":0,"docs":[
+          {
+            "id":"0553293354",
+            "cat":["book"],
+            "name":"Foundation",
+    ...
+
+#### Phrase search
+
+To search for a multi-term phrase, enclose it in double quotes: `q="multiple terms here"`.  E.g. to search for
+"CAS latency" - note that the space between terms must be converted to "`+`" in a URL (the Admin UI will handle URL
+encoding for you automatically):
+
+    curl "http://localhost:8983/solr/gettingstarted/select?wt=json&indent=true&q=\"CAS+latency\""
+
+You'll get back:
+
+    {
+      "responseHeader":{
+        "status":0,
+        "QTime":0,
+        "params":{
+          "indent":"true",
+          "q":"\"CAS latency\"",
+          "wt":"json"}},
+      "response":{"numFound":2,"start":0,"docs":[
+          {
+            "id":"VDBDB1A16",
+            "name":"A-DATA V-Series 1GB 184-Pin DDR SDRAM Unbuffered DDR 400 (PC 3200) System Memory - OEM",
+            "manu":"A-DATA Technology Inc.",
+            "manu_id_s":"corsair",
+            "cat":["electronics", "memory"],
+            "features":["CAS latency 3,\t 2.7v"],
+    ...
+
+#### Combining searches
+
+By default, when you search for multiple terms and/or phrases in a single query, Solr will only require that one of them
+is present in order for a document to match.  Documents containing more terms will be sorted higher in the results list.
+
+You can require that a term or phrase is present by prefixing it with a "`+`"; conversely, to disallow the presence of a
+term or phrase, prefix it with a "`-`".
+
+To find documents that contain both terms "`one`" and "`three`", enter `+one +three` in the `q` param in the
+core-specific Admin UI Query tab.  Because the "`+`" character has a reserved purpose in URLs
+(encoding the space character), you must URL encode it for `curl` as "`%2B`":
+
+    curl "http://localhost:8983/solr/gettingstarted/select?wt=json&indent=true&q=%2Bone+%2Bthree"
+
+To search for documents that contain the term "`two`" but **don't** contain the term "`one`", enter `+two -one` in the
+`q` param in the Admin UI.  Again, URL encode "`+`" as "`%2B`":
+
+    curl "http://localhost:8983/solr/gettingstarted/select?wt=json&indent=true&q=%2Btwo+-one"
+
+#### In depth
+
+For more Solr search options, see the Solr Reference Guide's [Searching](https://cwiki.apache.org/confluence/display/solr/Searching)
+section.
+
+
+### Faceting
+
+One of Solr's most popular features is faceting.  Faceting allows the search results to be arranged into subsets (or
+buckets or categories), providing a count for each subset.  There are several types of faceting: field values, numeric
+and date ranges, pivots (decision tree), and arbitrary query faceting.
+
+#### Field facets
+
+In addition to providing search results, a Solr query can return the number of documents that contain each unique value
+in the whole result set.
+
+From the core-specific Admin UI Query tab, if you check the "`facet`" checkbox, you'll see a few facet-related options
+appear:
+
+<img style="border:1px solid #ccc" src="images/quickstart-admin-ui-facet-options.png" alt="Solr Quick Start: Query tab facet options"/>
+
+To see facet counts from all documents (`q=*:*`): turn on faceting (`facet=true`), and specify the field to facet on via
+the `facet.field` param.  If you only want facets, and no document contents, specify `rows=0`.  The `curl` command below
+will return facet counts for the `manu_id_s` field:
+
+    curl http://localhost:8983/solr/gettingstarted/select?wt=json&indent=true&q=*:*&rows=0 \
+                                                      &facet=true&facet.field=manu_id_s
+
+In your terminal, you'll see:
+
+    {
+      "responseHeader":{
+        "status":0,
+        "QTime":3,
+        "params":{
+          "facet":"true",
+          "indent":"true",
+          "q":"*:*",
+          "facet.field":"manu_id_s",
+          "wt":"json",
+          "rows":"0"}},
+      "response":{"numFound":2990,"start":0,"docs":[]
+      },
+      "facet_counts":{
+        "facet_queries":{},
+        "facet_fields":{
+          "manu_id_s":[
+            "corsair",3,
+            "belkin",2,
+            "canon",2,
+            "apple",1,
+            "asus",1,
+            "ati",1,
+            "boa",1,
+            "dell",1,
+            "eu",1,
+            "maxtor",1,
+            "nor",1,
+            "uk",1,
+            "viewsonic",1,
+            "samsung",0]},
+        "facet_dates":{},
+        "facet_ranges":{},
+        "facet_intervals":{}}}
+
+#### Range facets
+
+For numerics or dates, it's often desirable to partition the facet counts into ranges rather than discrete values.
+A prime example of numeric range faceting, using the example product data, is `price`.  In the `/browse` UI, it looks
+like this:
+
+<img style="border:1px solid #ccc" src="images/quickstart-range-facet.png" alt="Solr Quick Start: Range facets"/>
+
+The data for these price range facets can be seen in JSON format with this command:
+
+    curl http://localhost:8983/solr/gettingstarted/select?q=*:*&wt=json&indent=on&rows=0&facet=true \
+                                                      &facet.range=price \
+                                                      &f.price.facet.range.start=0 \
+                                                      &f.price.facet.range.end=600 \
+                                                      &f.price.facet.range.gap=50 \
+                                                      &facet.range.other=after
+
+In your terminal you will see:
+
+    {
+      "responseHeader":{
+        "status":0,
+        "QTime":1,
+        "params":{
+          "facet.range.other":"after",
+          "facet":"true",
+          "indent":"on",
+          "q":"*:*",
+          "f.price.facet.range.gap":"50",
+          "facet.range":"price",
+          "f.price.facet.range.end":"600",
+          "wt":"json",
+          "f.price.facet.range.start":"0",
+          "rows":"0"}},
+      "response":{"numFound":2990,"start":0,"docs":[]
+      },
+      "facet_counts":{
+        "facet_queries":{},
+        "facet_fields":{},
+        "facet_dates":{},
+        "facet_ranges":{
+          "price":{
+            "counts":[
+              "0.0",19,
+              "50.0",1,
+              "100.0",0,
+              "150.0",2,
+              "200.0",0,
+              "250.0",1,
+              "300.0",1,
+              "350.0",2,
+              "400.0",0,
+              "450.0",1,
+              "500.0",0,
+              "550.0",0],
+            "gap":50.0,
+            "start":0.0,
+            "end":600.0,
+            "after":2}},
+        "facet_intervals":{}}}
+
+#### Pivot facets
+
+Another faceting type is pivot facets, also known as "decison trees", allowing two or more fields to be nested for all
+the various possible combinations.  Using the example technical product data, pivot facets can be used to see how many
+of the products in the "book" category (the `cat` field) are in stock or not in stock.  Here's how to get at the raw
+data for this scenario:
+
+    curl http://localhost:8983/solr/gettingstarted/select?q=*:*&rows=0&wt=json&indent=on \
+                                                      &facet=on&facet.pivot=cat,inStock
+
+This results in the following response (trimmed to just the book category output), which says out of 14 items in the
+"book" category, 12 are in stock and 2 are not in stock:
+
+    ...
+    "facet_pivot":{
+      "cat,inStock":[{
+          "field":"cat",
+          "value":"book",
+          "count":14,
+          "pivot":[{
+              "field":"inStock",
+              "value":true,
+              "count":12},
+            {
+              "field":"inStock",
+              "value":false,
+              "count":2}]},
+    ...
+
+#### More faceting options
+
+For the full scoop on Solr faceting, visit the Solr Reference Guide's [Faceting](https://cwiki.apache.org/confluence/display/solr/Faceting)
+section.
+
+
+### Spatial
+
+Solr has sophisticated geospatial support, including searching within a specified distance range of a given location
+(or within a bounding box), sorting by distance, or even boosting results by the distance.  Some of the example tech products
+documents in `example/exampledocs/*.xml` have locations associated with them to illustrate the spatial capabilities.
+To run the tech products example, see the [techproducts example section](#techproducts).
+Spatial queries can be combined with any other types of queries, such as in this example of querying for "ipod" within
+10 kilometers from San Francisco:
+
+<img style="border:1px solid #ccc" width="50%" src="images/quickstart-spatial.png" alt="Solr Quick Start: spatial search" class="float-right"/>
+
+The URL to this example is <http://localhost:8983/solr/techproducts/browse?q=ipod&pt=37.7752%2C-122.4232&d=10&sfield=store&fq=%7B%21bbox%7D&queryOpts=spatial&queryOpts=spatial>,
+leveraging the `/browse` UI to show a map for each item and allow easy selection of the location to search near.
+
+To learn more about Solr's spatial capabilities, see the Solr Reference Guide's [Spatial Search](https://cwiki.apache.org/confluence/display/solr/Spatial+Search)
+section.
+
+## Wrapping up
+
+If you've run the full set of commands in this quick start guide you have done the following:
+
+* Launched Solr into SolrCloud mode, two nodes, two collections including shards and replicas
+* Indexed a directory of rich text files
+* Indexed Solr XML files
+* Indexed Solr JSON files
+* Indexed CSV content
+* Opened the admin console, used its query interface to get JSON formatted results
+* Opened the /browse interface to explore Solr's features in a more friendly and familiar interface
+
+Nice work!   The script (see below) to run all of these items took under two minutes! (Your run time may vary, depending
+on your computer's power and resources available.)
+
+Here's a Unix script for convenient copying and pasting in order to run the key commands for this quick start guide:
+
+    date ;
+    bin/solr start -e cloud -noprompt ;
+      open http://localhost:8983/solr ;
+      bin/post -c gettingstarted docs/ ;
+      open http://localhost:8983/solr/gettingstarted/browse ;
+      bin/post -c gettingstarted example/exampledocs/*.xml ;
+      bin/post -c gettingstarted example/exampledocs/books.json ;
+      bin/post -c gettingstarted example/exampledocs/books.csv ;
+      bin/post -c gettingstarted -d "<delete><id>SP2514N</id></delete>" ;
+      bin/solr healthcheck -c gettingstarted ;
+    date ;
+
+<a name="cleanup"/>
+## Cleanup
+
+As you work through this guide, you may want to stop Solr and reset the environment back to the starting point.
+The following command line will stop Solr and remove the directories for each of the two nodes that the start script
+created:
+
+    bin/solr stop -all ; rm -Rf example/cloud/
+
+## Where to next?
+
+For more information on Solr, check out the following resources:
+
+  * [Solr Reference Guide](https://cwiki.apache.org/confluence/display/solr/Apache+Solr+Reference+Guide) (ensure you
+    match the version of the reference guide with your version of Solr)
+  * See also additional [Resources](http://lucene.apache.org/solr/resources.html)
+
diff --git a/solr/solrj/ivy.xml b/solr/solrj/ivy.xml
index 02005e6..4824619 100644
--- a/solr/solrj/ivy.xml
+++ b/solr/solrj/ivy.xml
@@ -32,7 +32,8 @@
     <dependency org="org.apache.httpcomponents" name="httpmime" rev="${/org.apache.httpcomponents/httpmime}" conf="compile"/>
     <dependency org="org.apache.httpcomponents" name="httpcore" rev="${/org.apache.httpcomponents/httpcore}" conf="compile"/>
     <dependency org="commons-io" name="commons-io" rev="${/commons-io/commons-io}" conf="compile"/>
-    <dependency org="org.codehaus.woodstox" name="wstx-asl" rev="${/org.codehaus.woodstox/wstx-asl}" conf="compile"/>
+    <dependency org="org.codehaus.woodstox" name="woodstox-core-asl" rev="${/org.codehaus.woodstox/woodstox-core-asl}" conf="compile"/>
+    <dependency org="org.codehaus.woodstox" name="stax2-api" rev="${/org.codehaus.woodstox/stax2-api}" conf="compile"/>
     <dependency org="org.noggit" name="noggit" rev="${/org.noggit/noggit}" conf="compile"/>
     <dependency org="org.slf4j" name="slf4j-api" rev="${/org.slf4j/slf4j-api}" conf="compile"/>
 
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java
index 0dd035b..2c88970 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java
@@ -26,13 +26,20 @@
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.client.solrj.response.SolrPingResponse;
 import org.apache.solr.client.solrj.response.UpdateResponse;
+import org.apache.solr.common.SolrDocument;
+import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.StringUtils;
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
 
+import java.io.Closeable;
 import java.io.IOException;
 import java.io.Serializable;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
 
@@ -41,7 +48,7 @@
  *
  * @since 5.0, replaced {@code SolrServer}
  */
-public abstract class SolrClient implements Serializable {
+public abstract class SolrClient implements Serializable, Closeable {
 
   private static final long serialVersionUID = 1L;
   private DocumentObjectBinder binder;
@@ -332,6 +339,56 @@
   }
 
   /**
+   * Retrieves the SolrDocument associated with the given identifier.
+   *
+   * @return retrieved SolrDocument, null if no document is found.
+   */
+  public SolrDocument getById(String id) throws SolrServerException {
+    return getById(id, null);
+  }
+
+  /**
+   * Retrieves the SolrDocument associated with the given identifier and uses
+   * the SolrParams to execute the request.
+   *
+   * @return retrieved SolrDocument, null if no document is found.
+   */
+  public SolrDocument getById(String id, SolrParams params) throws SolrServerException {
+    SolrDocumentList docs = getById(Arrays.asList(id), params);
+    if (!docs.isEmpty()) {
+      return docs.get(0);
+    }
+    return null;
+  }
+
+  /**
+   * Retrieves the SolrDocuments associated with the given identifiers.
+   * If a document was not found, it will not be added to the SolrDocumentList.
+   */
+  public SolrDocumentList getById(Collection<String> ids) throws SolrServerException {
+    return getById(ids, null);
+  }
+
+  /**
+   * Retrieves the SolrDocuments associated with the given identifiers and uses
+   * the SolrParams to execute the request.
+   * If a document was not found, it will not be added to the SolrDocumentList.
+   */
+  public SolrDocumentList getById(Collection<String> ids, SolrParams params) throws SolrServerException {
+    if (ids == null || ids.isEmpty()) {
+      throw new IllegalArgumentException("Must provide an identifier of a document to retrieve.");
+    }
+
+    ModifiableSolrParams reqParams = new ModifiableSolrParams(params);
+    if (StringUtils.isEmpty(reqParams.get(CommonParams.QT))) {
+      reqParams.set(CommonParams.QT, "/get");
+    }
+    reqParams.set("ids", (String[]) ids.toArray());
+
+    return query(reqParams).getResults();
+  }
+  
+  /**
    * SolrServer implementations need to implement how a request is actually processed
    */
   public abstract NamedList<Object> request(final SolrRequest request) throws SolrServerException, IOException;
@@ -343,10 +400,4 @@
     return binder;
   }
 
-  /**
-   * Release allocated resources.
-   *
-   * @since solr 4.0
-   */
-  public abstract void shutdown();
 }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrQuery.java b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrQuery.java
index 50fc1fe..80eb623 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrQuery.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrQuery.java
@@ -17,13 +17,6 @@
 
 package org.apache.solr.client.solrj;
 
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Date;
-import java.util.List;
-import java.util.Locale;
-import java.util.regex.Pattern;
-
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.FacetParams;
 import org.apache.solr.common.params.HighlightParams;
@@ -32,6 +25,13 @@
 import org.apache.solr.common.params.TermsParams;
 import org.apache.solr.common.util.DateUtil;
 
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Date;
+import java.util.List;
+import java.util.Locale;
+import java.util.regex.Pattern;
+
 
 /**
  * This is an augmented SolrParams with get/set/add fields for common fields used
@@ -443,14 +443,6 @@
     return this;
   }
 
-  /**
-   * @deprecated use {@link #setFacetMissing(Boolean)}
-   */
-  @Deprecated
-  public SolrQuery setMissing(String fld) {
-    return setFacetMissing(Boolean.valueOf(fld));
-  }
-
   /** get facet sort
    * 
    * @return facet sort or default of {@link FacetParams#FACET_SORT_COUNT}
@@ -459,18 +451,6 @@
     return this.get(FacetParams.FACET_SORT, FacetParams.FACET_SORT_COUNT);
   }
 
-  /** get facet sort
-   * 
-   * @return facet sort or default of true. <br />
-   * true corresponds to
-   * {@link FacetParams#FACET_SORT_COUNT} and <br />false to {@link FacetParams#FACET_SORT_INDEX}
-   * 
-   * @deprecated Use {@link #getFacetSortString()} instead.
-   */
-  @Deprecated
-  public boolean getFacetSort() {
-    return this.get(FacetParams.FACET_SORT, FacetParams.FACET_SORT_COUNT).equals(FacetParams.FACET_SORT_COUNT);
-  }
 
   /** set facet sort
    * 
@@ -482,19 +462,6 @@
     return this;
   }
 
-  /** set facet sort
-   * 
-   * @param sort sort facets
-   * @return this
-   * @deprecated Use {@link #setFacetSort(String)} instead, true corresponds to
-   * {@link FacetParams#FACET_SORT_COUNT} and false to {@link FacetParams#FACET_SORT_INDEX}.
-   */
-  @Deprecated
-  public SolrQuery setFacetSort(boolean sort) { 
-    this.set(FacetParams.FACET_SORT, sort == true ? FacetParams.FACET_SORT_COUNT : FacetParams.FACET_SORT_INDEX);
-    return this;
-  }
-
   /** add highlight field
    * 
    * @param f field to enable for highlighting
@@ -572,57 +539,6 @@
   }
 
   /**
-   * Replaces the sort string with a single sort field.
-   * @deprecated Use {@link #setSort(SortClause)} instead, which is part
-   * of an api handling a wider range of sort specifications.
-   */
-  @Deprecated
-  public SolrQuery setSortField(String field, ORDER order) {
-    this.remove(CommonParams.SORT);
-    addValueToParam(CommonParams.SORT, toSortString(field, order));
-    return this;
-  }
-  
-  /**
-   * Adds a sort field to the end of the sort string.
-   * @deprecated Use {@link #addSort(SortClause)} instead, which is part
-   * of an api handling a wider range of sort specifications.
-   */
-  @Deprecated
-  public SolrQuery addSortField(String field, ORDER order) {
-    return addValueToParam(CommonParams.SORT, toSortString(field, order));
-  }
-
-  /**
-   * Removes a sort field to the end of the sort string.
-   * @deprecated Use {@link #removeSort(SortClause)} instead, which is part
-   * of an api handling a wider range of sort specifications.
-   */
-  @Deprecated
-  public SolrQuery removeSortField(String field, ORDER order) {
-    String[] sorts = getSortFields();
-    if (sorts != null) {
-      String removeSort = toSortString(field, order);
-      String s = join(sorts, ",", removeSort);
-      if (s.length()==0) s=null;
-      this.set(CommonParams.SORT, s);
-    }
-    return this;
-  }
-  
-  /**
-   * Gets an array of sort specifications.
-   * @deprecated Use {@link #getSorts()} instead, which is part
-   * of an api handling a wider range of sort specifications.
-   */
-  @Deprecated
-  public String[] getSortFields() {
-    String s = getSortField();
-    if (s==null) return null;
-    return s.trim().split(", *");
-  }
-
-  /**
    * Gets the raw sort field, as it will be sent to Solr.
    * <p>
    * The returned sort field will always contain a serialized version
@@ -979,22 +895,6 @@
   }
 
   /**
-   * @deprecated See {@link #setRequestHandler(String)}.
-   */
-  @Deprecated
-  public SolrQuery setQueryType(String qt) {
-    return setRequestHandler(qt);
-  }
-
-  /**
-   * @deprecated See {@link #getRequestHandler()}.
-   */
-  @Deprecated
-  public String getQueryType() {
-    return getRequestHandler();
-  }
-
-  /**
    * @return this
    * @see ModifiableSolrParams#set(String,String[])
    */
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
index fcb7bc0..fe1e587 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java
@@ -46,6 +46,7 @@
 import org.apache.solr.common.params.ShardParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.params.UpdateParams;
+import org.apache.solr.common.util.Hash;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SolrjNamedThreadFactory;
 import org.apache.solr.common.util.StrUtils;
@@ -126,9 +127,10 @@
 
   }
   private volatile long timeToLive = 60* 1000L;
+  private volatile List<Object> locks = objectList(3);
 
 
-  protected Map<String, ExpiringCachedDocCollection> collectionStateCache = new ConcurrentHashMap<String, ExpiringCachedDocCollection>(){
+  protected final Map<String, ExpiringCachedDocCollection> collectionStateCache = new ConcurrentHashMap<String, ExpiringCachedDocCollection>(){
     @Override
     public ExpiringCachedDocCollection get(Object key) {
       ExpiringCachedDocCollection val = super.get(key);
@@ -143,7 +145,7 @@
   };
 
   class ExpiringCachedDocCollection {
-    DocCollection cached;
+    final DocCollection cached;
     long cachedAt;
 
     ExpiringCachedDocCollection(DocCollection cached) {
@@ -458,15 +460,7 @@
             Thread.currentThread().interrupt();
             throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
                 "", e);
-          } catch (KeeperException e) {
-            if (zk != null) zk.close();
-            throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
-                "", e);
-          } catch (IOException e) {
-            if (zk != null) zk.close();
-            throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
-                "", e);
-          } catch (TimeoutException e) {
+          } catch (KeeperException | TimeoutException | IOException e) {
             if (zk != null) zk.close();
             throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
                 "", e);
@@ -753,7 +747,7 @@
     String stateVerParam = null;
     List<DocCollection> requestedCollections = null;
     if (collection != null && !request.getPath().startsWith("/admin")) { // don't do _stateVer_ checking for admin requests
-      Set<String> requestedCollectionNames = getCollectionList(getZkStateReader().getClusterState(), collection);
+      Set<String> requestedCollectionNames = getCollectionNames(getZkStateReader().getClusterState(), collection);
 
       StringBuilder stateVerParamBuilder = null;
       for (String requestedCollection : requestedCollectionNames) {
@@ -916,16 +910,13 @@
             "No collection param specified on request and no default collection has been set.");
       }
       
-      Set<String> collectionsList = getCollectionList(clusterState, collection);
-      if (collectionsList.size() == 0) {
+      Set<String> collectionNames = getCollectionNames(clusterState, collection);
+      if (collectionNames.size() == 0) {
         throw new SolrException(ErrorCode.BAD_REQUEST,
             "Could not find collection: " + collection);
       }
 
       String shardKeys =  reqParams.get(ShardParams._ROUTE_);
-      if(shardKeys == null) {
-        shardKeys = reqParams.get(ShardParams.SHARD_KEYS); // deprecated
-      }
 
       // TODO: not a big deal because of the caching, but we could avoid looking
       // at every shard
@@ -935,7 +926,7 @@
       // specified,
       // add it to the Map of slices.
       Map<String,Slice> slices = new HashMap<>();
-      for (String collectionName : collectionsList) {
+      for (String collectionName : collectionNames) {
         DocCollection col = getDocCollection(clusterState, collectionName);
         Collection<Slice> routeSlices = col.getRouter().getSearchSlices(shardKeys, reqParams , col);
         ClientUtils.addSlices(slices, collectionName, routeSlices, true);
@@ -997,6 +988,9 @@
         theUrlList.addAll(urlList);
       }
       if(theUrlList.isEmpty()) {
+        for (String s : collectionNames) {
+          if(s!=null) collectionStateCache.remove(s);
+        }
         throw new SolrException(SolrException.ErrorCode.INVALID_STATE, "Not enough nodes to handle the request");
       }
 
@@ -1016,11 +1010,11 @@
     return rsp.getResponse();
   }
 
-  private Set<String> getCollectionList(ClusterState clusterState,
-      String collection) {
+  private Set<String> getCollectionNames(ClusterState clusterState,
+                                         String collection) {
     // Extract each comma separated collection name and store in a List.
     List<String> rawCollectionsList = StrUtils.splitSmart(collection, ",", true);
-    Set<String> collectionsList = new HashSet<>();
+    Set<String> collectionNames = new HashSet<>();
     // validate collections
     for (String collectionName : rawCollectionsList) {
       if (!clusterState.getCollections().contains(collectionName)) {
@@ -1028,20 +1022,20 @@
         String alias = aliases.getCollectionAlias(collectionName);
         if (alias != null) {
           List<String> aliasList = StrUtils.splitSmart(alias, ",", true);
-          collectionsList.addAll(aliasList);
+          collectionNames.addAll(aliasList);
           continue;
         }
 
           throw new SolrException(ErrorCode.BAD_REQUEST, "Collection not found: " + collectionName);
         }
 
-      collectionsList.add(collectionName);
+      collectionNames.add(collectionName);
     }
-    return collectionsList;
+    return collectionNames;
   }
 
   @Override
-  public void shutdown() {
+  public void close() throws IOException {
     if (zkStateReader != null) {
       synchronized(this) {
         if (zkStateReader!= null)
@@ -1051,11 +1045,11 @@
     }
     
     if (shutdownLBHttpSolrServer) {
-      lbClient.shutdown();
+      lbClient.close();
     }
     
     if (clientIsInternal && myClient!=null) {
-      myClient.getConnectionManager().shutdown();
+      HttpClientUtil.close(myClient);
     }
 
     if(this.threadPool != null && !this.threadPool.isShutdown()) {
@@ -1071,18 +1065,50 @@
     return updatesToLeaders;
   }
 
-  protected DocCollection getDocCollection(ClusterState clusterState, String collection) throws SolrException {
-    ExpiringCachedDocCollection cachedState = collectionStateCache != null ? collectionStateCache.get(collection) : null;
-    if (cachedState != null && cachedState.cached != null) {
-      return cachedState.cached;
-    }
+  /**If caches are expired they are refreshed after acquiring a lock.
+   * use this to set the number of locks
+   */
+  public void setParallelCacheRefreshes(int n){ locks = objectList(n); }
 
-    DocCollection col = clusterState.getCollectionOrNull(collection);
+  private static ArrayList<Object> objectList(int n) {
+    ArrayList<Object> l =  new ArrayList<>(n);
+    for(int i=0;i<n;i++) l.add(new Object());
+    return l;
+  }
+
+
+  protected DocCollection getDocCollection(ClusterState clusterState, String collection) throws SolrException {
+    if(collection == null) return null;
+    DocCollection col = getFromCache(collection);
+    if(col != null) return col;
+
+    ClusterState.CollectionRef ref = clusterState.getCollectionRef(collection);
+    if(ref == null){
+      //no such collection exists
+      return null;
+    }
+    if(!ref.isLazilyLoaded()) {
+      //it is readily available just return it
+      return ref.get();
+    }
+    List locks = this.locks;
+    final Object lock = locks.get(Math.abs(Hash.murmurhash3_x86_32(collection, 0, collection.length(), 0) % locks.size()));
+    synchronized (lock){
+      //we have waited for sometime just check once again
+      col = getFromCache(collection);
+      if(col !=null) return col;
+      col = ref.get();
+    }
     if(col == null ) return  null;
     if(col.getStateFormat() >1) collectionStateCache.put(collection, new ExpiringCachedDocCollection(col));
     return col;
   }
 
+  private DocCollection getFromCache(String c){
+    ExpiringCachedDocCollection cachedState = collectionStateCache.get(c);
+    return cachedState != null ? cachedState.cached : null;
+  }
+
 
   /**
    * Useful for determining the minimum achieved replication factor across
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java
index e6b3aca..3e61c9d 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java
@@ -36,6 +36,7 @@
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.params.UpdateParams;
+import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SolrjNamedThreadFactory;
 import org.slf4j.Logger;
@@ -424,8 +425,8 @@
   }
 
   @Override
-  public void shutdown() {
-    client.shutdown();
+  public void close() {
+    IOUtils.closeQuietly(client);
     if (shutdownExecutor) {
       scheduler.shutdown();
       try {
@@ -454,7 +455,7 @@
   }
 
   public void shutdownNow() {
-    client.shutdown();
+    IOUtils.closeQuietly(client);
     if (shutdownExecutor) {
       scheduler.shutdownNow(); // Cancel currently executing tasks
       try {
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientConfigurer.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientConfigurer.java
index 1ec9e98..1d370ff 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientConfigurer.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientConfigurer.java
@@ -29,7 +29,7 @@
  */
 public class HttpClientConfigurer {
   
-  protected void configure(DefaultHttpClient httpClient, SolrParams config) {
+  public void configure(DefaultHttpClient httpClient, SolrParams config) {
     
     if (config.get(HttpClientUtil.PROP_MAX_CONNECTIONS) != null) {
       HttpClientUtil.setMaxConnections(httpClient,
@@ -51,16 +51,15 @@
           config.getInt(HttpClientUtil.PROP_SO_TIMEOUT));
     }
     
-    if (config.get(HttpClientUtil.PROP_USE_RETRY) != null) {
-      HttpClientUtil.setUseRetry(httpClient,
-          config.getBool(HttpClientUtil.PROP_USE_RETRY));
-    }
-    
     if (config.get(HttpClientUtil.PROP_FOLLOW_REDIRECTS) != null) {
       HttpClientUtil.setFollowRedirects(httpClient,
           config.getBool(HttpClientUtil.PROP_FOLLOW_REDIRECTS));
     }
     
+    // always call setUseRetry, whether it is in config or not
+    HttpClientUtil.setUseRetry(httpClient,
+        config.getBool(HttpClientUtil.PROP_USE_RETRY, true));
+    
     final String basicAuthUser = config
         .get(HttpClientUtil.PROP_BASIC_AUTH_USER);
     final String basicAuthPass = config
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientUtil.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientUtil.java
index ff2a8fe..848729c 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientUtil.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientUtil.java
@@ -38,6 +38,7 @@
 import org.apache.http.conn.ssl.SSLSocketFactory;
 import org.apache.http.conn.ssl.X509HostnameVerifier;
 import org.apache.http.entity.HttpEntityWrapper;
+import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.impl.client.DefaultHttpClient;
 import org.apache.http.impl.client.DefaultHttpRequestRetryHandler;
 import org.apache.http.impl.client.SystemDefaultHttpClient;
@@ -54,7 +55,6 @@
  * Utility class for creating/configuring httpclient instances. 
  */
 public class HttpClientUtil {
-  
   // socket timeout measured in ms, closes a socket if read
   // takes longer than x ms to complete. throws
   // java.net.SocketTimeoutException: Read timed out exception
@@ -109,7 +109,7 @@
    *          http client configuration, if null a client with default
    *          configuration (no additional configuration) is created. 
    */
-  public static HttpClient createClient(final SolrParams params) {
+  public static CloseableHttpClient createClient(final SolrParams params) {
     final ModifiableSolrParams config = new ModifiableSolrParams(params);
     if (logger.isDebugEnabled()) {
       logger.debug("Creating new http client, config:" + config);
@@ -123,7 +123,7 @@
    * Creates new http client by using the provided configuration.
    * 
    */
-  public static HttpClient createClient(final SolrParams params, ClientConnectionManager cm) {
+  public static CloseableHttpClient createClient(final SolrParams params, ClientConnectionManager cm) {
     final ModifiableSolrParams config = new ModifiableSolrParams(params);
     if (logger.isDebugEnabled()) {
       logger.debug("Creating new http client, config:" + config);
@@ -141,6 +141,14 @@
       SolrParams config) {
     configurer.configure(httpClient,  config);
   }
+  
+  public static void close(HttpClient httpClient) { 
+    if (httpClient instanceof CloseableHttpClient) {
+      org.apache.solr.common.util.IOUtils.closeQuietly((CloseableHttpClient) httpClient);
+    } else {
+      httpClient.getConnectionManager().shutdown();
+    }
+  }
 
   /**
    * Control HTTP payload compression.
@@ -233,7 +241,14 @@
     if (!useRetry) {
       httpClient.setHttpRequestRetryHandler(NO_RETRY);
     } else {
-      httpClient.setHttpRequestRetryHandler(new DefaultHttpRequestRetryHandler());
+      // if the request is not fully sent, we retry
+      // streaming updates are not a problem, because they are not retryable
+      httpClient.setHttpRequestRetryHandler(new DefaultHttpRequestRetryHandler(){
+        @Override
+        protected boolean handleAsIdempotent(final HttpRequest request) {
+          return false; // we can't tell if a Solr request is idempotent
+        }
+      });
     }
   }
 
@@ -269,6 +284,14 @@
     }
   }
   
+  public static void setStaleCheckingEnabled(final HttpClient httpClient, boolean enabled) {
+    HttpConnectionParams.setStaleCheckingEnabled(httpClient.getParams(), enabled);
+  }
+  
+  public static void setTcpNoDelay(final HttpClient httpClient, boolean tcpNoDelay) {
+    HttpConnectionParams.setTcpNoDelay(httpClient.getParams(), tcpNoDelay);
+  }
+  
   private static class UseCompressionRequestInterceptor implements
       HttpRequestInterceptor {
     
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java
index ae19299..ec49e46 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java
@@ -752,9 +752,9 @@
    * Close the {@link ClientConnectionManager} from the internal client.
    */
   @Override
-  public void shutdown() {
+  public void close() throws IOException {
     if (httpClient != null && internalClient) {
-      httpClient.getConnectionManager().shutdown();
+      HttpClientUtil.close(httpClient);
     }
   }
 
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrClient.java
index 615ad23..1c28f5a 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrClient.java
@@ -21,7 +21,9 @@
 import org.apache.solr.client.solrj.request.IsUpdateRequest;
 import org.apache.solr.client.solrj.request.RequestWriter;
 import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SolrjNamedThreadFactory;
 import org.apache.solr.common.SolrException;
@@ -221,7 +223,12 @@
     this.parser = parser;
     if (httpClient == null) {
       ModifiableSolrParams params = new ModifiableSolrParams();
-      params.set(HttpClientUtil.PROP_USE_RETRY, false);
+      if (solrServerUrl.length > 1) {
+        // we prefer retrying another server
+        params.set(HttpClientUtil.PROP_USE_RETRY, false);
+      } else {
+        params.set(HttpClientUtil.PROP_USE_RETRY, true);
+      }
       this.httpClient = HttpClientUtil.createClient(params);
     } else {
       this.httpClient = httpClient;
@@ -288,7 +295,13 @@
     boolean isUpdate = req.request instanceof IsUpdateRequest;
     List<ServerWrapper> skipped = null;
 
+    long timeAllowedNano = getTimeAllowedInNanos(req.getRequest());
+    long timeOutTime = System.nanoTime() + timeAllowedNano;
     for (String serverStr : req.getServers()) {
+      if(isTimeExceeded(timeAllowedNano, timeOutTime)) {
+        break;
+      }
+      
       serverStr = normalize(serverStr);
       // if the server is currently a zombie, just skip to the next one
       ServerWrapper wrapper = zombieServers.get(serverStr);
@@ -318,6 +331,10 @@
     // try the servers we previously skipped
     if (skipped != null) {
       for (ServerWrapper wrapper : skipped) {
+        if(isTimeExceeded(timeAllowedNano, timeOutTime)) {
+          break;
+        }
+
         ex = doRequest(wrapper.client, req, rsp, isUpdate, true, wrapper.getKey());
         if (ex == null) {
           return rsp; // SUCCESS
@@ -452,12 +469,12 @@
   }
 
   @Override
-  public void shutdown() {
+  public void close() {
     if (aliveCheckExecutor != null) {
       aliveCheckExecutor.shutdownNow();
     }
     if(clientIsInternal) {
-      httpClient.getConnectionManager().shutdown();
+      HttpClientUtil.close(httpClient);
     }
   }
 
@@ -482,7 +499,13 @@
     int maxTries = serverList.length;
     Map<String,ServerWrapper> justFailed = null;
 
+    long timeAllowedNano = getTimeAllowedInNanos(request);
+    long timeOutTime = System.nanoTime() + timeAllowedNano;
     for (int attempts=0; attempts<maxTries; attempts++) {
+      if(isTimeExceeded(timeAllowedNano, timeOutTime)) {
+        break;
+      }
+      
       int count = counter.incrementAndGet() & Integer.MAX_VALUE;
       ServerWrapper wrapper = serverList[count % serverList.length];
       wrapper.lastUsed = System.currentTimeMillis();
@@ -506,9 +529,12 @@
       }
     }
 
-
     // try other standard servers that we didn't try just now
     for (ServerWrapper wrapper : zombieServers.values()) {
+      if(isTimeExceeded(timeAllowedNano, timeOutTime)) {
+        break;
+      }
+      
       if (wrapper.standard==false || justFailed!=null && justFailed.containsKey(wrapper.getKey())) continue;
       try {
         NamedList<Object> rsp = wrapper.client.request(request);
@@ -540,6 +566,19 @@
   }
   
   /**
+   * @return time allowed in nanos, returns -1 if no time_allowed is specified.
+   */
+  private long getTimeAllowedInNanos(final SolrRequest req) {
+    SolrParams reqParams = req.getParams();
+    return reqParams == null ? -1 : 
+      TimeUnit.NANOSECONDS.convert(reqParams.getInt(CommonParams.TIME_ALLOWED, -1), TimeUnit.MILLISECONDS);
+  }
+  
+  private boolean isTimeExceeded(long timeAllowedNano, long timeOutTime) {
+    return timeAllowedNano > 0 && System.nanoTime() > timeOutTime;
+  }
+  
+  /**
    * Takes up one dead server and check for aliveness. The check is done in a roundrobin. Each server is checked for
    * aliveness once in 'x' millis where x is decided by the setAliveCheckinterval() or it is defaulted to 1 minute
    *
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
index d2fa1bd..7ef78e4 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
@@ -815,6 +815,10 @@
     private Boolean onlyActiveNodes;
     private Boolean shardUnique;
     
+    public BalanceShardUnique() {
+      this.action = CollectionAction.BALANCESHARDUNIQUE;
+    }
+    
     public String getPropertyName() {
       return propertyName;
     }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreAdminRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreAdminRequest.java
index 2c4a2c1..320a16e 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreAdminRequest.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CoreAdminRequest.java
@@ -587,14 +587,6 @@
     return req.process( client );
   }
 
-  @Deprecated
-  public static CoreAdminResponse persist(String fileName, SolrClient client) throws SolrServerException, IOException
-  {
-    CoreAdminRequest.Persist req = new CoreAdminRequest.Persist();
-    req.setFileName(fileName);
-    return req.process(client);
-  }
-
   public static CoreAdminResponse mergeIndexes(String name,
       String[] indexDirs, String[] srcCores, SolrClient client) throws SolrServerException,
       IOException {
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/QueryRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/QueryRequest.java
index b8e4237..33131b7 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/QueryRequest.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/QueryRequest.java
@@ -92,10 +92,8 @@
       long endTime = TimeUnit.MILLISECONDS.convert(System.nanoTime(), TimeUnit.NANOSECONDS);
       res.setElapsedTime(endTime - startTime);
       return res;
-    } catch (SolrServerException e){
+    } catch (SolrServerException | SolrException e){
       throw e;
-    } catch (SolrException s){
-      throw s;
     } catch (Exception e) {
       throw new SolrServerException("Error executing query", e);
     }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java
index a6510d5..b03b1bc 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/UpdateRequest.java
@@ -208,7 +208,16 @@
           routes.put(leaderUrl, request);
         }
         UpdateRequest urequest = (UpdateRequest) request.getRequest();
-        urequest.add(doc);
+        Map<String,Object> value = entry.getValue();
+        Boolean ow = null;
+        if (value != null) {
+          ow = (Boolean) value.get(OVERWRITE);
+        }
+        if (ow != null) {
+          urequest.add(doc, ow);
+        } else {
+          urequest.add(doc);
+        }
       }
     }
     
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/RangeFacet.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/RangeFacet.java
index 22708ce..52b4e6b 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/RangeFacet.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/RangeFacet.java
@@ -84,11 +84,6 @@
 
   public static class Numeric extends RangeFacet<Number, Number> {
 
-    @Deprecated
-    public Numeric(String name, Number start, Number end, Number gap, Number before, Number after) {
-      this(name, start, end, gap, before, after, null);
-    }
-
     public Numeric(String name, Number start, Number end, Number gap, Number before, Number after, Number between) {
       super(name, start, end, gap, before, after, between);
     }
@@ -97,11 +92,6 @@
 
   public static class Date extends RangeFacet<java.util.Date, String> {
 
-    @Deprecated
-    public Date(String name, java.util.Date start, java.util.Date end, String gap, Number before, Number after) {
-      this(name, start, end, gap, before, after, null);
-    }
-
     public Date(String name, java.util.Date start, java.util.Date end, String gap, Number before, Number after, Number between) {
       super(name, start, end, gap, before, after, between);
     }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/SpellCheckResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/SpellCheckResponse.java
index 1118e9d..a3ae45d 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/SpellCheckResponse.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/SpellCheckResponse.java
@@ -210,18 +210,6 @@
       return alternativeFrequencies;
     }
 
-    @Deprecated
-    /** @see #getAlternatives */
-    public List<String> getSuggestions() {
-      return alternatives;
-    }
-
-    @Deprecated
-    /** @see #getAlternativeFrequencies */
-    public List<Integer> getSuggestionFrequencies() {
-      return alternativeFrequencies;
-    }
-
   }
 
   public class Collation {
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/util/ClientUtils.java b/solr/solrj/src/java/org/apache/solr/client/solrj/util/ClientUtils.java
index a4bcca0..9f33d6f 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/util/ClientUtils.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/util/ClientUtils.java
@@ -17,21 +17,6 @@
 
 package org.apache.solr.client.solrj.util;
 
-import java.io.IOException;
-import java.io.StringWriter;
-import java.io.Writer;
-import java.net.URLEncoder;
-import java.text.DateFormat;
-import java.text.ParseException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.TimeZone;
-import java.nio.ByteBuffer;
-
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.SolrInputField;
@@ -43,6 +28,18 @@
 import org.apache.solr.common.util.DateUtil;
 import org.apache.solr.common.util.XML;
 
+import java.io.IOException;
+import java.io.StringWriter;
+import java.io.Writer;
+import java.net.URLEncoder;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Map.Entry;
+
 
 /**
  *
@@ -187,44 +184,6 @@
   //---------------------------------------------------------------------------------------
 
   /**
-   * @deprecated Use {@link org.apache.solr.common.util.DateUtil#DEFAULT_DATE_FORMATS}
-   */
-  @Deprecated
-  public static final Collection<String> fmts = DateUtil.DEFAULT_DATE_FORMATS;
-
-  /**
-   * Returns a formatter that can be use by the current thread if needed to
-   * convert Date objects to the Internal representation.
-   *
-   * @deprecated Use {@link org.apache.solr.common.util.DateUtil#parseDate(String)}
-   */
-  @Deprecated
-  public static Date parseDate( String d ) throws ParseException
-  {
-    return DateUtil.parseDate(d);
-  }
-
-  /**
-   * Returns a formatter that can be use by the current thread if needed to
-   * convert Date objects to the Internal representation.
-   *
-   * @deprecated use {@link org.apache.solr.common.util.DateUtil#getThreadLocalDateFormat()}
-   */
-  @Deprecated
-  public static DateFormat getThreadLocalDateFormat() {
-
-    return DateUtil.getThreadLocalDateFormat();
-  }
-
-  /**
-   * @deprecated Use {@link org.apache.solr.common.util.DateUtil#UTC}.
-   */
-  @Deprecated
-  public static TimeZone UTC = DateUtil.UTC;
-
-
-
-  /**
    * See: {@link org.apache.lucene.queryparser.classic queryparser syntax} 
    * for more information on Escaping Special Characters
    */
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java
index be18d65..1b14360 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java
@@ -17,6 +17,12 @@
  * limitations under the License.
  */
 
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrException.ErrorCode;
+import org.noggit.JSONWriter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
@@ -26,12 +32,6 @@
 import java.util.Map.Entry;
 import java.util.Set;
 
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.SolrException.ErrorCode;
-import org.noggit.JSONWriter;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 /**
  * Immutable state of the cloud. Normally you can get the state by using
  * {@link ZkStateReader#getClusterState()}.
@@ -45,18 +45,6 @@
   private final Map<String, CollectionRef> collectionStates;
   private Set<String> liveNodes;
 
-
-  /**
-   * Use this constr when ClusterState is meant for publication.
-   * 
-   * hashCode and equals will only depend on liveNodes and not clusterStateVersion.
-   */
-  @Deprecated
-  public ClusterState(Set<String> liveNodes,
-      Map<String, DocCollection> collectionStates) {
-    this(null, liveNodes, collectionStates);
-  }
-
   /**
    * Use this constr when ClusterState is meant for consumption.
    */
@@ -177,6 +165,9 @@
     return coll;
   }
 
+  public CollectionRef getCollectionRef(String coll) {
+    return  collectionStates.get(coll);
+  }
 
   public DocCollection getCollectionOrNull(String coll) {
     CollectionRef ref = collectionStates.get(coll);
@@ -325,16 +316,6 @@
 
   @Override
   public void write(JSONWriter jsonWriter) {
-    if (collectionStates.size() == 1) {
-      CollectionRef ref = collectionStates.values().iterator().next();
-      DocCollection docCollection = ref.get();
-      if (docCollection.getStateFormat() > 1) {
-        jsonWriter.write(Collections.singletonMap(docCollection.getName(), docCollection));
-        // serializing a single DocCollection that is persisted outside of clusterstate.json
-        return;
-      }
-    }
-
     LinkedHashMap<String , DocCollection> map = new LinkedHashMap<>();
     for (Entry<String, CollectionRef> e : collectionStates.entrySet()) {
       // using this class check to avoid fetching from ZK in case of lazily loaded collection
@@ -409,6 +390,8 @@
       return coll;
     }
 
+    public boolean isLazilyLoaded() { return false; }
+
   }
 
 }
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ImplicitDocRouter.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ImplicitDocRouter.java
index 1fd82e1..8e421bd 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/ImplicitDocRouter.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ImplicitDocRouter.java
@@ -31,9 +31,9 @@
 /** This document router is for custom sharding
  */
 public class ImplicitDocRouter extends DocRouter {
+
   public static final String NAME = "implicit";
-//  @Deprecated
-//  public static final String DEFAULT_SHARD_PARAM = "_shard_";
+
   private static Logger log = LoggerFactory
       .getLogger(ImplicitDocRouter.class);
 
@@ -49,7 +49,6 @@
       }
       if(shard == null) {
         Object o = sdoc.getFieldValue(_ROUTE_);
-        if (o == null) o = sdoc.getFieldValue("_shard_");//deprecated . for backcompat remove later
         if (o != null) {
           shard = o.toString();
         }
@@ -58,7 +57,6 @@
 
     if (shard == null) {
       shard = params.get(_ROUTE_);
-      if(shard == null) shard =params.get("_shard_"); //deperecated for back compat
     }
 
     if (shard != null) {
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/SaslZkACLProvider.java b/solr/solrj/src/java/org/apache/solr/common/cloud/SaslZkACLProvider.java
new file mode 100644
index 0000000..86ab25f
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/SaslZkACLProvider.java
@@ -0,0 +1,49 @@
+package org.apache.solr.common.cloud;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.zookeeper.ZooDefs;
+import org.apache.zookeeper.data.ACL;
+import org.apache.zookeeper.data.Id;
+
+/**
+ * ZkACLProvider that gives all permissions for the user specified in System
+ * property "solr.authorization.superuser" (default: "solr") when using sasl,
+ * and gives read permissions for anyone else.  Designed for a setup where
+ * configurations have already been set up and will not be modified, or
+ * where configuration changes are controlled via Solr APIs.
+ */
+public class SaslZkACLProvider extends DefaultZkACLProvider {
+
+  private static String superUser = System.getProperty("solr.authorization.superuser", "solr");
+
+  @Override
+  protected List<ACL> createGlobalACLsToAdd() {
+    List<ACL> result = new ArrayList<ACL>();
+    result.add(new ACL(ZooDefs.Perms.ALL, new Id("sasl", superUser)));
+    result.add(new ACL(ZooDefs.Perms.READ, ZooDefs.Ids.ANYONE_ID_UNSAFE));
+
+    if (result.isEmpty()) {
+      result = super.createGlobalACLsToAdd();
+    }
+    return result;
+  }
+}
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZkClient.java b/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZkClient.java
index 9b82e14..f5f33a1 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZkClient.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/SolrZkClient.java
@@ -26,7 +26,6 @@
 import java.util.List;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
-import java.util.concurrent.atomic.AtomicLong;
 
 import javax.xml.transform.OutputKeys;
 import javax.xml.transform.Source;
@@ -40,8 +39,8 @@
 import org.apache.solr.common.StringUtils;
 import org.apache.solr.common.cloud.ZkClientConnectionStrategy.ZkUpdate;
 import org.apache.solr.common.util.ExecutorUtil;
+import org.apache.solr.common.util.ObjectReleaseTracker;
 import org.apache.solr.common.util.SolrjNamedThreadFactory;
-import org.apache.zookeeper.AsyncCallback;
 import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.KeeperException.NoNodeException;
@@ -62,9 +61,6 @@
  *
  */
 public class SolrZkClient implements Closeable {
-  // These should *only* be used for debugging or monitoring purposes
-  public static final AtomicLong numOpens = new AtomicLong();
-  public static final AtomicLong numCloses = new AtomicLong();
   
   static final String NEWL = System.getProperty("line.separator");
 
@@ -183,7 +179,7 @@
       }
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
     }
-    numOpens.incrementAndGet();
+    assert ObjectReleaseTracker.track(this);
     if (zkACLProvider == null) {
       this.zkACLProvider = createZkACLProvider();
     } else {
@@ -644,7 +640,7 @@
       connManager.close();
       closeCallbackExecutor();
     }
-    numCloses.incrementAndGet();
+    assert ObjectReleaseTracker.release(this);
   }
 
   public boolean isClosed() {
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
index 01ae265..33c1734 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
@@ -273,6 +273,7 @@
     if (collection.getZNodeVersion() < version) {
       log.debug("server older than client {}<{}", collection.getZNodeVersion(), version);
       DocCollection nu = getCollectionLive(this, coll);
+      if (nu == null) return null;
       if (nu.getZNodeVersion() > collection.getZNodeVersion()) {
         updateWatchedCollection(nu);
         collection = nu;
@@ -454,9 +455,11 @@
       synchronized (this) {
         if (watchedCollections.contains(s)) {
           DocCollection live = getCollectionLive(this, s);
-          watchedCollectionStates.put(s, live);
-          // if it is a watched collection, add too
-          result.put(s, new ClusterState.CollectionRef(live));
+          if (live != null) {
+            watchedCollectionStates.put(s, live);
+            // if it is a watched collection, add too
+            result.put(s, new ClusterState.CollectionRef(live));
+          }
         } else {
           // if it is not collection, then just create a reference which can fetch
           // the collection object just in time from ZK
@@ -468,6 +471,9 @@
             public DocCollection get() {
               return getCollectionLive(ZkStateReader.this, collName);
             }
+
+            @Override
+            public boolean isLazilyLoaded() { return true; }
           });
         }
       }
@@ -527,7 +533,10 @@
       }
       synchronized (ZkStateReader.this) {
         for (String watchedCollection : watchedCollections) {
-          updateWatchedCollection(getCollectionLive(this, watchedCollection));
+          DocCollection live = getCollectionLive(this, watchedCollection);
+          if (live != null) {
+            updateWatchedCollection(live);
+          }
         }
       }
 
@@ -585,7 +594,11 @@
 
             synchronized (ZkStateReader.this) {
               for (String watchedCollection : watchedCollections) {
-                updateWatchedCollection(getCollectionLive(ZkStateReader.this, watchedCollection));
+                DocCollection live = getCollectionLive(ZkStateReader.this, watchedCollection);
+                assert live != null;
+                if (live != null) {
+                  updateWatchedCollection(live);
+                }
               }
             }
           }
@@ -878,7 +891,10 @@
       };
       zkClient.exists(fullpath, watcher, true);
     }
-    updateWatchedCollection(getCollectionLive(this, coll));
+    DocCollection collection = getCollectionLive(this, coll);
+    if (collection != null) {
+      updateWatchedCollection(collection);
+    }
   }
   
   private void updateWatchedCollection(DocCollection newState) {
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/AppendedSolrParams.java b/solr/solrj/src/java/org/apache/solr/common/params/AppendedSolrParams.java
index 298e0c1..4c9cc2e 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/AppendedSolrParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/AppendedSolrParams.java
@@ -24,11 +24,11 @@
  */
 public class AppendedSolrParams extends DefaultSolrParams {
 
-  /**
-   * @deprecated (3.6) Use {@link SolrParams#wrapAppended(SolrParams, SolrParams)} instead.
-   */
-  @Deprecated
-  public AppendedSolrParams(SolrParams main, SolrParams extra) {
+  public static AppendedSolrParams wrapAppended(SolrParams params, SolrParams extra) {
+    return new AppendedSolrParams(params, extra);
+  }
+
+  private AppendedSolrParams(SolrParams main, SolrParams extra) {
     super(main, extra);
   }
 
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/DefaultSolrParams.java b/solr/solrj/src/java/org/apache/solr/common/params/DefaultSolrParams.java
index 36936e4..1f308cd 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/DefaultSolrParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/DefaultSolrParams.java
@@ -24,14 +24,11 @@
  *
  */
 public class DefaultSolrParams extends SolrParams {
+
   protected final SolrParams params;
   protected final SolrParams defaults;
 
-  /**
-   * @deprecated (3.6) Use {@link SolrParams#wrapDefaults(SolrParams, SolrParams)} instead.
-   */
-  @Deprecated
-  public DefaultSolrParams(SolrParams params, SolrParams defaults) {
+  protected DefaultSolrParams(SolrParams params, SolrParams defaults) {
     assert params != null && defaults != null;
     this.params = params;
     this.defaults = defaults;
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/FacetParams.java b/solr/solrj/src/java/org/apache/solr/common/params/FacetParams.java
index 0509ea5..0947494 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/FacetParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/FacetParams.java
@@ -310,25 +310,6 @@
   }
   
   /**
-   * @deprecated Use {@link FacetRangeOther}
-   */
-  @Deprecated
-  public enum FacetDateOther {
-    BEFORE, AFTER, BETWEEN, ALL, NONE;
-    @Override
-    public String toString() { return super.toString().toLowerCase(Locale.ROOT); }
-    public static FacetDateOther get(String label) {
-      try {
-        return valueOf(label.toUpperCase(Locale.ROOT));
-      } catch (IllegalArgumentException e) {
-        throw new SolrException
-          (SolrException.ErrorCode.BAD_REQUEST,
-           label+" is not a valid type of 'other' range facet information",e);
-      }
-    }
-  }
-  
-  /**
    * An enumeration of the legal values for {@link #FACET_DATE_INCLUDE} and {@link #FACET_RANGE_INCLUDE}
    * <p>
    * <ul>
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/HighlightParams.java b/solr/solrj/src/java/org/apache/solr/common/params/HighlightParams.java
index 49e82ef..ceccd74 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/HighlightParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/HighlightParams.java
@@ -52,6 +52,7 @@
   
   public static final String USE_PHRASE_HIGHLIGHTER = HIGHLIGHT+".usePhraseHighlighter";
   public static final String HIGHLIGHT_MULTI_TERM = HIGHLIGHT+".highlightMultiTerm";
+  public static final String PAYLOADS = HIGHLIGHT+".payloads";
 
   public static final String MERGE_CONTIGUOUS_FRAGMENTS = HIGHLIGHT + ".mergeContiguous";
 
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/MapSolrParams.java b/solr/solrj/src/java/org/apache/solr/common/params/MapSolrParams.java
index 3cfce3f..f2a1c37 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/MapSolrParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/MapSolrParams.java
@@ -19,6 +19,7 @@
 
 import org.apache.solr.common.util.StrUtils;
 
+import java.util.Arrays;
 import java.util.Iterator;
 import java.util.Map;
 import java.io.IOException;
@@ -35,13 +36,22 @@
 
   @Override
   public String get(String name) {
-    return map.get(name);
+    Object  o = map.get(name);
+    if(o == null) return null;
+    if (o instanceof String) return  (String) o;
+    if (o instanceof String[]) {
+      String[] strings = (String[]) o;
+      if(strings.length == 0) return null;
+      return strings[0];
+    }
+    return String.valueOf(o);
   }
 
   @Override
   public String[] getParams(String name) {
-    String val = map.get(name);
-    return val==null ? null : new String[]{val};
+    Object val = map.get(name);
+    if (val instanceof String[]) return (String[]) val;
+    return val==null ? null : new String[]{String.valueOf(val)};
   }
 
   @Override
@@ -59,13 +69,16 @@
 
       for (Map.Entry<String,String> entry : map.entrySet()) {
         String key = entry.getKey();
-        String val = entry.getValue();
-
+        Object val = entry.getValue();
+        if (val instanceof String[]) {
+          String[] strings = (String[]) val;
+          val =  StrUtils.join(Arrays.asList(strings),',');
+        }
         if (!first) sb.append('&');
         first=false;
         sb.append(key);
         sb.append('=');
-        StrUtils.partialURLEncodeVal(sb, val==null ? "" : val);
+        StrUtils.partialURLEncodeVal(sb, val==null ? "" : String.valueOf(val));
       }
     }
     catch (IOException e) {throw new RuntimeException(e);}  // can't happen
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java b/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java
index a17be2a..46a9fc0 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java
@@ -45,10 +45,6 @@
 
   /** Should things fail if there is an error? (true/false) */
   public static final String SHARDS_TOLERANT = "shards.tolerant";
-
-  /** Should things fail if there is an error? (true/false) */
-  @Deprecated
-  public static final String SHARD_KEYS = "shard.keys";
   
   /** query purpose for shard requests */
   public static final String SHARDS_PURPOSE = "shards.purpose";
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/SolrParams.java b/solr/solrj/src/java/org/apache/solr/common/params/SolrParams.java
index 10f07b8..36d0df1 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/SolrParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/SolrParams.java
@@ -17,17 +17,17 @@
 
 package org.apache.solr.common.params;
 
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.SimpleOrderedMap;
+import org.apache.solr.common.util.StrUtils;
+
 import java.io.Serializable;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.common.util.StrUtils;
-
 /**  SolrParams hold request parameters.
  *
  *
@@ -282,7 +282,6 @@
     }
   }
 
-  @SuppressWarnings({"deprecation"})
   public static SolrParams wrapDefaults(SolrParams params, SolrParams defaults) {
     if (params == null)
       return defaults;
@@ -291,13 +290,12 @@
     return new DefaultSolrParams(params,defaults);
   }
 
-  @SuppressWarnings({"deprecation"})
   public static SolrParams wrapAppended(SolrParams params, SolrParams defaults) {
     if (params == null)
       return defaults;
     if (defaults == null)
       return params;
-    return new AppendedSolrParams(params,defaults);
+    return AppendedSolrParams.wrapAppended(params,defaults);
   }
 
   /** Create a Map&lt;String,String&gt; from a NamedList given no keys are repeated */
diff --git a/solr/core/src/java/org/apache/solr/util/IOUtils.java b/solr/solrj/src/java/org/apache/solr/common/util/IOUtils.java
similarity index 93%
rename from solr/core/src/java/org/apache/solr/util/IOUtils.java
rename to solr/solrj/src/java/org/apache/solr/common/util/IOUtils.java
index e7b82ea..cbf1dcb 100644
--- a/solr/core/src/java/org/apache/solr/util/IOUtils.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/IOUtils.java
@@ -1,8 +1,7 @@
-package org.apache.solr.util;
+package org.apache.solr.common.util;
 
 import java.io.Closeable;
 
-import org.apache.solr.core.HdfsDirectoryFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/ObjectReleaseTracker.java b/solr/solrj/src/java/org/apache/solr/common/util/ObjectReleaseTracker.java
new file mode 100644
index 0000000..47ab21a
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/common/util/ObjectReleaseTracker.java
@@ -0,0 +1,62 @@
+package org.apache.solr.common.util;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+
+public class ObjectReleaseTracker {
+  public static Map<Object,String> OBJECTS = new ConcurrentHashMap<>();
+  
+  public static boolean track(Object object) {
+    StringWriter sw = new StringWriter();
+    PrintWriter pw = new PrintWriter(sw);
+    new ObjectTrackerException().printStackTrace(pw);
+    OBJECTS.put(object, sw.toString());
+    return true;
+  }
+  
+  public static boolean release(Object object) {
+    OBJECTS.remove(object);
+    return true;
+  }
+  
+  public static boolean clearObjectTrackerAndCheckEmpty() {
+    Set<Entry<Object,String>> entries = OBJECTS.entrySet();
+    boolean empty = entries.isEmpty();
+    if (entries.size() > 0) {
+      System.err.println("ObjectTracker found objects that were not released!!!");
+    }
+    
+    for (Entry<Object,String> entry : entries) {
+      System.err.println(entry.getValue());
+    }
+    
+    OBJECTS.clear();
+    
+    return empty;
+  }
+  
+  private static class ObjectTrackerException extends RuntimeException {
+    
+  }
+}
diff --git a/solr/solrj/src/test-files/solrj/solr/shared/conf/schema.xml b/solr/solrj/src/test-files/solrj/solr/configsets/shared/conf/schema.xml
similarity index 100%
rename from solr/solrj/src/test-files/solrj/solr/shared/conf/schema.xml
rename to solr/solrj/src/test-files/solrj/solr/configsets/shared/conf/schema.xml
diff --git a/solr/solrj/src/test-files/solrj/solr/shared/conf/solrconfig.xml b/solr/solrj/src/test-files/solrj/solr/configsets/shared/conf/solrconfig.xml
similarity index 100%
rename from solr/solrj/src/test-files/solrj/solr/shared/conf/solrconfig.xml
rename to solr/solrj/src/test-files/solrj/solr/configsets/shared/conf/solrconfig.xml
diff --git a/solr/solrj/src/test-files/solrj/solr/shared/conf/stopwords-en.txt b/solr/solrj/src/test-files/solrj/solr/configsets/shared/conf/stopwords-en.txt
similarity index 100%
rename from solr/solrj/src/test-files/solrj/solr/shared/conf/stopwords-en.txt
rename to solr/solrj/src/test-files/solrj/solr/configsets/shared/conf/stopwords-en.txt
diff --git a/solr/solrj/src/test-files/solrj/solr/shared/conf/stopwords-fr.txt b/solr/solrj/src/test-files/solrj/solr/configsets/shared/conf/stopwords-fr.txt
similarity index 100%
rename from solr/solrj/src/test-files/solrj/solr/shared/conf/stopwords-fr.txt
rename to solr/solrj/src/test-files/solrj/solr/configsets/shared/conf/stopwords-fr.txt
diff --git a/solr/solrj/src/test-files/solrj/solr/multicore/core0/core.properties b/solr/solrj/src/test-files/solrj/solr/multicore/core0/core.properties
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/solr/solrj/src/test-files/solrj/solr/multicore/core0/core.properties
diff --git a/solr/solrj/src/test-files/solrj/solr/multicore/core1/core.properties b/solr/solrj/src/test-files/solrj/solr/multicore/core1/core.properties
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/solr/solrj/src/test-files/solrj/solr/multicore/core1/core.properties
diff --git a/solr/solrj/src/test-files/solrj/solr/multicore/solr.xml b/solr/solrj/src/test-files/solrj/solr/multicore/solr.xml
index 4e27135..bffa48e 100644
--- a/solr/solrj/src/test-files/solrj/solr/multicore/solr.xml
+++ b/solr/solrj/src/test-files/solrj/solr/multicore/solr.xml
@@ -16,25 +16,7 @@
  limitations under the License.
 -->
 
-<!--
- All (relative) paths are relative to the installation path
-  
-  persistent: Save changes made via the API to this file
-  sharedLib: path to a lib directory that will be shared across all cores
--->
-<solr persistent="false">
+<solr>
 
-  <!--
-  adminPath: RequestHandler path to manage cores.  
-    If 'null' (or absent), cores will not be manageable via request handler
-  -->
-  <cores adminPath="/admin/cores" host="${host:}" hostPort="${jetty.port:8983}" hostContext="${hostContext:solr}">
-    <core name="core0" instanceDir="core0" />
-    <core name="core1" instanceDir="core1" />
-    
-    <shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
-      <str name="urlScheme">${urlScheme:}</str>
-    </shardHandlerFactory>
-  </cores>
-        
+
 </solr>
diff --git a/solr/solrj/src/test-files/solrj/solr/shared/collection1/core.properties b/solr/solrj/src/test-files/solrj/solr/shared/collection1/core.properties
new file mode 100644
index 0000000..0954556
--- /dev/null
+++ b/solr/solrj/src/test-files/solrj/solr/shared/collection1/core.properties
@@ -0,0 +1 @@
+configSet=shared
\ No newline at end of file
diff --git a/solr/solrj/src/test-files/solrj/solr/shared/core0/core.properties b/solr/solrj/src/test-files/solrj/solr/shared/core0/core.properties
new file mode 100644
index 0000000..afb87fe
--- /dev/null
+++ b/solr/solrj/src/test-files/solrj/solr/shared/core0/core.properties
@@ -0,0 +1,8 @@
+name=core0
+dataDir=${dataDir1}
+collection=${collection:acollection}
+version=3.5
+l10n=EN
+ctlField=core0
+comment=This is a sample with a sys prop ${sysprop:default}
+configSet=shared
\ No newline at end of file
diff --git a/solr/solrj/src/test-files/solrj/solr/shared/core1/core.properties b/solr/solrj/src/test-files/solrj/solr/shared/core1/core.properties
new file mode 100644
index 0000000..341afa5
--- /dev/null
+++ b/solr/solrj/src/test-files/solrj/solr/shared/core1/core.properties
@@ -0,0 +1,6 @@
+dataDir=${dataDir2}
+version=2.4
+l10n=FR
+ctlField=core1
+comment=Ceci est un exemple
+configSet=shared
\ No newline at end of file
diff --git a/solr/solrj/src/test-files/solrj/solr/shared/solr.xml b/solr/solrj/src/test-files/solrj/solr/shared/solr.xml
index a954a3a..7abf439 100644
--- a/solr/solrj/src/test-files/solrj/solr/shared/solr.xml
+++ b/solr/solrj/src/test-files/solrj/solr/shared/solr.xml
@@ -16,20 +16,45 @@
  limitations under the License.
 -->
 
+<solr>
+
+  <str name="shareSchema">${shareSchema:false}</str>
+  <str name="configSetBaseDir">${configSetBaseDir:../configsets}</str>
+  <str name="coreRootDirectory">${coreRootDirectory:.}</str>
+
+  <shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
+    <str name="urlScheme">${urlScheme:}</str>
+    <int name="socketTimeout">${socketTimeout:90000}</int>
+    <int name="connTimeout">${connTimeout:15000}</int>
+  </shardHandlerFactory>
+
+  <solrcloud>
+    <str name="host">127.0.0.1</str>
+    <int name="hostPort">${hostPort:8983}</int>
+    <str name="hostContext">${hostContext:solr}</str>
+    <int name="zkClientTimeout">${solr.zkclienttimeout:30000}</int>
+    <bool name="genericCoreNodeNames">${genericCoreNodeNames:true}</bool>
+    <int name="leaderVoteWait">0</int>
+    <int name="distribUpdateConnTimeout">${distribUpdateConnTimeout:45000}</int>
+    <int name="distribUpdateSoTimeout">${distribUpdateSoTimeout:340000}</int>
+  </solrcloud>
+
+</solr>
+
 <!--
  All (relative) paths are relative to the installation path
   
   persistent: Save changes made via the API to this file
   sharedLib: path to a lib directory that will be shared across all cores
--->
+
 <solr persistent="false">
   <property name="version" value="1.3"/>
   <property name="lang" value="english, french"/>
 
-  <!--
+
   adminPath: RequestHandler path to manage cores.  
     If 'null' (or absent), cores will not be manageable via REST
-  -->
+
   <cores adminPath="/admin/cores" defaultCoreName="core0" host="127.0.0.1" hostPort="${hostPort:8983}" hostContext="${hostContext:solr}" zkClientTimeout="8000" genericCoreNodeNames="${genericCoreNodeNames:true}" configSetBaseDir="${configSetBase:configsets}">
     <core name="collection1" instanceDir="." />
     <core name="core0" instanceDir="${theInstanceDir:./}" dataDir="${dataDir1}" collection="${collection:acollection}">
@@ -46,3 +71,4 @@
     </core>
   </cores>
 </solr>
+-->
diff --git a/solr/solrj/src/test-files/solrj/solr/solr.xml b/solr/solrj/src/test-files/solrj/solr/solr.xml
index c5d6bac..8f79966 100644
--- a/solr/solrj/src/test-files/solrj/solr/solr.xml
+++ b/solr/solrj/src/test-files/solrj/solr/solr.xml
@@ -16,24 +16,52 @@
  limitations under the License.
 -->
 
+
 <!--
  All (relative) paths are relative to the installation path
-  
-  persistent: Save changes made via the API to this file
-  sharedLib: path to a lib directory that will be shared across all cores
 -->
+<solr>
+
+  <str name="shareSchema">${shareSchema:false}</str>
+  <str name="configSetBaseDir">${configSetBaseDir:configsets}</str>
+  <str name="coreRootDirectory">${coreRootDirectory:.}</str>
+
+  <shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
+    <str name="urlScheme">${urlScheme:}</str>
+    <int name="socketTimeout">${socketTimeout:90000}</int>
+    <int name="connTimeout">${connTimeout:15000}</int>
+  </shardHandlerFactory>
+
+  <solrcloud>
+    <str name="host">127.0.0.1</str>
+    <int name="hostPort">${hostPort:8983}</int>
+    <str name="hostContext">${hostContext:solr}</str>
+    <int name="zkClientTimeout">${solr.zkclienttimeout:30000}</int>
+    <bool name="genericCoreNodeNames">${genericCoreNodeNames:true}</bool>
+    <int name="leaderVoteWait">0</int>
+    <int name="distribUpdateConnTimeout">${distribUpdateConnTimeout:45000}</int>
+    <int name="distribUpdateSoTimeout">${distribUpdateSoTimeout:340000}</int>
+  </solrcloud>
+
+</solr>
+
+
+    <!--
+     All (relative) paths are relative to the installation path
+
+      persistent: Save changes made via the API to this file
+      sharedLib: path to a lib directory that will be shared across all cores
+
 <solr persistent="false">
 
-  <!--
-  adminPath: RequestHandler path to manage cores.  
-    If 'null' (or absent), cores will not be manageable via request handler
-  -->
+
+
   <cores adminPath="/admin/cores" defaultCoreName="collection1" host="127.0.0.1" hostPort="${hostPort:8983}" 
          hostContext="${hostContext:solr}" zkClientTimeout="${solr.zkclienttimeout:30000}" shareSchema="${shareSchema:false}" 
          genericCoreNodeNames="${genericCoreNodeNames:true}" leaderVoteWait="0"
          distribUpdateConnTimeout="${distribUpdateConnTimeout:45000}" distribUpdateSoTimeout="${distribUpdateSoTimeout:340000}">
     <core name="collection1" instanceDir="collection1" shard="${shard:}" collection="${collection:collection1}" />
-    <!--config="${solrconfig:solrconfig.xml}" schema="${schema:schema.xml}"/-->
+
     <shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
       <str name="urlScheme">${urlScheme:}</str>
       <int name="socketTimeout">${socketTimeout:90000}</int>
@@ -41,3 +69,4 @@
     </shardHandlerFactory>
   </cores>
 </solr>
+-->
\ No newline at end of file
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/GetByIdTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/GetByIdTest.java
new file mode 100644
index 0000000..f32e7ff
--- /dev/null
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/GetByIdTest.java
@@ -0,0 +1,117 @@
+package org.apache.solr.client.solrj;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.File;
+import java.util.Arrays;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.solr.SolrJettyTestBase;
+import org.apache.solr.common.SolrDocument;
+import org.apache.solr.common.SolrDocumentList;
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.params.SolrParams;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class GetByIdTest extends SolrJettyTestBase {
+  
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    initCore();
+  }
+  
+  @Before
+  @Override
+  public void setUp() throws Exception {
+    super.setUp();
+    getSolrClient().deleteByQuery("*:*");
+    getSolrClient().add(Arrays.asList(
+        sdoc("id", "1", "term_s", "Microsoft", "term2_s", "MSFT"),
+        sdoc("id", "2", "term_s", "Apple", "term2_s", "AAPL"),
+        sdoc("id", "3", "term_s", "Yahoo", "term2_s", "YHOO")));
+    
+    getSolrClient().commit(true, true);
+  }
+  
+  @Test
+  public void testGetId() throws Exception {
+    SolrDocument rsp = getSolrClient().getById("0");
+    assertNull(rsp);
+    
+    rsp = getSolrClient().getById("1");
+    assertEquals("1", rsp.get("id"));
+    assertEquals("Microsoft", rsp.get("term_s"));
+    assertEquals("MSFT", rsp.get("term2_s"));
+
+    rsp = getSolrClient().getById("2");    
+    assertEquals("2", rsp.get("id"));
+    assertEquals("Apple", rsp.get("term_s"));
+    assertEquals("AAPL", rsp.get("term2_s"));
+  }
+  
+  @Test
+  public void testGetIdWithParams() throws Exception {
+    final SolrParams ID_FL_ONLY = params(CommonParams.FL, "id");
+    
+    SolrDocument rsp = getSolrClient().getById("0", ID_FL_ONLY);
+    assertNull(rsp);
+    
+    rsp = getSolrClient().getById("1", ID_FL_ONLY);
+    assertEquals("1", rsp.get("id"));
+    assertNull("This field should have been removed from the response.", rsp.get("term_s"));
+    assertNull("This field should have been removed from the response.", rsp.get("term2_s"));
+
+    rsp = getSolrClient().getById("2", ID_FL_ONLY);    
+    assertEquals("2", rsp.get("id"));
+    assertNull("This field should have been removed from the response.", rsp.get("term_s"));
+    assertNull("This field should have been removed from the response.", rsp.get("term2_s"));
+  }
+
+  @Test
+  public void testGetIds() throws Exception {
+    SolrDocumentList rsp = getSolrClient().getById(Arrays.asList("0", "1", "2", "3", "4"));
+    assertEquals(3, rsp.getNumFound());
+    assertEquals("1", rsp.get(0).get("id"));
+    assertEquals("Microsoft", rsp.get(0).get("term_s"));
+    assertEquals("MSFT", rsp.get(0).get("term2_s"));
+    
+    assertEquals("2", rsp.get(1).get("id"));
+    assertEquals("Apple", rsp.get(1).get("term_s"));
+    assertEquals("AAPL", rsp.get(1).get("term2_s"));
+    
+    assertEquals("3", rsp.get(2).get("id"));
+    assertEquals("Yahoo", rsp.get(2).get("term_s"));
+    assertEquals("YHOO", rsp.get(2).get("term2_s"));
+  }
+  
+  @Test
+  public void testGetIdsWithParams() throws Exception {
+    SolrDocumentList rsp = getSolrClient().getById(Arrays.asList("0", "1", "2"), params(CommonParams.FL, "id"));
+    assertEquals(2, rsp.getNumFound());
+    
+    assertEquals("1", rsp.get(0).get("id"));
+    assertNull("This field should have been removed from the response.", rsp.get(0).get("term_s"));
+    assertNull("This field should have been removed from the response.", rsp.get(0).get("term2_s"));
+    
+    assertEquals("2", rsp.get(1).get("id"));
+    assertNull("This field should have been removed from the response.", rsp.get(1).get("term_s"));
+    assertNull("This field should have been removed from the response.", rsp.get(1).get("term2_s"));
+  }
+}
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/LargeVolumeTestBase.java b/solr/solrj/src/test/org/apache/solr/client/solrj/LargeVolumeTestBase.java
index b100177..d231a94 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/LargeVolumeTestBase.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/LargeVolumeTestBase.java
@@ -118,7 +118,7 @@
           log.info("Caught benign exception during commit: " + e.getMessage());
         }
         if (!(client instanceof EmbeddedSolrServer)) {
-          client.shutdown();
+          client.close();
         }
 
       } catch (Exception e) {
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
index d28b103..9dacb4b 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java
@@ -213,13 +213,13 @@
     if (jetty != null) {
       // check system wide system handler + "/admin/info/system"
       String url = jetty.getBaseUrl().toString();
-      HttpSolrClient adminClient = new HttpSolrClient(url);
-      SolrQuery q = new SolrQuery();
-      q.set("qt", "/admin/info/system");
-      QueryResponse rsp = adminClient.query(q);
-      assertNotNull(rsp.getResponse().get("mode"));
-      assertNotNull(rsp.getResponse().get("lucene"));
-      adminClient.shutdown();
+      try (HttpSolrClient adminClient = new HttpSolrClient(url)) {
+        SolrQuery q = new SolrQuery();
+        q.set("qt", "/admin/info/system");
+        QueryResponse rsp = adminClient.query(q);
+        assertNotNull(rsp.getResponse().get("mode"));
+        assertNotNull(rsp.getResponse().get("lucene"));
+      }
     }
   }
 
@@ -256,7 +256,7 @@
     
     SolrQuery query = new SolrQuery();
     query.setQuery( "*:*" );
-    query.addSortField( "price", SolrQuery.ORDER.asc );
+    query.addSort(new SolrQuery.SortClause("price", SolrQuery.ORDER.asc));
     QueryResponse rsp = client.query( query );
     
     assertEquals(2, rsp.getResults().getNumFound());
@@ -495,7 +495,7 @@
     SolrQuery query = new SolrQuery();
     query.setQuery( "*:*" );
     query.set( CommonParams.FL, "id,price,[docid],[explain style=nl],score,aaa:[value v=aaa],ten:[value v=10 t=int]" );
-    query.addSortField( "price", SolrQuery.ORDER.asc );
+    query.addSort(new SolrQuery.SortClause("price", SolrQuery.ORDER.asc));
     QueryResponse rsp = client.query( query );
     
     SolrDocumentList out = rsp.getResults();
@@ -547,7 +547,7 @@
     if (!(client instanceof EmbeddedSolrServer)) {
       /* Do not close in case of using EmbeddedSolrServer,
        * as that would close the CoreContainer */
-      client.shutdown();
+      client.close();
     }
   }
   
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTestsBase.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTestsBase.java
index 0ee63d1..d6bb4ac 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTestsBase.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTestsBase.java
@@ -258,12 +258,14 @@
     assertEquals(10, cnt.get());
   }
   
-  protected void assertNumFound(String query, int num)
+  protected QueryResponse assertNumFound(String query, int num)
       throws SolrServerException, IOException {
     QueryResponse rsp = getSolrClient().query(new SolrQuery(query));
     if (num != rsp.getResults().getNumFound()) {
       fail("expected: " + num + " but had: " + rsp.getResults().getNumFound()
           + " :: " + rsp.getResults());
     }
+    return rsp;
+
   }
 }
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExceptionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExceptionTest.java
index 31df23f..44234a4 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExceptionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExceptionTest.java
@@ -17,7 +17,7 @@
 
 package org.apache.solr.client.solrj;
 
-import org.apache.http.client.HttpClient;
+import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.impl.HttpClientUtil;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
@@ -34,15 +34,16 @@
     // this is a very simple test and most of the test should be considered verified 
     // if the compiler won't let you by without the try/catch
     boolean gotExpectedError = false;
+    CloseableHttpClient httpClient = null;
     try {
       // switched to a local address to avoid going out on the net, ns lookup issues, etc.
       // set a 1ms timeout to let the connection fail faster.
-      HttpClient httpClient = HttpClientUtil.createClient(null);
+      httpClient = HttpClientUtil.createClient(null);
       HttpClientUtil.setConnectionTimeout(httpClient,  1);
       SolrClient client = new HttpSolrClient("http://[ff01::114]:11235/solr/", httpClient);
       SolrQuery query = new SolrQuery("test123");
       client.query(query);
-      client.shutdown();
+      httpClient.close();
     } catch (SolrServerException sse) {
       gotExpectedError = true;
       /***
@@ -50,6 +51,8 @@
               //If one is using OpenDNS, then you don't get UnknownHostException, instead you get back that the query couldn't execute
               || (sse.getRootCause().getClass() == SolrException.class && ((SolrException) sse.getRootCause()).code() == 302 && sse.getMessage().equals("Error executing query")));
       ***/
+    } finally {
+      if (httpClient != null) HttpClientUtil.close(httpClient);
     }
     assertTrue(gotExpectedError);
   }
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrQueryTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrQueryTest.java
index 800b48f..7b5be04 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrQueryTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrQueryTest.java
@@ -70,14 +70,14 @@
     b = q.removeFacetQuery("a:b");
     Assert.assertEquals(null, q.getFacetQuery());   
     
-    q.addSortField("price", SolrQuery.ORDER.asc);
-    q.addSortField("date", SolrQuery.ORDER.desc);
-    q.addSortField("qty", SolrQuery.ORDER.desc);
-    q.removeSortField("date", SolrQuery.ORDER.desc);
-    Assert.assertEquals(2, q.getSortFields().length);
-    q.removeSortField("price", SolrQuery.ORDER.asc);
-    q.removeSortField("qty", SolrQuery.ORDER.desc);
-    Assert.assertEquals(null, q.getSortFields());
+    q.addSort("price", SolrQuery.ORDER.asc);
+    q.addSort("date", SolrQuery.ORDER.desc);
+    q.addSort("qty", SolrQuery.ORDER.desc);
+    q.removeSort(new SortClause("date", SolrQuery.ORDER.desc));
+    Assert.assertEquals(2, q.getSorts().size());
+    q.removeSort(new SortClause("price", SolrQuery.ORDER.asc));
+    q.removeSort(new SortClause("qty", SolrQuery.ORDER.desc));
+    Assert.assertEquals(0, q.getSorts().size());
     
     q.addHighlightField("hl1");
     q.addHighlightField("hl2");
@@ -103,21 +103,6 @@
     
     // System.out.println(q);
   }
-  
-  /*
-   * Verifies that the old (deprecated) sort methods
-   * allows mix-and-match between the raw field and
-   * the itemized apis.
-   */
-  public void testSortFieldRawStringAndMethods() {
-    SolrQuery q = new SolrQuery("dog");
-    q.set("sort", "price asc,date desc,qty desc");
-    q.removeSortField("date", SolrQuery.ORDER.desc);
-    Assert.assertEquals(2, q.getSortFields().length);
-    q.set("sort", "price asc, date desc, qty desc");
-    q.removeSortField("date", SolrQuery.ORDER.desc);
-    Assert.assertEquals(2, q.getSortFields().length);
-  }
 
   /*
    *  Verifies that you can use removeSortField() twice, which
@@ -125,13 +110,13 @@
    */
   public void testSortFieldRemoveAfterRemove() {
     SolrQuery q = new SolrQuery("dog");
-    q.addSortField("price", SolrQuery.ORDER.asc);
-    q.addSortField("date", SolrQuery.ORDER.desc);
-    q.addSortField("qty", SolrQuery.ORDER.desc);
-    q.removeSortField("date", SolrQuery.ORDER.desc);
-    Assert.assertEquals(2, q.getSortFields().length);
-    q.removeSortField("qty", SolrQuery.ORDER.desc);
-    Assert.assertEquals(1, q.getSortFields().length);
+    q.addSort("price", SolrQuery.ORDER.asc);
+    q.addSort("date", SolrQuery.ORDER.desc);
+    q.addSort("qty", SolrQuery.ORDER.desc);
+    q.removeSort("date");
+    Assert.assertEquals(2, q.getSorts().size());
+    q.removeSort("qty");
+    Assert.assertEquals(1, q.getSorts().size());
   }
 
   /*
@@ -140,9 +125,9 @@
    */
   public void testSortFieldRemoveLast() {
     SolrQuery q = new SolrQuery("dog");
-    q.addSortField("date", SolrQuery.ORDER.desc);
-    q.addSortField("qty", SolrQuery.ORDER.desc);
-    q.removeSortField("qty", SolrQuery.ORDER.desc);
+    q.addSort("date", SolrQuery.ORDER.desc);
+    q.addSort("qty", SolrQuery.ORDER.desc);
+    q.removeSort("qty");
     Assert.assertEquals("date desc", q.getSortField());
   }
 
@@ -276,9 +261,9 @@
 
   public void testFacetSortLegacy() {
     SolrQuery q = new SolrQuery("dog");
-    assertTrue("expected default value to be true", q.getFacetSort());
-    q.setFacetSort(false);
-    assertFalse("expected set value to be false", q.getFacetSort());
+    assertEquals("expected default value to be SORT_COUNT", FacetParams.FACET_SORT_COUNT, q.getFacetSortString());
+    q.setFacetSort(FacetParams.FACET_SORT_INDEX);
+    assertEquals("expected set value to be SORT_INDEX", FacetParams.FACET_SORT_INDEX, q.getFacetSortString());
   }
 
   public void testFacetNumericRange() {
@@ -343,7 +328,7 @@
       assertEquals("foo", q.setFacetPrefix("foo").get( FacetParams.FACET_PREFIX, null ) );
       assertEquals("foo", q.setFacetPrefix("a", "foo").getFieldParam( "a", FacetParams.FACET_PREFIX, null ) );
 
-      assertEquals( Boolean.TRUE, q.setMissing(Boolean.TRUE.toString()).getBool( FacetParams.FACET_MISSING ) );
+      assertEquals( Boolean.TRUE, q.setFacetMissing(Boolean.TRUE).getBool( FacetParams.FACET_MISSING ) );
       assertEquals( Boolean.FALSE, q.setFacetMissing( Boolean.FALSE ).getBool( FacetParams.FACET_MISSING ) );      
       assertEquals( "true", q.setParam( "xxx", true ).getParams( "xxx" )[0] );
 
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrSchemalessExampleTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrSchemalessExampleTest.java
index 49879e7..618ce0c 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrSchemalessExampleTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrSchemalessExampleTest.java
@@ -25,6 +25,8 @@
 import org.apache.solr.client.solrj.impl.BinaryRequestWriter;
 import org.apache.solr.client.solrj.impl.BinaryResponseParser;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.SolrDocument;
 import org.apache.solr.util.ExternalPaths;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -34,7 +36,11 @@
 import java.io.ByteArrayInputStream;
 import java.io.File;
 import java.io.OutputStreamWriter;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
 import java.util.Properties;
+import java.util.Set;
 
 public class SolrSchemalessExampleTest extends SolrExampleTestsBase {
   private static Logger log = LoggerFactory.getLogger(SolrSchemalessExampleTest.class);
@@ -83,6 +89,46 @@
     assertNumFound("*:*", 2);
   }
 
+  @Test
+  public void testFieldMutating() throws Exception {
+    HttpSolrClient client = (HttpSolrClient) getSolrClient();
+    client.deleteByQuery("*:*");
+    client.commit();
+    assertNumFound("*:*", 0); // make sure it got in
+    // two docs, one with uniqueKey, another without it
+    String json = "{\"name one\": \"name\"} " +
+        "{\"name  two\" : \"name\"}" +
+        "{\"first-second\" : \"name\"}" +
+        "{\"x+y\" : \"name\"}" +
+        "{\"p%q\" : \"name\"}" +
+        "{\"p.q\" : \"name\"}" +
+        "{\"a&b\" : \"name\"}"
+        ;
+    HttpClient httpClient = client.getHttpClient();
+    HttpPost post = new HttpPost(client.getBaseURL() + "/update/json/docs");
+    post.setHeader("Content-Type", "application/json");
+    post.setEntity(new InputStreamEntity(new ByteArrayInputStream(json.getBytes("UTF-8")), -1));
+    HttpResponse response = httpClient.execute(post);
+    assertEquals(200, response.getStatusLine().getStatusCode());
+    client.commit();
+    List<String> expected = Arrays.asList(
+        "name_one",
+        "name__two",
+        "first-second",
+        "a_b",
+        "p_q",
+        "p.q",
+        "x_y");
+    HashSet set = new HashSet();
+    QueryResponse rsp = assertNumFound("*:*", expected.size());
+    for (SolrDocument doc : rsp.getResults()) set.addAll(doc.getFieldNames());
+    for (String s : expected) {
+      assertTrue(s+" not created "+ rsp ,set.contains(s) );
+    }
+
+  }
+
+
 
   @Override
   public SolrClient createNewSolrClient() {
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/StartSolrJetty.java b/solr/solrj/src/test/org/apache/solr/client/solrj/StartSolrJetty.java
index 2d8d565..3c0b923 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/StartSolrJetty.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/StartSolrJetty.java
@@ -18,8 +18,9 @@
 package org.apache.solr.client.solrj;
 
 import org.eclipse.jetty.server.Connector;
+import org.eclipse.jetty.server.HttpConnectionFactory;
 import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.server.bio.SocketConnector;
+import org.eclipse.jetty.server.ServerConnector;
 import org.eclipse.jetty.webapp.WebAppContext;
 
 /**
@@ -32,9 +33,9 @@
     //System.setProperty("solr.solr.home", "../../../example/solr");
 
     Server server = new Server();
-    SocketConnector connector = new SocketConnector();
+    ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory());
     // Set some timeout options to make debugging easier.
-    connector.setMaxIdleTime(1000 * 60 * 60);
+    connector.setIdleTimeout(1000 * 60 * 60);
     connector.setSoLingerTime(-1);
     connector.setPort(8983);
     server.setConnectors(new Connector[] { connector });
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/TestLBHttpSolrClient.java b/solr/solrj/src/test/org/apache/solr/client/solrj/TestLBHttpSolrClient.java
index dd538b9..444adf6 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/TestLBHttpSolrClient.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/TestLBHttpSolrClient.java
@@ -20,7 +20,7 @@
 import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
 import junit.framework.Assert;
 import org.apache.commons.io.FileUtils;
-import org.apache.http.client.HttpClient;
+import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.lucene.util.QuickPatchThreadsFilter;
@@ -41,6 +41,7 @@
 
 import java.io.File;
 import java.io.IOException;
+import java.nio.file.Files;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
@@ -61,7 +62,7 @@
   private static final Logger log = LoggerFactory.getLogger(TestLBHttpSolrClient.class);
 
   SolrInstance[] solr = new SolrInstance[3];
-  HttpClient httpClient;
+  CloseableHttpClient httpClient;
 
   // TODO: fix this test to not require FSDirectory
   static String savedFactory;
@@ -104,16 +105,13 @@
       doc.addField("name", solrInstance.name);
       docs.add(doc);
     }
-    HttpSolrClient client = new HttpSolrClient(solrInstance.getUrl(), httpClient);
     SolrResponseBase resp;
-    try {
+    try (HttpSolrClient client = new HttpSolrClient(solrInstance.getUrl(), httpClient)) {
       resp = client.add(docs);
       assertEquals(0, resp.getStatus());
       resp = client.commit();
-    } finally {
-      client.shutdown();
+      assertEquals(0, resp.getStatus());
     }
-    assertEquals(0, resp.getStatus());
   }
 
   @Override
@@ -123,7 +121,7 @@
         aSolr.tearDown();
       }
     }
-    httpClient.getConnectionManager().shutdown();
+    httpClient.close();
     super.tearDown();
   }
 
@@ -205,23 +203,26 @@
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.set(HttpClientUtil.PROP_CONNECTION_TIMEOUT, 250);
     params.set(HttpClientUtil.PROP_SO_TIMEOUT, 250);
-    HttpClient myHttpClient = HttpClientUtil.createClient(params);
-
-    LBHttpSolrClient client = new LBHttpSolrClient(myHttpClient, s);
-    client.setAliveCheckInterval(500);
-
-    // Kill a server and test again
-    solr[1].jetty.stop();
-    solr[1].jetty = null;
-
-    // query the servers
-    for (String value : s)
-      client.query(new SolrQuery("*:*"));
-
-    // Start the killed server once again
-    solr[1].startJetty();
-    // Wait for the alive check to complete
-    waitForServer(30000, client, 3, "solr1");
+    CloseableHttpClient myHttpClient = HttpClientUtil.createClient(params);
+    try {
+      LBHttpSolrClient client = new LBHttpSolrClient(myHttpClient, s);
+      client.setAliveCheckInterval(500);
+  
+      // Kill a server and test again
+      solr[1].jetty.stop();
+      solr[1].jetty = null;
+  
+      // query the servers
+      for (String value : s)
+        client.query(new SolrQuery("*:*"));
+  
+      // Start the killed server once again
+      solr[1].startJetty();
+      // Wait for the alive check to complete
+      waitForServer(30000, client, 3, "solr1");
+    } finally {
+      myHttpClient.close();
+    }
   }
   
   // wait maximum ms for serverName to come back up
@@ -263,7 +264,7 @@
     }
 
     public String getUrl() {
-      return buildUrl(port, "/solr");
+      return buildUrl(port, "/solr/collection1");
     }
 
     public String getSchemaFile() {
@@ -298,6 +299,7 @@
       FileUtils.copyFile(SolrTestCaseJ4.getFile(getSolrConfigFile()), f);
       f = new File(confDir, "schema.xml");
       FileUtils.copyFile(SolrTestCaseJ4.getFile(getSchemaFile()), f);
+      Files.createFile(homeDir.toPath().resolve("collection1/core.properties"));
     }
 
     public void tearDown() throws Exception {
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/JettyWebappTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/JettyWebappTest.java
index 36750d1..4fb6230 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/JettyWebappTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/JettyWebappTest.java
@@ -28,8 +28,9 @@
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.util.ExternalPaths;
 import org.eclipse.jetty.server.Connector;
+import org.eclipse.jetty.server.HttpConnectionFactory;
 import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.server.bio.SocketConnector;
+import org.eclipse.jetty.server.ServerConnector;
 import org.eclipse.jetty.server.session.HashSessionIdManager;
 import org.eclipse.jetty.webapp.WebAppContext;
 import org.junit.Rule;
@@ -71,8 +72,8 @@
     server.setSessionIdManager(new HashSessionIdManager(new Random(random().nextLong())));
     new WebAppContext(server, path, context );
 
-    SocketConnector connector = new SocketConnector();
-    connector.setMaxIdleTime(1000 * 60 * 60);
+    ServerConnector connector = new ServerConnector(server, new HttpConnectionFactory());
+    connector.setIdleTimeout(1000 * 60 * 60);
     connector.setSoLingerTime(-1);
     connector.setPort(0);
     server.setConnectors(new Connector[]{connector});
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/SolrExampleStreamingTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/SolrExampleStreamingTest.java
index efe07e3..88a608b 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/SolrExampleStreamingTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/SolrExampleStreamingTest.java
@@ -76,30 +76,30 @@
   public void testWaitOptions() throws Exception {
     // SOLR-3903
     final List<Throwable> failures = new ArrayList<>();
-    ConcurrentUpdateSolrClient concurrentClient = new ConcurrentUpdateSolrClient
+    try (ConcurrentUpdateSolrClient concurrentClient = new ConcurrentUpdateSolrClient
       (jetty.getBaseUrl().toString() + "/collection1", 2, 2) {
         @Override
         public void handleError(Throwable ex) {
           failures.add(ex);
         }
-      };
-      
-    int docId = 42;
-    for (UpdateRequest.ACTION action : EnumSet.allOf(UpdateRequest.ACTION.class)) {
-      for (boolean waitSearch : Arrays.asList(true, false)) {
-        for (boolean waitFlush : Arrays.asList(true, false)) {
-          UpdateRequest updateRequest = new UpdateRequest();
-          SolrInputDocument document = new SolrInputDocument();
-          document.addField("id", docId++ );
-          updateRequest.add(document);
-          updateRequest.setAction(action, waitSearch, waitFlush);
-          concurrentClient.request(updateRequest);
+      }) {
+
+      int docId = 42;
+      for (UpdateRequest.ACTION action : EnumSet.allOf(UpdateRequest.ACTION.class)) {
+        for (boolean waitSearch : Arrays.asList(true, false)) {
+          for (boolean waitFlush : Arrays.asList(true, false)) {
+            UpdateRequest updateRequest = new UpdateRequest();
+            SolrInputDocument document = new SolrInputDocument();
+            document.addField("id", docId++);
+            updateRequest.add(document);
+            updateRequest.setAction(action, waitSearch, waitFlush);
+            concurrentClient.request(updateRequest);
+          }
         }
       }
+      concurrentClient.commit();
+      concurrentClient.blockUntilFinished();
     }
-    concurrentClient.commit();
-    concurrentClient.blockUntilFinished();
-    concurrentClient.shutdown();
 
     if (0 != failures.size()) {
       assertEquals(failures.size() + " Unexpected Exception, starting with...", 
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServer.java b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServer.java
index 5d17d78..a9a5e97 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServer.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestEmbeddedSolrServer.java
@@ -17,12 +17,8 @@
  * limitations under the License.
  */
 
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-
+import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
 import junit.framework.Assert;
-
 import org.apache.solr.core.SolrCore;
 import org.junit.Rule;
 import org.junit.rules.RuleChain;
@@ -30,7 +26,10 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
 
 public class TestEmbeddedSolrServer extends AbstractEmbeddedSolrServerTestCase {
 
@@ -55,9 +54,9 @@
     Assert.assertEquals(cores, ((EmbeddedSolrServer)getSolrCore1()).getCoreContainer());
   }
   
-  public void testShutdown() {
+  public void testClose() throws IOException {
     
-    EmbeddedSolrServer solrServer = (EmbeddedSolrServer)getSolrCore0();
+    EmbeddedSolrServer solrServer = (EmbeddedSolrServer) getSolrCore0();
     
     Assert.assertEquals(3, cores.getCores().size());
     List<SolrCore> solrCores = new ArrayList<>();
@@ -66,7 +65,7 @@
       solrCores.add(solrCore);
     }
     
-    solrServer.shutdown();
+    solrServer.close();
     
     Assert.assertEquals(0, cores.getCores().size());
     
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestSolrProperties.java b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestSolrProperties.java
index 684dec8..3b94377 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestSolrProperties.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestSolrProperties.java
@@ -19,16 +19,14 @@
 
 import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
 import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.request.AbstractUpdateRequest.ACTION;
 import org.apache.solr.client.solrj.request.CoreAdminRequest;
 import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.client.solrj.request.UpdateRequest;
 import org.apache.solr.client.solrj.response.CoreAdminResponse;
 import org.apache.solr.common.SolrInputDocument;
-import org.apache.solr.core.SolrXMLCoresLocator;
-import org.apache.solr.util.TestHarness;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.RuleChain;
@@ -67,9 +65,6 @@
   @Test
   public void testProperties() throws Exception {
 
-    SolrXMLCoresLocator.NonPersistingLocator locator
-        = (SolrXMLCoresLocator.NonPersistingLocator) cores.getCoresLocator();
-
     UpdateRequest up = new UpdateRequest();
     up.setAction(ACTION.COMMIT, true, true);
     up.deleteByQuery("*:*");
@@ -139,33 +134,6 @@
     long after = mcr.getStartTime(name).getTime();
     assertTrue("should have more recent time: " + after + "," + before, after > before);
 
-    TestHarness.validateXPath(locator.xml,
-        "/solr/cores[@defaultCoreName='core0']",
-        "/solr/cores[@host='127.0.0.1']",
-        "/solr/cores[@hostPort='${hostPort:8983}']",
-        "/solr/cores[@zkClientTimeout='8000']",
-        "/solr/cores[@hostContext='${hostContext:solr}']",
-        "/solr/cores[@genericCoreNodeNames='${genericCoreNodeNames:true}']"
-        );
-    
-    CoreAdminRequest.renameCore(name, "renamed_core", coreadmin);
-
-    TestHarness.validateXPath(locator.xml,
-        "/solr/cores/core[@name='renamed_core']",
-        "/solr/cores/core[@instanceDir='${theInstanceDir:./}']",
-        "/solr/cores/core[@collection='${collection:acollection}']"
-        );
-    
-    coreadmin = getRenamedSolrAdmin();
-    File dataDir = new File(tempDir,"data3");
-    File tlogDir = new File(tempDir,"tlog3");
-
-    CoreAdminRequest.createCore("newCore", SOLR_HOME.getAbsolutePath(),
-        coreadmin, null, null, dataDir.getAbsolutePath(),
-        tlogDir.getAbsolutePath());
-
-    TestHarness.validateXPath(locator.xml, "/solr/cores/core[@name='collection1' and @instanceDir='.']");
-
   }
 
 }
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/BasicHttpSolrClientTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/BasicHttpSolrClientTest.java
index 1a5aed3..34dbfa2 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/BasicHttpSolrClientTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/BasicHttpSolrClientTest.java
@@ -20,8 +20,8 @@
 import org.apache.http.Header;
 import org.apache.http.HttpEntity;
 import org.apache.http.HttpResponse;
-import org.apache.http.client.HttpClient;
 import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.solr.SolrJettyTestBase;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrRequest;
@@ -158,17 +158,16 @@
   
   @Test
   public void testTimeout() throws Exception {
-    HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() +
-                                               "/slow/foo");
+
     SolrQuery q = new SolrQuery("*:*");
-    client.setSoTimeout(2000);
-    try {
+    try (HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() + "/slow/foo")) {
+      client.setSoTimeout(2000);
       QueryResponse response = client.query(q, METHOD.GET);
       fail("No exception thrown.");
     } catch (SolrServerException e) {
       assertTrue(e.getMessage().contains("Timeout"));
     }
-    client.shutdown();
+
   }
   
   /**
@@ -181,9 +180,7 @@
     assertEquals(status + " didn't generate an UNKNOWN error code, someone modified the list of valid ErrorCode's w/o changing this test to work a different way",
                  ErrorCode.UNKNOWN, ErrorCode.getErrorCode(status));
 
-    HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() +
-                                               "/debug/foo");
-    try {
+    try ( HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() + "/debug/foo")) {
       DebugServlet.setErrorCode(status);
       try {
         SolrQuery q = new SolrQuery("foo");
@@ -194,285 +191,300 @@
         assertEquals("Unexpected exception status code", status, e.code());
       }
     } finally {
-      client.shutdown();
       DebugServlet.clear();
     }
   }
 
   @Test
-  public void testQuery(){
+  public void testQuery() throws IOException {
     DebugServlet.clear();
-    HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() + "/debug/foo");
-    SolrQuery q = new SolrQuery("foo");
-    q.setParam("a", "\u1234");
-    try {
-      client.query(q, METHOD.GET);
-    } catch (Throwable t) {}
-    
-    //default method
-    assertEquals("get", DebugServlet.lastMethod);
-    //agent
-    assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
-    //default wt
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
-    assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]);
-    //default version
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
-    assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
-    //agent
-    assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
-    //keepalive
-    assertEquals("keep-alive", DebugServlet.headers.get("Connection"));
-    //content-type
-    assertEquals(null, DebugServlet.headers.get("Content-Type"));
-    //param encoding
-    assertEquals(1, DebugServlet.parameters.get("a").length);
-    assertEquals("\u1234", DebugServlet.parameters.get("a")[0]);
+    try (HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() + "/debug/foo")) {
+      SolrQuery q = new SolrQuery("foo");
+      q.setParam("a", "\u1234");
+      try {
+        client.query(q, METHOD.GET);
+      } catch (Throwable t) {
+      }
 
-    //POST
-    DebugServlet.clear();
-    try {
-      client.query(q, METHOD.POST);
-    } catch (Throwable t) {}
-    assertEquals("post", DebugServlet.lastMethod);
-    assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
-    assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]);
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
-    assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
-    assertEquals(1, DebugServlet.parameters.get("a").length);
-    assertEquals("\u1234", DebugServlet.parameters.get("a")[0]);
-    assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
-    assertEquals("keep-alive", DebugServlet.headers.get("Connection"));
-    assertEquals("application/x-www-form-urlencoded; charset=UTF-8", DebugServlet.headers.get("Content-Type"));
+      //default method
+      assertEquals("get", DebugServlet.lastMethod);
+      //agent
+      assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
+      //default wt
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
+      assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]);
+      //default version
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
+      assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
+      //agent
+      assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
+      //keepalive
+      assertEquals("keep-alive", DebugServlet.headers.get("Connection"));
+      //content-type
+      assertEquals(null, DebugServlet.headers.get("Content-Type"));
+      //param encoding
+      assertEquals(1, DebugServlet.parameters.get("a").length);
+      assertEquals("\u1234", DebugServlet.parameters.get("a")[0]);
 
-    //PUT
-    DebugServlet.clear();
-    try {
-      client.query(q, METHOD.PUT);
-    } catch (Throwable t) {}
-    assertEquals("put", DebugServlet.lastMethod);
-    assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
-    assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]);
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
-    assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
-    assertEquals(1, DebugServlet.parameters.get("a").length);
-    assertEquals("\u1234", DebugServlet.parameters.get("a")[0]);
-    assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
-    assertEquals("keep-alive", DebugServlet.headers.get("Connection"));
-    assertEquals("application/x-www-form-urlencoded; charset=UTF-8", DebugServlet.headers.get("Content-Type"));
+      //POST
+      DebugServlet.clear();
+      try {
+        client.query(q, METHOD.POST);
+      } catch (Throwable t) {
+      }
+      assertEquals("post", DebugServlet.lastMethod);
+      assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
+      assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]);
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
+      assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
+      assertEquals(1, DebugServlet.parameters.get("a").length);
+      assertEquals("\u1234", DebugServlet.parameters.get("a")[0]);
+      assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
+      assertEquals("keep-alive", DebugServlet.headers.get("Connection"));
+      assertEquals("application/x-www-form-urlencoded; charset=UTF-8", DebugServlet.headers.get("Content-Type"));
 
-    //XML/GET
-    client.setParser(new XMLResponseParser());
-    DebugServlet.clear();
-    try {
-      client.query(q, METHOD.GET);
-    } catch (Throwable t) {}
-    assertEquals("get", DebugServlet.lastMethod);
-    assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
-    assertEquals("xml", DebugServlet.parameters.get(CommonParams.WT)[0]);
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
-    assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
-    assertEquals(1, DebugServlet.parameters.get("a").length);
-    assertEquals("\u1234", DebugServlet.parameters.get("a")[0]);
-    assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
-    assertEquals("keep-alive", DebugServlet.headers.get("Connection"));
+      //PUT
+      DebugServlet.clear();
+      try {
+        client.query(q, METHOD.PUT);
+      } catch (Throwable t) {
+      }
+      assertEquals("put", DebugServlet.lastMethod);
+      assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
+      assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]);
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
+      assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
+      assertEquals(1, DebugServlet.parameters.get("a").length);
+      assertEquals("\u1234", DebugServlet.parameters.get("a")[0]);
+      assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
+      assertEquals("keep-alive", DebugServlet.headers.get("Connection"));
+      assertEquals("application/x-www-form-urlencoded; charset=UTF-8", DebugServlet.headers.get("Content-Type"));
 
-    //XML/POST
-    client.setParser(new XMLResponseParser());
-    DebugServlet.clear();
-    try {
-      client.query(q, METHOD.POST);
-    } catch (Throwable t) {}
-    assertEquals("post", DebugServlet.lastMethod);
-    assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
-    assertEquals("xml", DebugServlet.parameters.get(CommonParams.WT)[0]);
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
-    assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
-    assertEquals(1, DebugServlet.parameters.get("a").length);
-    assertEquals("\u1234", DebugServlet.parameters.get("a")[0]);
-    assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
-    assertEquals("keep-alive", DebugServlet.headers.get("Connection"));
-    assertEquals("application/x-www-form-urlencoded; charset=UTF-8", DebugServlet.headers.get("Content-Type"));
+      //XML/GET
+      client.setParser(new XMLResponseParser());
+      DebugServlet.clear();
+      try {
+        client.query(q, METHOD.GET);
+      } catch (Throwable t) {
+      }
+      assertEquals("get", DebugServlet.lastMethod);
+      assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
+      assertEquals("xml", DebugServlet.parameters.get(CommonParams.WT)[0]);
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
+      assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
+      assertEquals(1, DebugServlet.parameters.get("a").length);
+      assertEquals("\u1234", DebugServlet.parameters.get("a")[0]);
+      assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
+      assertEquals("keep-alive", DebugServlet.headers.get("Connection"));
 
-    client.setParser(new XMLResponseParser());
-    DebugServlet.clear();
-    try {
-      client.query(q, METHOD.PUT);
-    } catch (Throwable t) {}
-    assertEquals("put", DebugServlet.lastMethod);
-    assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
-    assertEquals("xml", DebugServlet.parameters.get(CommonParams.WT)[0]);
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
-    assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
-    assertEquals(1, DebugServlet.parameters.get("a").length);
-    assertEquals("\u1234", DebugServlet.parameters.get("a")[0]);
-    assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
-    assertEquals("keep-alive", DebugServlet.headers.get("Connection"));
-    assertEquals("application/x-www-form-urlencoded; charset=UTF-8", DebugServlet.headers.get("Content-Type"));
-    client.shutdown();
+      //XML/POST
+      client.setParser(new XMLResponseParser());
+      DebugServlet.clear();
+      try {
+        client.query(q, METHOD.POST);
+      } catch (Throwable t) {
+      }
+      assertEquals("post", DebugServlet.lastMethod);
+      assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
+      assertEquals("xml", DebugServlet.parameters.get(CommonParams.WT)[0]);
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
+      assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
+      assertEquals(1, DebugServlet.parameters.get("a").length);
+      assertEquals("\u1234", DebugServlet.parameters.get("a")[0]);
+      assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
+      assertEquals("keep-alive", DebugServlet.headers.get("Connection"));
+      assertEquals("application/x-www-form-urlencoded; charset=UTF-8", DebugServlet.headers.get("Content-Type"));
+
+      client.setParser(new XMLResponseParser());
+      DebugServlet.clear();
+      try {
+        client.query(q, METHOD.PUT);
+      } catch (Throwable t) {
+      }
+      assertEquals("put", DebugServlet.lastMethod);
+      assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
+      assertEquals("xml", DebugServlet.parameters.get(CommonParams.WT)[0]);
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
+      assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
+      assertEquals(1, DebugServlet.parameters.get("a").length);
+      assertEquals("\u1234", DebugServlet.parameters.get("a")[0]);
+      assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
+      assertEquals("keep-alive", DebugServlet.headers.get("Connection"));
+      assertEquals("application/x-www-form-urlencoded; charset=UTF-8", DebugServlet.headers.get("Content-Type"));
+    }
+
   }
 
   @Test
-  public void testDelete(){
+  public void testDelete() throws IOException {
     DebugServlet.clear();
-    HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() + "/debug/foo");
-    try {
-      client.deleteById("id");
-    } catch (Throwable t) {}
-    
-    //default method
-    assertEquals("post", DebugServlet.lastMethod);
-    //agent
-    assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
-    //default wt
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
-    assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]);
-    //default version
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
-    assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
-    //agent
-    assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
-    //keepalive
-    assertEquals("keep-alive", DebugServlet.headers.get("Connection"));
+    try (HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() + "/debug/foo")) {
+      try {
+        client.deleteById("id");
+      } catch (Throwable t) {
+      }
 
-    //XML
-    client.setParser(new XMLResponseParser());
-    try {
-      client.deleteByQuery("*:*");
-    } catch (Throwable t) {}
-    
-    assertEquals("post", DebugServlet.lastMethod);
-    assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
-    assertEquals("xml", DebugServlet.parameters.get(CommonParams.WT)[0]);
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
-    assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
-    assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
-    assertEquals("keep-alive", DebugServlet.headers.get("Connection"));
-    client.shutdown();
+      //default method
+      assertEquals("post", DebugServlet.lastMethod);
+      //agent
+      assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
+      //default wt
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
+      assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]);
+      //default version
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
+      assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
+      //agent
+      assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
+      //keepalive
+      assertEquals("keep-alive", DebugServlet.headers.get("Connection"));
+
+      //XML
+      client.setParser(new XMLResponseParser());
+      try {
+        client.deleteByQuery("*:*");
+      } catch (Throwable t) {
+      }
+
+      assertEquals("post", DebugServlet.lastMethod);
+      assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
+      assertEquals("xml", DebugServlet.parameters.get(CommonParams.WT)[0]);
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
+      assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
+      assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
+      assertEquals("keep-alive", DebugServlet.headers.get("Connection"));
+    }
+
   }
   
   @Test
-  public void testUpdate(){
+  public void testUpdate() throws IOException {
     DebugServlet.clear();
-    HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() + "/debug/foo");
-    UpdateRequest req = new UpdateRequest();
-    req.add(new SolrInputDocument());
-    req.setParam("a", "\u1234");
-    try {
-      client.request(req);
-    } catch (Throwable t) {}
-    
-    //default method
-    assertEquals("post", DebugServlet.lastMethod);
-    //agent
-    assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
-    //default wt
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
-    assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]);
-    //default version
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
-    assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
-    //content type
-    assertEquals("application/xml; charset=UTF-8", DebugServlet.headers.get("Content-Type"));
-    //parameter encoding
-    assertEquals(1, DebugServlet.parameters.get("a").length);
-    assertEquals("\u1234", DebugServlet.parameters.get("a")[0]);
+    try (HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() + "/debug/foo")) {
+      UpdateRequest req = new UpdateRequest();
+      req.add(new SolrInputDocument());
+      req.setParam("a", "\u1234");
+      try {
+        client.request(req);
+      } catch (Throwable t) {
+      }
 
-    //XML response
-    client.setParser(new XMLResponseParser());
-    try {
-      client.request(req);
-    } catch (Throwable t) {}
-    assertEquals("post", DebugServlet.lastMethod);
-    assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
-    assertEquals("xml", DebugServlet.parameters.get(CommonParams.WT)[0]);
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
-    assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
-    assertEquals("application/xml; charset=UTF-8", DebugServlet.headers.get("Content-Type"));
-    assertEquals(1, DebugServlet.parameters.get("a").length);
-    assertEquals("\u1234", DebugServlet.parameters.get("a")[0]);
-    
-    //javabin request
-    client.setParser(new BinaryResponseParser());
-    client.setRequestWriter(new BinaryRequestWriter());
-    DebugServlet.clear();
-    try {
-      client.request(req);
-    } catch (Throwable t) {}
-    assertEquals("post", DebugServlet.lastMethod);
-    assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
-    assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]);
-    assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
-    assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
-    assertEquals("application/javabin", DebugServlet.headers.get("Content-Type"));
-    assertEquals(1, DebugServlet.parameters.get("a").length);
-    assertEquals("\u1234", DebugServlet.parameters.get("a")[0]);
-    client.shutdown();
+      //default method
+      assertEquals("post", DebugServlet.lastMethod);
+      //agent
+      assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
+      //default wt
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
+      assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]);
+      //default version
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
+      assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
+      //content type
+      assertEquals("application/xml; charset=UTF-8", DebugServlet.headers.get("Content-Type"));
+      //parameter encoding
+      assertEquals(1, DebugServlet.parameters.get("a").length);
+      assertEquals("\u1234", DebugServlet.parameters.get("a")[0]);
+
+      //XML response
+      client.setParser(new XMLResponseParser());
+      try {
+        client.request(req);
+      } catch (Throwable t) {
+      }
+      assertEquals("post", DebugServlet.lastMethod);
+      assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
+      assertEquals("xml", DebugServlet.parameters.get(CommonParams.WT)[0]);
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
+      assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
+      assertEquals("application/xml; charset=UTF-8", DebugServlet.headers.get("Content-Type"));
+      assertEquals(1, DebugServlet.parameters.get("a").length);
+      assertEquals("\u1234", DebugServlet.parameters.get("a")[0]);
+
+      //javabin request
+      client.setParser(new BinaryResponseParser());
+      client.setRequestWriter(new BinaryRequestWriter());
+      DebugServlet.clear();
+      try {
+        client.request(req);
+      } catch (Throwable t) {
+      }
+      assertEquals("post", DebugServlet.lastMethod);
+      assertEquals("Solr[" + HttpSolrClient.class.getName() + "] 1.0", DebugServlet.headers.get("User-Agent"));
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.WT).length);
+      assertEquals("javabin", DebugServlet.parameters.get(CommonParams.WT)[0]);
+      assertEquals(1, DebugServlet.parameters.get(CommonParams.VERSION).length);
+      assertEquals(client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
+      assertEquals("application/javabin", DebugServlet.headers.get("Content-Type"));
+      assertEquals(1, DebugServlet.parameters.get("a").length);
+      assertEquals("\u1234", DebugServlet.parameters.get("a")[0]);
+    }
+
   }
   
   @Test
   public void testRedirect() throws Exception {
-    HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() + "/redirect/foo");
-    SolrQuery q = new SolrQuery("*:*");
-    // default = false
-    try {
-      QueryResponse response = client.query(q);
-      fail("Should have thrown an exception.");
-    } catch (SolrServerException e) {
-      assertTrue(e.getMessage().contains("redirect"));
+    try (HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() + "/redirect/foo")) {
+      SolrQuery q = new SolrQuery("*:*");
+      // default = false
+      try {
+        QueryResponse response = client.query(q);
+        fail("Should have thrown an exception.");
+      } catch (SolrServerException e) {
+        assertTrue(e.getMessage().contains("redirect"));
+      }
+      client.setFollowRedirects(true);
+      try {
+        QueryResponse response = client.query(q);
+      } catch (Throwable t) {
+        fail("Exception was thrown:" + t);
+      }
+      //And back again:
+      client.setFollowRedirects(false);
+      try {
+        QueryResponse response = client.query(q);
+        fail("Should have thrown an exception.");
+      } catch (SolrServerException e) {
+        assertTrue(e.getMessage().contains("redirect"));
+      }
     }
-    client.setFollowRedirects(true);
-    try {
-      QueryResponse response = client.query(q);
-    } catch (Throwable t) {
-      fail("Exception was thrown:" + t);
-    }
-    //And back again:
-    client.setFollowRedirects(false);
-    try {
-      QueryResponse response = client.query(q);
-      fail("Should have thrown an exception.");
-    } catch (SolrServerException e) {
-      assertTrue(e.getMessage().contains("redirect"));
-    }
-    client.shutdown();
+
   }
   
   @Test
   public void testCompression() throws Exception {
-    HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() + "/debug/foo");
-    SolrQuery q = new SolrQuery("*:*");
-    
-    // verify request header gets set
-    DebugServlet.clear();
-    try {
-      client.query(q);
-    } catch (Throwable t) {}
-    assertNull(DebugServlet.headers.get("Accept-Encoding"));
-    client.setAllowCompression(true);
-    try {
-      client.query(q);
-    } catch (Throwable t) {}
-    assertNotNull(DebugServlet.headers.get("Accept-Encoding"));
-    client.setAllowCompression(false);
-    try {
-      client.query(q);
-    } catch (Throwable t) {}
-    assertNull(DebugServlet.headers.get("Accept-Encoding"));
+    try (HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() + "/debug/foo")) {
+      SolrQuery q = new SolrQuery("*:*");
+      
+      // verify request header gets set
+      DebugServlet.clear();
+      try {
+        client.query(q);
+      } catch (Throwable t) {}
+      assertNull(DebugServlet.headers.get("Accept-Encoding"));
+      client.setAllowCompression(true);
+      try {
+        client.query(q);
+      } catch (Throwable t) {}
+      assertNotNull(DebugServlet.headers.get("Accept-Encoding"));
+      client.setAllowCompression(false);
+      try {
+        client.query(q);
+      } catch (Throwable t) {}
+      assertNull(DebugServlet.headers.get("Accept-Encoding"));
+    }
     
     // verify server compresses output
     HttpGet get = new HttpGet(jetty.getBaseUrl().toString() + "/collection1" +
                               "/select?q=foo&wt=xml");
     get.setHeader("Accept-Encoding", "gzip");
-    HttpClient httpclient = HttpClientUtil.createClient(null);
+    CloseableHttpClient httpclient = HttpClientUtil.createClient(null);
     HttpEntity entity = null;
     try {
       HttpResponse response = httpclient.execute(get);
@@ -484,46 +496,50 @@
       if(entity!=null) {
         entity.getContent().close();
       }
-      httpclient.getConnectionManager().shutdown();
+      httpclient.close();
     }
     
     // verify compressed response can be handled
-    client = new HttpSolrClient(jetty.getBaseUrl().toString() + "/collection1");
-    client.setAllowCompression(true);
-    q = new SolrQuery("foo");
-    QueryResponse response = client.query(q);
-    assertEquals(0, response.getStatus());
-    client.shutdown();
+    try (HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() + "/collection1")) {
+      client.setAllowCompression(true);
+      SolrQuery q = new SolrQuery("foo");
+      QueryResponse response = client.query(q);
+      assertEquals(0, response.getStatus());
+    }
   }
   
   @Test
-  public void testSetParametersExternalClient(){
-    HttpClient httpClient = HttpClientUtil.createClient(null);
-    HttpSolrClient solrClient = new HttpSolrClient(jetty.getBaseUrl().toString(),
-                                               httpClient);
-    try {
-      solrClient.setMaxTotalConnections(1);
-      fail("Operation should not succeed.");
-    } catch (UnsupportedOperationException e) {}
-    try {
-      solrClient.setDefaultMaxConnectionsPerHost(1);
-      fail("Operation should not succeed.");
-    } catch (UnsupportedOperationException e) {}
-    solrClient.shutdown();
-    httpClient.getConnectionManager().shutdown();
+  public void testSetParametersExternalClient() throws IOException{
+
+    try (CloseableHttpClient httpClient = HttpClientUtil.createClient(null);
+         HttpSolrClient solrClient = new HttpSolrClient(jetty.getBaseUrl().toString(), httpClient)) {
+
+      try {
+        solrClient.setMaxTotalConnections(1);
+        fail("Operation should not succeed.");
+      } catch (UnsupportedOperationException e) {}
+      try {
+        solrClient.setDefaultMaxConnectionsPerHost(1);
+        fail("Operation should not succeed.");
+      } catch (UnsupportedOperationException e) {}
+
+    }
   }
 
   @Test
   public void testGetRawStream() throws SolrServerException, IOException{
-    HttpClient client = HttpClientUtil.createClient(null);
-    HttpSolrClient solrClient = new HttpSolrClient(jetty.getBaseUrl().toString() + "/collection1",
-                                               client, null);
-    QueryRequest req = new QueryRequest();
-    NamedList response = solrClient.request(req);
-    InputStream stream = (InputStream)response.get("stream");
-    assertNotNull(stream);
-    stream.close();
-    client.getConnectionManager().shutdown();
+    CloseableHttpClient client = HttpClientUtil.createClient(null);
+    try {
+      HttpSolrClient solrClient = new HttpSolrClient(jetty.getBaseUrl().toString() + "/collection1",
+                                                 client, null);
+      QueryRequest req = new QueryRequest();
+      NamedList response = solrClient.request(req);
+      InputStream stream = (InputStream)response.get("stream");
+      assertNotNull(stream);
+      stream.close();
+    } finally {
+      client.close();
+    }
   }
 
   /**
@@ -597,55 +613,55 @@
 
   @Test
   public void testQueryString() throws Exception {
-    HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() +
-                                               "/debug/foo");
 
-    // test without request query params
-    DebugServlet.clear();
-    client.setQueryParams(setOf("serverOnly"));
-    UpdateRequest req = new UpdateRequest();
-    setReqParamsOf(req, "serverOnly", "notServer");
-    try {
-      client.request(req);
-    } catch (Throwable t) {}
-    verifyServletState(client, req);
-
-    // test without server query params
-    DebugServlet.clear();
-    client.setQueryParams(setOf());
-    req = new UpdateRequest();
-    req.setQueryParams(setOf("requestOnly"));
-    setReqParamsOf(req, "requestOnly", "notRequest");
-    try {
-      client.request(req);
-    } catch (Throwable t) {}
-    verifyServletState(client, req);
-
-    // test with both request and server query params
-    DebugServlet.clear();
-    req = new UpdateRequest();
-    client.setQueryParams(setOf("serverOnly", "both"));
-    req.setQueryParams(setOf("requestOnly", "both"));
-    setReqParamsOf(req, "serverOnly", "requestOnly", "both", "neither");
-     try {
-      client.request(req);
-    } catch (Throwable t) {}
-    verifyServletState(client, req);
-
-    // test with both request and server query params with single stream
-    DebugServlet.clear();
-    req = new UpdateRequest();
-    req.add(new SolrInputDocument());
-    client.setQueryParams(setOf("serverOnly", "both"));
-    req.setQueryParams(setOf("requestOnly", "both"));
-    setReqParamsOf(req, "serverOnly", "requestOnly", "both", "neither");
-     try {
-      client.request(req);
-    } catch (Throwable t) {}
-    // NOTE: single stream requests send all the params
-    // as part of the query string.  So add "neither" to the request
-    // so it passes the verification step.
-    req.setQueryParams(setOf("requestOnly", "both", "neither"));
-    verifyServletState(client, req);
+    try (HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() + "/debug/foo")) {
+      // test without request query params
+      DebugServlet.clear();
+      client.setQueryParams(setOf("serverOnly"));
+      UpdateRequest req = new UpdateRequest();
+      setReqParamsOf(req, "serverOnly", "notServer");
+      try {
+        client.request(req);
+      } catch (Throwable t) {}
+      verifyServletState(client, req);
+  
+      // test without server query params
+      DebugServlet.clear();
+      client.setQueryParams(setOf());
+      req = new UpdateRequest();
+      req.setQueryParams(setOf("requestOnly"));
+      setReqParamsOf(req, "requestOnly", "notRequest");
+      try {
+        client.request(req);
+      } catch (Throwable t) {}
+      verifyServletState(client, req);
+  
+      // test with both request and server query params
+      DebugServlet.clear();
+      req = new UpdateRequest();
+      client.setQueryParams(setOf("serverOnly", "both"));
+      req.setQueryParams(setOf("requestOnly", "both"));
+      setReqParamsOf(req, "serverOnly", "requestOnly", "both", "neither");
+       try {
+        client.request(req);
+      } catch (Throwable t) {}
+      verifyServletState(client, req);
+  
+      // test with both request and server query params with single stream
+      DebugServlet.clear();
+      req = new UpdateRequest();
+      req.add(new SolrInputDocument());
+      client.setQueryParams(setOf("serverOnly", "both"));
+      req.setQueryParams(setOf("requestOnly", "both"));
+      setReqParamsOf(req, "serverOnly", "requestOnly", "both", "neither");
+       try {
+        client.request(req);
+      } catch (Throwable t) {}
+      // NOTE: single stream requests send all the params
+      // as part of the query string.  So add "neither" to the request
+      // so it passes the verification step.
+      req.setQueryParams(setOf("requestOnly", "both", "neither"));
+      verifyServletState(client, req);
+    }
   }
 }
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientMultiConstructorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientMultiConstructorTest.java
index 47d769c..6b0ea23 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientMultiConstructorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientMultiConstructorTest.java
@@ -4,6 +4,7 @@
 import org.apache.lucene.util.TestUtil;
 import org.junit.Test;
 
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.LinkedHashSet;
@@ -35,7 +36,7 @@
   Collection<String> hosts;
 
   @Test
-  public void testWithChroot() {
+  public void testWithChroot() throws IOException {
     boolean setOrList = random().nextBoolean();
     int numOfZKServers = TestUtil.nextInt(random(), 1, 5);
     boolean withChroot = random().nextBoolean();
@@ -43,7 +44,6 @@
     final String chroot = "/mychroot";
 
     StringBuilder sb = new StringBuilder();
-    CloudSolrClient client;
 
     if(setOrList) {
       /*
@@ -62,15 +62,16 @@
       if(i<numOfZKServers -1) sb.append(",");
     }
 
-    if(withChroot) {
+    String clientChroot = null;
+    if (withChroot) {
       sb.append(chroot);
-      client = new CloudSolrClient(hosts, "/mychroot");
-    } else {
-      client = new CloudSolrClient(hosts, null);
+      clientChroot = "/mychroot";
     }
 
-    assertEquals(sb.toString(), client.getZkHost());
-    client.shutdown();
+    try (CloudSolrClient client = new CloudSolrClient(hosts, clientChroot)) {
+      assertEquals(sb.toString(), client.getZkHost());
+    }
+
   }
   
   @Test(expected = IllegalArgumentException.class)
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java
index ecad354..fb31ff6 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java
@@ -20,8 +20,9 @@
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
-import org.apache.http.client.HttpClient;
+import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.lucene.util.LuceneTestCase.Slow;
+import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
@@ -44,16 +45,14 @@
 import org.apache.solr.common.params.ShardParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.zookeeper.KeeperException;
-import org.junit.After;
 import org.junit.AfterClass;
-import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.IOException;
-import java.net.MalformedURLException;
 import java.util.Collection;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -95,34 +94,69 @@
     return SOLR_HOME;
   }
   
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     // we expect this time of exception as shards go up and down...
     //ignoreException(".*");
     
     System.setProperty("numShards", Integer.toString(sliceCount));
   }
   
-  @Override
-  @After
-  public void tearDown() throws Exception {
-    super.tearDown();
-    resetExceptionIgnores();
-  }
-  
   public CloudSolrClientTest() {
     super();
     sliceCount = 2;
-    shardCount = 3;
+    fixShardCount(3);
   }
 
-  @Override
-  public void doTest() throws Exception {
+  @Test
+  public void test() throws Exception {
     allTests();
     stateVersionParamTest();
     customHttpClientTest();
+    testOverwriteOption();
+  }
+
+  private void testOverwriteOption() throws Exception, SolrServerException,
+      IOException {
+    String collectionName = "overwriteCollection";
+    createCollection(collectionName, controlClientCloud, 1, 1);
+    waitForRecoveriesToFinish(collectionName, false);
+    try (CloudSolrClient cloudClient = createCloudClient(collectionName)) {
+      SolrInputDocument doc1 = new SolrInputDocument();
+      doc1.addField(id, "0");
+      doc1.addField("a_t", "hello1");
+      SolrInputDocument doc2 = new SolrInputDocument();
+      doc2.addField(id, "0");
+      doc2.addField("a_t", "hello2");
+      
+      UpdateRequest request = new UpdateRequest();
+      request.add(doc1);
+      request.add(doc2);
+      request.setAction(AbstractUpdateRequest.ACTION.COMMIT, false, false);
+      NamedList<Object> response = cloudClient.request(request);
+      QueryResponse resp = cloudClient.query(new SolrQuery("*:*"));
+      
+      assertEquals("There should be one document because overwrite=true", 1, resp.getResults().getNumFound());
+      
+      doc1 = new SolrInputDocument();
+      doc1.addField(id, "1");
+      doc1.addField("a_t", "hello1");
+      doc2 = new SolrInputDocument();
+      doc2.addField(id, "1");
+      doc2.addField("a_t", "hello2");
+      
+      request = new UpdateRequest();
+      // overwrite=false
+      request.add(doc1, false);
+      request.add(doc2, false);
+      request.setAction(AbstractUpdateRequest.ACTION.COMMIT, false, false);
+      response = cloudClient.request(request);
+      
+      resp = cloudClient.query(new SolrQuery("*:*"));
+
+      assertEquals("There should be 3 documents because there should be two id=1 docs due to overwrite=false", 3, resp.getResults().getNumFound());
+    }
   }
 
   private void allTests() throws Exception {
@@ -175,10 +209,11 @@
       params.add("q", "id:" + id);
       params.add("distrib", "false");
       QueryRequest queryRequest = new QueryRequest(params);
-      HttpSolrClient solrClient = new HttpSolrClient(url);
-      QueryResponse queryResponse = queryRequest.process(solrClient);
-      SolrDocumentList docList = queryResponse.getResults();
-      assertTrue(docList.getNumFound() == 1);
+      try (HttpSolrClient solrClient = new HttpSolrClient(url)) {
+        QueryResponse queryResponse = queryRequest.process(solrClient);
+        SolrDocumentList docList = queryResponse.getResults();
+        assertTrue(docList.getNumFound() == 1);
+      }
     }
     
     // Test the deleteById routing for UpdateRequest
@@ -196,10 +231,7 @@
     assertTrue(docs.getNumFound() == 0);
     
     // Test Multi-Threaded routed updates for UpdateRequest
-    
-    CloudSolrClient threadedClient = null;
-    try {
-      threadedClient = new CloudSolrClient(zkServer.getZkAddress());
+    try (CloudSolrClient threadedClient = new CloudSolrClient(zkServer.getZkAddress())) {
       threadedClient.setParallelUpdates(true);
       threadedClient.setDefaultCollection(collectionName);
       response = threadedClient.request(request);
@@ -218,13 +250,12 @@
         params.add("q", "id:" + id);
         params.add("distrib", "false");
         QueryRequest queryRequest = new QueryRequest(params);
-        HttpSolrClient solrClient = new HttpSolrClient(url);
-        QueryResponse queryResponse = queryRequest.process(solrClient);
-        SolrDocumentList docList = queryResponse.getResults();
-        assertTrue(docList.getNumFound() == 1);
+        try (HttpSolrClient solrClient = new HttpSolrClient(url)) {
+          QueryResponse queryResponse = queryRequest.process(solrClient);
+          SolrDocumentList docList = queryResponse.getResults();
+          assertTrue(docList.getNumFound() == 1);
+        }
       }
-    } finally {
-      threadedClient.shutdown();
     }
 
     // Test that queries with _route_ params are routed by the client
@@ -315,22 +346,25 @@
 
     controlClient.commit();
     cloudClient.commit();
-    cloudClient.shutdown();
+    cloudClient.close();
   }
 
   private Long getNumRequests(String baseUrl, String collectionName) throws
       SolrServerException, IOException {
-    HttpSolrClient client = new HttpSolrClient(baseUrl + "/"+ collectionName);
-    client.setConnectionTimeout(15000);
-    client.setSoTimeout(60000);
-    ModifiableSolrParams params = new ModifiableSolrParams();
-    params.set("qt", "/admin/mbeans");
-    params.set("stats", "true");
-    params.set("key", "standard");
-    params.set("cat", "QUERYHANDLER");
-    // use generic request to avoid extra processing of queries
-    QueryRequest req = new QueryRequest(params);
-    NamedList<Object> resp = client.request(req);
+
+    NamedList<Object> resp;
+    try (HttpSolrClient client = new HttpSolrClient(baseUrl + "/"+ collectionName)) {
+      client.setConnectionTimeout(15000);
+      client.setSoTimeout(60000);
+      ModifiableSolrParams params = new ModifiableSolrParams();
+      params.set("qt", "/admin/mbeans");
+      params.set("stats", "true");
+      params.set("key", "standard");
+      params.set("cat", "QUERYHANDLER");
+      // use generic request to avoid extra processing of queries
+      QueryRequest req = new QueryRequest(params);
+      resp = client.request(req);
+    }
     return (Long) resp.findRecursive("solr-mbeans", "QUERYHANDLER",
         "standard", "stats", "requests");
   }
@@ -342,37 +376,38 @@
   }
 
   private void stateVersionParamTest() throws Exception {
-    CloudSolrClient client = createCloudClient(null);
-    try {
+
+    try (CloudSolrClient client = createCloudClient(null)) {
       String collectionName = "checkStateVerCol";
       createCollection(collectionName, client, 2, 2);
       waitForRecoveriesToFinish(collectionName, false);
       DocCollection coll = client.getZkStateReader().getClusterState().getCollection(collectionName);
       Replica r = coll.getSlices().iterator().next().getReplicas().iterator().next();
 
-      HttpSolrClient solrClient = new HttpSolrClient(r.getStr(ZkStateReader.BASE_URL_PROP) + "/"+collectionName);
-
-
       SolrQuery q = new SolrQuery().setQuery("*:*");
-
-      log.info("should work query, result {}", solrClient.query(q));
-      //no problem
-      q.setParam(CloudSolrClient.STATE_VERSION, collectionName + ":" + coll.getZNodeVersion());
-      log.info("2nd query , result {}", solrClient.query(q));
-      //no error yet good
-
-      q.setParam(CloudSolrClient.STATE_VERSION, collectionName+":"+ (coll.getZNodeVersion() -1)); //an older version expect error
-
       HttpSolrClient.RemoteSolrException sse = null;
-      try {
-        solrClient.query(q);
-        log.info("expected query error");
-      } catch (HttpSolrClient.RemoteSolrException e) {
-        sse = e;
+
+      try (HttpSolrClient solrClient = new HttpSolrClient(r.getStr(ZkStateReader.BASE_URL_PROP) + "/"+collectionName)) {
+
+        log.info("should work query, result {}", solrClient.query(q));
+        //no problem
+        q.setParam(CloudSolrClient.STATE_VERSION, collectionName + ":" + coll.getZNodeVersion());
+        log.info("2nd query , result {}", solrClient.query(q));
+        //no error yet good
+
+        q.setParam(CloudSolrClient.STATE_VERSION, collectionName + ":" + (coll.getZNodeVersion() - 1)); //an older version expect error
+
+        try {
+          solrClient.query(q);
+          log.info("expected query error");
+        } catch (HttpSolrClient.RemoteSolrException e) {
+          sse = e;
+        }
+
+        assertNotNull(sse);
+        assertEquals(" Error code should be ", sse.code(), SolrException.ErrorCode.INVALID_STATE.code);
+
       }
-      solrClient.shutdown();
-      assertNotNull(sse);
-      assertEquals(" Error code should be ", sse.code(), SolrException.ErrorCode.INVALID_STATE.code);
 
       //now send the request to another node that does n ot serve the collection
 
@@ -392,68 +427,56 @@
       }
       log.info("thenode which does not serve this collection{} ",theNode);
       assertNotNull(theNode);
-      solrClient = new HttpSolrClient(theNode + "/"+collectionName);
 
-      q.setParam(CloudSolrClient.STATE_VERSION, collectionName+":"+coll.getZNodeVersion());
+      try (SolrClient solrClient = new HttpSolrClient(theNode + "/"+collectionName)) {
 
-      try {
-        solrClient.query(q);
-        log.info("error was expected");
-      } catch (HttpSolrClient.RemoteSolrException e) {
-        sse = e;
+        q.setParam(CloudSolrClient.STATE_VERSION, collectionName + ":" + coll.getZNodeVersion());
+        try {
+          solrClient.query(q);
+          log.info("error was expected");
+        } catch (HttpSolrClient.RemoteSolrException e) {
+          sse = e;
+        }
+        assertNotNull(sse);
+        assertEquals(" Error code should be ", sse.code(), SolrException.ErrorCode.INVALID_STATE.code);
       }
-      solrClient.shutdown();
-      assertNotNull(sse);
-      assertEquals(" Error code should be ",  sse.code() , SolrException.ErrorCode.INVALID_STATE.code);
-    } finally {
-      client.shutdown();
     }
 
   }
 
-  public void testShutdown() throws MalformedURLException {
-    CloudSolrClient client = new CloudSolrClient("[ff01::114]:33332");
-    try {
+  public void testShutdown() throws IOException {
+    try (CloudSolrClient client = new CloudSolrClient("[ff01::114]:33332")) {
       client.setZkConnectTimeout(100);
       client.connect();
       fail("Expected exception");
     } catch (SolrException e) {
       assertTrue(e.getCause() instanceof TimeoutException);
-    } finally {
-      client.shutdown();
     }
   }
 
-  public void testWrongZkChrootTest() throws MalformedURLException {
-    CloudSolrClient client = null;
-    try {
-      client = new CloudSolrClient(zkServer.getZkAddress() + "/xyz/foo");
+  public void testWrongZkChrootTest() throws IOException {
+    try (CloudSolrClient client = new CloudSolrClient(zkServer.getZkAddress() + "/xyz/foo")) {
       client.setDefaultCollection(DEFAULT_COLLECTION);
       client.setZkClientTimeout(1000 * 60);
       client.connect();
       fail("Expected exception");
     } catch(SolrException e) {
       assertTrue(e.getCause() instanceof KeeperException);
-    } finally {
-      client.shutdown();
     }
     // see SOLR-6146 - this test will fail by virtue of the zkClient tracking performed
     // in the afterClass method of the base class
   }
 
-  public void customHttpClientTest() {
-    CloudSolrClient solrClient = null;
+  public void customHttpClientTest() throws IOException {
+
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.set(HttpClientUtil.PROP_SO_TIMEOUT, 1000);
-    HttpClient client = null;
 
-    try {
-      client = HttpClientUtil.createClient(params);
-      solrClient = new CloudSolrClient(zkServer.getZkAddress(), client);
+    try (CloseableHttpClient client = HttpClientUtil.createClient(params);
+         CloudSolrClient solrClient = new CloudSolrClient(zkServer.getZkAddress(), client)) {
+
       assertTrue(solrClient.getLbClient().getHttpClient() == client);
-    } finally {
-      solrClient.shutdown();
-      client.getConnectionManager().shutdown();
+
     }
   }
 }
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ExternalHttpClientTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ExternalHttpClientTest.java
index 99f7394..4a918f0 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ExternalHttpClientTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ExternalHttpClientTest.java
@@ -25,7 +25,6 @@
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.response.QueryResponse;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
@@ -53,25 +52,21 @@
    */
   @Test
   public void testTimeoutWithExternalClient() throws Exception {
+
     HttpClientBuilder builder = HttpClientBuilder.create();
     RequestConfig config = RequestConfig.custom().setSocketTimeout(2000).build();
     builder.setDefaultRequestConfig(config);
-    HttpSolrClient solrClient = null;
-    try (CloseableHttpClient httpClient = builder.build()) {
-      solrClient = new HttpSolrClient(jetty.getBaseUrl().toString() +
-          "/slow/foo", httpClient);
+
+    try (CloseableHttpClient httpClient = builder.build();
+         HttpSolrClient solrClient = new HttpSolrClient(jetty.getBaseUrl().toString() + "/slow/foo", httpClient)) {
 
       SolrQuery q = new SolrQuery("*:*");
       try {
-        QueryResponse response = solrClient.query(q, SolrRequest.METHOD.GET);
+        solrClient.query(q, SolrRequest.METHOD.GET);
         fail("No exception thrown.");
       } catch (SolrServerException e) {
         assertTrue(e.getMessage().contains("Timeout"));
       }
-    } finally {
-      if (solrClient != null) {
-        solrClient.shutdown();
-      }
     }
   }
 }
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpClientUtilTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpClientUtilTest.java
index 927453b..190a330 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpClientUtilTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpClientUtilTest.java
@@ -19,6 +19,7 @@
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 
+import java.io.IOException;
 import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.http.auth.AuthScope;
@@ -28,6 +29,7 @@
 import org.apache.http.conn.ssl.BrowserCompatHostnameVerifier;
 import org.apache.http.conn.ssl.SSLSocketFactory;
 import org.apache.http.conn.ssl.X509HostnameVerifier;
+import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.impl.client.DefaultHttpClient;
 import org.apache.http.impl.conn.PoolingClientConnectionManager;
 import org.apache.http.params.HttpConnectionParams;
@@ -39,9 +41,9 @@
 public class HttpClientUtilTest {
 
   @Test
-  public void testNoParamsSucceeds() {
-    HttpClient clien = HttpClientUtil.createClient(null);
-    clien.getConnectionManager().shutdown();
+  public void testNoParamsSucceeds() throws IOException {
+    CloseableHttpClient client = HttpClientUtil.createClient(null);
+    client.close();
   }
 
   @Test
@@ -57,26 +59,29 @@
     params.set(HttpClientUtil.PROP_SO_TIMEOUT, 42345);
     params.set(HttpClientUtil.PROP_USE_RETRY, false);
     DefaultHttpClient client = (DefaultHttpClient) HttpClientUtil.createClient(params);
-    assertEquals(12345, HttpConnectionParams.getConnectionTimeout(client.getParams()));
-    assertEquals(PoolingClientConnectionManager.class, client.getConnectionManager().getClass());
-    assertEquals(22345, ((PoolingClientConnectionManager)client.getConnectionManager()).getMaxTotal());
-    assertEquals(32345, ((PoolingClientConnectionManager)client.getConnectionManager()).getDefaultMaxPerRoute());
-    assertEquals(42345, HttpConnectionParams.getSoTimeout(client.getParams()));
-    assertEquals(HttpClientUtil.NO_RETRY, client.getHttpRequestRetryHandler());
-    assertEquals("pass", client.getCredentialsProvider().getCredentials(new AuthScope("127.0.0.1", 1234)).getPassword());
-    assertEquals("user", client.getCredentialsProvider().getCredentials(new AuthScope("127.0.0.1", 1234)).getUserPrincipal().getName());
-    assertEquals(true, client.getParams().getParameter(ClientPNames.HANDLE_REDIRECTS));
-    client.getConnectionManager().shutdown();
+    try {
+      assertEquals(12345, HttpConnectionParams.getConnectionTimeout(client.getParams()));
+      assertEquals(PoolingClientConnectionManager.class, client.getConnectionManager().getClass());
+      assertEquals(22345, ((PoolingClientConnectionManager)client.getConnectionManager()).getMaxTotal());
+      assertEquals(32345, ((PoolingClientConnectionManager)client.getConnectionManager()).getDefaultMaxPerRoute());
+      assertEquals(42345, HttpConnectionParams.getSoTimeout(client.getParams()));
+      assertEquals(HttpClientUtil.NO_RETRY, client.getHttpRequestRetryHandler());
+      assertEquals("pass", client.getCredentialsProvider().getCredentials(new AuthScope("127.0.0.1", 1234)).getPassword());
+      assertEquals("user", client.getCredentialsProvider().getCredentials(new AuthScope("127.0.0.1", 1234)).getUserPrincipal().getName());
+      assertEquals(true, client.getParams().getParameter(ClientPNames.HANDLE_REDIRECTS));
+    } finally {
+      client.close();
+    }
   }
   
   @Test
-  public void testReplaceConfigurer(){
+  public void testReplaceConfigurer() throws IOException{
     
     try {
     final AtomicInteger counter = new AtomicInteger();
     HttpClientConfigurer custom = new HttpClientConfigurer(){
       @Override
-      protected void configure(DefaultHttpClient httpClient, SolrParams config) {
+      public void configure(DefaultHttpClient httpClient, SolrParams config) {
         super.configure(httpClient, config);
         counter.set(config.getInt("custom-param", -1));
       }
@@ -87,7 +92,7 @@
     
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.set("custom-param", 5);
-    HttpClientUtil.createClient(params).getConnectionManager().shutdown();
+    HttpClientUtil.createClient(params).close();
     assertEquals(5, counter.get());
     } finally {
       //restore default configurer
@@ -98,26 +103,36 @@
   
   @Test
   @SuppressWarnings("deprecation")
-  public void testSSLSystemProperties() {
+  public void testSSLSystemProperties() throws IOException {
+    CloseableHttpClient client = HttpClientUtil.createClient(null);
     try {
       SSLTestConfig.setSSLSystemProperties();
       assertNotNull("HTTPS scheme could not be created using the javax.net.ssl.* system properties.", 
-          HttpClientUtil.createClient(null).getConnectionManager().getSchemeRegistry().get("https"));
+          client.getConnectionManager().getSchemeRegistry().get("https"));
       
       System.clearProperty(HttpClientUtil.SYS_PROP_CHECK_PEER_NAME);
-      assertEquals(BrowserCompatHostnameVerifier.class, getHostnameVerifier(HttpClientUtil.createClient(null)).getClass());
+      client.close();
+      client = HttpClientUtil.createClient(null);
+      assertEquals(BrowserCompatHostnameVerifier.class, getHostnameVerifier(client).getClass());
       
       System.setProperty(HttpClientUtil.SYS_PROP_CHECK_PEER_NAME, "true");
-      assertEquals(BrowserCompatHostnameVerifier.class, getHostnameVerifier(HttpClientUtil.createClient(null)).getClass());
+      client.close();
+      client = HttpClientUtil.createClient(null);
+      assertEquals(BrowserCompatHostnameVerifier.class, getHostnameVerifier(client).getClass());
       
       System.setProperty(HttpClientUtil.SYS_PROP_CHECK_PEER_NAME, "");
-      assertEquals(BrowserCompatHostnameVerifier.class, getHostnameVerifier(HttpClientUtil.createClient(null)).getClass());
+      client.close();
+      client = HttpClientUtil.createClient(null);
+      assertEquals(BrowserCompatHostnameVerifier.class, getHostnameVerifier(client).getClass());
       
       System.setProperty(HttpClientUtil.SYS_PROP_CHECK_PEER_NAME, "false");
-      assertEquals(AllowAllHostnameVerifier.class, getHostnameVerifier(HttpClientUtil.createClient(null)).getClass());
+      client.close();
+      client = HttpClientUtil.createClient(null);
+      assertEquals(AllowAllHostnameVerifier.class, getHostnameVerifier(client).getClass());
     } finally {
       SSLTestConfig.clearSSLSystemProperties();
       System.clearProperty(HttpClientUtil.SYS_PROP_CHECK_PEER_NAME);
+      client.close();
     }
   }
   
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttpSolrClientTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttpSolrClientTest.java
index 969b3a8..8d546f0 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttpSolrClientTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttpSolrClientTest.java
@@ -3,11 +3,12 @@
  */
 package org.apache.solr.client.solrj.impl;
 
+import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.solr.client.solrj.ResponseParser;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.junit.Test;
 
-import java.net.MalformedURLException;
+import java.io.IOException;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
@@ -38,19 +39,22 @@
    * Test method for {@link LBHttpSolrClient#LBHttpSolrClient(org.apache.http.client.HttpClient, org.apache.solr.client.solrj.ResponseParser, java.lang.String[])}.
    * 
    * Validate that the parser passed in is used in the <code>HttpSolrClient</code> instances created.
-   * 
-   * @throws MalformedURLException If URL is invalid, no URL passed, so won't happen.
    */
   @Test
-  public void testLBHttpSolrClientHttpClientResponseParserStringArray() throws MalformedURLException {
-    LBHttpSolrClient testClient = new LBHttpSolrClient(HttpClientUtil.createClient(new ModifiableSolrParams()), (ResponseParser) null);
-    HttpSolrClient httpSolrClient = testClient.makeSolrClient("http://127.0.0.1:8080");
-    assertNull("Generated server should have null parser.", httpSolrClient.getParser());
+  public void testLBHttpSolrClientHttpClientResponseParserStringArray() throws IOException {
+
+    try (CloseableHttpClient httpClient = HttpClientUtil.createClient(new ModifiableSolrParams());
+         LBHttpSolrClient testClient = new LBHttpSolrClient(httpClient, (ResponseParser) null);
+         HttpSolrClient httpSolrClient = testClient.makeSolrClient("http://127.0.0.1:8080")) {
+      assertNull("Generated server should have null parser.", httpSolrClient.getParser());
+    }
 
     ResponseParser parser = new BinaryResponseParser();
-    testClient = new LBHttpSolrClient(HttpClientUtil.createClient(new ModifiableSolrParams()), parser);
-    httpSolrClient = testClient.makeSolrClient("http://127.0.0.1:8080");
-    assertEquals("Invalid parser passed to generated server.", parser, httpSolrClient.getParser());
+    try (CloseableHttpClient httpClient = HttpClientUtil.createClient(new ModifiableSolrParams());
+         LBHttpSolrClient testClient = new LBHttpSolrClient(httpClient, parser);
+         HttpSolrClient httpSolrClient = testClient.makeSolrClient("http://127.0.0.1:8080")) {
+      assertEquals("Invalid parser passed to generated server.", parser, httpSolrClient.getParser());
+    }
   }
   
 }
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java
index e187e63..ac71b99 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java
@@ -95,44 +95,46 @@
   @Test
   public void testCustomUlogDir() throws Exception {
     
-    SolrClient client = getSolrAdmin();
-    
-    File dataDir = createTempDir("data").toFile();
-    
-    File newCoreInstanceDir = createTempDir("instance").toFile();
-    
-    File instanceDir = new File(cores.getSolrHome());
-    FileUtils.copyDirectory(instanceDir, new File(newCoreInstanceDir,
-        "newcore"));
+    try (SolrClient client = getSolrAdmin()) {
 
-    CoreAdminRequest.Create req = new CoreAdminRequest.Create();
-    req.setCoreName("newcore");
-    req.setInstanceDir(newCoreInstanceDir.getAbsolutePath() + File.separator + "newcore");
-    req.setDataDir(dataDir.getAbsolutePath());
-    req.setUlogDir(new File(dataDir, "ulog").getAbsolutePath());
+      File dataDir = createTempDir("data").toFile();
 
-    // These should be the inverse of defaults.
-    req.setIsLoadOnStartup(false);
-    req.setIsTransient(true);
-    req.process(client);
+      File newCoreInstanceDir = createTempDir("instance").toFile();
 
-    // Show that the newly-created core has values for load on startup and transient different than defaults due to the
-    // above.
-    File logDir;
-    try (SolrCore coreProveIt = cores.getCore("collection1");
-         SolrCore core = cores.getCore("newcore")) {
+      File instanceDir = new File(cores.getSolrHome());
+      FileUtils.copyDirectory(instanceDir, new File(newCoreInstanceDir,
+          "newcore"));
 
-      assertTrue(core.getCoreDescriptor().isTransient());
-      assertFalse(coreProveIt.getCoreDescriptor().isTransient());
+      CoreAdminRequest.Create req = new CoreAdminRequest.Create();
+      req.setCoreName("newcore");
+      req.setInstanceDir(newCoreInstanceDir.getAbsolutePath() + File.separator + "newcore");
+      req.setDataDir(dataDir.getAbsolutePath());
+      req.setUlogDir(new File(dataDir, "ulog").getAbsolutePath());
+      req.setConfigSet("shared");
 
-      assertFalse(core.getCoreDescriptor().isLoadOnStartup());
-      assertTrue(coreProveIt.getCoreDescriptor().isLoadOnStartup());
+      // These should be the inverse of defaults.
+      req.setIsLoadOnStartup(false);
+      req.setIsTransient(true);
+      req.process(client);
 
-      logDir = new File(core.getUpdateHandler().getUpdateLog().getLogDir());
+      // Show that the newly-created core has values for load on startup and transient different than defaults due to the
+      // above.
+      File logDir;
+      try (SolrCore coreProveIt = cores.getCore("collection1");
+           SolrCore core = cores.getCore("newcore")) {
+
+        assertTrue(core.getCoreDescriptor().isTransient());
+        assertFalse(coreProveIt.getCoreDescriptor().isTransient());
+
+        assertFalse(core.getCoreDescriptor().isLoadOnStartup());
+        assertTrue(coreProveIt.getCoreDescriptor().isLoadOnStartup());
+
+        logDir = new File(core.getUpdateHandler().getUpdateLog().getLogDir());
+      }
+
+      assertEquals(new File(dataDir, "ulog" + File.separator + "tlog").getAbsolutePath(), logDir.getAbsolutePath());
+
     }
-
-    assertEquals(new File(dataDir, "ulog" + File.separator + "tlog").getAbsolutePath(), logDir.getAbsolutePath());
-    client.shutdown();
     
   }
   
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/response/NoOpResponseParserTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/response/NoOpResponseParserTest.java
index 3f2b216..93ce201 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/response/NoOpResponseParserTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/response/NoOpResponseParserTest.java
@@ -73,15 +73,16 @@
    */
   @Test
   public void testQueryParse() throws Exception {
-    HttpSolrClient client = (HttpSolrClient) createNewSolrClient();
-    SolrQuery query = new SolrQuery("id:1234");
-    QueryRequest req = new QueryRequest(query);
-    client.setParser(new NoOpResponseParser());
-    NamedList<Object> resp = client.request(req);
-    String responseString = (String) resp.get("response");
 
-    assertResponse(responseString);
-    client.shutdown();
+    try (HttpSolrClient client = (HttpSolrClient) createNewSolrClient()) {
+      SolrQuery query = new SolrQuery("id:1234");
+      QueryRequest req = new QueryRequest(query);
+      client.setParser(new NoOpResponseParser());
+      NamedList<Object> resp = client.request(req);
+      String responseString = (String) resp.get("response");
+      assertResponse(responseString);
+    }
+
   }
 
   private void assertResponse(String responseString) throws IOException {
diff --git a/solr/test-framework/src/java/org/apache/solr/BaseDistributedSearchTestCase.java b/solr/test-framework/src/java/org/apache/solr/BaseDistributedSearchTestCase.java
index 244649b8..f8751db 100644
--- a/solr/test-framework/src/java/org/apache/solr/BaseDistributedSearchTestCase.java
+++ b/solr/test-framework/src/java/org/apache/solr/BaseDistributedSearchTestCase.java
@@ -39,12 +39,20 @@
 import org.eclipse.jetty.servlet.ServletHolder;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.Rule;
+import org.junit.rules.TestRule;
+import org.junit.runner.Description;
+import org.junit.runners.model.Statement;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.IOException;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -53,6 +61,7 @@
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Properties;
 import java.util.Random;
 import java.util.Set;
 import java.util.SortedMap;
@@ -61,6 +70,20 @@
 /**
  * Helper base class for distributed search test cases
  *
+ * By default, all tests in sub-classes will be executed with
+ * 1, 2, ... DEFAULT_MAX_SHARD_COUNT number of shards set up repeatedly.
+ *
+ * In general, it's preferable to annotate the tests in sub-classes with a
+ * {@literal @}ShardsFixed(num = N) or a {@literal @}ShardsRepeat(min = M, max = N)
+ * to indicate whether the test should be called once, with a fixed number of shards,
+ * or called repeatedly for number of shards = M to N.
+ *
+ * In some cases though, if the number of shards has to be fixed, but the number
+ * itself is dynamic, or if it has to be set as a default for all sub-classes
+ * of a sub-class, there's a fixShardCount(N) available, which is identical to
+ * {@literal @}ShardsFixed(num = N) for all tests without annotations in that class
+ * hierarchy. Ideally this function should be retired in favour of better annotations..
+ *
  * @since solr 1.5
  */
 public abstract class BaseDistributedSearchTestCase extends SolrTestCaseJ4 {
@@ -172,16 +195,19 @@
                                      "[ff01::213]:33332" + context};
   }
 
-  protected int shardCount = 4;      // the actual number of solr cores that will be created in the cluster
+  private final static int DEFAULT_MAX_SHARD_COUNT = 3;
 
-  /**
-   * Sub classes can set this flag in their constructor to true if they
-   * want to fix the number of shards to 'shardCount'
-   *
-   * The default is false which means that test will be executed with
-   * 1, 2, 3, ....shardCount number of shards repeatedly
-   */
-  protected boolean fixShardCount = false;
+  private int shardCount = -1;      // the actual number of solr cores that will be created in the cluster
+  public int getShardCount() {
+    return shardCount;
+  }
+
+  private boolean isShardCountFixed = false;
+
+  public void fixShardCount(int count) {
+    isShardCountFixed = true;
+    shardCount = count;
+  }
 
   protected JettySolrRunner controlJetty;
   protected List<SolrClient> clients = new ArrayList<>();
@@ -243,13 +269,6 @@
 
   public static RandVal rdate = new RandDate();
 
-  /**
-   * Perform the actual tests here
-   *
-   * @throws Exception on error
-   */
-  public abstract void doTest() throws Exception;
-
   public static String[] fieldNames = new String[]{"n_ti1", "n_f1", "n_tf1", "n_d1", "n_td1", "n_l1", "n_tl1", "n_dt1", "n_tdt1"};
   public static RandVal[] randVals = new RandVal[]{rint, rfloat, rfloat, rdouble, rdouble, rlong, rlong, rdate, rdate};
 
@@ -268,42 +287,50 @@
   public String getSolrHome() {
     return SolrTestCaseJ4.TEST_HOME();
   }
-  
-  @Override
-  public void setUp() throws Exception {
+
+  private boolean distribSetUpCalled = false;
+  public void distribSetUp() throws Exception {
+    distribSetUpCalled = true;
     SolrTestCaseJ4.resetExceptionIgnores();  // ignore anything with ignore_exception in it
-    super.setUp();
     System.setProperty("solr.test.sys.prop1", "propone");
     System.setProperty("solr.test.sys.prop2", "proptwo");
     testDir = createTempDir().toFile();
   }
 
-  @Override
-  public void tearDown() throws Exception {
+  private boolean distribTearDownCalled = false;
+  public void distribTearDown() throws Exception {
+    distribTearDownCalled = true;
     destroyServers();
-    super.tearDown();
   }
 
   protected JettySolrRunner createControlJetty() throws Exception {
+    writeCoreProperties(testDir.toPath().resolve("control/cores"), DEFAULT_TEST_CORENAME);
+    System.setProperty("coreRootDirectory", testDir.toPath().resolve("control").toString());
     JettySolrRunner jetty = createJetty(new File(getSolrHome()), testDir + "/control/data", null, getSolrConfigFile(), getSchemaFile());
     return jetty;
   }
-  
-  protected void createServers(int numShards) throws Exception {
-    controlJetty = createControlJetty();
 
+  protected void createServers(int numShards) throws Exception {
+
+    System.setProperty("configSetBaseDir", getSolrHome());
+
+    controlJetty = createControlJetty();
     controlClient = createNewSolrClient(controlJetty.getLocalPort());
 
     shardsArr = new String[numShards];
     StringBuilder sb = new StringBuilder();
     for (int i = 0; i < numShards; i++) {
       if (sb.length() > 0) sb.append(',');
+      String shardname = "shard" + i;
+      Path coresPath = testDir.toPath().resolve(shardname).resolve("cores");
+      writeCoreProperties(coresPath, DEFAULT_TEST_CORENAME);
+      System.setProperty("coreRootDirectory", testDir.toPath().resolve(shardname).toString());
       JettySolrRunner j = createJetty(new File(getSolrHome()),
           testDir + "/shard" + i + "/data", null, getSolrConfigFile(),
           getSchemaFile());
       jettys.add(j);
       clients.add(createNewSolrClient(j.getLocalPort()));
-      String shardStr = buildUrl(j.getLocalPort());
+      String shardStr = buildUrl(j.getLocalPort()) + "/" + DEFAULT_TEST_CORENAME;
       shardsArr[i] = shardStr;
       sb.append(shardStr);
     }
@@ -340,10 +367,10 @@
   }
 
   protected void destroyServers() throws Exception {
-    controlJetty.stop();
-    ((HttpSolrClient) controlClient).shutdown();
+    if (controlJetty != null) controlJetty.stop();
+    if (controlClient != null)  controlClient.close();
     for (JettySolrRunner jetty : jettys) jetty.stop();
-    for (SolrClient client : clients) ((HttpSolrClient) client).shutdown();
+    for (SolrClient client : clients) client.close();
     clients.clear();
     jettys.clear();
   }
@@ -389,7 +416,7 @@
   protected SolrClient createNewSolrClient(int port) {
     try {
       // setup the client...
-      HttpSolrClient client = new HttpSolrClient(buildUrl(port));
+      HttpSolrClient client = new HttpSolrClient(buildUrl(port) + "/" + DEFAULT_TEST_CORENAME);
       client.setConnectionTimeout(DEFAULT_CONNECTION_TIMEOUT);
       client.setSoTimeout(90000);
       client.setDefaultMaxConnectionsPerHost(100);
@@ -860,23 +887,107 @@
     compareSolrResponses(a, b);
   }
 
-  @Test
-  public void testDistribSearch() throws Exception {
-    if (fixShardCount) {
-      createServers(shardCount);
-      RandVal.uniqueValues = new HashSet(); //reset random values
-      doTest();
-      destroyServers();
-    } else {
-      for (int nServers = 1; nServers < shardCount; nServers++) {
-        createServers(nServers);
+  @Retention(RetentionPolicy.RUNTIME)
+  @Target(ElementType.METHOD)
+  public @interface ShardsRepeat {
+    public abstract int min() default 1;
+    public abstract int max() default DEFAULT_MAX_SHARD_COUNT;
+  }
+
+  @Retention(RetentionPolicy.RUNTIME)
+  @Target(ElementType.METHOD)
+  public @interface ShardsFixed {
+    public abstract int num();
+  }
+
+  public class ShardsRepeatRule implements TestRule {
+
+    private abstract class ShardsStatement extends Statement {
+      abstract protected void callStatement() throws Throwable;
+
+      @Override
+      public void evaluate() throws Throwable {
+        distribSetUp();
+        if (! distribSetUpCalled) {
+          Assert.fail("One of the overrides of distribSetUp does not propagate the call.");
+        }
+        try {
+          callStatement();
+        } finally {
+          distribTearDown();
+          if (! distribTearDownCalled) {
+            Assert.fail("One of the overrides of distribTearDown does not propagate the call.");
+          }
+        }
+      }
+    }
+
+    private class ShardsFixedStatement extends ShardsStatement {
+
+      private final int numShards;
+      private final Statement statement;
+
+      private ShardsFixedStatement(int numShards, Statement statement) {
+        this.numShards = numShards;
+        this.statement = statement;
+      }
+
+      @Override
+      public void callStatement() throws Throwable {
+        fixShardCount(numShards);
+        createServers(numShards);
         RandVal.uniqueValues = new HashSet(); //reset random values
-        doTest();
+        statement.evaluate();
         destroyServers();
       }
     }
+
+    private class ShardsRepeatStatement extends ShardsStatement {
+
+      private final int min;
+      private final int max;
+      private final Statement statement;
+
+      private ShardsRepeatStatement(int min, int max, Statement statement) {
+        this.min = min;
+        this.max = max;
+        this.statement = statement;
+      }
+
+      @Override
+      public void callStatement() throws Throwable {
+        for (shardCount = min; shardCount <= max; shardCount++) {
+          createServers(shardCount);
+          RandVal.uniqueValues = new HashSet(); //reset random values
+          statement.evaluate();
+          destroyServers();
+        }
+      }
+    }
+
+    @Override
+    public Statement apply(Statement statement, Description description) {
+      ShardsFixed fixed = description.getAnnotation(ShardsFixed.class);
+      ShardsRepeat repeat = description.getAnnotation(ShardsRepeat.class);
+      if (fixed != null && repeat != null) {
+        throw new RuntimeException("ShardsFixed and ShardsRepeat annotations can't coexist");
+      }
+      else if (fixed != null) {
+        return new ShardsFixedStatement(fixed.num(), statement);
+      }
+      else if (repeat != null) {
+        return new ShardsRepeatStatement(repeat.min(), repeat.max(), statement);
+      }
+      else {
+        return (isShardCountFixed ? new ShardsFixedStatement(shardCount, statement) :
+          new ShardsRepeatStatement(1, DEFAULT_MAX_SHARD_COUNT, statement));
+      }
+    }
   }
 
+  @Rule
+  public ShardsRepeatRule repeatRule = new ShardsRepeatRule();
+
   public static Object[] getRandFields(String[] fields, RandVal[] randVals) {
     Object[] o = new Object[fields.length * 2];
     for (int i = 0; i < fields.length; i++) {
@@ -931,6 +1042,20 @@
     if (solrxml != null) {
       FileUtils.copyFile(new File(getSolrHome(), solrxml), new File(jettyHome, "solr.xml"));
     }
+
+    Properties coreProperties = new Properties();
+    coreProperties.setProperty("name", "collection1");
+    coreProperties.setProperty("shard", "${shard:}");
+    coreProperties.setProperty("collection", "${collection:collection1}");
+    coreProperties.setProperty("config", "${solrconfig:solrconfig.xml}");
+    coreProperties.setProperty("schema", "${schema:schema.xml}");
+    coreProperties.setProperty("coreNodeName", "${coreNodeName:}");
+
+    writeCoreProperties(jettyHome.toPath().resolve("cores/collection1"), coreProperties, "collection1");
+
+     //   <core name="collection1" instanceDir="collection1" shard="${shard:}"
+     // collection="${collection:collection1}" config="${solrconfig:solrconfig.xml}" schema="${schema:schema.xml}"
+    //coreNodeName="${coreNodeName:}"/>
   }
 
 }
diff --git a/solr/test-framework/src/java/org/apache/solr/SolrJettyTestBase.java b/solr/test-framework/src/java/org/apache/solr/SolrJettyTestBase.java
index a33a938..d91c38f 100644
--- a/solr/test-framework/src/java/org/apache/solr/SolrJettyTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/SolrJettyTestBase.java
@@ -32,6 +32,7 @@
 
 import java.io.File;
 import java.io.OutputStreamWriter;
+import java.nio.file.Path;
 import java.util.Properties;
 import java.util.SortedMap;
 
@@ -57,6 +58,19 @@
     // creates the data dir
     initCore(null, null, solrHome);
 
+    Path coresDir = createTempDir().resolve("cores");
+
+    System.setProperty("coreRootDirectory", coresDir.toString());
+    System.setProperty("configSetBaseDir", solrHome);
+
+    Properties props = new Properties();
+    props.setProperty("name", DEFAULT_TEST_CORENAME);
+    props.setProperty("configSet", "collection1");
+    props.setProperty("config", "${solrconfig:solrconfig.xml}");
+    props.setProperty("schema", "${schema:schema.xml}");
+
+    writeCoreProperties(coresDir.resolve("core"), props, "RestTestBase");
+
     ignoreException("maxWarmingSearchers");
 
     context = context==null ? "/solr" : context;
@@ -85,7 +99,7 @@
       jetty.stop();
       jetty = null;
     }
-    if (client != null) client.shutdown();
+    if (client != null) client.close();
     client = null;
   }
 
@@ -120,7 +134,7 @@
         throw new RuntimeException( ex );
       }
     } else {
-      return new EmbeddedSolrServer( h.getCoreContainer(), "" );
+      return new EmbeddedSolrServer( h.getCoreContainer(), "collection1" );
     }
   }
 
diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
index 6f8edc7..373bb4e 100644
--- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
+++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
@@ -40,19 +40,18 @@
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.SolrInputField;
-import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.ObjectReleaseTracker;
 import org.apache.solr.common.util.XML;
 import org.apache.solr.core.ConfigSolr;
-import org.apache.solr.core.ConfigSolrXmlOld;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.CoreDescriptor;
 import org.apache.solr.core.SolrConfig;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrResourceLoader;
-import org.apache.solr.handler.JsonUpdateRequestHandler;
+import org.apache.solr.handler.UpdateRequestHandler;
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestHandler;
@@ -81,9 +80,11 @@
 import javax.xml.xpath.XPathExpressionException;
 import java.io.File;
 import java.io.IOException;
+import java.io.OutputStreamWriter;
 import java.io.Reader;
 import java.io.StringReader;
 import java.io.StringWriter;
+import java.io.Writer;
 import java.lang.annotation.Documented;
 import java.lang.annotation.ElementType;
 import java.lang.annotation.Inherited;
@@ -91,6 +92,8 @@
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 import java.net.URL;
+import java.nio.charset.Charset;
+import java.nio.file.Files;
 import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -123,9 +126,32 @@
 })
 @SuppressSysoutChecks(bugUrl = "Solr dumps tons of logs to console.")
 public abstract class SolrTestCaseJ4 extends LuceneTestCase {
-  private static String coreName = ConfigSolrXmlOld.DEFAULT_DEFAULT_CORE_NAME;
+
+  public static final String DEFAULT_TEST_CORENAME = "collection1";
+
+  private static String coreName = DEFAULT_TEST_CORENAME;
+
   public static int DEFAULT_CONNECTION_TIMEOUT = 60000;  // default socket connection timeout in ms
 
+  protected void writeCoreProperties(Path coreDirectory, String corename) throws IOException {
+    Properties props = new Properties();
+    props.setProperty("name", corename);
+    props.setProperty("configSet", "collection1");
+    props.setProperty("config", "${solrconfig:solrconfig.xml}");
+    props.setProperty("schema", "${schema:schema.xml}");
+
+    writeCoreProperties(coreDirectory, props, this.getTestName());
+  }
+
+  public static void writeCoreProperties(Path coreDirectory, Properties properties, String testname) throws IOException {
+    log.info("Writing core.properties file to {}", coreDirectory);
+    Files.createDirectories(coreDirectory);
+    try (Writer writer =
+             new OutputStreamWriter(Files.newOutputStream(coreDirectory.resolve("core.properties")), Charset.forName("UTF-8"))) {
+      properties.store(writer, testname);
+    }
+  }
+
   /**
    * Annotation for test classes that want to disable SSL
    */
@@ -165,7 +191,6 @@
     System.setProperty("solr.clustering.enabled", "false");
     setupLogging();
     startTrackingSearchers();
-    startTrackingZkClients();
     ignoreException("ignore_exception");
     newRandomConfig();
     
@@ -185,9 +210,9 @@
       deleteCore();
       resetExceptionIgnores();
       endTrackingSearchers();
-      endTrackingZkClients();
+      assertTrue("Some resources were not closed, shutdown, or released.", ObjectReleaseTracker.clearObjectTrackerAndCheckEmpty());
       resetFactory();
-      coreName = ConfigSolrXmlOld.DEFAULT_DEFAULT_CORE_NAME;
+      coreName = DEFAULT_TEST_CORENAME;
     } finally {
       initCoreDataDir = null;
       System.clearProperty("zookeeper.forceSync");
@@ -289,7 +314,7 @@
     FileUtils.write(tmpFile, xmlStr, IOUtils.UTF_8);
 
     SolrResourceLoader loader = new SolrResourceLoader(solrHome.getAbsolutePath());
-    h = new TestHarness(loader, ConfigSolr.fromFile(loader, new File(solrHome, "solr.xml")));
+    h = new TestHarness(ConfigSolr.fromFile(loader, new File(solrHome, "solr.xml")));
     lrf = h.getRequestFactory("standard", 0, 20, CommonParams.VERSION, "2.2");
   }
   
@@ -410,12 +435,6 @@
       numOpens = numCloses = 0;
     }
   }
-  static long zkClientNumOpens;
-  static long zkClientNumCloses;
-  public static void startTrackingZkClients() {
-    zkClientNumOpens = SolrZkClient.numOpens.get();
-    zkClientNumCloses = SolrZkClient.numCloses.get();
-  }
 
   public static void endTrackingSearchers() {
      long endNumOpens = SolrIndexSearcher.numOpens.get();
@@ -449,20 +468,6 @@
      }
   }
   
-  public static void endTrackingZkClients() {
-    long endNumOpens = SolrZkClient.numOpens.get();
-    long endNumCloses = SolrZkClient.numCloses.get();
-
-    SolrZkClient.numOpens.getAndSet(0);
-    SolrZkClient.numCloses.getAndSet(0);
-
-    if (endNumOpens-zkClientNumOpens != endNumCloses-zkClientNumCloses) {
-      String msg = "ERROR: SolrZkClient opens=" + (endNumOpens-zkClientNumOpens) + " closes=" + (endNumCloses-zkClientNumCloses);
-      log.error(msg);
-      fail(msg);
-    }
- }
-  
   /** Causes an exception matching the regex pattern to not be logged. */
   public static void ignoreException(String pattern) {
     if (SolrException.ignorePatterns == null)
@@ -591,6 +596,19 @@
     return h.getCoreContainer();
   }
 
+  public static CoreContainer createCoreContainer(ConfigSolr config) {
+    testSolrHome = config.getSolrResourceLoader().getInstanceDir();
+    h = new TestHarness(config);
+    lrf = h.getRequestFactory("standard", 0, 20, CommonParams.VERSION, "2.2");
+    return h.getCoreContainer();
+  }
+
+  public static CoreContainer createCoreContainer(String coreName, String dataDir, String solrConfig, String schema) {
+    CoreContainer cc = createCoreContainer(new TestHarness.TestConfigSolr(coreName, dataDir, solrConfig, schema));
+    h.coreName = coreName;
+    return cc;
+  }
+
   public static CoreContainer createDefaultCoreContainer(String solrHome) {
     testSolrHome = checkNotNull(solrHome);
     h = new TestHarness("collection1", initCoreDataDir.getAbsolutePath(), "solrconfig.xml", "schema.xml");
@@ -1044,20 +1062,6 @@
     @Override
     public String toString() { return xml; }
   }
-
-  /**
-   * @see IOUtils#rm(Path...)
-   */
-  @Deprecated()
-  public static boolean recurseDelete(File f) {
-    try {
-      IOUtils.rm(f.toPath());
-      return true;
-    } catch (IOException e) {
-      System.err.println(e.toString());
-      return false;
-    }
-  }
   
   public void clearIndex() {
     assertU(delQ("*:*"));
@@ -1077,7 +1081,7 @@
     DirectSolrConnection connection = new DirectSolrConnection(core);
     SolrRequestHandler handler = core.getRequestHandler("/update/json");
     if (handler == null) {
-      handler = new JsonUpdateRequestHandler();
+      handler = new UpdateRequestHandler();
       handler.init(null);
     }
     return connection.request(handler, args, json);
@@ -1799,7 +1803,7 @@
     copyMinConf(dstRoot, null);
   }
 
-  // Creates a minimal conf dir. Optionally adding in a core.properties file from the string passed in
+  // Creates a minimal conf dir, adding in a core.properties file from the string passed in
   // the string to write to the core.properties file may be null in which case nothing is done with it.
   // propertiesContent may be an empty string, which will actually work.
   public static void copyMinConf(File dstRoot, String propertiesContent) throws IOException {
@@ -1808,6 +1812,7 @@
     if (! dstRoot.exists()) {
       assertTrue("Failed to make subdirectory ", dstRoot.mkdirs());
     }
+    Files.createFile(dstRoot.toPath().resolve("core.properties"));
     if (propertiesContent != null) {
       FileUtils.writeStringToFile(new File(dstRoot, "core.properties"), propertiesContent, Charsets.UTF_8.toString());
     }
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
index 1098ca5..a7c33f0 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
@@ -33,8 +33,6 @@
 import org.apache.solr.core.MockDirectoryFactory;
 import org.apache.solr.servlet.SolrDispatchFilter;
 import org.apache.zookeeper.KeeperException;
-import org.junit.After;
-import org.junit.Before;
 import org.junit.BeforeClass;
 
 public abstract class AbstractDistribZkTestBase extends BaseDistributedSearchTestCase {
@@ -54,11 +52,9 @@
     //useFactory(null);
   }
 
-
-  @Before
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     
     String zkDir = testDir.getAbsolutePath() + File.separator
     + "zookeeper/server1/data";
@@ -212,8 +208,7 @@
   }
   
   @Override
-  @After
-  public void tearDown() throws Exception {
+  public void distribTearDown() throws Exception {
     if (DEBUG) {
       printLayout();
     }
@@ -228,7 +223,7 @@
     System.clearProperty(MockDirectoryFactory.SOLR_TESTS_ALLOW_READING_FILES_STILL_OPEN_FOR_WRITE);
     
     resetExceptionIgnores();
-    super.tearDown();
+    super.distribTearDown();
     zkServer.shutdown();
   }
   
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
index aba22bd..1174737 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
@@ -55,9 +55,7 @@
 import org.apache.solr.servlet.SolrDispatchFilter;
 import org.apache.solr.update.DirectUpdateHandler2;
 import org.apache.zookeeper.CreateMode;
-import org.junit.After;
 import org.junit.AfterClass;
-import org.junit.Before;
 import org.junit.BeforeClass;
 import org.noggit.CharArr;
 import org.noggit.JSONWriter;
@@ -95,18 +93,18 @@
 @Slow
 public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTestBase {
   static Logger log = LoggerFactory.getLogger(AbstractFullDistribZkTestBase.class);
-  
+
   @BeforeClass
   public static void beforeFullSolrCloudTest() {
     // shorten the log output more for this test type
     if (formatter != null) formatter.setShorterFormat();
   }
-  
+
   public static final String SHARD1 = "shard1";
   public static final String SHARD2 = "shard2";
-  
+
   protected boolean printLayoutOnTearDown = false;
-  
+
   String t1 = "a_t";
   String i1 = "a_i1";
   String tlong = "other_tl1";
@@ -122,14 +120,14 @@
   protected Map<String,List<CloudJettyRunner>> shardToJetty = new HashMap<>();
   private AtomicInteger jettyIntCntr = new AtomicInteger(0);
   protected ChaosMonkey chaosMonkey;
-  
+
   protected Map<String,CloudJettyRunner> shardToLeaderJetty = new HashMap<>();
   private boolean cloudInit;
   protected boolean checkCreatedVsState;
   protected boolean useJettyDataDir = true;
 
   protected Map<URI,SocketProxy> proxies = new HashMap<URI,SocketProxy>();
-  
+
   public static class CloudJettyRunner {
     public JettySolrRunner jetty;
     public String nodeName;
@@ -160,19 +158,19 @@
       return "CloudJettyRunner [url=" + url + "]";
     }
   }
-  
+
   static class CloudSolrServerClient {
     SolrClient solrClient;
     String shardName;
     int port;
     public ZkNodeProps info;
-    
+
     public CloudSolrServerClient() {}
     
     public CloudSolrServerClient(SolrClient client) {
       this.solrClient = client;
     }
-    
+
     @Override
     public int hashCode() {
       final int prime = 31;
@@ -180,7 +178,7 @@
       result = prime * result + ((solrClient == null) ? 0 : solrClient.hashCode());
       return result;
     }
-    
+
     @Override
     public boolean equals(Object obj) {
       if (this == obj) return true;
@@ -192,69 +190,67 @@
       } else if (!solrClient.equals(other.solrClient)) return false;
       return true;
     }
-    
+
   }
-  
-  @Before
+
   @Override
-  public void setUp() throws Exception {
-    super.setUp();
+  public void distribSetUp() throws Exception {
+    super.distribSetUp();
     // ignoreException(".*");
     if (sliceCount > 0) {
       System.setProperty("numShards", Integer.toString(sliceCount));
     } else {
       System.clearProperty("numShards");
     }
-    
+
     if (isSSLMode()) {
       System.clearProperty("urlScheme");
       ZkStateReader zkStateReader = new ZkStateReader(zkServer.getZkAddress(),
           AbstractZkTestCase.TIMEOUT, AbstractZkTestCase.TIMEOUT);
       try {
         zkStateReader.getZkClient().create(ZkStateReader.CLUSTER_PROPS,
-          ZkStateReader.toJSON(Collections.singletonMap("urlScheme","https")), 
+          ZkStateReader.toJSON(Collections.singletonMap("urlScheme","https")),
           CreateMode.PERSISTENT, true);
       } finally {
         zkStateReader.close();
       }
     }
   }
-  
+
   @BeforeClass
   public static void beforeClass() {
     System.setProperty("solrcloud.update.delay", "0");
   }
-  
+
   @AfterClass
   public static void afterClass() throws Exception {
     System.clearProperty("solrcloud.update.delay");
     System.clearProperty("genericCoreNodeNames");
   }
-  
+
   public AbstractFullDistribZkTestBase() {
-    fixShardCount = true;
-    
-    shardCount = 4;
     sliceCount = 2;
+    fixShardCount(4);
+
     // TODO: for now, turn off stress because it uses regular clients, and we
     // need the cloud client because we kill servers
     stress = 0;
-    
+
     useExplicitNodeNames = random().nextBoolean();
   }
-  
+
   protected String getDataDir(String dataDir) throws IOException {
     return dataDir;
   }
-  
+
   protected void initCloud() throws Exception {
     assert(cloudInit == false);
     cloudInit = true;
     cloudClient = createCloudClient(DEFAULT_COLLECTION);
     cloudClient.connect();
-    
+
     ZkStateReader zkStateReader = cloudClient.getZkStateReader();
-    
+
     chaosMonkey = new ChaosMonkey(zkServer, zkStateReader, DEFAULT_COLLECTION,
         shardToJetty, shardToLeaderJetty);
   }
@@ -265,23 +261,26 @@
     if (defaultCollection != null) client.setDefaultCollection(defaultCollection);
     client.getLbClient().getHttpClient().getParams()
         .setParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, 30000);
+    client.getLbClient().getHttpClient().getParams()
+    .setParameter(CoreConnectionPNames.SO_TIMEOUT, 60000);
     return client;
   }
-  
+
   @Override
   protected void createServers(int numServers) throws Exception {
-    
+
     System.setProperty("collection", "control_collection");
 
     // we want hashes by default for the control, so set to 1 shard as opposed to leaving unset
     String oldNumShards = System.getProperty(ZkStateReader.NUM_SHARDS_PROP);
     System.setProperty(ZkStateReader.NUM_SHARDS_PROP, "1");
-    
+
     try {
-      
-      File controlJettyDir = createTempDir().toFile();
+
+      File controlJettyDir = createTempDir("control").toFile();
       setupJettySolrHome(controlJettyDir);
-      
+
+      System.setProperty("coreRootDirectory", controlJettyDir.toPath().resolve("cores").toString());
       controlJetty = createJetty(controlJettyDir, useJettyDataDir ? getDataDir(testDir
           + "/control/data") : null); // don't pass shard name... let it default to
                                // "shard1"
@@ -302,7 +301,7 @@
                                           // cloudClient
         return;
       }
-      
+
     } finally {
       System.clearProperty("collection");
       if (oldNumShards != null) {
@@ -314,9 +313,9 @@
 
 
     initCloud();
-    
+
     createJettys(numServers, checkCreatedVsState).size();
-    
+
     int cnt = getTotalReplicas(DEFAULT_COLLECTION);
     if (cnt > 0) {
       waitForCollection(cloudClient.getZkStateReader(), DEFAULT_COLLECTION, sliceCount);
@@ -345,21 +344,17 @@
       Thread.sleep(500);
     }
   }
-  
+
   protected List<JettySolrRunner> createJettys(int numJettys) throws Exception {
     return createJettys(numJettys, false);
   }
 
-  protected int defaultStateFormat = 1 + random().nextInt(2);
+  protected String defaultStateFormat = String.valueOf( 1 + random().nextInt(2));
 
-  protected int getStateFormat()  {
+  protected String getStateFormat()  {
     String stateFormat = System.getProperty("tests.solr.stateFormat", null);
     if (stateFormat != null)  {
-      if ("2".equals(stateFormat)) {
-        return defaultStateFormat = 2;
-      } else if ("1".equals(stateFormat))  {
-        return defaultStateFormat = 1;
-      }
+      defaultStateFormat = stateFormat;
     }
     return defaultStateFormat; // random
   }
@@ -375,7 +370,7 @@
     List<SolrClient> clients = new ArrayList<>();
     StringBuilder sb = new StringBuilder();
 
-    if (getStateFormat() == 2) {
+    if ("2".equals(getStateFormat())) {
       log.info("Creating collection1 with stateFormat=2");
       SolrZkClient zkClient = new SolrZkClient(zkServer.getZkAddress(),
           AbstractZkTestCase.TIMEOUT, AbstractZkTestCase.TIMEOUT);
@@ -391,33 +386,34 @@
       if (sb.length() > 0) sb.append(',');
       int cnt = this.jettyIntCntr.incrementAndGet();
 
-      File jettyDir = createTempDir().toFile();
+      File jettyDir = createTempDir("shard-" + i).toFile();
 
       jettyDir.mkdirs();
       setupJettySolrHome(jettyDir);
       log.info("create jetty " + i);
+      System.setProperty("coreRootDirectory", jettyDir.toPath().resolve("cores").toString());
       JettySolrRunner j = createJetty(jettyDir, useJettyDataDir ? getDataDir(testDir + "/jetty"
           + cnt) : null, null, "solrconfig.xml", null);
       jettys.add(j);
       SolrClient client = createNewSolrClient(j.getLocalPort());
       clients.add(client);
     }
-  
+
     this.jettys.addAll(jettys);
     this.clients.addAll(clients);
-    
+
     int numShards = getTotalReplicas(DEFAULT_COLLECTION);
     if (checkCreatedVsState) {
       // now wait until we see that the number of shards in the cluster state
       // matches what we expect
       int retries = 0;
-      while (numShards != shardCount) {
+      while (numShards != getShardCount()) {
         numShards = getTotalReplicas(DEFAULT_COLLECTION);
-        if (numShards == shardCount) break;
+        if (numShards == getShardCount()) break;
         if (retries++ == 60) {
           printLayoutOnTearDown = true;
           fail("Shards in the state does not match what we set:" + numShards
-              + " vs " + shardCount);
+              + " vs " + getShardCount());
         }
         Thread.sleep(500);
       }
@@ -432,7 +428,7 @@
     if (numShards > 0) {
       updateMappingsFromZk(this.jettys, this.clients);
     }
-    
+
     // build the shard string
     for (int i = 1; i <= numJettys / 2; i++) {
       JettySolrRunner j = this.jettys.get(i);
@@ -442,7 +438,7 @@
       sb.append("|").append(buildUrl(j2.getLocalPort()));
     }
     shards = sb.toString();
-    
+
     return jettys;
   }
 
@@ -494,30 +490,30 @@
     }
     return cnt;
   }
-  
+
   public JettySolrRunner createJetty(String dataDir, String ulogDir, String shardList,
       String solrConfigOverride) throws Exception {
-    
+
     JettySolrRunner jetty = new JettySolrRunner(getSolrHome(), context, 0,
         solrConfigOverride, null, false, getExtraServlets(), sslConfig, getExtraRequestFilters());
     jetty.setShards(shardList);
     jetty.setDataDir(getDataDir(dataDir));
     jetty.start();
-    
+
     return jetty;
   }
-  
+
   public JettySolrRunner createJetty(File solrHome, String dataDir, String shardList, String solrConfigOverride, String schemaOverride) throws Exception {
     // randomly test a relative solr.home path
     if (random().nextBoolean()) {
       solrHome = getRelativeSolrHomePath(solrHome);
     }
-    
+
     JettySolrRunner jetty = new JettySolrRunner(solrHome.getPath(), context, 0, solrConfigOverride, schemaOverride, false, getExtraServlets(), sslConfig, getExtraRequestFilters());
     jetty.setShards(shardList);
     jetty.setDataDir(getDataDir(dataDir));
     jetty.start();
-    
+
     return jetty;
   }
 
@@ -598,7 +594,7 @@
   private File getRelativeSolrHomePath(File solrHome) {
     String path = SolrResourceLoader.normalizeDir(new File(".").getAbsolutePath());
     String base = new File(solrHome.getPath()).getAbsolutePath();
-    
+
     if (base.startsWith(".")) {
       base = base.replaceFirst("\\.", new File(".").getName());
     }
@@ -606,14 +602,14 @@
     if (path.endsWith(File.separator + ".")) {
       path = path.substring(0, path.length() - 2);
     }
-    
+
     int splits = path.split("\\" + File.separator).length;
-    
+
     StringBuilder p = new StringBuilder();
     for (int i = 0; i < splits - 2; i++) {
       p.append("..").append(File.separator);
-    }   
-    
+    }
+
     String prefix = FilenameUtils.getPrefix(path);
     if (base.startsWith(prefix)) {
       base = base.substring(prefix.length());
@@ -632,7 +628,7 @@
     zkStateReader.updateClusterState(true);
     cloudJettys.clear();
     shardToJetty.clear();
-    
+
     ClusterState clusterState = zkStateReader.getClusterState();
     DocCollection coll = clusterState.getCollection(DEFAULT_COLLECTION);
 
@@ -645,28 +641,28 @@
         for (Replica replica : slice.getReplicas()) {
           int port = new URI(((HttpSolrClient) client).getBaseURL())
               .getPort();
-          
+
           if (replica.getStr(ZkStateReader.BASE_URL_PROP).contains(":" + port)) {
             CloudSolrServerClient csc = new CloudSolrServerClient();
             csc.solrClient = client;
             csc.port = port;
             csc.shardName = replica.getStr(ZkStateReader.NODE_NAME_PROP);
             csc.info = replica;
-            
+
             theClients .add(csc);
-            
+
             break nextClient;
           }
         }
       }
     }
- 
+
     for (JettySolrRunner jetty : jettys) {
       int port = jetty.getLocalPort();
       if (port == -1) {
         throw new RuntimeException("Cannot find the port for jetty");
       }
-      
+
       nextJetty:
       for (Slice slice : coll.getSlices()) {
         Set<Entry<String,Replica>> entries = slice.getReplicasMap().entrySet();
@@ -696,7 +692,7 @@
         }
       }
     }
-    
+
     // # of jetties may not match replicas in shard here, because we don't map
     // jetties that are not running - every shard should have at least one
     // running jetty though
@@ -706,13 +702,13 @@
       if (!allowOverSharding) {
         assertNotNull("Test setup problem: We found no jetties for shard: "
             + slice.getName() + " just:" + shardToJetty.keySet(), jetties);
-        
+
         assertEquals("slice:" + slice.getName(), slice.getReplicas().size(),
             jetties.size());
       }
     }
   }
-  
+
   private CloudSolrServerClient findClientByPort(int port, List<CloudSolrServerClient> theClients) {
     for (CloudSolrServerClient client : theClients) {
       if (client.port == port) {
@@ -724,7 +720,7 @@
 
   @Override
   protected void setDistributedParams(ModifiableSolrParams params) {
-    
+
     if (r.nextBoolean()) {
       // don't set shards, let that be figured out from the cloud state
     } else {
@@ -737,19 +733,19 @@
       params.set("shards", sb.toString());
     }
   }
-  
+
   @Override
   protected void indexDoc(SolrInputDocument doc) throws IOException,
       SolrServerException {
-    
+
     UpdateRequest req = new UpdateRequest();
     req.add(doc);
     req.setParam("CONTROL", "TRUE");
     req.process(controlClient);
-    
+
     // if we wanted to randomly pick a client - but sometimes they may be
     // down...
-    
+
     // boolean pick = random.nextBoolean();
     //
     // int which = (doc.getField(id).toString().hashCode() & 0x7fffffff) %
@@ -762,13 +758,13 @@
     //
     // HttpSolrServer client = (HttpSolrServer)
     // clients.get(which);
-    
+
     UpdateRequest ureq = new UpdateRequest();
     ureq.add(doc);
     // ureq.setParam(UpdateParams.UPDATE_CHAIN, DISTRIB_UPDATE_CHAIN);
     ureq.process(cloudClient);
   }
-  
+
   @Override
   protected void index_specific(int serverNumber, Object... fields)
       throws Exception {
@@ -780,7 +776,7 @@
     
     HttpSolrClient client = (HttpSolrClient) clients
         .get(serverNumber);
-    
+
     UpdateRequest ureq = new UpdateRequest();
     ureq.add(doc);
     // ureq.setParam("update.chain", DISTRIB_UPDATE_CHAIN);
@@ -793,16 +789,16 @@
     for (int i = 0; i < fields.length; i += 2) {
       doc.addField((String) (fields[i]), fields[i + 1]);
     }
-    
+
     UpdateRequest ureq = new UpdateRequest();
     ureq.add(doc);
     // ureq.setParam("update.chain", DISTRIB_UPDATE_CHAIN);
     ureq.process(client);
-    
+
     // add to control second in case adding to shards fails
     controlClient.add(doc);
   }
-  
+
   @Override
   protected void del(String q) throws Exception {
     controlClient.deleteByQuery(q);
@@ -816,25 +812,25 @@
     }
      ***/
   }// serial commit...
-  
+
   protected void waitForRecoveriesToFinish(boolean verbose)
       throws Exception {
     ZkStateReader zkStateReader = cloudClient.getZkStateReader();
     super.waitForRecoveriesToFinish(DEFAULT_COLLECTION, zkStateReader, verbose);
   }
-  
+
   protected void waitForRecoveriesToFinish(String collection, boolean verbose)
       throws Exception {
     ZkStateReader zkStateReader = cloudClient.getZkStateReader();
     super.waitForRecoveriesToFinish(collection, zkStateReader, verbose);
   }
-  
+
   protected void waitForRecoveriesToFinish(boolean verbose, int timeoutSeconds)
       throws Exception {
     ZkStateReader zkStateReader = cloudClient.getZkStateReader();
     super.waitForRecoveriesToFinish(DEFAULT_COLLECTION, zkStateReader, verbose, true, timeoutSeconds);
   }
-  
+
   protected void checkQueries() throws Exception {
 
     handle.put("_version_", SKIPVAL);
@@ -858,7 +854,7 @@
       query("q", "*:*", "sort", f + " desc");
       query("q", "*:*", "sort", f + " asc");
     }
-    
+
     // these queries should be exactly ordered and scores should exactly match
     query("q", "*:*", "sort", i1 + " desc");
     query("q", "*:*", "sort", i1 + " asc");
@@ -875,30 +871,30 @@
     handle.remove("maxScore");
     query("q", "{!func}" + i1, "fl", "*,score"); // even scores should match
                                                  // exactly here
-    
+
     handle.put("highlighting", UNORDERED);
     handle.put("response", UNORDERED);
-    
+
     handle.put("maxScore", SKIPVAL);
     query("q", "quick");
     query("q", "all", "fl", "id", "start", "0");
     query("q", "all", "fl", "foofoofoo", "start", "0"); // no fields in returned
                                                         // docs
     query("q", "all", "fl", "id", "start", "100");
-    
+
     handle.put("score", SKIPVAL);
     query("q", "quick", "fl", "*,score");
     query("q", "all", "fl", "*,score", "start", "1");
     query("q", "all", "fl", "*,score", "start", "100");
-    
+
     query("q", "now their fox sat had put", "fl", "*,score", "hl", "true",
         "hl.fl", t1);
-    
+
     query("q", "now their fox sat had put", "fl", "foofoofoo", "hl", "true",
         "hl.fl", t1);
-    
+
     query("q", "matchesnothing", "fl", "*,score");
-    
+
     query("q", "*:*", "rows", 100, "facet", "true", "facet.field", t1);
     query("q", "*:*", "rows", 100, "facet", "true", "facet.field", t1,
         "facet.limit", -1, "facet.sort", "count");
@@ -916,11 +912,11 @@
         "facet.offset", 1);
     query("q", "*:*", "rows", 100, "facet", "true", "facet.field", t1,
         "facet.mincount", 2);
-    
+
     // test faceting multiple things at once
     query("q", "*:*", "rows", 100, "facet", "true", "facet.query", "quick",
         "facet.query", "all", "facet.query", "*:*", "facet.field", t1);
-    
+
     // test filter tagging, facet exclusion, and naming (multi-select facet
     // support)
     query("q", "*:*", "rows", 100, "facet", "true", "facet.query",
@@ -933,16 +929,16 @@
         "{!ex=t1}SubjectTerms_mfacet", "fq",
         "{!tag=t1}SubjectTerms_mfacet:(test 1)", "facet.limit", "10",
         "facet.mincount", "1");
-    
+
     // test field that is valid in schema but missing in all shards
     query("q", "*:*", "rows", 100, "facet", "true", "facet.field",
         missingField, "facet.mincount", 2);
     // test field that is valid in schema and missing in some shards
     query("q", "*:*", "rows", 100, "facet", "true", "facet.field", oddField,
         "facet.mincount", 2);
-    
+
     query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", i1);
-    
+
     // Try to get better coverage for refinement queries by turning off over
     // requesting.
     // This makes it much more likely that we may not get the top facet values
@@ -956,15 +952,15 @@
         "{!key='a b/c \\' \\} foo'}" + t1, "facet.limit", 5,
         "facet.shard.limit", 5);
     handle.remove("facet_fields");
-    
+
     query("q", "*:*", "sort", "n_tl1 desc");
-    
+
     // index the same document to two shards and make sure things
     // don't blow up.
     // assumes first n clients are first n shards
     if (clients.size() >= 2) {
       index(id, 100, i1, 107, t1, "oh no, a duplicate!");
-      for (int i = 0; i < shardCount; i++) {
+      for (int i = 0; i < getShardCount(); i++) {
         index_specific(i, id, 100, i1, 107, t1, "oh no, a duplicate!");
       }
       commit();
@@ -973,7 +969,7 @@
       query("q", "*:*", "rows", 100);
     }
   }
-  
+
   protected void indexAbunchOfDocs() throws Exception {
     indexr(id, 2, i1, 50, t1, "to come to the aid of their country.");
     indexr(id, 3, i1,  2, t1, "how now brown cow");
@@ -993,7 +989,7 @@
         "Great works are performed, not by strength, but by perseverance.");
     indexr(id, 13, i1, 232, t1, "no eggs on wall, lesson learned",
         oddField, "odd man out");
-    
+
     indexr(id, 14, "SubjectTerms_mfacet", new String[] {"mathematical models",
         "mathematical analysis"});
     indexr(id, 15, "SubjectTerms_mfacet", new String[] {"test 1", "test 2",
@@ -1005,19 +1001,19 @@
       vals[i] = "test " + i;
     }
     indexr(id, 17, "SubjectTerms_mfacet", vals);
-    
+
     for (int i = 100; i < 150; i++) {
       indexr(id, i);
     }
   }
-  
+
   /**
-   * Executes a query against each live and active replica of the specified shard 
+   * Executes a query against each live and active replica of the specified shard
    * and aserts that the results are identical.
    *
    * @see #queryAndCompare
    */
-  public QueryResponse queryAndCompareReplicas(SolrParams params, String shard) 
+  public QueryResponse queryAndCompareReplicas(SolrParams params, String shard)
     throws Exception {
 
     ArrayList<SolrClient> shardClients = new ArrayList<>(7);
@@ -1042,9 +1038,9 @@
 
   /**
    * For each Shard, executes a query against each live and active replica of that shard
-   * and asserts that the results are identical for each replica of the same shard.  
-   * Because results are not compared between replicas of different shards, this method 
-   * should be safe for comparing the results of any query, even if it contains 
+   * and asserts that the results are identical for each replica of the same shard.
+   * Because results are not compared between replicas of different shards, this method
+   * should be safe for comparing the results of any query, even if it contains
    * "distrib=false", because the replicas should all be identical.
    *
    * @see AbstractFullDistribZkTestBase#queryAndCompareReplicas(SolrParams, String)
@@ -1058,25 +1054,25 @@
     }
   }
 
-  /** 
-   * Returns a non-null string if replicas within the same shard do not have a 
-   * consistent number of documents. 
+  /**
+   * Returns a non-null string if replicas within the same shard do not have a
+   * consistent number of documents.
    */
   protected void checkShardConsistency(String shard) throws Exception {
     checkShardConsistency(shard, false, false);
   }
 
-  /** 
-   * Returns a non-null string if replicas within the same shard do not have a 
+  /**
+   * Returns a non-null string if replicas within the same shard do not have a
    * consistent number of documents.
-   * If expectFailure==false, the exact differences found will be logged since 
+   * If expectFailure==false, the exact differences found will be logged since
    * this would be an unexpected failure.
-   * verbose causes extra debugging into to be displayed, even if everything is 
+   * verbose causes extra debugging into to be displayed, even if everything is
    * consistent.
    */
   protected String checkShardConsistency(String shard, boolean expectFailure, boolean verbose)
       throws Exception {
-    
+
     List<CloudJettyRunner> solrJetties = shardToJetty.get(shard);
     if (solrJetties == null) {
       throw new RuntimeException("shard not found:" + shard + " keys:"
@@ -1112,7 +1108,7 @@
             + e.getMessage() + "\n");
         continue;
       }
-      
+
       boolean live = false;
       String nodeName = props.getStr(ZkStateReader.NODE_NAME_PROP);
       if (zkStateReader.getClusterState().liveNodesContain(nodeName)) {
@@ -1149,19 +1145,19 @@
       }
     }
     return failMessage;
-    
+
   }
-  
+
   public void showCounts() {
     Set<String> theShards = shardToJetty.keySet();
-    
+
     for (String shard : theShards) {
       List<CloudJettyRunner> solrJetties = shardToJetty.get(shard);
-      
+
       for (CloudJettyRunner cjetty : solrJetties) {
         ZkNodeProps props = cjetty.info;
         System.err.println("PROPS:" + props);
-        
+
         try {
           SolrParams query = params("q", "*:*", "rows", "0", "distrib",
               "false", "tests", "checkShardConsistency"); // "tests" is just a
@@ -1187,11 +1183,11 @@
           live = true;
         }
         System.err.println(" live:" + live);
-        
+
       }
     }
   }
-  
+
   protected void randomlyEnableAutoSoftCommit() {
     if (r.nextBoolean()) {
       enableAutoSoftCommit(1000);
@@ -1199,7 +1195,7 @@
       log.info("Not turning on auto soft commit");
     }
   }
-  
+
   protected void enableAutoSoftCommit(int time) {
     log.info("Turning on auto soft commit: " + time);
     for (List<CloudJettyRunner> jettyList : shardToJetty.values()) {
@@ -1228,7 +1224,7 @@
       throws Exception {
     checkShardConsistency(checkVsControl, verbose, null, null);
   }
-  
+
   /* Checks shard consistency and optionally checks against the control shard.
    * The test will be failed if differences are found.
    */
@@ -1236,7 +1232,7 @@
       throws Exception {
 
     updateMappingsFromZk(jettys, clients, true);
-    
+
     Set<String> theShards = shardToJetty.keySet();
     String failMessage = null;
     for (String shard : theShards) {
@@ -1245,7 +1241,7 @@
         failMessage = shardFailMessage;
       }
     }
-    
+
     if (failMessage != null) {
       fail(failMessage);
     }
@@ -1260,7 +1256,7 @@
     SolrDocumentList cloudDocList = cloudClient.query(q).getResults();
     long cloudClientDocs = cloudDocList.getNumFound();
 
-    
+
     // now check that the right # are on each shard
     theShards = shardToJetty.keySet();
     int cnt = 0;
@@ -1314,7 +1310,7 @@
     }
     return null;
   }
-  
+
   protected void assertDocCounts(boolean verbose) throws Exception {
     // TODO: as we create the clients, we should build a map from shard to
     // node/client
@@ -1324,7 +1320,7 @@
         + "\n\n");
     long controlCount = controlClient.query(new SolrQuery("*:*")).getResults()
         .getNumFound();
-    
+
     // do some really inefficient mapping...
     ZkStateReader zk = new ZkStateReader(zkServer.getZkAddress(), 10000,
         AbstractZkTestCase.TIMEOUT);
@@ -1337,12 +1333,12 @@
     } finally {
       zk.close();
     }
-    
+
     if (slices == null) {
       throw new RuntimeException("Could not find collection "
           + DEFAULT_COLLECTION + " in " + clusterState.getCollections());
     }
-    
+
     for (CloudJettyRunner cjetty : cloudJettys) {
       CloudSolrServerClient client = cjetty.client;
       for (Map.Entry<String,Slice> slice : slices.entrySet()) {
@@ -1368,7 +1364,7 @@
         query.set("distrib", false);
         count = client.solrClient.query(query).getResults().getNumFound();
       }
-      
+
       if (verbose) System.err.println("client docs:" + count + "\n\n");
     }
     if (verbose) System.err.println("control docs:"
@@ -1378,39 +1374,39 @@
     assertEquals("Doc Counts do not add up", controlCount,
         cloudClient.query(query).getResults().getNumFound());
   }
-  
+
   @Override
   protected QueryResponse queryServer(ModifiableSolrParams params)
       throws SolrServerException {
-    
+
     if (r.nextBoolean()) params.set("collection", DEFAULT_COLLECTION);
-    
+
     QueryResponse rsp = cloudClient.query(params);
     return rsp;
   }
-  
+
   static abstract class StopableThread extends Thread {
     public StopableThread(String name) {
       super(name);
     }
     public abstract void safeStop();
   }
-  
+
   class StopableSearchThread extends StopableThread {
     private volatile boolean stop = false;
     protected final AtomicInteger queryFails = new AtomicInteger();
     private String[] QUERIES = new String[] {"to come","their country","aid","co*"};
-    
+
     public StopableSearchThread() {
       super("StopableSearchThread");
       setDaemon(true);
     }
-    
+
     @Override
     public void run() {
       Random random = random();
       int numSearches = 0;
-      
+
       while (true && !stop) {
         numSearches++;
         try {
@@ -1431,43 +1427,43 @@
           Thread.currentThread().interrupt();
         }
       }
-      
+
       System.err.println("num searches done:" + numSearches + " with " + queryFails + " fails");
     }
-    
+
     @Override
     public void safeStop() {
       stop = true;
     }
-    
+
     public int getFails() {
       return queryFails.get();
     }
-    
+
   };
-  
+
   public void waitForThingsToLevelOut(int waitForRecTimeSeconds) throws Exception {
     log.info("Wait for recoveries to finish - wait " + waitForRecTimeSeconds + " for each attempt");
     int cnt = 0;
     boolean retry = false;
     do {
       waitForRecoveriesToFinish(VERBOSE, waitForRecTimeSeconds);
-      
+
       try {
         commit();
       } catch (Throwable t) {
         t.printStackTrace();
         // we don't care if this commit fails on some nodes
       }
-      
+
       updateMappingsFromZk(jettys, clients);
-      
+
       Set<String> theShards = shardToJetty.keySet();
       String failMessage = null;
       for (String shard : theShards) {
         failMessage = checkShardConsistency(shard, true, false);
       }
-      
+
       if (failMessage != null) {
         log.info("shard inconsistency - waiting ...");
         retry = true;
@@ -1475,12 +1471,12 @@
         retry = false;
       }
       cnt++;
-      if (cnt > 20) break;
+      if (cnt > 30) break;
       Thread.sleep(2000);
     } while (retry);
   }
-  
-  
+
+
   public void waitForNoShardInconsistency() throws Exception {
     log.info("Wait for no shard inconsistency");
     int cnt = 0;
@@ -1492,15 +1488,20 @@
         t.printStackTrace();
         // we don't care if this commit fails on some nodes
       }
-      
+
       updateMappingsFromZk(jettys, clients);
-      
+
       Set<String> theShards = shardToJetty.keySet();
       String failMessage = null;
       for (String shard : theShards) {
-        failMessage = checkShardConsistency(shard, true, false);
+        try {
+          failMessage = checkShardConsistency(shard, true, true);
+        } catch (Exception e) {
+          // we might hit a node we just stopped
+          failMessage="hit exception:" + e.getMessage();
+        }
       }
-      
+
       if (failMessage != null) {
         log.info("shard inconsistency - waiting ...");
         retry = true;
@@ -1508,7 +1509,7 @@
         retry = false;
       }
       cnt++;
-      if (cnt > 20) break;
+      if (cnt > 40) break;
       Thread.sleep(2000);
     } while (retry);
   }
@@ -1526,42 +1527,41 @@
   }
 
   @Override
-  @After
-  public void tearDown() throws Exception {
+  public void distribTearDown() throws Exception {
     if (VERBOSE || printLayoutOnTearDown) {
       super.printLayout();
     }
     if (commondCloudSolrClient != null) {
-      commondCloudSolrClient.shutdown();
+      commondCloudSolrClient.close();
     }
     if (controlClient != null) {
-      ((HttpSolrClient) controlClient).shutdown();
+      controlClient.close();
     }
     if (cloudClient != null) {
-      cloudClient.shutdown();
+      cloudClient.close();
     }
     if (controlClientCloud != null) {
-      controlClientCloud.shutdown();
+      controlClientCloud.close();
     }
-    super.tearDown();
-    
+    super.distribTearDown();
+
     System.clearProperty("zkHost");
     System.clearProperty("numShards");
 
-    // close socket proxies after super.tearDown
+    // close socket proxies after super.distribTearDown
     if (!proxies.isEmpty()) {
       for (SocketProxy proxy : proxies.values()) {
         proxy.close();
       }
     }
   }
-  
+
   @Override
   protected void commit() throws Exception {
     controlClient.commit();
     cloudClient.commit();
   }
-  
+
   @Override
   protected void destroyServers() throws Exception {
     if (controlJetty != null) {
@@ -1574,10 +1574,9 @@
         log.error("", e);
       }
     }
-    clients.clear();
-    jettys.clear();
+    super.destroyServers();
   }
-  
+
   protected CollectionAdminResponse createCollection(String collectionName, int numShards, int replicationFactor, int maxShardsPerNode) throws SolrServerException, IOException {
     return createCollection(null, collectionName, numShards, replicationFactor, maxShardsPerNode, null, null);
   }
@@ -1615,9 +1614,9 @@
       collectionInfos.put(collectionName, list);
     }
     params.set("name", collectionName);
-    if (getStateFormat() == 2) {
-      log.info("Creating collection with stateFormat=2: " + collectionName);
-      params.set(DocCollection.STATE_FORMAT, "2");
+    if ("1".equals(getStateFormat()) ) {
+      log.info("Creating collection with stateFormat=1: " + collectionName);
+      params.set(DocCollection.STATE_FORMAT, "1");
     }
     SolrRequest request = new QueryRequest(params);
     request.setPath("/admin/collections");
@@ -1625,11 +1624,8 @@
     CollectionAdminResponse res = new CollectionAdminResponse();
     if (client == null) {
       final String baseUrl = getBaseUrl((HttpSolrClient) clients.get(clientIndex));
-      SolrClient adminClient = createNewSolrClient("", baseUrl);
-      try {
+      try (SolrClient adminClient = createNewSolrClient("", baseUrl)) {
         res.setResponse(adminClient.request(request));
-      } finally {
-        if (adminClient != null) adminClient.shutdown();
       }
     } else {
       res.setResponse(client.request(request));
@@ -1649,7 +1645,7 @@
         MAX_SHARDS_PER_NODE, maxShardsPerNode),
         client);
   }
-  
+
   protected CollectionAdminResponse createCollection(Map<String, List<Integer>> collectionInfos,
                                                      String collectionName, int numShards, int replicationFactor, int maxShardsPerNode, SolrClient client, String createNodeSetStr, String configName) throws SolrServerException, IOException {
 
@@ -1698,7 +1694,7 @@
         0, client.getBaseURL().length()
             - DEFAULT_COLLECTION.length() - 1);
   }
-  
+
   protected SolrInputDocument getDoc(Object... fields) throws Exception {
     SolrInputDocument doc = new SolrInputDocument();
     addFields(doc, fields);
@@ -1712,7 +1708,7 @@
     // The Math.min thing is here, because we expect replication-factor to be reduced to if there are not enough live nodes to spread all shards of a collection over different nodes
     int expectedShardsPerSlice = numShardsNumReplicaList.get(1);
     int expectedTotalShards = expectedSlices * expectedShardsPerSlice;
-    
+
 //      Map<String,DocCollection> collections = clusterState
 //          .getCollectionStates();
       if (clusterState.hasCollection(collectionName)) {
@@ -1731,14 +1727,14 @@
         totalShards += slices.get(sliceName).getReplicas().size();
       }
       if (totalShards != expectedTotalShards) {
-        return "Found new collection " + collectionName + " with correct number of slices, but mismatch on number of shards. Expected: " + expectedTotalShards + ", actual: " + totalShards; 
+        return "Found new collection " + collectionName + " with correct number of slices, but mismatch on number of shards. Expected: " + expectedTotalShards + ", actual: " + totalShards;
         }
       return null;
     } else {
       return "Could not find new collection " + collectionName;
     }
   }
-  
+
   protected void checkForCollection(String collectionName,
       List<Integer> numShardsNumReplicaList,
       List<String> nodesAllowedToRunShards) throws Exception {
@@ -1776,7 +1772,7 @@
     }
     return commondCloudSolrClient;
   }
-  
+
   public static String getUrlFromZk(ClusterState clusterState, String collection) {
     Map<String,Slice> slices = clusterState.getCollection(collection).getSlicesMap();
 
@@ -1856,12 +1852,11 @@
         .getBaseURL();
     baseUrl = baseUrl.substring(0, baseUrl.length() - "collection1".length());
 
-    HttpSolrClient baseClient = new HttpSolrClient(baseUrl);
-    baseClient.setConnectionTimeout(15000);
-    baseClient.setSoTimeout(60000 * 5);
-    NamedList r = baseClient.request(request);
-    baseClient.shutdown();
-    return r;
+    try (HttpSolrClient baseClient = new HttpSolrClient(baseUrl)) {
+      baseClient.setConnectionTimeout(15000);
+      baseClient.setSoTimeout(60000 * 5);
+      return baseClient.request(request);
+    }
   }
 
   protected void createCollection(String collName,
@@ -1881,9 +1876,9 @@
 
   protected List<Replica> ensureAllReplicasAreActive(String testCollectionName, String shardId, int shards, int rf, int maxWaitSecs) throws Exception {
     long startMs = System.currentTimeMillis();
-    
+
     Map<String,Replica> notLeaders = new HashMap<String,Replica>();
-    
+
     ZkStateReader zkr = cloudClient.getZkStateReader();
     zkr.updateClusterState(true); // force the state to be fresh
 
@@ -1898,19 +1893,20 @@
       // refresh state every 2 secs
       if (waitMs % 2000 == 0)
         cloudClient.getZkStateReader().updateClusterState(true);
-      
+
       cs = cloudClient.getZkStateReader().getClusterState();
       assertNotNull(cs);
       Slice shard = cs.getSlice(testCollectionName, shardId);
       assertNotNull("No Slice for "+shardId, shard);
       allReplicasUp = true; // assume true
       Collection<Replica> replicas = shard.getReplicas();
-      assertTrue(replicas.size() == rf);
+      assertTrue("Did not find correct number of replicas. Expected:" + rf + " Found:" + replicas.size(), replicas.size() == rf);
+      
       leader = shard.getLeader();
       assertNotNull(leader);
       log.info("Found "+replicas.size()+" replicas and leader on "+
         leader.getNodeName()+" for "+shardId+" in "+testCollectionName);
-      
+
       // ensure all replicas are "active" and identify the non-leader replica
       for (Replica replica : replicas) {
         String replicaState = replica.getStr(ZkStateReader.STATE_PROP);
@@ -1918,11 +1914,11 @@
           log.info("Replica " + replica.getName() + " is currently " + replicaState);
           allReplicasUp = false;
         }
-        
-        if (!leader.equals(replica)) 
+
+        if (!leader.equals(replica))
           notLeaders.put(replica.getName(), replica);
       }
-      
+
       if (!allReplicasUp) {
         try {
           Thread.sleep(500L);
@@ -1930,22 +1926,22 @@
         waitMs += 500L;
       }
     } // end while
-    
-    if (!allReplicasUp) 
+
+    if (!allReplicasUp)
       fail("Didn't see all replicas for shard "+shardId+" in "+testCollectionName+
           " come up within " + maxWaitMs + " ms! ClusterState: " + printClusterStateInfo());
-    
-    if (notLeaders.isEmpty()) 
+
+    if (notLeaders.isEmpty())
       fail("Didn't isolate any replicas that are not the leader! ClusterState: " + printClusterStateInfo());
-    
+
     long diffMs = (System.currentTimeMillis() - startMs);
     log.info("Took " + diffMs + " ms to see all replicas become active.");
-    
+
     List<Replica> replicas = new ArrayList<Replica>();
     replicas.addAll(notLeaders.values());
     return replicas;
-  }  
-  
+  }
+
   protected String printClusterStateInfo() throws Exception {
     return printClusterStateInfo(null);
   }
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractZkTestCase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractZkTestCase.java
index 2535e70..382d3d4 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractZkTestCase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractZkTestCase.java
@@ -17,10 +17,6 @@
  * limitations under the License.
  */
 
-import java.io.File;
-import java.util.HashMap;
-import java.util.Map;
-
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkNodeProps;
@@ -31,6 +27,10 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+
 /**
  * Base test class for ZooKeeper tests.
  */
@@ -74,7 +74,7 @@
     
     buildZooKeeper(zkServer.getZkHost(), zkServer.getZkAddress(), SOLRHOME,
         "solrconfig.xml", "schema.xml");
-    
+
     initCore("solrconfig.xml", "schema.xml");
   }
 
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java b/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java
index 428e2b7..1931ed2 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java
@@ -218,9 +218,11 @@
       if (filter != null) {
         CoreContainer cores = ((SolrDispatchFilter) filter).getCores();
         if (cores != null) {
-          int zklocalport = ((InetSocketAddress) cores.getZkController()
-              .getZkClient().getSolrZooKeeper().getSocketAddress()).getPort();
-          IpTables.blockPort(zklocalport);
+          if (cores.isZooKeeperAware()) {
+            int zklocalport = ((InetSocketAddress) cores.getZkController()
+                .getZkClient().getSolrZooKeeper().getSocketAddress()).getPort();
+            IpTables.blockPort(zklocalport);
+          }
         }
       }
     }
@@ -591,9 +593,11 @@
       if (filter != null) {
         CoreContainer cores = ((SolrDispatchFilter) filter).getCores();
         if (cores != null) {
-          int zklocalport = ((InetSocketAddress) cores.getZkController()
-              .getZkClient().getSolrZooKeeper().getSocketAddress()).getPort();
-          IpTables.unblockPort(zklocalport);
+          if (cores.isZooKeeperAware()) {
+            int zklocalport = ((InetSocketAddress) cores.getZkController()
+                .getZkClient().getSolrZooKeeper().getSocketAddress()).getPort();
+            IpTables.unblockPort(zklocalport);
+          }
         }
       }
     }
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
index 33fe036..6e87aea 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
@@ -17,16 +17,9 @@
  * limitations under the License.
  */
 
-import java.io.File;
-import java.io.IOException;
-import java.util.Collections;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.SortedMap;
-
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.embedded.SSLConfig;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.common.cloud.SolrZkClient;
@@ -39,6 +32,14 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.File;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.SortedMap;
+
 public class MiniSolrCloudCluster {
   
   private static Logger log = LoggerFactory.getLogger(MiniSolrCloudCluster.class);
@@ -60,6 +61,23 @@
   public MiniSolrCloudCluster(int numServers, String hostContext, File baseDir, File solrXml,
       SortedMap<ServletHolder, String> extraServlets,
       SortedMap<Class, String> extraRequestFilters) throws Exception {
+    this(numServers, hostContext, baseDir, solrXml, extraServlets, extraRequestFilters, null);
+  }
+
+  /**
+   * "Mini" SolrCloud cluster to be used for testing
+   * @param numServers number of Solr servers to start
+   * @param hostContext context path of Solr servers used by Jetty
+   * @param baseDir base directory that the mini cluster should be run from
+   * @param solrXml solr.xml file to be uploaded to ZooKeeper
+   * @param extraServlets Extra servlets to be started by Jetty
+   * @param extraRequestFilters extra filters to be started by Jetty
+   * @param sslConfig SSL configuration
+   */
+  public MiniSolrCloudCluster(int numServers, String hostContext, File baseDir, File solrXml,
+      SortedMap<ServletHolder, String> extraServlets,
+      SortedMap<Class, String> extraRequestFilters,
+      SSLConfig sslConfig) throws Exception {
     testDir = baseDir;
 
     String zkDir = testDir.getAbsolutePath() + File.separator
@@ -78,7 +96,11 @@
 
     jettys = new LinkedList<JettySolrRunner>();
     for (int i = 0; i < numServers; ++i) {
-      startJettySolrRunner(hostContext, extraServlets, extraRequestFilters);
+      if (sslConfig == null) {
+        startJettySolrRunner(hostContext, extraServlets, extraRequestFilters);
+      } else {
+        startJettySolrRunner(hostContext, extraServlets, extraRequestFilters, sslConfig);
+      }
     }
     
     solrClient = buildSolrClient();
@@ -108,9 +130,23 @@
   public JettySolrRunner startJettySolrRunner(String hostContext,
       SortedMap<ServletHolder, String> extraServlets,
       SortedMap<Class, String> extraRequestFilters) throws Exception {
+    return startJettySolrRunner(hostContext, extraServlets, extraRequestFilters, null);
+  }
+
+  /**
+   * Start a new Solr instance
+   * @param hostContext context path of Solr servers used by Jetty
+   * @param extraServlets Extra servlets to be started by Jetty
+   * @param extraRequestFilters extra filters to be started by Jetty
+   * @param sslConfig SSL configuration
+   * @return new Solr instance
+   */
+  public JettySolrRunner startJettySolrRunner(String hostContext,
+      SortedMap<ServletHolder, String> extraServlets,
+      SortedMap<Class, String> extraRequestFilters, SSLConfig sslConfig) throws Exception {
     String context = getHostContextSuitableForServletContext(hostContext);
-    JettySolrRunner jetty = new JettySolrRunner(testDir.getAbsolutePath(), context, 0, null, null,
-      true, extraServlets, null, extraRequestFilters);
+    JettySolrRunner jetty = new JettySolrRunner(testDir.getAbsolutePath(), context,
+      0, null, null, true, extraServlets, sslConfig, extraRequestFilters);
     jetty.start();
     jettys.add(jetty);
     return jetty;
@@ -160,7 +196,7 @@
    */
   public void shutdown() throws Exception {
     try {
-      solrClient.shutdown();
+      solrClient.close();
       for (int i = jettys.size() - 1; i >= 0; --i) {
         stopJettySolrRunner(i);
       }
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/ZkTestServer.java b/solr/test-framework/src/java/org/apache/solr/cloud/ZkTestServer.java
index c4747c9..63c4299 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/ZkTestServer.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/ZkTestServer.java
@@ -287,6 +287,11 @@
     public void runFromConfig(ServerConfig config) throws IOException {
       log.info("Starting server");
       try {
+        // ZooKeeper maintains a static collection of AuthenticationProviders, so
+        // we make sure the SASL provider is loaded so that it can be used in
+        // subsequent tests.
+        System.setProperty("zookeeper.authProvider.1",
+          "org.apache.zookeeper.server.auth.SASLAuthenticationProvider");
         // Note that this thread isn't going to be doing anything else,
         // so rather than spawning another thread, we will just call
         // run() in this thread.
@@ -419,7 +424,7 @@
 
   public void run() throws InterruptedException {
     log.info("STARTING ZK TEST SERVER");
-    // we don't call super.setUp
+    // we don't call super.distribSetUp
     zooThread = new Thread() {
       
       @Override
diff --git a/solr/test-framework/src/java/org/apache/solr/core/MockDirectoryFactory.java b/solr/test-framework/src/java/org/apache/solr/core/MockDirectoryFactory.java
index 37633ba..d76b709 100644
--- a/solr/test-framework/src/java/org/apache/solr/core/MockDirectoryFactory.java
+++ b/solr/test-framework/src/java/org/apache/solr/core/MockDirectoryFactory.java
@@ -25,7 +25,6 @@
 import org.apache.lucene.store.MockDirectoryWrapper;
 import org.apache.lucene.store.NRTCachingDirectory;
 import org.apache.lucene.store.NoLockFactory;
-import org.apache.lucene.store.RateLimitedDirectoryWrapper;
 import org.apache.lucene.store.TrackingDirectoryWrapper;
 import org.apache.lucene.util.LuceneTestCase;
 
@@ -86,9 +85,6 @@
     if (dir instanceof NRTCachingDirectory) {
       cdir = ((NRTCachingDirectory)dir).getDelegate();
     }
-    if (cdir instanceof RateLimitedDirectoryWrapper) {
-      cdir = ((RateLimitedDirectoryWrapper)dir).getDelegate();
-    }
     if (cdir instanceof TrackingDirectoryWrapper) {
       cdir = ((TrackingDirectoryWrapper)dir).getDelegate();
     }
diff --git a/solr/test-framework/src/java/org/apache/solr/core/MockFSDirectoryFactory.java b/solr/test-framework/src/java/org/apache/solr/core/MockFSDirectoryFactory.java
index 9e5efa39..1c3bbcd 100644
--- a/solr/test-framework/src/java/org/apache/solr/core/MockFSDirectoryFactory.java
+++ b/solr/test-framework/src/java/org/apache/solr/core/MockFSDirectoryFactory.java
@@ -25,7 +25,6 @@
 import org.apache.lucene.store.LockFactory;
 import org.apache.lucene.store.MockDirectoryWrapper;
 import org.apache.lucene.store.NRTCachingDirectory;
-import org.apache.lucene.store.RateLimitedDirectoryWrapper;
 import org.apache.lucene.store.TrackingDirectoryWrapper;
 import org.apache.lucene.util.LuceneTestCase;
 
@@ -69,9 +68,6 @@
     if (dir instanceof NRTCachingDirectory) {
       cdir = ((NRTCachingDirectory)dir).getDelegate();
     }
-    if (cdir instanceof RateLimitedDirectoryWrapper) {
-      cdir = ((RateLimitedDirectoryWrapper)dir).getDelegate();
-    }
     if (cdir instanceof TrackingDirectoryWrapper) {
       cdir = ((TrackingDirectoryWrapper)dir).getDelegate();
     }
diff --git a/solr/test-framework/src/java/org/apache/solr/util/ReadOnlyCoresLocator.java b/solr/test-framework/src/java/org/apache/solr/util/ReadOnlyCoresLocator.java
new file mode 100644
index 0000000..af370a9
--- /dev/null
+++ b/solr/test-framework/src/java/org/apache/solr/util/ReadOnlyCoresLocator.java
@@ -0,0 +1,51 @@
+package org.apache.solr.util;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.CoreDescriptor;
+import org.apache.solr.core.CoresLocator;
+
+public abstract class ReadOnlyCoresLocator implements CoresLocator {
+
+  @Override
+  public void create(CoreContainer cc, CoreDescriptor... coreDescriptors) {
+    // no-op
+  }
+
+  @Override
+  public void persist(CoreContainer cc, CoreDescriptor... coreDescriptors) {
+    // no-op
+  }
+
+  @Override
+  public void delete(CoreContainer cc, CoreDescriptor... coreDescriptors) {
+    // no-op
+  }
+
+  @Override
+  public void rename(CoreContainer cc, CoreDescriptor oldCD, CoreDescriptor newCD) {
+    // no-op
+  }
+
+  @Override
+  public void swap(CoreContainer cc, CoreDescriptor cd1, CoreDescriptor cd2) {
+    // no-op
+  }
+
+}
diff --git a/solr/test-framework/src/java/org/apache/solr/util/RestTestBase.java b/solr/test-framework/src/java/org/apache/solr/util/RestTestBase.java
index 13f4857..09227b9 100644
--- a/solr/test-framework/src/java/org/apache/solr/util/RestTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/util/RestTestBase.java
@@ -16,12 +16,6 @@
  * limitations under the License.
  */
 
-import java.io.IOException;
-import java.util.Map;
-import java.util.SortedMap;
-
-import javax.xml.xpath.XPathExpressionException;
-
 import org.apache.solr.JSONTestUtil;
 import org.apache.solr.SolrJettyTestBase;
 import org.apache.solr.common.SolrException;
@@ -29,18 +23,25 @@
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.servlet.SolrRequestParsers;
 import org.eclipse.jetty.servlet.ServletHolder;
+import org.junit.AfterClass;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.xml.sax.SAXException;
 
-import org.junit.AfterClass;
+import javax.xml.xpath.XPathExpressionException;
+import java.io.IOException;
+import java.util.Map;
+import java.util.SortedMap;
 
 abstract public class RestTestBase extends SolrJettyTestBase {
   private static final Logger log = LoggerFactory.getLogger(RestTestBase.class);
   protected static RestTestHarness restTestHarness;
 
   @AfterClass
-  public static void cleanUpHarness() {
+  public static void cleanUpHarness() throws IOException {
+    if (restTestHarness != null) {
+      restTestHarness.close();
+    }
     restTestHarness = null;
   }
 
@@ -53,7 +54,7 @@
     restTestHarness = new RestTestHarness(new RESTfulServerProvider() {
       @Override
       public String getBaseURL() {
-        return jetty.getBaseUrl().toString();
+        return jetty.getBaseUrl().toString() + "/" + DEFAULT_TEST_CORENAME;
       }
     });
   }
diff --git a/solr/test-framework/src/java/org/apache/solr/util/RestTestHarness.java b/solr/test-framework/src/java/org/apache/solr/util/RestTestHarness.java
index 0924719..1fa5014 100644
--- a/solr/test-framework/src/java/org/apache/solr/util/RestTestHarness.java
+++ b/solr/test-framework/src/java/org/apache/solr/util/RestTestHarness.java
@@ -16,15 +16,7 @@
  * limitations under the License.
  */
 
-import java.io.IOException;
-import java.net.URLEncoder;
-import java.nio.charset.StandardCharsets;
-
-import javax.xml.xpath.XPathConstants;
-import javax.xml.xpath.XPathExpressionException;
-
 import org.apache.http.HttpEntity;
-import org.apache.http.client.HttpClient;
 import org.apache.http.client.methods.HttpDelete;
 import org.apache.http.client.methods.HttpGet;
 import org.apache.http.client.methods.HttpPost;
@@ -32,16 +24,24 @@
 import org.apache.http.client.methods.HttpUriRequest;
 import org.apache.http.entity.ContentType;
 import org.apache.http.entity.StringEntity;
+import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.util.EntityUtils;
 import org.apache.solr.client.solrj.impl.HttpClientUtil;
 import org.apache.solr.common.params.ModifiableSolrParams;
 
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathExpressionException;
+import java.io.Closeable;
+import java.io.IOException;
+import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
+
 /**
  * Facilitates testing Solr's REST API via a provided embedded Jetty
  */
-public class RestTestHarness extends BaseTestHarness {
+public class RestTestHarness extends BaseTestHarness implements Closeable {
   private RESTfulServerProvider serverProvider;
-  private HttpClient httpClient = HttpClientUtil.createClient(new
+  private CloseableHttpClient httpClient = HttpClientUtil.createClient(new
       ModifiableSolrParams());
   
   public RestTestHarness(RESTfulServerProvider serverProvider) {
@@ -51,6 +51,10 @@
   public String getBaseURL() {
     return serverProvider.getBaseURL();
   }
+
+  public String getAdminURL() {
+    return getBaseURL().replace("/collection1", "");
+  }
   
   /**
    * Validates an XML "query" response against an array of XPath test strings
@@ -96,6 +100,10 @@
     return getResponse(new HttpGet(getBaseURL() + request));
   }
 
+  public String adminQuery(String request) throws Exception {
+    return getResponse(new HttpGet(getAdminURL() + request));
+  }
+
   /**
    * Processes a PUT request using a URL path (with no context path) + optional query params,
    * e.g. "/schema/fields/newfield", PUTs the given content, and returns the response content.
@@ -151,17 +159,25 @@
     }
   }
 
-
+  public String checkAdminResponseStatus(String xml, String code) throws Exception {
+    try {
+      String response = adminQuery(xml);
+      String valid = validateXPath(response, "//int[@name='status']="+code );
+      return (null == valid) ? null : response;
+    } catch (XPathExpressionException e) {
+      throw new RuntimeException("?!? static xpath has bug?", e);
+    }
+  }
   /**
    * Reloads the first core listed in the response to the core admin handler STATUS command
    */
   @Override
   public void reload() throws Exception {
     String coreName = (String)evaluateXPath
-        (query("/admin/cores?action=STATUS"),
+        (adminQuery("/admin/cores?action=STATUS"),
          "//lst[@name='status']/lst[1]/str[@name='name']",
          XPathConstants.STRING);
-    String xml = checkResponseStatus("/admin/cores?action=RELOAD&core=" + coreName, "0");
+    String xml = checkAdminResponseStatus("/admin/cores?action=RELOAD&core=" + coreName, "0");
     if (null != xml) {
       throw new RuntimeException("RELOAD failed:\n" + xml);
     }
@@ -195,4 +211,9 @@
       EntityUtils.consumeQuietly(entity);
     }
   }
+
+  @Override
+  public void close() throws IOException {
+    httpClient.close();
+  }
 }
diff --git a/solr/test-framework/src/java/org/apache/solr/util/SSLTestConfig.java b/solr/test-framework/src/java/org/apache/solr/util/SSLTestConfig.java
index 92267a3..b3e575f 100644
--- a/solr/test-framework/src/java/org/apache/solr/util/SSLTestConfig.java
+++ b/solr/test-framework/src/java/org/apache/solr/util/SSLTestConfig.java
@@ -39,7 +39,7 @@
 
 public class SSLTestConfig extends SSLConfig {
   public static File TEST_KEYSTORE = ExternalPaths.SERVER_HOME == null ? null
-      : new File(ExternalPaths.SERVER_HOME, "../etc/solrtest.keystore");
+      : new File(ExternalPaths.SERVER_HOME, "../etc/test/solrtest.keystore");
   
   private static String TEST_KEYSTORE_PATH = TEST_KEYSTORE != null
       && TEST_KEYSTORE.exists() ? TEST_KEYSTORE.getAbsolutePath() : null;
@@ -90,7 +90,7 @@
   
   private class SSLHttpClientConfigurer extends HttpClientConfigurer {
     @SuppressWarnings("deprecation")
-    protected void configure(DefaultHttpClient httpClient, SolrParams config) {
+    public void configure(DefaultHttpClient httpClient, SolrParams config) {
       super.configure(httpClient, config);
       SchemeRegistry registry = httpClient.getConnectionManager().getSchemeRegistry();
       // Make sure no tests cheat by using HTTP
diff --git a/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java b/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java
index 0a09d29..cf0836d 100644
--- a/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java
+++ b/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java
@@ -17,13 +17,17 @@
 
 package org.apache.solr.util;
 
+import com.google.common.collect.ImmutableList;
+import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.NamedList.NamedListEntry;
 import org.apache.solr.core.ConfigSolr;
-import org.apache.solr.core.ConfigSolrXmlOld;
 import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.CoreDescriptor;
+import org.apache.solr.core.CoresLocator;
+import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.SolrConfig;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrResourceLoader;
@@ -42,6 +46,7 @@
 import java.io.IOException;
 import java.io.StringWriter;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 
 /**
@@ -57,7 +62,7 @@
  *
  */
 public class TestHarness extends BaseTestHarness {
-  String coreName;
+  public String coreName;
   protected volatile CoreContainer container;
   public UpdateRequestHandler updater;
  
@@ -78,11 +83,10 @@
   }
   
   /**
-   * Creates a SolrConfig object for the 
-   * {@link ConfigSolrXmlOld#DEFAULT_DEFAULT_CORE_NAME} core using {@link #createConfig(String,String,String)}
+   * Creates a SolrConfig object for the default test core using {@link #createConfig(String,String,String)}
    */
   public static SolrConfig createConfig(String solrHome, String confFile) {
-    return createConfig(solrHome, ConfigSolrXmlOld.DEFAULT_DEFAULT_CORE_NAME, confFile);
+    return createConfig(solrHome, SolrTestCaseJ4.DEFAULT_TEST_CORENAME, confFile);
   }
 
   /**
@@ -116,7 +120,7 @@
   public TestHarness( String dataDirectory,
                       SolrConfig solrConfig,
                       IndexSchema indexSchema) {
-      this(ConfigSolrXmlOld.DEFAULT_DEFAULT_CORE_NAME, dataDirectory, solrConfig, indexSchema);
+      this(SolrTestCaseJ4.DEFAULT_TEST_CORENAME, dataDirectory, solrConfig, indexSchema);
   }
 
   /**
@@ -126,21 +130,9 @@
    * @param indexSchema schema resource name
    */
   public TestHarness(String coreName, String dataDir, String solrConfig, String indexSchema) {
-    try {
-      if (coreName == null)
-        coreName = ConfigSolrXmlOld.DEFAULT_DEFAULT_CORE_NAME;
-      this.coreName = coreName;
-
-      SolrResourceLoader loader = new SolrResourceLoader(SolrResourceLoader.locateSolrHome());
-      ConfigSolr config = getTestHarnessConfig(loader, coreName, dataDir, solrConfig, indexSchema);
-      container = new CoreContainer(loader, config);
-      container.load();
-
-      updater = new UpdateRequestHandler();
-      updater.init( null );
-    } catch (Exception e) {
-      throw new RuntimeException(e);
-    }
+    this(new TestConfigSolr(new SolrResourceLoader(SolrResourceLoader.locateSolrHome()),
+                                                    coreName, dataDir, solrConfig, indexSchema));
+    this.coreName = (coreName == null) ? SolrTestCaseJ4.DEFAULT_TEST_CORENAME : coreName;
   }
 
   public TestHarness(String coreName, String dataDir, SolrConfig solrConfig, IndexSchema indexSchema) {
@@ -162,38 +154,81 @@
    * @param solrXml the text of a solrxml
    */
   public TestHarness(SolrResourceLoader loader, String solrXml) {
-    this(loader, ConfigSolr.fromString(loader, solrXml));
+    this(ConfigSolr.fromString(loader, solrXml));
   }
 
   /**
-   * Create a TestHarness using a specific resource loader and config
-   * @param loader the SolrResourceLoader to use
+   * Create a TestHarness using a specific config
    * @param config the ConfigSolr to use
    */
-  public TestHarness(SolrResourceLoader loader, ConfigSolr config) {
-    container = new CoreContainer(loader, config);
+  public TestHarness(ConfigSolr config) {
+    container = new CoreContainer(config);
     container.load();
     updater = new UpdateRequestHandler();
     updater.init(null);
   }
 
-  private static ConfigSolr getTestHarnessConfig(SolrResourceLoader loader, String coreName, String dataDir,
-                                                 String solrConfig, String schema) {
-    String solrxml = "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n"
-        + "<solr persistent=\"false\">\n"
-        + "  <cores adminPath=\"/admin/cores\" defaultCoreName=\""
-        + ConfigSolrXmlOld.DEFAULT_DEFAULT_CORE_NAME
-        + "\""
-        + " host=\"${host:}\" hostPort=\"${hostPort:}\" hostContext=\"${hostContext:}\""
-        + " distribUpdateSoTimeout=\"30000\""
-        + " zkClientTimeout=\"${zkClientTimeout:30000}\" distribUpdateConnTimeout=\"30000\""
-        + ">\n"
-        + "    <core name=\"" + coreName + "\" config=\"" + solrConfig
-        + "\" schema=\"" + schema + "\" dataDir=\"" + dataDir
-        + "\" transient=\"false\" loadOnStartup=\"true\""
-        + " shard=\"${shard:shard1}\" collection=\"${collection:collection1}\" instanceDir=\"" + coreName + "/\" />\n"
-        + "  </cores>\n" + "</solr>";
-    return ConfigSolr.fromString(loader, solrxml);
+  public static class TestConfigSolr extends ConfigSolr {
+
+    final CoresLocator locator;
+
+    public TestConfigSolr(String coreName, String dataDir, String solrConfig, String schema) {
+      this(new SolrResourceLoader(SolrResourceLoader.locateSolrHome()), coreName, dataDir, solrConfig, schema);
+    }
+
+    public TestConfigSolr(SolrResourceLoader loader, String coreName, String dataDir, String solrConfig, String schema) {
+      super(loader);
+      locator = new TestCoresLocator(coreName, dataDir, solrConfig, schema);
+    }
+
+    @Override
+    public CoresLocator getCoresLocator() {
+      return locator;
+    }
+
+    @Override
+    public PluginInfo getShardHandlerFactoryPluginInfo() {
+      return null;
+    }
+
+    @Override
+    protected String getProperty(CfgProp key) {
+      switch (key) {
+        case SOLR_HOST: return System.getProperty("host");
+        case SOLR_HOSTPORT: return System.getProperty("hostPort", "");
+        case SOLR_HOSTCONTEXT: return System.getProperty("hostContext", "");
+        case SOLR_DISTRIBUPDATESOTIMEOUT: return "30000";
+        case SOLR_ZKCLIENTTIMEOUT: return System.getProperty("zkClientTimeout", "30000");
+        case SOLR_DISTRIBUPDATECONNTIMEOUT: return "30000";
+        case SOLR_SHARESCHEMA: return System.getProperty("shareSchema", "false");
+      }
+      return null;
+    }
+  }
+
+  public static class TestCoresLocator extends ReadOnlyCoresLocator {
+
+    final String coreName;
+    final String dataDir;
+    final String solrConfig;
+    final String schema;
+
+    public TestCoresLocator(String coreName, String dataDir, String solrConfig, String schema) {
+      this.coreName = coreName == null ? SolrTestCaseJ4.DEFAULT_TEST_CORENAME : coreName;
+      this.dataDir = dataDir;
+      this.schema = schema;
+      this.solrConfig = solrConfig;
+    }
+
+    @Override
+    public List<CoreDescriptor> discover(CoreContainer cc) {
+      return ImmutableList.of(new CoreDescriptor(cc, coreName, coreName,
+          CoreDescriptor.CORE_DATADIR, dataDir,
+          CoreDescriptor.CORE_CONFIG, solrConfig,
+          CoreDescriptor.CORE_SCHEMA, schema,
+          CoreDescriptor.CORE_COLLECTION, System.getProperty("collection", "collection1"),
+          CoreDescriptor.CORE_SHARD, System.getProperty("shard", "shard1")));
+    }
   }
   
   public CoreContainer getCoreContainer() {
@@ -289,7 +324,8 @@
    * @see LocalSolrQueryRequest
    */
   public String query(String handler, SolrQueryRequest req) throws Exception {
-    try (SolrCore core = getCoreInc()) {
+    try {
+      SolrCore core = req.getCore();
       SolrQueryResponse rsp = new SolrQueryResponse();
       SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp));
       core.execute(core.getRequestHandler(handler),req,rsp);
diff --git a/solr/webapp/build.xml b/solr/webapp/build.xml
index 6fc3d71..ec8138e 100644
--- a/solr/webapp/build.xml
+++ b/solr/webapp/build.xml
@@ -40,7 +40,7 @@
   <target name="compile-core"/>
   <target name="compile-test"/>
 
-  <target name="dist"
+  <target name="server-war"
           description="Creates the Solr WAR Distribution file."
           depends="test, init-dist, dist-core, dist-solrj, lucene-jars-to-solr">
     <build-manifest title="Apache Solr Search Server"
diff --git a/solr/webapp/web/admin.html b/solr/webapp/web/admin.html
index 1543ae9..81fcbc5 100644
--- a/solr/webapp/web/admin.html
+++ b/solr/webapp/web/admin.html
@@ -138,9 +138,9 @@
                     
           <li class="documentation"><a href="http://lucene.apache.org/solr/"><span>Documentation</span></a></li>
           <li class="issues"><a href="http://issues.apache.org/jira/browse/SOLR"><span>Issue Tracker</span></a></li>
-          <li class="irc"><a href="http://webchat.freenode.net/?channels=#solr"><span>IRC Channel</span></a></li>
-          <li class="mailinglist"><a href="http://wiki.apache.org/solr/UsingMailingLists"><span>Community forum</span></a></li>
-          <li class="wiki-query-syntax"><a href="http://wiki.apache.org/solr/SolrQuerySyntax"><span>Solr Query Syntax</span></a></li>
+          <li class="irc"><a href="https://wiki.apache.org/solr/IRCChannels"><span>IRC Channel</span></a></li>
+          <li class="mailinglist"><a href="http://lucene.apache.org/solr/resources.html#community"><span>Community forum</span></a></li>
+          <li class="wiki-query-syntax"><a href="https://cwiki.apache.org/confluence/display/solr/Query+Syntax+and+Parsing"><span>Solr Query Syntax</span></a></li>
                     
         </ul>